Mix from different snippets
#Remove history from branch
git checkout --orphan newBranch
git add . # Add all files and commit them
git commit -m "init commit"
git branch -D master # Deletes the master branch
git branch -m master # Rename the current branch to master
git push -f origin master # Force push master branch to github
or
git push --mirror --force
#edit global git config
git config --edit --global
#show gitconfigs file location
git config --list --show-origin
#show git config global properties
git config --global --list
#fatal: refusing to merge unrelated histories on every try, especially the first pull request after remotely adding a Git repository.
#Using the --allow-unrelated-histories flag worked with a pull request in this way:
git pull origin branchname --allow-unrelated-histories
#Tracking new files
> git add README
#Short Status
> git status -s
#compares what is in your working directory with what is in your staging area
> git diff
#compares your staged changes to your last commit
> git diff --staged
> git commit -m "Story 182: Fix benchmarks for speed”
#Makes Git automatically stage every file that is already tracked before doing the commit, letting you skip the git add part
> git commit -a -m 'added new benchmarks’
#Move/Rename file
> git mv README.md README
#Remove file from stage but not from working directory
> git rm PROJECTS.md
#lists the commits made in that repository in reverse chronological order
> git log
#shows the difference (the patch output) introduced in each commit.(last two entries)
> git log -p -2
#The oneline option prints each commit on a single line
> git log --pretty=oneline
#This option adds a nice little ASCII graph showing your branch and merge history
> git log --pretty=format:"%h %s" —graph
# list of commits made in the last two weeks
> git log --since=2.weeks
#code history were committed by Junio Hamano in the month of October 2008 and are not merge commits
> git log --pretty="%h - %s" --author='Junio C Hamano' --since="2008-10-01" --before="2008-11-01" --no-merges -- t/
#takes your staging area and uses it for the commit. You end up with a single commit — the second commit replaces the results of the first.
> git commit --amend
#Unstaging a Staged File
> git reset HEAD CONTRIBUTING.md
#Unmodifying a Modified File
> git checkout -- CONTRIBUTING.md
#shows you the URLs that Git has stored for the shortname to be used
> git remote -v
#Add remote repository
> git remote add pb https://github.com/paulboone/ticgit
#git fetch command only downloads the data to your local repository — it doesn’t automatically merge it
> git fetch pb
#push your master branch to your origin serve
> git push origin master
#see more information about a particular remote
> git remote show origin
#rename remote branch pb to Paul
> git remote rename pb paul
#remove remote branch Paul
> git remote remove paul
#Create new branch
> git branch testing
#Switching branches
> git checkout testing
#create a new branch and switch to it at the same time
> git checkout -b iss53
#print out the history of your commits, showing where your branch pointers are and how your history has diverged
> git log --oneline --decorate --graph --all
#merge the hotfix branch back into master
> git checkout master
> git merge hotfix
#delete branch hotfix
> git branch -d hotfix
#Resolve merge conflicts, git opens default merge tool > opendiff
> git mergetool
#Shows all branches and * -> active branch
> git branch -v
#Shows merged branches
> git branch --merged
#To see all the branches that contain work you haven’t yet merged in
> git branch --no-merged
#list all tags
> git tag
#List of tags at the 1.8.5 series
> git tag -l "v1.8.5*"
#Create annotated tag
> git tag -a v1.4 -m "my version 1.4"
#Info about tag
> git show v1.4
#Create Lightweight Tag
> git tag v1.4-lw
#Push the tag to remote
> git push origin v1.5
#Push group of tags to remote
git push origin --tags
#delete tag locally
> git tag -d v1.4-lw
#delete tag by remote
> git push origin :refs/tags/v1.4-lw
or
> git push origin --delete <tagname>
#checkout tag
> git checkout 2.0.0
#checkout tag and create new branch
> git checkout -b version2 v2.0.0
# Docker ps:
docker ps --all --format "table {{.ID}}\t{{.Names}}\t{{.CreatedAt}}\t{{.Status}}\t{{.RunningFor}}"
docker ps --all --format "table {{.ID}}\t{{.Names}}\t{{.CreatedAt}}\t{{.Status}}\t{{.Labels}}"
docker ps --all --format "table {{.ID}}\t{{.Names}}\t{{.CreatedAt}}\t{{.Status}}\t{{.Size}}\t{{.Command}}\t{{.Mounts}}\t{{.Ports}}\t{{.RunningFor}}" --filter=status=running
docker ps --all --format "table {{.ID}}\t{{.Names}}\t{{.Status}}\t{{.Size}}\t{{.Command}}\t{{.Mounts}}\t{{.RunningFor}}\t{{.CreatedAt}}" --filter=status=running
List all Docker Compose projects currently running
docker ps --filter "label=com.docker.compose.project" -q | xargs docker inspect --format='{{index .Config.Labels "com.docker.compose.project"}}'| sort | uniq
#Dangling images are layers that have no relationship to any tagged images. They no longer serve a purpose and consume disk space.
docker images -f dangling=true
#Delete Dangling images
docker images purge
#List images according to pattern
docker images -a | grep "pattern"
#Remove images according to pattern
docker images -a | grep "pattern" | awk '{print $3}' | xargs docker rmi
#Remove all images, add the -q flag to pass the Image ID to docker rmi
docker rmi $(docker images -a -q)
#Run and remove containers after
docker run --rm image_name
#List of all exited containers
docker ps -a -f status=exited
#Remove all exited containers, -q to pass the IDs to the docker rm command.
docker rm $(docker ps -a -f status=exited -q)
#List of all containers using more than one filter
docker ps -a -f status=exited -f status=created
#Remove all containers using more than one filter, -q to pass the IDs to the docker rm command.
docker rm $(docker ps -a -f status=exited -f status=created -q)
#Stop all containers, -q to pass the IDs to the docker rm command.
docker stop $(docker ps -a -q)
#Remove all containers, -q to pass the IDs to the docker rm command.
docker rm $(docker ps -a -q)
#Remove dangling volumes - Docker 1.9 and later
docker volume ls -f dangling=true
#Remove a container and its volume
docker rm -v container_name
#Remove all networks that are created more than 12 hours ago
docker network prune -a --filter \"until=12h\”
#add container to network
docker network connect my-network my-container
#Remove Docker images another method
docker rmi $(docker images --filter "dangling=true" -q --no-trunc)
docker images | grep "none"
docker rmi $(docker images | grep "none" | awk '/ / { print $3 }')
#Remove docker containers another method
docker rm $(docker ps -qa --no-trunc --filter "status=exited")
//install dependency to local repo - single, with pom, without pom
mvn install:install-file -Dfile=<path-to-file> -DgroupId=<group-id> -DartifactId=<artifact-id> -Dversion=<version> -Dpackaging=<packaging>
mvn install:install-file -Dfile=<path-to-file> -DpomFile=<path-to-pomfile>
mvn install:install-file -Dfile=<path-to-file>
//deploy dependency to nexus
mvn org.apache.maven.plugins:maven-deploy-plugin:2.8.2:deploy-file -DgroupId=de.kvtelematik.tools -DartifactId=git-branch-as-maven-version -Dversion=0.0.11 -Dpackaging=jar -Dfile=git-branch-as-maven-version-0.0.11-SNAPSHOT.jar -DgeneratePom=true -DrepositoryId=maven-public -Durl=http://kvd-dev.spdns.de:8081/repository/maven-public/
// Licensed under MIT
// author : Damien Nozay
// for all builds from build-flow-plugin whose parameters include a GIT_BRANCH paramater,
// change the displayName to include branch and build number
import com.cloudbees.plugins.flow.*;
jobs = Jenkins.instance.getAllItems(BuildFlow);
jobs.each { it ->
it.builds.each { b ->
GIT_BRANCH = b.envVars['GIT_BRANCH']
( GIT_BRANCH =~ /(?:refs\/remotes\/)?(.+)/ ).each { full,branch ->
b.displayName = branch + ' (#' + b.number + ')'
}
}
}
// Licensed under MIT
// author : Damien Nozay
// ---------------------------------------------------------
// This script goes through all the jobs and checks the disk usage.
// prints some disk usage stats as well as the retention policy.
// ---------------------------------------------------------
// e.g.:
//
// JOB: playground/test-python
// -> lastbuild: #1 = FAILURE, time: 2015-02-12T20:56:16Z
// -> builds=12, average=8 KB, max=9 KB, total=97 KB, worstCase=113 KB
//
//
def printDiscarder(job) {
d = job.buildDiscarder
if (d) {
println(" -> keep: builds=(${d.daysToKeep} days, ${d.numToKeep} total); artifacts=(${d.artifactDaysToKeep} days, ${d.artifactNumToKeep} total)")
} else {
println(" -> no retention policy.")
}
}
import hudson.plugins.disk_usage.BuildDiskUsageAction
import hudson.plugins.disk_usage.DiskUsageUtil
def getDiskUsage(build) {
usage = null
build.getTransientActions().each {
action ->
if (action instanceof BuildDiskUsageAction) {
// println action.buildUsageString
// println action.allDiskUsage
usage = action
}
}
return usage
}
def printDiskUsage(job) {
maxUsage = 0
totalUsage = 0
numBuilds = 0
job.builds.each() {
build ->
usage = getDiskUsage(build)
if (usage.allDiskUsage > maxUsage) { maxUsage = usage.allDiskUsage }
totalUsage += usage.allDiskUsage
numBuilds += 1
// println(" * build ${build.number} - ${usage.buildUsageString}")
}
averageUsage = 0
if (numBuilds) { averageUsage = (totalUsage / numBuilds).longValue() }
worstCase = numBuilds * maxUsage
println(" -> builds=${numBuilds}, average=${DiskUsageUtil.getSizeString(averageUsage)}, max=${DiskUsageUtil.getSizeString(maxUsage)}, total=${DiskUsageUtil.getSizeString(totalUsage)}, worstCase=${DiskUsageUtil.getSizeString(worstCase)}")
}
jobs = Jenkins.instance.getAllItems()
jobs.each { j ->
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return }
numbuilds = j.builds.size()
println 'JOB: ' + j.fullName
if (numbuilds == 0) {
println ' -> no build'
} else {
lastbuild = j.builds[numbuilds - 1]
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', time: ' + lastbuild.timestampString2
}
printDiscarder(j)
printDiskUsage(j)
println ''
}
''
// adapted from https://github.com/tkrzeminski/jenkins-groovy-scripts/blob/master/show-all-credentials.groovy
// ---------------------------------------------------------
// This script goes through all the credentials and tries to find a matching item.
// Useful to find the secret that needs to be updated (e.g. it was leaked in log or similar).
// ---------------------------------------------------------
// find needle in credentials
needle = 'xxxxxxx'
import jenkins.model.*
import com.cloudbees.plugins.credentials.*
import com.cloudbees.plugins.credentials.impl.*
import com.cloudbees.plugins.credentials.domains.*
import com.cloudbees.jenkins.plugins.sshcredentials.impl.BasicSSHUserPrivateKey
import com.cloudbees.jenkins.plugins.awscredentials.AWSCredentialsImpl
import org.jenkinsci.plugins.plaincredentials.StringCredentials
import org.jenkinsci.plugins.plaincredentials.impl.FileCredentialsImpl
def showRow = { credentialType, secretId, content = null, username = null, password = null, description = null ->
println("━" * 80)
println("type".padLeft(20) + "┃ ${credentialType}")
println("description".padLeft(20) + "┃ ${description}")
println("secret id".padLeft(20) + "┃ ${secretId}")
println("username".padLeft(20) + "┃ ${username}")
println("password".padLeft(20) + "┃ ${password}")
println("content:")
println("┅" * 80)
println(content)
println("━" * 80)
}
// set Credentials domain name (null means is it global)
domainName = null
credentialsStore = Jenkins.instance.getExtensionList('com.cloudbees.plugins.credentials.SystemCredentialsProvider')[0]?.getStore()
domain = new Domain(domainName, null, Collections.<DomainSpecification>emptyList())
credentialsStore?.getCredentials(domain).each{
if(it instanceof UsernamePasswordCredentialsImpl && (it.username == needle || it.password?.getPlainText() == needle))
showRow("user/password", it.id, null, it.username, it.password?.getPlainText(), it.description)
if(it instanceof BasicSSHUserPrivateKey && (it.passphrase?.getPlainText() == needle))
showRow("ssh priv key", it.id, null, it.passphrase?.getPlainText(), it.privateKeySource?.getPrivateKey(), it.description)
if(it instanceof AWSCredentialsImpl && (it.accessKey == needle || it.secretKey?.getPlainText() == needle))
showRow("aws", it.id, null, it.accessKey, it.secretKey?.getPlainText(), it.description)
if(it instanceof StringCredentials && (it.secret?.getPlainText()?.contains(needle)))
showRow("secret text", it.id, it.secret?.getPlainText(), null, null, it.description)
if(it instanceof FileCredentialsImpl && (it.content?.text?.contains(needle)))
showRow("secret file", it.id, it.content?.text, null, null, it.description)
}
return
// Licensed under MIT
// author : Damien Nozay
// scan all jobs and check if the last build was aborted (e.g. maintenance)
// and output user / timestamp
jobs = Jenkins.instance.getAllItems()
jobs.each { j ->
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return }
numbuilds = j.builds.size()
if (numbuilds == 0) { return }
lastbuild = j.builds[numbuilds - 1]
if (lastbuild.result == Result.ABORTED) {
println 'JOB: ' + j.fullName
abortCause = lastbuild.getAction(InterruptedBuildAction).getCauses()[0]
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', cause: ' + abortCause.shortDescription + ', time: ' + lastbuild.timestampString2
}
}
''
// Licensed under MIT
// author : Damien Nozay
// list jobs and their last build.
jobs = Jenkins.instance.getAllItems()
jobs.each { j ->
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return }
println 'JOB: ' + j.fullName
numbuilds = j.builds.size()
if (numbuilds == 0) {
println ' -> no build'
return
}
lastbuild = j.builds[numbuilds - 1]
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', time: ' + lastbuild.timestampString2
}
// returns blank
''
// Licensed under MIT
// author : Damien Nozay
// list jobs not run in the last N days / last N months
import groovy.time.TimeCategory
use ( TimeCategory ) {
// e.g. find jobs not run in last 3 months
sometimeago = (new Date() - 3.months)
}
jobs = Jenkins.instance.getAllItems()
lastabort = null
jobs.each { j ->
if (j instanceof com.cloudbees.hudson.plugins.folder.Folder) { return }
numbuilds = j.builds.size()
if (numbuilds == 0) {
println 'JOB: ' + j.fullName
println ' -> no build'
return
}
lastbuild = j.builds[numbuilds - 1]
if (lastbuild.timestamp.getTime() < sometimeago) {
println 'JOB: ' + j.fullName
println ' -> lastbuild: ' + lastbuild.displayName + ' = ' + lastbuild.result + ', time: ' + lastbuild.timestampString2
}
}
''
// Licensed under MIT
// author : Damien Nozay
// ---------------------------------------------------------
// This script cleans a subdir in all existing workspaces for a selected job.
// node -> check workspace (concurrent too) -> check subdir -> delete
// ---------------------------------------------------------
job = Jenkins.instance.getItemByFullName('SomeJobFolder/myJob')
subdir = 'dist'
println "Looking for job: " + job.fullName
import hudson.slaves.WorkspaceList
combinator = System.getProperty(WorkspaceList.class.getName(),"@");
for (node in Jenkins.instance.getNodes()) {
println "Node: '" + node.getSelfLabel().toString() + "' (" + node.getAssignedLabels().join(",") + ")"
workspacePathBase = node.getWorkspaceFor(job)
// handle concurrent workspaces
for (int i=1; ; i++) {
// they are suffixed...
workspacePath = i==1 ? workspacePathBase : workspacePathBase.withSuffix(combinator+i);
// stop checking (unlikely to have higher suffix)
if (!workspacePath.exists()) {
break;
} else {
println " * found workspace: " + workspacePath.getRemote()
targetDir = workspacePath.child(subdir)
if (targetDir.exists()) {
println " * found target directory"
if (!job.isBuilding()) {
println " * removing directory (job is not building)"
targetDir.deleteRecursive()
} else {
println " * not removing directory (job is building)"
}
}
}
}
}
// Licensed under MIT
// author : Damien Nozay
// ---------------------------------------------------------
// Retroactively add badges to promoted builds.
// ---------------------------------------------------------
import hudson.plugins.promoted_builds.*;
import org.jvnet.hudson.plugins.groovypostbuild.*;
// customize these
project_name = "project/full/name"
promotion_name = "promotion_process_name"
// look up promoted builds for project + promotion process.
project = Jenkins.instance.getItemByFullName(project_name)
action = project.getAction(PromotedProjectAction.class)
promotion = action.getProcess(promotion_name)
promoted_builds = action.getPromotions(promotion)
// check this other gist:
// https://gist.github.com/dnozay/fc528b43cf27755017cc
def add_release_version(promo_build) {
target = promo_build.target;
// access the promotion build environment and the RELEASE_VERSION parameter.
release_version = promo_build.environment.get('RELEASE_VERSION');
// create the summary with the gold star icon and attach to target.
GroovyPostbuildSummaryAction action = new GroovyPostbuildSummaryAction("star-gold.png");
target.getActions().add(action);
// customize text for the summary.
action.appendText("RELEASE VERSION = " + release_version, false);
// also add a short text that will appear in the build history
target.getActions().add(GroovyPostbuildAction.createShortText(release_version));
// save build
target.save();
}
// do stuff; e.g. add release version in the description.
for (Promotion promo: promoted_builds) {
add_release_version(promo)
}
// Licensed under MIT
// author : Damien Nozay
// ---------------------------------------------------------
// This script goes through all the jobs and checks if they are using the Groovy Postbuild
// if they are, then it computes the hash value, and checks against the ones that are approved.
// ---------------------------------------------------------
import org.jenkinsci.plugins.scriptsecurity.scripts.*;
import org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.*;
import hudson.model.RootAction;
import org.jvnet.hudson.plugins.groovypostbuild.*;
// instance containing the approvals
// list of approved hashes: println instance.approvedScriptHashes
ScriptApproval instance = Jenkins.getInstance().getExtensionList(RootAction.class).get(ScriptApproval.class);
approvedScriptHashes = instance.approvedScriptHashes
import java.util.*
import java.security.MessageDigest;
def hash(String script, String language) {
MessageDigest digest = MessageDigest.getInstance("SHA-1");
digest.update(language.getBytes("UTF-8"));
digest.update((byte) ':');
digest.update(script.getBytes("UTF-8"));
return Util.toHexString(digest.digest());
}
jobs = hudson.model.Hudson.instance.getAllItems(FreeStyleProject)
for (job in jobs) {
for (publisher in job.publishersList) {
if (publisher instanceof GroovyPostbuildRecorder) {
hashval = hash(publisher.script.script, "groovy")
println "#" * 80
println "job: " + job.getFullName()
println "script hash: " + hashval
println "approved: " + (hashval in approvedScriptHashes)
println "script: "
println "-" * 80
println publisher.script.script
println "#" * 80
}
}
}