diff options
Diffstat (limited to 'plugins')
189 files changed, 10522 insertions, 824 deletions
diff --git a/plugins/ant/ant.plugin.zsh b/plugins/ant/ant.plugin.zsh index 691d4d2db..45f2b06eb 100644 --- a/plugins/ant/ant.plugin.zsh +++ b/plugins/ant/ant.plugin.zsh @@ -1,17 +1,7 @@ -stat -f%m . > /dev/null 2>&1 -if [ "$?" = 0 ]; then - stat_cmd=(stat -f%m) -else - stat_cmd=(stat -L --format=%Y) -fi - _ant_does_target_list_need_generating () { - if [ ! -f .ant_targets ]; then return 0; - else - accurate=$($stat_cmd .ant_targets) - changed=$($stat_cmd build.xml) - return $(expr $accurate '>=' $changed) - fi + [ ! -f .ant_targets ] && return 0; + [ .ant_targets -nt build.xml ] && return 0; + return 1; } _ant () { diff --git a/plugins/archlinux/archlinux.plugin.zsh b/plugins/archlinux/archlinux.plugin.zsh index ae92a0b4c..bffe9657a 100644 --- a/plugins/archlinux/archlinux.plugin.zsh +++ b/plugins/archlinux/archlinux.plugin.zsh @@ -8,7 +8,7 @@ if [[ -x `which yaourt` ]]; then } alias yaconf='yaourt -C' # Fix all configuration files with vimdiff # Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips - alias yaupg='yaourt -Syu' # Synchronize with repositories before upgrading packages that are out of date on the local system. + alias yaupg='yaourt -Syua' # Synchronize with repositories before upgrading packages (AUR packages too) that are out of date on the local system. alias yasu='yaourt --sucre' # Same as yaupg, but without confirmation alias yain='yaourt -S' # Install specific package(s) from the repositories alias yains='yaourt -U' # Install specific package not from the repositories but from a file @@ -18,6 +18,8 @@ if [[ -x `which yaourt` ]]; then alias yareps='yaourt -Ss' # Search for package(s) in the repositories alias yaloc='yaourt -Qi' # Display information about a given package in the local database alias yalocs='yaourt -Qs' # Search for package(s) in the local database + alias yalst='yaourt -Qe' # List installed packages, even those installed from AUR (they're tagged as "local") + alias yaorph='yaourt -Qtd' # Remove orphans using yaourt # Additional yaourt alias examples if [[ -x `which abs` ]]; then alias yaupd='yaourt -Sy && sudo abs' # Update and refresh the local package and ABS databases against repositories diff --git a/plugins/atom/atom.plugin.zsh b/plugins/atom/atom.plugin.zsh new file mode 100644 index 000000000..9adb9031a --- /dev/null +++ b/plugins/atom/atom.plugin.zsh @@ -0,0 +1,14 @@ +local _atom_paths > /dev/null 2>&1 +_atom_paths=( + "$HOME/Applications/Atom.app" + "/Applications/Atom.app" +) + +for _atom_path in $_atom_paths; do + if [[ -a $_atom_path ]]; then + alias at="open -a '$_atom_path'" + break + fi +done + +alias att='at .' diff --git a/plugins/autoenv/autoenv.plugin.zsh b/plugins/autoenv/autoenv.plugin.zsh new file mode 100644 index 000000000..ca5666979 --- /dev/null +++ b/plugins/autoenv/autoenv.plugin.zsh @@ -0,0 +1,18 @@ +# The use_env call below is a reusable command to activate/create a new Python +# virtualenv, requiring only a single declarative line of code in your .env files. +# It only performs an action if the requested virtualenv is not the current one. +use_env() { + typeset venv + venv="$1" + if [[ "${VIRTUAL_ENV:t}" != "$venv" ]]; then + if workon | grep -q "$venv"; then + workon "$venv" + else + echo -n "Create virtualenv $venv now? (Yn) " + read answer + if [[ "$answer" == "Y" ]]; then + mkvirtualenv "$venv" + fi + fi + fi +} diff --git a/plugins/autojump/autojump.plugin.zsh b/plugins/autojump/autojump.plugin.zsh index 3894ccd2f..50a694764 100644 --- a/plugins/autojump/autojump.plugin.zsh +++ b/plugins/autojump/autojump.plugin.zsh @@ -1,10 +1,14 @@ if [ $commands[autojump] ]; then # check if autojump is installed - if [ -f /usr/share/autojump/autojump.zsh ]; then # debian and ubuntu package + if [ -f $HOME/.autojump/etc/profile.d/autojump.zsh ]; then # manual user-local installation + . $HOME/.autojump/etc/profile.d/autojump.zsh + elif [ -f /usr/share/autojump/autojump.zsh ]; then # debian and ubuntu package . /usr/share/autojump/autojump.zsh elif [ -f /etc/profile.d/autojump.zsh ]; then # manual installation . /etc/profile.d/autojump.zsh - elif [ -f $HOME/.autojump/etc/profile.d/autojump.zsh ]; then # manual user-local installation - . $HOME/.autojump/etc/profile.d/autojump.zsh + elif [ -f /etc/profile.d/autojump.sh ]; then # gentoo installation + . /etc/profile.d/autojump.sh + elif [ -f /usr/local/share/autojump/autojump.zsh ]; then # freebsd installation + . /usr/local/share/autojump/autojump.zsh elif [ -f /opt/local/etc/profile.d/autojump.zsh ]; then # mac os x with ports . /opt/local/etc/profile.d/autojump.zsh elif [ $commands[brew] -a -f `brew --prefix`/etc/autojump.zsh ]; then # mac os x with brew diff --git a/plugins/autopep8/_autopep8 b/plugins/autopep8/_autopep8 new file mode 100644 index 000000000..c14d06d66 --- /dev/null +++ b/plugins/autopep8/_autopep8 @@ -0,0 +1,32 @@ +#compdef autopep8 +# +# this is zsh completion function file. +# generated by genzshcomp(ver: 0.5.1) +# + +typeset -A opt_args +local context state line + +_arguments -s -S \ + "--help[show this help message and exit]:" \ + "-h[show this help message and exit]:" \ + "--version[show program's version number and exit]:" \ + "--verbose[print verbose messages; multiple -v result in more verbose messages]" \ + "-v[print verbose messages; multiple -v result in more verbose messages]" \ + "--diff[print the diff for the fixed source]" \ + "-d[print the diff for the fixed source]" \ + "--in-place[make changes to files in place]" \ + "-i[make changes to files in place]" \ + "--recursive[run recursively; must be used with --in-place or --diff]" \ + "-r[run recursively; must be used with --in-place or --diff]" \ + "--jobs[number of parallel jobs; match CPU count if value is less than 1]::n number of parallel jobs; match CPU count if value is:_files" \ + "-j[number of parallel jobs; match CPU count if value is less than 1]::n number of parallel jobs; match CPU count if value is:_files" \ + "--pep8-passes[maximum number of additional pep8 passes (default: 100)]::n:_files" \ + "-p[maximum number of additional pep8 passes (default: 100)]::n:_files" \ + "-a[-a result in more aggressive changes]::result:_files" \ + "--exclude[exclude files/directories that match these comma- separated globs]::globs:_files" \ + "--list-fixes[list codes for fixes; used by --ignore and --select]" \ + "--ignore[do not fix these errors/warnings (default E226,E24)]::errors:_files" \ + "--select[fix only these errors/warnings (e.g. E4,W)]::errors:_files" \ + "--max-line-length[set maximum allowed line length (default: 79)]::n:_files" \ + "*::args:_files" diff --git a/plugins/autopep8/autopep8.plugin.zsh b/plugins/autopep8/autopep8.plugin.zsh new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/plugins/autopep8/autopep8.plugin.zsh diff --git a/plugins/aws/aws.plugin.zsh b/plugins/aws/aws.plugin.zsh new file mode 100644 index 000000000..8b57d7db1 --- /dev/null +++ b/plugins/aws/aws.plugin.zsh @@ -0,0 +1,30 @@ +_homebrew-installed() { + type brew &> /dev/null +} + +_awscli-homebrew-installed() { + brew --prefix awscli &> /dev/null +} + +export AWS_HOME=~/.aws + +function agp { + echo $AWS_DEFAULT_PROFILE + +} +function asp { + export AWS_DEFAULT_PROFILE=$1 + export RPROMPT="<aws:$AWS_DEFAULT_PROFILE>" + +} +function aws_profiles { + reply=($(grep profile $AWS_HOME/config|sed -e 's/.*profile \([a-zA-Z0-9_-]*\).*/\1/')) +} + +compctl -K aws_profiles asp + +if _homebrew-installed && _awscli-homebrew-installed ; then + source $(brew --prefix)/opt/awscli/libexec/bin/aws_zsh_completer.sh +else + source `which aws_zsh_completer.sh` +fi diff --git a/plugins/battery/battery.plugin.zsh b/plugins/battery/battery.plugin.zsh index 95f890632..32dd4b624 100644 --- a/plugins/battery/battery.plugin.zsh +++ b/plugins/battery/battery.plugin.zsh @@ -10,21 +10,35 @@ if [[ $(uname) == "Darwin" ]] ; then + function battery_pct() { + local smart_battery_status="$(ioreg -rc "AppleSmartBattery")" + typeset -F maxcapacity=$(echo $smart_battery_status | grep '^.*"MaxCapacity"\ =\ ' | sed -e 's/^.*"MaxCapacity"\ =\ //') + typeset -F currentcapacity=$(echo $smart_battery_status | grep '^.*"CurrentCapacity"\ =\ ' | sed -e 's/^.*CurrentCapacity"\ =\ //') + integer i=$(((currentcapacity/maxcapacity) * 100)) + echo $i + } + + function plugged_in() { + [ $(ioreg -rc AppleSmartBattery | grep -c '^.*"ExternalConnected"\ =\ Yes') -eq 1 ] + } + function battery_pct_remaining() { - if [[ $(ioreg -rc AppleSmartBattery | grep -c '^.*"ExternalConnected"\ =\ No') -eq 1 ]] ; then - typeset -F maxcapacity=$(ioreg -rc "AppleSmartBattery"| grep '^.*"MaxCapacity"\ =\ ' | sed -e 's/^.*"MaxCapacity"\ =\ //') - typeset -F currentcapacity=$(ioreg -rc "AppleSmartBattery"| grep '^.*"CurrentCapacity"\ =\ ' | sed -e 's/^.*CurrentCapacity"\ =\ //') - integer i=$(((currentcapacity/maxcapacity) * 100)) - echo $i - else + if plugged_in ; then echo "External Power" + else + battery_pct fi } function battery_time_remaining() { - if [[ $(ioreg -rc AppleSmartBattery | grep -c '^.*"ExternalConnected"\ =\ No') -eq 1 ]] ; then - timeremaining=$(ioreg -rc "AppleSmartBattery"| grep '^.*"AvgTimeToEmpty"\ =\ ' | sed -e 's/^.*"AvgTimeToEmpty"\ =\ //') - echo "~$((timeremaining / 60)):$((timeremaining % 60))" + local smart_battery_status="$(ioreg -rc "AppleSmartBattery")" + if [[ $(echo $smart_battery_status | grep -c '^.*"ExternalConnected"\ =\ No') -eq 1 ]] ; then + timeremaining=$(echo $smart_battery_status | grep '^.*"AvgTimeToEmpty"\ =\ ' | sed -e 's/^.*"AvgTimeToEmpty"\ =\ //') + if [ $timeremaining -gt 720 ] ; then + echo "::" + else + echo "~$((timeremaining / 60)):$((timeremaining % 60))" + fi else echo "∞" fi @@ -42,17 +56,43 @@ if [[ $(uname) == "Darwin" ]] ; then fi echo "%{$fg[$color]%}[$(battery_pct_remaining)%%]%{$reset_color%}" else - echo "" + echo "∞" fi } + function battery_is_charging() { + [[ $(ioreg -rc "AppleSmartBattery"| grep '^.*"IsCharging"\ =\ ' | sed -e 's/^.*"IsCharging"\ =\ //') == "Yes" ]] + } + elif [[ $(uname) == "Linux" ]] ; then - if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then - function battery_pct_remaining() { echo "$(acpi | cut -f2 -d ',' | tr -cd '[:digit:]')" } - function battery_time_remaining() { echo $(acpi | cut -f3 -d ',') } - function battery_pct_prompt() { - b=$(battery_pct_remaining) + function battery_is_charging() { + ! [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] + } + + function battery_pct() { + if (( $+commands[acpi] )) ; then + echo "$(acpi | cut -f2 -d ',' | tr -cd '[:digit:]')" + fi + } + + function battery_pct_remaining() { + if [ ! $(battery_is_charging) ] ; then + battery_pct + else + echo "External Power" + fi + } + + function battery_time_remaining() { + if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then + echo $(acpi | cut -f3 -d ',') + fi + } + + function battery_pct_prompt() { + b=$(battery_pct_remaining) + if [[ $(acpi 2&>/dev/null | grep -c '^Battery.*Discharging') -gt 0 ]] ; then if [ $b -gt 50 ] ; then color='green' elif [ $b -gt 20 ] ; then @@ -61,11 +101,60 @@ elif [[ $(uname) == "Linux" ]] ; then color='red' fi echo "%{$fg[$color]%}[$(battery_pct_remaining)%%]%{$reset_color%}" - } + else + echo "∞" + fi + } + +else + # Empty functions so we don't cause errors in prompts + function battery_pct_remaining() { + } + + function battery_time_remaining() { + } + + function battery_pct_prompt() { + } +fi + +function battery_level_gauge() { + local gauge_slots=${BATTERY_GAUGE_SLOTS:-10}; + local green_threshold=${BATTERY_GREEN_THRESHOLD:-6}; + local yellow_threshold=${BATTERY_YELLOW_THRESHOLD:-4}; + local color_green=${BATTERY_COLOR_GREEN:-%F{green}}; + local color_yellow=${BATTERY_COLOR_YELLOW:-%F{yellow}}; + local color_red=${BATTERY_COLOR_RED:-%F{red}}; + local color_reset=${BATTERY_COLOR_RESET:-%{%f%k%b%}}; + local battery_prefix=${BATTERY_GAUGE_PREFIX:-'['}; + local battery_suffix=${BATTERY_GAUGE_SUFFIX:-']'}; + local filled_symbol=${BATTERY_GAUGE_FILLED_SYMBOL:-'▶'}; + local empty_symbol=${BATTERY_GAUGE_EMPTY_SYMBOL:-'▷'}; + local charging_color=${BATTERY_CHARGING_COLOR:-$color_yellow}; + local charging_symbol=${BATTERY_CHARGING_SYMBOL:-'⚡'}; + + local battery_remaining_percentage=$(battery_pct); + + if [[ $battery_remaining_percentage =~ [0-9]+ ]]; then + local filled=$(((( $battery_remaining_percentage + $gauge_slots - 1) / $gauge_slots))); + local empty=$(($gauge_slots - $filled)); + + if [[ $filled -gt $green_threshold ]]; then local gauge_color=$color_green; + elif [[ $filled -gt $yellow_threshold ]]; then local gauge_color=$color_yellow; + else local gauge_color=$color_red; + fi else - error_msg='no battery' - function battery_pct_remaining() { echo $error_msg } - function battery_time_remaining() { echo $error_msg } - function battery_pct_prompt() { echo '' } + local filled=$gauge_slots; + local empty=0; + filled_symbol=${BATTERY_UNKNOWN_SYMBOL:-'.'}; fi -fi + + local charging=' ' && battery_is_charging && charging=$charging_symbol; + + printf ${charging_color//\%/\%\%}$charging${color_reset//\%/\%\%}${battery_prefix//\%/\%\%}${gauge_color//\%/\%\%} + printf ${filled_symbol//\%/\%\%}'%.0s' {1..$filled} + [[ $filled -lt $gauge_slots ]] && printf ${empty_symbol//\%/\%\%}'%.0s' {1..$empty} + printf ${color_reset//\%/\%\%}${battery_suffix//\%/\%\%}${color_reset//\%/\%\%} +} + + diff --git a/plugins/bower/_bower b/plugins/bower/_bower new file mode 100644 index 000000000..ae0ca4a4e --- /dev/null +++ b/plugins/bower/_bower @@ -0,0 +1,58 @@ + + +# Credits to npm's awesome completion utility. +# +# Bower completion script, based on npm completion script. + +###-begin-bower-completion-### +# +# Installation: bower completion >> ~/.bashrc (or ~/.zshrc) +# Or, maybe: bower completion > /usr/local/etc/bash_completion.d/bower +# + +COMP_WORDBREAKS=${COMP_WORDBREAKS/=/} +COMP_WORDBREAKS=${COMP_WORDBREAKS/@/} +export COMP_WORDBREAKS + +if type complete &>/dev/null; then + _bower_completion () { + local si="$IFS" + IFS=$'\n' COMPREPLY=($(COMP_CWORD="$COMP_CWORD" \ + COMP_LINE="$COMP_LINE" \ + COMP_POINT="$COMP_POINT" \ + bower completion -- "${COMP_WORDS[@]}" \ + 2>/dev/null)) || return $? + IFS="$si" + } + complete -F _bower_completion bower +elif type compdef &>/dev/null; then + _bower_completion() { + si=$IFS + compadd -- $(COMP_CWORD=$((CURRENT-1)) \ + COMP_LINE=$BUFFER \ + COMP_POINT=0 \ + bower completion -- "${words[@]}" \ + 2>/dev/null) + IFS=$si + } + compdef _bower_completion bower +elif type compctl &>/dev/null; then + _bower_completion () { + local cword line point words si + read -Ac words + read -cn cword + let cword-=1 + read -l line + read -ln point + si="$IFS" + IFS=$'\n' reply=($(COMP_CWORD="$cword" \ + COMP_LINE="$line" \ + COMP_POINT="$point" \ + bower completion -- "${words[@]}" \ + 2>/dev/null)) || return $? + IFS="$si" + } + compctl -K _bower_completion bower +fi +###-end-bower-completion-### + diff --git a/plugins/bower/bower.plugin.zsh b/plugins/bower/bower.plugin.zsh new file mode 100644 index 000000000..68a67a3cc --- /dev/null +++ b/plugins/bower/bower.plugin.zsh @@ -0,0 +1,81 @@ +alias bi="bower install" +alias bl="bower list" +alias bs="bower search" + +_bower_installed_packages () { + bower_package_list=$(bower ls --no-color 2>/dev/null| awk 'NR>3{print p}{p=$0}'| cut -d ' ' -f 2|sed 's/#.*//') +} +_bower () +{ + local -a _1st_arguments _no_color _dopts _save_dev _force_lastest _production + local expl + typeset -A opt_args + + _no_color=('--no-color[Do not print colors (available in all commands)]') + + _dopts=( + '(--save)--save[Save installed packages into the project"s bower.json dependencies]' + '(--force)--force[Force fetching remote resources even if a local copy exists on disk]' + ) + + _save_dev=('(--save-dev)--save-dev[Save installed packages into the project"s bower.json devDependencies]') + + _force_lastest=('(--force-latest)--force-latest[Force latest version on conflict]') + + _production=('(--production)--production[Do not install project devDependencies]') + + _1st_arguments=( + 'cache-clean:Clean the Bower cache, or the specified package caches' \ + 'help:Display help information about Bower' \ + 'info:Version info and description of a particular package' \ + 'init:Interactively create a bower.json file' \ + 'install:Install a package locally' \ + 'link:Symlink a package folder' \ + 'lookup:Look up a package URL by name' \ + 'register:Register a package' \ + 'search:Search for a package by name' \ + 'uninstall:Remove a package' \ + 'update:Update a package' \ + {ls,list}:'[List all installed packages]' + ) + _arguments \ + $_no_color \ + '*:: :->subcmds' && return 0 + + if (( CURRENT == 1 )); then + _describe -t commands "bower subcommand" _1st_arguments + return + fi + + case "$words[1]" in + install) + _arguments \ + $_dopts \ + $_save_dev \ + $_force_lastest \ + $_no_color \ + $_production + ;; + update) + _arguments \ + $_dopts \ + $_no_color \ + $_force_lastest + _bower_installed_packages + compadd "$@" $(echo $bower_package_list) + ;; + uninstall) + _arguments \ + $_no_color \ + $_dopts + _bower_installed_packages + compadd "$@" $(echo $bower_package_list) + ;; + *) + $_no_color \ + ;; + esac + +} + +compdef _bower bower diff --git a/plugins/brew-cask/brew-cask.plugin.zsh b/plugins/brew-cask/brew-cask.plugin.zsh new file mode 100644 index 000000000..91ce0f498 --- /dev/null +++ b/plugins/brew-cask/brew-cask.plugin.zsh @@ -0,0 +1,84 @@ +# Autocompletion for homebrew-cask. +# +# This script intercepts calls to the brew plugin and adds autocompletion +# for the cask subcommand. +# +# Author: https://github.com/pstadler + +compdef _brew-cask brew + +_brew-cask() +{ + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + ':subcmd:->subcmd' \ + '*::options:->options' + + case $state in + (command) + __call_original_brew + cask_commands=( + 'cask:manage casks' + ) + _describe -t commands 'brew cask command' cask_commands ;; + + (subcmd) + case "$line[1]" in + cask) + if (( CURRENT == 3 )); then + local -a subcommands + subcommands=( + "alfred:used to modify Alfred's scope to include the Caskroom" + 'audit:verifies installability of casks' + 'checklinks:checks for bad cask links' + 'cleanup:cleans up cached downloads' + 'create:creates a cask of the given name and opens it in an editor' + 'doctor:checks for configuration issues' + 'edit:edits the cask of the given name' + 'fetch:downloads Cask resources to local cache' + 'home:opens the homepage of the cask of the given name' + 'info:displays information about the cask of the given name' + 'install:installs the cask of the given name' + 'list:with no args, lists installed casks; given installed casks, lists installed files' + 'search:searches all known casks' + 'uninstall:uninstalls the cask of the given name' + "update:a synonym for 'brew update'" + ) + _describe -t commands "brew cask subcommand" subcommands + fi ;; + + *) + __call_original_brew ;; + esac ;; + + (options) + local -a casks installed_casks + local expl + case "$line[2]" in + list|uninstall) + __brew_installed_casks + _wanted installed_casks expl 'installed casks' compadd -a installed_casks ;; + audit|edit|home|info|install) + __brew_all_casks + _wanted casks expl 'all casks' compadd -a casks ;; + esac ;; + esac +} + +__brew_all_casks() { + casks=(`brew cask search`) +} + +__brew_installed_casks() { + installed_casks=(`brew cask list`) +} + +__call_original_brew() +{ + local ret=1 + _call_function ret _brew + compdef _brew-cask brew +} diff --git a/plugins/brew/_brew b/plugins/brew/_brew index 1dcf0a4bf..40442a1d3 100644 --- a/plugins/brew/_brew +++ b/plugins/brew/_brew @@ -11,10 +11,25 @@ _brew_installed_formulae() { installed_formulae=(`brew list`) } +_brew_installed_taps() { + installed_taps=(`brew tap`) +} + +_brew_outdated_formulae() { + outdated_formulae=(`brew outdated`) +} + +_brew_running_services() { + running_services=(`brew services list | awk '{print $1}'`) +} + local -a _1st_arguments _1st_arguments=( + 'audit:check formulae for Homebrew coding style' + 'bundle:look for a Brewfile and run each line as a brew command' 'cat:display formula file for a formula' 'cleanup:uninstall unused and old versions of packages' + 'commands:show a list of commands' 'create:create a new formula' 'deps:list dependencies and dependants of a formula' 'doctor:audits your installation for common issues' @@ -22,23 +37,39 @@ _1st_arguments=( 'home:visit the homepage of a formula or the brew project' 'info:information about a formula' 'install:install a formula' + 'reinstall:install a formula anew; re-using its current options' 'link:link a formula' 'list:list files in a formula or not-installed formulae' 'log:git commit log for a formula' 'missing:check all installed formuale for missing dependencies.' - 'outdated:list formulas for which a newer version is available' + 'outdated:list formulae for which a newer version is available' + 'pin:pin specified formulae' 'prune:remove dead links' 'remove:remove a formula' 'search:search for a formula (/regex/ or string)' 'server:start a local web app that lets you browse formulae (requires Sinatra)' + 'services:small wrapper around `launchctl` for supported formulae' + 'tap:tap a new formula repository from GitHub, or list existing taps' + 'uninstall:uninstall a formula' 'unlink:unlink a formula' + 'unpin:unpin specified formulae' + 'untap:remove a tapped repository' 'update:freshen up links' 'upgrade:upgrade outdated formulae' - 'uses:show formulas which depend on a formula' + 'uses:show formulae which depend on a formula' +) + +local -a _service_arguments +_service_arguments=( + 'cleanup:get rid of stale services and unused plists' + 'list:list all services managed by `brew services`' + 'restart:gracefully restart selected service' + 'start:start selected service' + 'stop:stop selected service' ) local expl -local -a formulae installed_formulae +local -a formulae installed_formulae installed_taps outdated_formulae running_services _arguments \ '(-v)-v[verbose]' \ @@ -57,24 +88,41 @@ if (( CURRENT == 1 )); then fi case "$words[1]" in - search|-S) - _arguments \ - '(--macports)--macports[search the macports repository]' \ - '(--fink)--fink[search the fink repository]' ;; + install|reinstall|audit|home|homepage|log|info|abv|uses|cat|deps|edit|options) + _brew_all_formulae + _wanted formulae expl 'all formulae' compadd -a formulae ;; list|ls) _arguments \ '(--unbrewed)--unbrewed[files in brew --prefix not controlled by brew]' \ + '(--pinned)--pinned[list all versions of pinned formulae]' \ '(--versions)--versions[list all installed versions of a formula]' \ - '1: :->forms' && return 0 + '1: :->forms' && return 0 if [[ "$state" == forms ]]; then _brew_installed_formulae _wanted installed_formulae expl 'installed formulae' compadd -a installed_formulae fi ;; - install|home|homepage|log|info|abv|uses|cat|deps|edit|options) - _brew_all_formulae - _wanted formulae expl 'all formulae' compadd -a formulae ;; - remove|rm|uninstall|unlink|cleanup|link|ln) + remove|rm|uninstall|unlink|cleanup|link|ln|pin|unpin) _brew_installed_formulae _wanted installed_formulae expl 'installed formulae' compadd -a installed_formulae ;; + search|-S) + _arguments \ + '(--macports)--macports[search the macports repository]' \ + '(--fink)--fink[search the fink repository]' ;; + services) + if [[ -n "$words[2]" ]]; then + case "$words[2]" in + restart|start|stop) + _brew_running_services + _wanted running_services expl 'running services' compadd -a running_services ;; + esac + else + _describe -t commands "brew services subcommand" _service_arguments + fi ;; + untap) + _brew_installed_taps + _wanted installed_taps expl 'installed taps' compadd -a installed_taps ;; + upgrade) + _brew_outdated_formulae + _wanted outdated_formulae expl 'outdated formulae' compadd -a outdated_formulae ;; esac diff --git a/plugins/brew/brew.plugin.zsh b/plugins/brew/brew.plugin.zsh index c2e95884e..42fb80c9a 100644 --- a/plugins/brew/brew.plugin.zsh +++ b/plugins/brew/brew.plugin.zsh @@ -1 +1,2 @@ alias brews='brew list -1' +alias bubu="brew update && brew upgrade && brew cleanup" diff --git a/plugins/bundler/README.md b/plugins/bundler/README.md new file mode 100644 index 000000000..56f0c7176 --- /dev/null +++ b/plugins/bundler/README.md @@ -0,0 +1,49 @@ +# Bundler + +- adds completion for basic bundler commands +- adds short aliases for common bundler commands + - `be` aliased to `bundle exec` + - `bl` aliased to `bundle list` + - `bp` aliased to `bundle package` + - `bo` aliased to `bundle open` + - `bu` aliased to `bundle update` + - `bi` aliased to `bundle install --jobs=<cpu core count>` (only for bundler `>= 1.4.0`) +- adds a wrapper for common gems: + - looks for a binstub under `./bin/` and executes it (if present) + - calls `bundle exec <gem executable>` otherwise + +For a full list of *common gems* being wrapped by default please look at the `bundler.plugin.zsh` file. + +## Configuration + +Please use the exact name of the executable and not the gem name. + +### Add additional gems to be wrapped + +Add this before the plugin-list in your `.zshrc`: +```sh +BUNDLED_COMMANDS=(rubocop) +plugins=(... bundler ...) +``` +This will add the wrapper for the `rubocop` gem (i.e. the executable). + + +### Exclude gems from being wrapped + +Add this before the plugin-list in your `.zshrc`: +```sh +UNBUNDLED_COMMANDS=(foreman spin) +plugins=(... bundler ...) +``` +This will exclude the `foreman` and `spin` gems (i.e. their executable) from being wrapped. + +## Excluded gems + +These gems should not be called with `bundle exec`. Please see the Issues on GitHub for clarification. + +`berks` +`foreman` +`mailcatcher` +`rails` +`ruby` +`spin` diff --git a/plugins/bundler/_bundler b/plugins/bundler/_bundler index 5d22cac9a..ba647ab80 100644 --- a/plugins/bundler/_bundler +++ b/plugins/bundler/_bundler @@ -18,11 +18,14 @@ case $state in "check[Determine whether the requirements for your application are installed]" \ "list[Show all of the gems in the current bundle]" \ "show[Show the source location of a particular gem in the bundle]" \ + "outdated[Show all of the outdated gems in the current bundle]" \ "console[Start an IRB session in the context of the current bundle]" \ "open[Open an installed gem in the editor]" \ "viz[Generate a visual representation of your dependencies]" \ "init[Generate a simple Gemfile, placed in the current directory]" \ "gem[Create a simple gem, suitable for development with bundler]" \ + "platform[Displays platform compatibility information]" \ + "clean[Cleans up unused gems in your bundler directory]" \ "help[Describe available tasks or one specific task]" ret=0 ;; @@ -38,11 +41,13 @@ case $state in 'check' \ 'list' \ 'show' \ + 'outdated' \ 'console' \ 'open' \ 'viz' \ 'init' \ 'gem' \ + 'platform' \ 'help' && ret=0 ;; install) @@ -62,6 +67,23 @@ case $state in exec) _normal && ret=0 ;; + clean) + _arguments \ + '(--force)--force[forces clean even if --path is not set]' \ + '(--dry-run)--dry-run[only print out changes, do not actually clean gems]' \ + '(--no-color)--no-color[Disable colorization in output]' \ + '(--verbose)--verbose[Enable verbose output mode]' + ret=0 + ;; + outdated) + _arguments \ + '(--pre)--pre[Check for newer pre-release gems]' \ + '(--source)--source[Check against a specific source]' \ + '(--local)--local[Do not attempt to fetch gems remotely and use the gem cache instead]' \ + '(--no-color)--no-color[Disable colorization in output]' \ + '(--verbose)--verbose[Enable verbose output mode]' + ret=0 + ;; (open|show) _gems=( $(bundle show 2> /dev/null | sed -e '/^ \*/!d; s/^ \* \([^ ]*\) .*/\1/') ) if [[ $_gems != "" ]]; then diff --git a/plugins/bundler/bundler.plugin.zsh b/plugins/bundler/bundler.plugin.zsh index f9843696b..617dcde71 100644 --- a/plugins/bundler/bundler.plugin.zsh +++ b/plugins/bundler/bundler.plugin.zsh @@ -1,31 +1,98 @@ alias be="bundle exec" -alias bi="bundle install" alias bl="bundle list" alias bp="bundle package" +alias bo="bundle open" alias bu="bundle update" +alias bi="bundle_install" -# The following is based on https://github.com/gma/bundler-exec +bundled_commands=( + annotate + cap + capify + cucumber + foodcritic + guard + irb + jekyll + kitchen + knife + middleman + nanoc + pry + puma + rackup + rainbows + rake + rspec + shotgun + sidekiq + spec + spork + spring + strainer + tailor + taps + thin + thor + unicorn + unicorn_rails +) -bundled_commands=(annotate cap capify cucumber foreman guard middleman nanoc rackup rainbows rake rspec ruby shotgun spec spork thin thor unicorn unicorn_rails puma zeus) +# Remove $UNBUNDLED_COMMANDS from the bundled_commands list +for cmd in $UNBUNDLED_COMMANDS; do + bundled_commands=(${bundled_commands#$cmd}); +done + +# Add $BUNDLED_COMMANDS to the bundled_commands list +for cmd in $BUNDLED_COMMANDS; do + bundled_commands+=($cmd); +done ## Functions +bundle_install() { + if _bundler-installed && _within-bundled-project; then + local bundler_version=`bundle version | cut -d' ' -f3` + if [[ $bundler_version > '1.4.0' || $bundler_version = '1.4.0' ]]; then + if [[ "$(uname)" == 'Darwin' ]] + then + local cores_num="$(sysctl hw.ncpu | awk '{print $2}')" + else + local cores_num="$(nproc)" + fi + bundle install --jobs=$cores_num $@ + else + bundle install $@ + fi + else + echo "Can't 'bundle install' outside a bundled project" + fi +} + _bundler-installed() { which bundle > /dev/null 2>&1 } _within-bundled-project() { - local check_dir=$PWD - while [ $check_dir != "/" ]; do + local check_dir="$PWD" + while [ "$check_dir" != "/" ]; do [ -f "$check_dir/Gemfile" ] && return check_dir="$(dirname $check_dir)" done false } +_binstubbed() { + [ -f "./bin/${1}" ] +} + _run-with-bundler() { if _bundler-installed && _within-bundled-project; then - bundle exec $@ + if _binstubbed $1; then + ./bin/$@ + else + bundle exec $@ + fi else $@ fi @@ -33,10 +100,11 @@ _run-with-bundler() { ## Main program for cmd in $bundled_commands; do + eval "function unbundled_$cmd () { $cmd \$@ }" eval "function bundled_$cmd () { _run-with-bundler $cmd \$@}" alias $cmd=bundled_$cmd if which _$cmd > /dev/null 2>&1; then - compdef _$cmd bundled_$cmd=$cmd + compdef _$cmd bundled_$cmd=$cmd fi done diff --git a/plugins/cabal/cabal.plugin.zsh b/plugins/cabal/cabal.plugin.zsh new file mode 100644 index 000000000..9f76add7a --- /dev/null +++ b/plugins/cabal/cabal.plugin.zsh @@ -0,0 +1,47 @@ +function cabal_sandbox_info() { + cabal_files=(*.cabal(N)) + if [ $#cabal_files -gt 0 ]; then + if [ -f cabal.sandbox.config ]; then + echo "%{$fg[green]%}sandboxed%{$reset_color%}" + else + echo "%{$fg[red]%}not sandboxed%{$reset_color%}" + fi + fi +} + +function _cabal_commands() { + local ret=1 state + _arguments ':subcommand:->subcommand' && ret=0 + + case $state in + subcommand) + subcommands=( + "bench:Run the benchmark, if any (configure with UserHooks)" + "build:Make this package ready for installation" + "check:Check the package for common mistakes" + "clean:Clean up after a build" + "copy:Copy teh files into the install locations" + "configure:Prepare to build the package" + "fetch:Downloads packages for later installation" + "haddock:Generate HAddock HTML documentation" + "help:Help about commands" + "hscolour:Generate HsColour colourised code, in HTML format" + "info:Display detailed information about a particular package" + "init:Interactively create a .cabal file" + "install:Installs a list of packages" + "list:List packages matching a search string" + "register:Register this package with the compiler" + "report:Upload build reports to a remote server" + "sdist:Generate a source distribution file (.tar.gz)" + "test:Run the test suite, if any (configure with UserHooks)" + "unpack:Unpacks packages for user inspection" + "update:Updates list of known packages" + "upload:Uploads source packages to Hackage" + ) + _describe -t subcommands 'cabal subcommands' subcommands && ret=0 + esac + + return ret +} + +compdef _cabal_commands cabal diff --git a/plugins/cake/cake.plugin.zsh b/plugins/cake/cake.plugin.zsh index 1d0d196ee..44cc47470 100644 --- a/plugins/cake/cake.plugin.zsh +++ b/plugins/cake/cake.plugin.zsh @@ -14,12 +14,9 @@ _cake_does_target_list_need_generating () { return 1; fi - if [ ! -f ${_cake_task_cache_file} ]; then return 0; - else - accurate=$(stat -f%m $_cake_task_cache_file) - changed=$(stat -f%m Cakefile) - return $(expr $accurate '>=' $changed) - fi + [ ! -f ${_cake_task_cache_file} ] && return 0; + [ ${_cake_task_cache_file} -nt Cakefile ] && return 0; + return 1; } _cake () { @@ -33,4 +30,4 @@ _cake () { fi } -compdef _cake cake
\ No newline at end of file +compdef _cake cake diff --git a/plugins/capistrano/_capistrano b/plugins/capistrano/_capistrano index cf6b50c7f..3cadf3d54 100644 --- a/plugins/capistrano/_capistrano +++ b/plugins/capistrano/_capistrano @@ -1,10 +1,10 @@ #compdef cap #autoload -if [ -f config/deploy.rb ]; then +if [[ -f config/deploy.rb || -f Capfile ]]; then if [[ ! -f .cap_tasks~ || config/deploy.rb -nt .cap_tasks~ ]]; then echo "\nGenerating .cap_tasks~..." > /dev/stderr - cap --tasks | grep '#' | cut -d " " -f 2 > .cap_tasks~ + cap -v --tasks | grep '#' | cut -d " " -f 2 > .cap_tasks~ fi compadd `cat .cap_tasks~` fi diff --git a/plugins/catimg/catimg.plugin.zsh b/plugins/catimg/catimg.plugin.zsh new file mode 100644 index 000000000..cb92f5986 --- /dev/null +++ b/plugins/catimg/catimg.plugin.zsh @@ -0,0 +1,17 @@ +################################################################################ +# catimg script by Eduardo San Martin Morote aka Posva # +# http://posva.net # +# # +# Ouput the content of an image to the stdout using the 256 colors of the # +# terminal. # +# Github: https://github.com/posva/catimg # +################################################################################ + + +function catimg() { + if [[ -x `which convert` ]]; then + zsh $ZSH/plugins/catimg/catimg.sh $@ + else + echo "catimg need convert (ImageMagick) to work)" + fi +} diff --git a/plugins/catimg/catimg.sh b/plugins/catimg/catimg.sh new file mode 100755 index 000000000..cd0f2e333 --- /dev/null +++ b/plugins/catimg/catimg.sh @@ -0,0 +1,88 @@ +################################################################################ +# catimg script by Eduardo San Martin Morote aka Posva # +# http://posva.net # +# # +# Ouput the content of an image to the stdout using the 256 colors of the # +# terminal. # +# Github: https://github.com/posva/catimg # +################################################################################ + +function help() { + echo "Usage catimg [-h] [-w width] [-c char] img" + echo "By default char is \" \" and w is the terminal width" +} + +# VARIABLES +COLOR_FILE=$(dirname $0)/colors.png +CHAR=" " + +WIDTH="" +IMG="" + +while getopts qw:c:h opt; do + case "$opt" in + w) WIDTH="$OPTARG" ;; + c) CHAR="$OPTARG" ;; + h) help; exit ;; + *) help ; exit 1;; + esac + done + +while [ "$1" ]; do + IMG="$1" + shift +done + +if [ "$IMG" = "" -o ! -f "$IMG" ]; then + help + exit 1 +fi + +if [ ! "$WIDTH" ]; then + COLS=$(expr $(tput cols) "/" $(echo -n "$CHAR" | wc -c)) +else + COLS=$(expr $WIDTH "/" $(echo -n "$CHAR" | wc -c)) +fi +WIDTH=$(convert "$IMG" -print "%w\n" /dev/null) +if [ "$WIDTH" -gt "$COLS" ]; then + WIDTH=$COLS +fi + +REMAP="" +if convert "$IMG" -resize $COLS\> +dither -remap $COLOR_FILE /dev/null ; then + REMAP="-remap $COLOR_FILE" +else + echo "The version of convert is too old, don't expect good results :(" >&2 + #convert "$IMG" -colors 256 PNG8:tmp.png + #IMG="tmp.png" +fi + +# Display the image +I=0 +convert "$IMG" -resize $COLS\> +dither `echo $REMAP` txt:- 2>/dev/null | +sed -e 's/.*none.*/NO NO NO/g' -e '1d;s/^.*(\(.*\)[,)].*$/\1/g;y/,/ /' | +while read R G B f; do + if [ ! "$R" = "NO" ]; then + if [ "$R" -eq "$G" -a "$G" -eq "$B" ]; then + (( + I++, + IDX = 232 + R * 23 / 255 + )) + else + (( + I++, + IDX = 16 + + R * 5 / 255 * 36 + + G * 5 / 255 * 6 + + B * 5 / 255 + )) + fi + #echo "$R,$G,$B: $IDX" + echo -ne "\e[48;5;${IDX}m${CHAR}" + else + (( I++ )) + echo -ne "\e[0m${CHAR}" + fi + # New lines + (( $I % $WIDTH )) || echo -e "\e[0m" +done diff --git a/plugins/catimg/colors.png b/plugins/catimg/colors.png Binary files differnew file mode 100644 index 000000000..5f2c8126b --- /dev/null +++ b/plugins/catimg/colors.png diff --git a/plugins/celery/_celery b/plugins/celery/_celery new file mode 100644 index 000000000..63af9fad5 --- /dev/null +++ b/plugins/celery/_celery @@ -0,0 +1,129 @@ +#compdef celery +#autoload + +#celery zsh completion + +_celery () { +local -a _1st_arguments ifargs dopts controlargs + +typeset -A opt_args + +_1st_arguments=('worker' 'events' 'beat' 'shell' 'multi' 'amqp' 'status' 'inspect' \ + 'control' 'purge' 'list' 'migrate' 'call' 'result' 'report') +ifargs=('--app=' '--broker=' '--loader=' '--config=' '--version') +dopts=('--detach' '--umask=' '--gid=' '--uid=' '--pidfile=' '--logfile=' '--loglevel=') +controlargs=('--timeout' '--destination') +_arguments \ + '(-A --app=)'{-A,--app}'[app instance to use (e.g. module.attr_name):APP]' \ + '(-b --broker=)'{-b,--broker}'[url to broker. default is "amqp://guest@localhost//":BROKER]' \ + '(--loader)--loader[name of custom loader class to use.:LOADER]' \ + '(--config)--config[Name of the configuration module:CONFIG]' \ + '(--workdir)--workdir[Optional directory to change to after detaching.:WORKING_DIRECTORY]' \ + '(-q --quiet)'{-q,--quiet}'[Don"t show as much output.]' \ + '(-C --no-color)'{-C,--no-color}'[Don"t display colors.]' \ + '(--version)--version[show program"s version number and exit]' \ + '(- : *)'{-h,--help}'[show this help message and exit]' \ + '*:: :->subcmds' && return 0 + +if (( CURRENT == 1 )); then + _describe -t commands "celery subcommand" _1st_arguments + return +fi + +case "$words[1]" in + worker) + _arguments \ + '(-C --concurrency=)'{-C,--concurrency=}'[Number of child processes processing the queue. The default is the number of CPUs.]' \ + '(--pool)--pool=:::(processes eventlet gevent threads solo)' \ + '(--purge --discard)'{--discard,--purge}'[Purges all waiting tasks before the daemon is started.]' \ + '(-f --logfile=)'{-f,--logfile=}'[Path to log file. If no logfile is specified, stderr is used.]' \ + '(--loglevel=)--loglevel=:::(critical error warning info debug)' \ + '(-N --hostname=)'{-N,--hostname=}'[Set custom hostname, e.g. "foo.example.com".]' \ + '(-B --beat)'{-B,--beat}'[Also run the celerybeat periodic task scheduler.]' \ + '(-s --schedule=)'{-s,--schedule=}'[Path to the schedule database if running with the -B option. Defaults to celerybeat-schedule.]' \ + '(-S --statedb=)'{-S,--statedb=}'[Path to the state database.Default: None]' \ + '(-E --events)'{-E,--events}'[Send events that can be captured by monitors like celeryev, celerymon, and others.]' \ + '(--time-limit=)--time-limit=[nables a hard time limit (in seconds int/float) for tasks]' \ + '(--soft-time-limit=)--soft-time-limit=[Enables a soft time limit (in seconds int/float) for tasks]' \ + '(--maxtasksperchild=)--maxtasksperchild=[Maximum number of tasks a pool worker can execute before it"s terminated and replaced by a new worker.]' \ + '(-Q --queues=)'{-Q,--queues=}'[List of queues to enable for this worker, separated by comma. By default all configured queues are enabled.]' \ + '(-I --include=)'{-I,--include=}'[Comma separated list of additional modules to import.]' \ + '(--pidfile=)--pidfile=[Optional file used to store the process pid.]' \ + '(--autoscale=)--autoscale=[Enable autoscaling by providing max_concurrency, min_concurrency.]' \ + '(--autoreload)--autoreload[Enable autoreloading.]' \ + '(--no-execv)--no-execv[Don"t do execv after multiprocessing child fork.]' + compadd -a ifargs + ;; + inspect) + _values -s \ + 'active[dump active tasks (being processed)]' \ + 'active_queues[dump queues being consumed from]' \ + 'ping[ping worker(s)]' \ + 'registered[dump of registered tasks]' \ + 'report[get bugreport info]' \ + 'reserved[dump reserved tasks (waiting to be processed)]' \ + 'revoked[dump of revoked task ids]' \ + 'scheduled[dump scheduled tasks (eta/countdown/retry)]' \ + 'stats[dump worker statistics]' + compadd -a controlargs ifargs + ;; + control) + _values -s \ + 'add_consumer[tell worker(s) to start consuming a queue]' \ + 'autoscale[change autoscale settings]' \ + 'cancel_consumer[tell worker(s) to stop consuming a queue]' \ + 'disable_events[tell worker(s) to disable events]' \ + 'enable_events[tell worker(s) to enable events]' \ + 'pool_grow[start more pool processes]' \ + 'pool_shrink[use less pool processes]' \ + 'rate_limit[tell worker(s) to modify the rate limit for a task type]' \ + 'time_limit[tell worker(s) to modify the time limit for a task type.]' + compadd -a controlargs ifargs + ;; + multi) + _values -s \ + '--nosplash[Don"t display program info.]' \ + '--verbose[Show more output.]' \ + '--no-color[Don"t display colors.]' \ + '--quiet[Don"t show as much output.]' \ + 'start' 'restart' 'stopwait' 'stop' 'show' \ + 'names' 'expand' 'get' 'kill' + compadd -a ifargs + ;; + amqp) + _values -s \ + 'queue.declare' 'queue.purge' 'exchange.delete' 'basic.publish' \ + 'exchange.declare' 'queue.delete' 'queue.bind' 'basic.get' + ;; + list) + _values -s, 'bindings' + ;; + shell) + _values -s \ + '--ipython[force iPython.]' \ + '--bpython[force bpython.]' \ + '--python[force default Python shell.]' \ + '--without-tasks[don"t add tasks to locals.]' \ + '--eventlet[use eventlet.]' \ + '--gevent[use gevent.]' + compadd -a ifargs + ;; + beat) + _arguments \ + '(-s --schedule=)'{-s,--schedule=}'[Path to the schedule database. Defaults to celerybeat-schedule.]' \ + '(-S --scheduler=)'{-S,--scheduler=}'[Scheduler class to use. Default is celery.beat.PersistentScheduler.]' \ + '(--max-interval)--max-interval[]' + compadd -a dopts fargs + ;; + events) + _arguments \ + '(-d --dump)'{-d,--dump}'[Dump events to stdout.]' \ + '(-c --camera=)'{-c,--camera=}'[Take snapshots of events using this camera.]' \ + '(-F --frequency=)'{-F,--frequency=}'[Camera: Shutter frequency. Default is every 1.0 seconds.]' \ + '(-r --maxrate=)'{-r,--maxrate=}'[Camera: Optional shutter rate limit (e.g. 10/m).]' + compadd -a dopts fargs + ;; + *) + ;; + esac +} diff --git a/plugins/chruby/chruby.plugin.zsh b/plugins/chruby/chruby.plugin.zsh new file mode 100644 index 000000000..2a2c80cf6 --- /dev/null +++ b/plugins/chruby/chruby.plugin.zsh @@ -0,0 +1,99 @@ +# +# INSTRUCTIONS +# +# With either a manual or brew installed chruby things should just work. +# +# If you'd prefer to specify an explicit path to load chruby from +# you can set variables like so: +# +# zstyle :omz:plugins:chruby path /local/path/to/chruby.sh +# zstyle :omz:plugins:chruby auto /local/path/to/auto.sh +# +# TODO +# - autodetermine correct source path on non OS X systems +# - completion if ruby-install exists + +# rvm and rbenv plugins also provide this alias +alias rubies='chruby' + +local _chruby_path +local _chruby_auto + +_homebrew-installed() { + whence brew &> /dev/null +} + +_chruby-from-homebrew-installed() { + brew --prefix chruby &> /dev/null +} + +_ruby-build_installed() { + whence ruby-build &> /dev/null +} + +_ruby-install-installed() { + whence ruby-install &> /dev/null +} + +# Simple definition completer for ruby-build +if _ruby-build_installed; then + _ruby-build() { compadd $(ruby-build --definitions) } + compdef _ruby-build ruby-build +fi + +_source_from_omz_settings() { + zstyle -s :omz:plugins:chruby path _chruby_path + zstyle -s :omz:plugins:chruby auto _chruby_auto + + if _chruby_path && [[ -r _chruby_path ]]; then + source ${_chruby_path} + fi + + if _chruby_auto && [[ -r _chruby_auto ]]; then + source ${_chruby_auto} + fi +} + +_chruby_dirs() { + chrubydirs=($HOME/.rubies/ $PREFIX/opt/rubies) + for dir in chrubydirs; do + if [[ -d $dir ]]; then + RUBIES+=$dir + fi + done +} + +if _homebrew-installed && _chruby-from-homebrew-installed ; then + source $(brew --prefix chruby)/share/chruby/chruby.sh + source $(brew --prefix chruby)/share/chruby/auto.sh + _chruby_dirs +elif [[ -r "/usr/local/share/chruby/chruby.sh" ]] ; then + source /usr/local/share/chruby/chruby.sh + source /usr/local/share/chruby/auto.sh + _chruby_dirs +else + _source_from_omz_settings + _chruby_dirs +fi + +function ensure_chruby() { + $(whence chruby) +} + +function current_ruby() { + local _ruby + _ruby="$(chruby |grep \* |tr -d '* ')" + if [[ $(chruby |grep -c \*) -eq 1 ]]; then + echo ${_ruby} + else + echo "system" + fi +} + +function chruby_prompt_info() { + echo "$(current_ruby)" +} + +# complete on installed rubies +_chruby() { compadd $(chruby | tr -d '* ') } +compdef _chruby chruby diff --git a/plugins/coffee/_coffee b/plugins/coffee/_coffee index 5c8eb9a08..10b6b8164 100644 --- a/plugins/coffee/_coffee +++ b/plugins/coffee/_coffee @@ -35,27 +35,37 @@ # ------- # # * Mario Fernandez (https://github.com/sirech) +# * Dong Weiming (https://github.com/dongweiming) # # ------------------------------------------------------------------------------ -local curcontext="$curcontext" state line ret=1 +local curcontext="$curcontext" state line ret=1 version opts first second third typeset -A opt_args +version=(${(f)"$(_call_program version $words[1] --version)"}) +version=${${(z)${version[1]}}[3]} +first=$(echo $version|cut -d '.' -f 1) +second=$(echo $version|cut -d '.' -f 2) +third=$(echo $version|cut -d '.' -f 3) +if (( $first < 2 )) && (( $second < 7 )) && (( $third < 3 ));then + opts+=('(-l --lint)'{-l,--lint}'[pipe the compiled JavaScript through JavaScript Lint]' + '(-r --require)'{-r,--require}'[require a library before executing your script]:library') +fi + _arguments -C \ '(- *)'{-h,--help}'[display this help message]' \ '(- *)'{-v,--version}'[display the version number]' \ + $opts \ '(-b --bare)'{-b,--bare}'[compile without a top-level function wrapper]' \ '(-e --eval)'{-e,--eval}'[pass a string from the command line as input]:Inline Script' \ '(-i --interactive)'{-i,--interactive}'[run an interactive CoffeeScript REPL]' \ '(-j --join)'{-j,--join}'[concatenate the source CoffeeScript before compiling]:Destination JS file:_files -g "*.js"' \ - '(-l --lint)'{-l,--lint}'[pipe the compiled JavaScript through JavaScript Lint]' \ '(--nodejs)--nodejs[pass options directly to the "node" binary]' \ '(-c --compile)'{-c,--compile}'[compile to JavaScript and save as .js files]' \ '(-o --output)'{-o,--output}'[set the output directory for compiled JavaScript]:Output Directory:_files -/' \ '(-n -t -p)'{-n,--nodes}'[print out the parse tree that the parser produces]' \ '(-n -t -p)'{-p,--print}'[print out the compiled JavaScript]' \ '(-n -t -p)'{-t,--tokens}'[print out the tokens that the lexer/rewriter produce]' \ - '(-r --require)'{-r,--require}'[require a library before executing your script]:library' \ '(-s --stdio)'{-s,--stdio}'[listen for and compile scripts over stdio]' \ '(-w --watch)'{-w,--watch}'[watch scripts for changes and rerun commands]' \ '*:script or directory:_files' && ret=0 diff --git a/plugins/colemak/colemak.plugin.zsh b/plugins/colemak/colemak.plugin.zsh index 34d42c280..cb7cc5068 100644 --- a/plugins/colemak/colemak.plugin.zsh +++ b/plugins/colemak/colemak.plugin.zsh @@ -19,4 +19,4 @@ bindkey -a 'N' vi-join bindkey -a 'j' vi-forward-word-end bindkey -a 'J' vi-forward-blank-word-end -lesskey $ZSH_CUSTOM/plugins/colemak/colemak-less +lesskey $ZSH/plugins/colemak/colemak-less diff --git a/plugins/colored-man/colored-man.plugin.zsh b/plugins/colored-man/colored-man.plugin.zsh new file mode 100644 index 000000000..56056284a --- /dev/null +++ b/plugins/colored-man/colored-man.plugin.zsh @@ -0,0 +1,11 @@ +man() { + env \ + LESS_TERMCAP_mb=$(printf "\e[1;31m") \ + LESS_TERMCAP_md=$(printf "\e[1;31m") \ + LESS_TERMCAP_me=$(printf "\e[0m") \ + LESS_TERMCAP_se=$(printf "\e[0m") \ + LESS_TERMCAP_so=$(printf "\e[1;44;33m") \ + LESS_TERMCAP_ue=$(printf "\e[0m") \ + LESS_TERMCAP_us=$(printf "\e[1;32m") \ + man "$@" +} diff --git a/plugins/colorize/colorize.plugin.zsh b/plugins/colorize/colorize.plugin.zsh new file mode 100644 index 000000000..11b58e69d --- /dev/null +++ b/plugins/colorize/colorize.plugin.zsh @@ -0,0 +1,28 @@ +# Plugin for highlighting file content +# Plugin highlights file content based on the filename extension. +# If no highlighting method supported for given extension then it tries +# guess it by looking for file content. + +alias colorize='colorize_via_pygmentize' + +colorize_via_pygmentize() { + if [ ! -x "$(which pygmentize)" ]; then + echo "package \'pygmentize\' is not installed!" + return -1 + fi + + if [ $# -eq 0 ]; then + pygmentize -g $@ + fi + + for FNAME in $@ + do + filename=$(basename "$FNAME") + lexer=`pygmentize -N \"$filename\"` + if [ "Z$lexer" != "Ztext" ]; then + pygmentize -l $lexer "$FNAME" + else + pygmentize -g "$FNAME" + fi + done +} diff --git a/plugins/command-not-found/command-not-found.plugin.zsh b/plugins/command-not-found/command-not-found.plugin.zsh index 567da1b45..f3d7ec2df 100644 --- a/plugins/command-not-found/command-not-found.plugin.zsh +++ b/plugins/command-not-found/command-not-found.plugin.zsh @@ -3,3 +3,7 @@ # this is installed in Ubuntu [[ -e /etc/zsh_command_not_found ]] && source /etc/zsh_command_not_found + +# Arch Linux command-not-found support, you must have package pkgfile installed +# https://wiki.archlinux.org/index.php/Pkgfile#.22Command_not_found.22_hook +[[ -e /usr/share/doc/pkgfile/command-not-found.zsh ]] && source /usr/share/doc/pkgfile/command-not-found.zsh diff --git a/plugins/common-aliases/common-aliases.plugin.zsh b/plugins/common-aliases/common-aliases.plugin.zsh new file mode 100644 index 000000000..228a39da0 --- /dev/null +++ b/plugins/common-aliases/common-aliases.plugin.zsh @@ -0,0 +1,93 @@ +# Advanced Aliases. +# Use with caution +# + +# ls, the common ones I use a lot shortened for rapid fire usage +alias l='ls -lFh' #size,show type,human readable +alias la='ls -lAFh' #long list,show almost all,show type,human readable +alias lr='ls -tRFh' #sorted by date,recursive,show type,human readable +alias lt='ls -ltFh' #long list,sorted by date,show type,human readable +alias ll='ls -l' #long list +alias ldot='ls -ld .*' +alias lS='ls -1FSsh' +alias lart='ls -1Fcart' +alias lrt='ls -1Fcrt' + +alias zshrc='vim ~/.zshrc' # Quick access to the ~/.zshrc file + +alias grep='grep --color' +alias sgrep='grep -R -n -H -C 5 --exclude-dir={.git,.svn,CVS} ' + +alias t='tail -f' + +# because typing 'cd' is A LOT of work!! +alias ..='cd ../' +alias ...='cd ../../' +alias ....='cd ../../../' +alias .....='cd ../../../../' + +# Command line head / tail shortcuts +alias -g H='| head' +alias -g T='| tail' +alias -g G='| grep' +alias -g L="| less" +alias -g M="| most" +alias -g LL="2>&1 | less" +alias -g CA="2>&1 | cat -A" +alias -g NE="2> /dev/null" +alias -g NUL="> /dev/null 2>&1" +alias -g P="2>&1| pygmentize -l pytb" + +alias dud='du -d 1 -h' +alias duf='du -sh *' +alias fd='find . -type d -name' +alias ff='find . -type f -name' + +alias h='history' +alias hgrep="fc -El 0 | grep" +alias help='man' +alias j='jobs' +alias p='ps -f' +alias sortnr='sort -n -r' +alias unexport='unset' + +alias whereami=display_info + +alias rm='rm -i' +alias cp='cp -i' +alias mv='mv -i' + +# zsh is able to auto-do some kungfoo +# depends on the SUFFIX :) +if [ ${ZSH_VERSION//\./} -ge 420 ]; then + # open browser on urls + _browser_fts=(htm html de org net com at cx nl se dk dk php) + for ft in $_browser_fts ; do alias -s $ft=$BROWSER ; done + + _editor_fts=(cpp cxx cc c hh h inl asc txt TXT tex) + for ft in $_editor_fts ; do alias -s $ft=$EDITOR ; done + + _image_fts=(jpg jpeg png gif mng tiff tif xpm) + for ft in $_image_fts ; do alias -s $ft=$XIVIEWER; done + + _media_fts=(ape avi flv mkv mov mp3 mpeg mpg ogg ogm rm wav webm) + for ft in $_media_fts ; do alias -s $ft=mplayer ; done + + #read documents + alias -s pdf=acroread + alias -s ps=gv + alias -s dvi=xdvi + alias -s chm=xchm + alias -s djvu=djview + + #list whats inside packed file + alias -s zip="unzip -l" + alias -s rar="unrar l" + alias -s tar="tar tf" + alias -s tar.gz="echo " + alias -s ace="unace l" +fi + +# Make zsh know about hosts already accessed by SSH +zstyle -e ':completion:*:(ssh|scp|sftp|rsh|rsync):hosts' hosts 'reply=(${=${${(f)"$(cat {/etc/ssh_,~/.ssh/known_}hosts(|2)(N) /dev/null)"}%%[# ]*}//,/ })' + diff --git a/plugins/composer/composer.plugin.zsh b/plugins/composer/composer.plugin.zsh index c9b762d07..2243dd3c1 100644 --- a/plugins/composer/composer.plugin.zsh +++ b/plugins/composer/composer.plugin.zsh @@ -10,9 +10,27 @@ _composer_get_command_list () { composer --no-ansi | sed "1,/Available commands/d" | awk '/^ [a-z]+/ { print $1 }' } +_composer_get_required_list () { + composer show -s --no-ansi | sed '1,/requires/d' | awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }' +} + _composer () { + local curcontext="$curcontext" state line + typeset -A opt_args + _arguments \ + '1: :->command'\ + '*: :->args' if [ -f composer.json ]; then - compadd `_composer_get_command_list` + case $state in + command) + compadd `_composer_get_command_list` + ;; + *) + compadd `_composer_get_required_list` + ;; + esac + else + compadd create-project init search selfupdate show fi } @@ -24,6 +42,10 @@ alias csu='composer self-update' alias cu='composer update' alias ci='composer install' alias ccp='composer create-project' +alias cdu='composer dump-autoload' # install composer in the current directory -alias cget='curl -s https://getcomposer.org/installer | php'
\ No newline at end of file +alias cget='curl -s https://getcomposer.org/installer | php' + +# Add Composer's global & local binaries to PATH +export PATH=$PATH:~/.composer/vendor/bin:./bin diff --git a/plugins/copydir/copydir.plugin.zsh b/plugins/copydir/copydir.plugin.zsh new file mode 100644 index 000000000..37bb5e086 --- /dev/null +++ b/plugins/copydir/copydir.plugin.zsh @@ -0,0 +1,3 @@ +function copydir { + pwd | tr -d "\r\n" | pbcopy +}
\ No newline at end of file diff --git a/plugins/copyfile/copyfile.plugin.zsh b/plugins/copyfile/copyfile.plugin.zsh new file mode 100644 index 000000000..944a903c6 --- /dev/null +++ b/plugins/copyfile/copyfile.plugin.zsh @@ -0,0 +1,5 @@ +function copyfile { + [[ "$#" != 1 ]] && return 1 + local file_to_copy=$1 + cat $file_to_copy | pbcopy +} diff --git a/plugins/debian/debian.plugin.zsh b/plugins/debian/debian.plugin.zsh index 55b90e379..6e45e0521 100644 --- a/plugins/debian/debian.plugin.zsh +++ b/plugins/debian/debian.plugin.zsh @@ -1,27 +1,28 @@ # Authors: # https://github.com/AlexBio # https://github.com/dbb +# https://github.com/Mappleconfusers # # Debian-related zsh aliases and functions for zsh # Use aptitude if installed, or apt-get if not. # You can just set apt_pref='apt-get' to override it. -if [[ -e $( which aptitude 2>&1 ) ]]; then +if [[ -e $( which -p aptitude 2>&1 ) ]]; then apt_pref='aptitude' else apt_pref='apt-get' fi # Use sudo by default if it's installed -if [[ -e $( which sudo 2>&1 ) ]]; then +if [[ -e $( which -p sudo 2>&1 ) ]]; then use_sudo=1 fi # Aliases ################################################################### # These are for more obscure uses of apt-get and aptitude that aren't covered # below. -alias ag='apt-get' -alias ap='aptitude' +alias age='apt-get' +alias api='aptitude' # Some self-explanatory aliases alias acs="apt-cache search" @@ -56,7 +57,7 @@ if [[ $use_sudo -eq 1 ]]; then alias ar='sudo $apt_pref remove' # apt-get only - alias ads='sudo $apt_pref dselect-upgrade' + alias ads='sudo apt-get dselect-upgrade' # Install all .deb files in the current directory. # Warning: you will need to put the glob in single quotes if you use: @@ -109,6 +110,38 @@ else ?not(~n`uname -r`))'\'' root' fi +# Completion ################################################################ + +# +# Registers a compdef for $1 that calls $apt_pref with the commands $2 +# To do that it creates a new completion function called _apt_pref_$2 +# +apt_pref_compdef() { + local f fb + f="_apt_pref_${2}" + + eval "function ${f}() { + shift words; + service=\"\$apt_pref\"; + words=(\"\$apt_pref\" '$2' \$words); + ((CURRENT++)) + test \"\${apt_pref}\" = 'aptitude' && _aptitude || _apt + }" + + compdef "$f" "$1" +} + +apt_pref_compdef aac "autoclean" +apt_pref_compdef abd "build-dep" +apt_pref_compdef ac "clean" +apt_pref_compdef ad "update" +apt_pref_compdef afu "update" +apt_pref_compdef ag "upgrade" +apt_pref_compdef ai "install" +apt_pref_compdef ail "install" +apt_pref_compdef ap "purge" +apt_pref_compdef ar "remove" +apt_pref_compdef ads "dselect-upgrade" # Misc. ##################################################################### # print all installed packages @@ -184,3 +217,11 @@ kerndeb () { "$revision" kernel_image kernel_headers } +# List packages by size +function apt-list-packages { + dpkg-query -W --showformat='${Installed-Size} ${Package} ${Status}\n' | \ + grep -v deinstall | \ + sort -n | \ + awk '{print $1" "$2}' +} + diff --git a/plugins/dirhistory/dirhistory.plugin.zsh b/plugins/dirhistory/dirhistory.plugin.zsh new file mode 100644 index 000000000..504d7ec14 --- /dev/null +++ b/plugins/dirhistory/dirhistory.plugin.zsh @@ -0,0 +1,132 @@ +## +# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories +# that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT. +# + +dirhistory_past=(`pwd`) +dirhistory_future=() +export dirhistory_past +export dirhistory_future + +export DIRHISTORY_SIZE=30 + +# Pop the last element of dirhistory_past. +# Pass the name of the variable to return the result in. +# Returns the element if the array was not empty, +# otherwise returns empty string. +function pop_past() { + eval "$1='$dirhistory_past[$#dirhistory_past]'" + if [[ $#dirhistory_past -gt 0 ]]; then + dirhistory_past[$#dirhistory_past]=() + fi +} + +function pop_future() { + eval "$1='$dirhistory_future[$#dirhistory_future]'" + if [[ $#dirhistory_future -gt 0 ]]; then + dirhistory_future[$#dirhistory_future]=() + fi +} + +# Push a new element onto the end of dirhistory_past. If the size of the array +# is >= DIRHISTORY_SIZE, the array is shifted +function push_past() { + if [[ $#dirhistory_past -ge $DIRHISTORY_SIZE ]]; then + shift dirhistory_past + fi + if [[ $#dirhistory_past -eq 0 || $dirhistory_past[$#dirhistory_past] != "$1" ]]; then + dirhistory_past+=($1) + fi +} + +function push_future() { + if [[ $#dirhistory_future -ge $DIRHISTORY_SIZE ]]; then + shift dirhistory_future + fi + if [[ $#dirhistory_future -eq 0 || $dirhistory_futuret[$#dirhistory_future] != "$1" ]]; then + dirhistory_future+=($1) + fi +} + +# Called by zsh when directory changes +function chpwd() { + push_past `pwd` + # If DIRHISTORY_CD is not set... + if [[ -z "${DIRHISTORY_CD+x}" ]]; then + # ... clear future. + dirhistory_future=() + fi +} + +function dirhistory_cd(){ + DIRHISTORY_CD="1" + cd $1 + unset DIRHISTORY_CD +} + +# Move backward in directory history +function dirhistory_back() { + local cw="" + local d="" + # Last element in dirhistory_past is the cwd. + + pop_past cw + if [[ "" == "$cw" ]]; then + # Someone overwrote our variable. Recover it. + dirhistory_past=(`pwd`) + return + fi + + pop_past d + if [[ "" != "$d" ]]; then + dirhistory_cd $d + push_future $cw + else + push_past $cw + fi +} + + +# Move forward in directory history +function dirhistory_forward() { + local d="" + + pop_future d + if [[ "" != "$d" ]]; then + dirhistory_cd $d + push_past $d + fi +} + + +# Bind keys to history navigation +function dirhistory_zle_dirhistory_back() { + # Erase current line in buffer + zle kill-buffer + dirhistory_back + zle accept-line +} + +function dirhistory_zle_dirhistory_future() { + # Erase current line in buffer + zle kill-buffer + dirhistory_forward + zle accept-line +} + +zle -N dirhistory_zle_dirhistory_back +# xterm in normal mode +bindkey "\e[3D" dirhistory_zle_dirhistory_back +bindkey "\e[1;3D" dirhistory_zle_dirhistory_back +# Putty: +bindkey "\e\e[D" dirhistory_zle_dirhistory_back +# GNU screen: +bindkey "\eO3D" dirhistory_zle_dirhistory_back + +zle -N dirhistory_zle_dirhistory_future +bindkey "\e[3C" dirhistory_zle_dirhistory_future +bindkey "\e[1;3C" dirhistory_zle_dirhistory_future +bindkey "\e\e[C" dirhistory_zle_dirhistory_future +bindkey "\eO3C" dirhistory_zle_dirhistory_future + + diff --git a/plugins/dirpersist/dirpersist.plugin.zsh b/plugins/dirpersist/dirpersist.plugin.zsh index 0f6d9f435..88d1129d4 100644 --- a/plugins/dirpersist/dirpersist.plugin.zsh +++ b/plugins/dirpersist/dirpersist.plugin.zsh @@ -1,39 +1,19 @@ -#!/bin/zsh -# -# Make the dirstack more persistant -# -# Add dirpersist to $plugins in ~/.zshrc to load -# +# Save dirstack history to .zdirs +# adapted from: +# github.com/grml/grml-etc-core/blob/master/etc/zsh/zshrc#L1547 -# $zdirstore is the file used to persist the stack -zdirstore=~/.zdirstore +DIRSTACKSIZE=${DIRSTACKSIZE:-20} +dirstack_file=${dirstack_file:-${HOME}/.zdirs} -dirpersistinstall () { - if grep 'dirpersiststore' ~/.zlogout > /dev/null; then - else - if read -q \?"Would you like to set up your .zlogout file for use with dirspersist? (y/n) "; then - echo "# Store dirs stack\n# See $ZSH/plugins/dirspersist.plugin.zsh\ndirpersiststore" >> ~/.zlogout - else - echo "If you don't want this message to appear, remove dirspersist from \$plugins" - fi - fi -} - -dirpersiststore () { - dirs -p | perl -e 'foreach (reverse <STDIN>) {chomp;s/([& ])/\\$1/g ;print "if [ -d $_ ]; then pushd -q $_; fi\n"}' > $zdirstore -} +if [[ -f ${dirstack_file} ]] && [[ ${#dirstack[*]} -eq 0 ]] ; then + dirstack=( ${(f)"$(< $dirstack_file)"} ) + # "cd -" won't work after login by just setting $OLDPWD, so + [[ -d $dirstack[1] ]] && cd $dirstack[1] && cd $OLDPWD +fi -dirpersistrestore () { - if [ -f $zdirstore ]; then - source $zdirstore - fi +chpwd() { + if (( $DIRSTACKSIZE <= 0 )) || [[ -z $dirstack_file ]]; then return; fi + local -ax my_stack + my_stack=( ${PWD} ${dirstack} ) + builtin print -l ${(u)my_stack} >! ${dirstack_file} } - -DIRSTACKSIZE=10 -setopt autopushd pushdminus pushdsilent pushdtohome pushdignoredups - -dirpersistinstall -dirpersistrestore - -# Make popd changes permanent without having to wait for logout -alias popd="popd;dirpersiststore" diff --git a/plugins/django/django.plugin.zsh b/plugins/django/django.plugin.zsh index 0bbd031fe..aaaa7d21d 100644 --- a/plugins/django/django.plugin.zsh +++ b/plugins/django/django.plugin.zsh @@ -20,6 +20,19 @@ _managepy-createcachetable(){ $nul_args && ret=0 } +_managepy-collectstatic(){ + _arguments -s : \ + '--link=-[Create a symbolic link to each file instead of copying.]:' \ + '--noinput=-[Do NOT prompt the user for input of any kind.]:' \ + '--no-post-process=-[Do NOT post process collected files.]:' \ + '--ignore=-[Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.]:' \ + '--dry-run=-[Do everything except modify the filesystem.]:' \ + '--clear=-[Clear the existing files using the storage before trying to copy or link the original file.]:' \ + '--link=-[Create a symbolic link to each file instead of copying.]:' \ + '--no-default-ignore=-[Do not ignore the common private glob-style patterns "CVS", ".*" and "*~".]:' \ + $nul_args && ret=0 +} + _managepy-dbshell(){ _arguments -s : \ $nul_args && ret=0 @@ -128,6 +141,7 @@ _managepy-sqlinitialdata(){} _managepy-sqlreset(){} _managepy-sqlsequencereset(){} _managepy-startapp(){} +_managepy-startproject(){} _managepy-syncdb() { _arguments -s : \ @@ -163,6 +177,7 @@ _managepy-commands() { commands=( 'adminindex:prints the admin-index template snippet for the given app name(s).' 'createcachetable:creates the table needed to use the SQL cache backend.' + 'collectstatic:Collect static files in a single location.' 'dbshell:runs the command-line client for the current DATABASE_ENGINE.' "diffsettings:displays differences between the current settings.py and Django's default settings." 'dumpdata:Output the contents of the database as a fixture of the given format.' @@ -184,6 +199,7 @@ _managepy-commands() { 'sqlreset:Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s).' 'sqlsequencereset:Prints the SQL statements for resetting sequences for the given app name(s).' "startapp:Creates a Django app directory structure for the given app name in this project's directory." + "startproject:Creates a Django project directory structure for the given project name in this current directory." "syncdb:Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created." 'test:Runs the test suite for the specified applications, or the entire site if no apps are specified.' 'testserver:Runs a development server with data from the given fixture(s).' @@ -220,4 +236,5 @@ _managepy() { compdef _managepy manage.py compdef _managepy django +compdef _managepy django-admin.py compdef _managepy django-manage diff --git a/plugins/docker/README.md b/plugins/docker/README.md new file mode 100644 index 000000000..231a6dcf5 --- /dev/null +++ b/plugins/docker/README.md @@ -0,0 +1,19 @@ +## Docker autocomplete plugin + +- Adds autocomplete options for all docker commands. +- Will also show containerIDs and Image names where applicable + +####Shows help for all commands +![General Help](http://i.imgur.com/tUBO9jh.png "Help for all commands") + + +####Shows your downloaded images where applicable +![Images](http://i.imgur.com/R8ZsWO1.png "Images") + + +####Shows your running containers where applicable +![Containers](http://i.imgur.com/WQtbheg.png "Containers") + + + +Maintainer : Ahmed Azaan ([@aeonazaan](https://twitter.com/aeonazaan)) diff --git a/plugins/docker/_docker b/plugins/docker/_docker new file mode 100644 index 000000000..28568a6e5 --- /dev/null +++ b/plugins/docker/_docker @@ -0,0 +1,356 @@ +#compdef docker + +# Docker autocompletion for oh-my-zsh +# Requires: Docker installed +# Author: Azaan (@aeonazaan) +# Updates: Bob Maerten (@bobmaerten) for Docker v0.9+ + + +# ----- Helper functions +# Output a selectable list of all running docker containers +__docker_containers() { + declare -a cont_cmd + cont_cmd=($(docker ps | awk 'NR>1{print $1":[CON("$1")"$2"("$3")]"}')) + _describe 'containers' cont_cmd +} + +# output a selectable list of all docker images +__docker_images() { + declare -a img_cmd + img_cmd=($(docker images | awk 'NR>1{print $1}')) + _describe 'images' img_cmd +} + +# ----- Commands +# Seperate function for each command, makes extension easier later +# --------------------------- +__attach() { + _arguments \ + '--no-stdin[Do not attach stdin]' \ + '--sig-proxy[Proxify all received signal to the process (even in non-tty mode)]' + __docker_containers +} + +__build() { + _arguments \ + '--no-cache[Do not use cache when building the image]' \ + '(-q,--quiet)'{-q,--quiet}'[Suppress the verbose output generated by the containers]' \ + '--rm[Remove intermediate containers after a successful build]' \ + '(-t,--tag=)'{-t,--tag=}'[Repository name (and optionally a tag) to be applied to the resulting image in case of success]' \ + '*:files:_files' +} + +__commit() { + _arguments \ + '(-a,--author=)'{-a,--author=}'[Author (eg. "John Hannibal Smith <hannibal@a-team.com>"]' \ + '(-m,--message=)'{-m,--message=}'[Commit message]' \ + '--run=[Config automatically applied when the image is run.]' + __docker_containers +} + +__cp() { + __docker_containers +} + +__diff() { + __docker_containers +} + +__events() { + _arguments \ + '--since=[Show previously created events and then stream.]' +} + +__export() { + __docker_containers +} + +__history() { + _arguments \ + '--no-trunc=[Don''t truncate output]' \ + '(-q,--quiet)'{-q,--quiet}'[Only show numeric IDs]' + __docker_images +} + +__images() { + _arguments \ + '(-a,--all)'{-a,--all}'[Show all images (by default filter out the intermediate images used to build)]' \ + '--no-trunc[Don''t truncate output]' \ + '(-q,--quiet=)'{-q,--quiet=}'[Only show numeric IDs]' \ + '(-t,--tree=)'{-t,--tree=}'[Output graph in tree format]' \ + '(-v,--viz=)'{-v,--viz=}'[Output graph in graphviz format]' + __docker_images +} + +__import() { + _arguments '*:files:_files' +} + +__info() { + # no arguments +} + +__insert() { + __docker_images + _arguments '*:files:_files' +} + +__inspect() { + __docker_images + __docker_containers +} + +__kill() { + __docker_containers +} + +__load() { + _arguments '*:files:_files' +} + +__login() { + _arguments \ + '(-e,--email=)'{-e,-email=}'[Email]' \ + '(-p,--password=)'{-p,-password=}'[Password]' \ + '(-u,--username=)'{-u,-username=}'[Username]' +} + +__logs() { + _arguments \ + '(-f,--follow)'{-f,-follow}'[Follow log output]' + __docker_containers +} + +__port() { + __docker_containers +} + +__top() { + __docker_containers +} + +__ps() { + _arguments \ + '(-a,--all)'{-a,--all}'[Show all containers. Only running containers are shown by default.]' \ + '--before-id=[Show only container created before Id, include non-running ones.]' \ + '(-l,--latest)'{-l,--latest}'[Show only the latest created container, include non-running ones.]' \ + '-n=[Show n last created containers, include non-running ones. default=-1.]' \ + '--no-trunc[Don''t truncate output]' \ + '(-q,--quiet)'{-q,--quiet}'[Only display numeric IDs]' \ + '(-s,--size)'{-s,--size}'[Display sizes]' \ + '--since-id=[Show only containers created since Id, include non-running ones.]' +} + +__pull() { + _arguments \ + '(-t,--tag=)'{-t,--tag=}'[Download tagged image in repository]' +} + +__push() { + # no arguments +} + +__restart() { + _arguments \ + '(-t,--time=)'{-t,--time=}'[Number of seconds to try to stop for before killing the container. Once killed it will then be restarted. Default=10]' + __docker_containers +} + +__rm() { + _arguments \ + '(-f,--force=)'{-f,--force=}'[Force removal of running container]' \ + '(-l,--link=)'{-l,--link=}'[Remove the specified link and not the underlying container]' \ + '(-v,--volumes=)'{-v,--volumes=}'[Remove the volumes associated to the container]' + __docker_containers +} + +__rmi() { + _arguments \ + '(-f,--force=)'{-f,--force=}'[Force]' + __docker_images +} + +__run() { + _arguments \ + '(-P,--publish-all=)'{-P,--publish-all=}'[Publish all exposed ports to the host interfaces]' \ + '(-a,--attach=)'{-a,--attach=}'[Attach to stdin, stdout or stderr.]' \ + '(-c,--cpu-shares=)'{-c,--cpu-shares=}': CPU shares (relative weight)]' \ + '--cidfile=[Write the container ID to the file]' \ + '(-d,--detach=)'{-d,--detach=}'[Detached mode: Run container in the background, print new container id]' \ + '--dns=[Set custom dns servers]' \ + '(-e,--env=)'{-e,--env=}'[Set environment variables]' \ + '--entrypoint=[Overwrite the default entrypoint of the image]' \ + '--expose=[Expose a port from the container without publishing it to your host]' \ + '(-h,--hostname=)'{-h,--hostname=}'[Container host name]' \ + '(-i,--interactive=)'{-i,--interactive=}'[Keep stdin open even if not attached]' \ + '--link=[Add link to another container (name:alias)]' \ + '--lxc-conf=[Add custom lxc options -lxc-conf="lxc.cgroup.cpuset.cpus = 0,1"]' \ + '(-m,--memory=)'{-m,--memory=}'[Memory limit (format: <number><optional unit>, where unit = b, k, m or g)]' \ + '(-n,--networking=)'{-n,--networking=}'[Enable networking for this container]' \ + '--name=[Assign a name to the container]' \ + '(-p,--publish=)'{-p,--publish=}'[Publish a container''s port to the host (format: ip:hostPort:containerPort | ip::containerPort | hostPort:containerPort) (use "docker port" to see the actual mapping)]' \ + '--privileged=[Give extended privileges to this container]' \ + '--rm=[Automatically remove the container when it exits (incompatible with -d)]' \ + '--sig-proxy=[Proxify all received signal to the process (even in non-tty mode)]' \ + '(-t,--tty=)'{-t,--tty=}'[Allocate a pseudo-tty]' \ + '(-u,--user=)'{-u,--user=}'[Username or UID]' \ + '(-v,--volume=)'{-v,--volume=}'[Bind mount a volume (e.g. from the host: -v /host:/container, from docker: -v /container)]' \ + '--volumes-from=[Mount volumes from the specified container(s)]' \ + '(-w,--workdir=)'{-w,--workdir=}'[Working directory inside the container]' + __docker_images +} + +__search() { + _arguments \ + '--no-trunc=[Don''t truncate output]' \ + '-s,--stars=)'{-s,--stars=}'[Only displays with at least xxx stars]' \ + '-t,--trusted=)'{-t,--trusted=}'[Only show trusted builds]' +} + +__save() { + __docker_images +} + +__start() { + _arguments \ + '(-a,--attach=)'{-a,--attach=}'[Attach container''s stdout/stderr and forward all signals to the process]' \ + '(-i,--interactive=)'{-i,--interactive=}'[Attach container''s stdin]' + __docker_containers +} + +__stop() { + _arguments \ + '(-t,--time=)'{-t,--time=}'[Number of seconds to wait for the container to stop before killing it.]' + __docker_containers +} + +__tag() { + _arguments \ + '(-f,--force=)'{-f,--force=}'[Force]' + __docker_images +} + +__version() { + # no arguments +} + +__wait() { + __docker_containers +} + +# end commands --------- +# ---------------------- + +local -a _1st_arguments +_1st_arguments=( + "attach":"Attach to a running container" + "build":"Build a container from a Dockerfile" + "commit":"Create a new image from a container's changes" + "cp":"Copy files/folders from the containers filesystem to the host path" + "diff":"Inspect changes on a container's filesystem" + "events":"Get real time events from the server" + "export":"Stream the contents of a container as a tar archive" + "history":"Show the history of an image" + "images":"List images" + "import":"Create a new filesystem image from the contents of a tarball" + "info":"Display system-wide information" + "insert":"Insert a file in an image" + "inspect":"Return low-level information on a container" + "kill":"Kill a running container" + "load":"Load an image from a tar archive" + "login":"Register or Login to the docker registry server" + "logs":"Fetch the logs of a container" + "port":"Lookup the public-facing port which is NAT-ed to PRIVATE_PORT" + "ps":"List containers" + "pull":"Pull an image or a repository from the docker registry server" + "push":"Push an image or a repository to the docker registry server" + "restart":"Restart a running container" + "rm":"Remove one or more containers" + "rmi":"Remove one or more images" + "run":"Run a command in a new container" + "save":"Save an image to a tar archive" + "search":"Search for an image in the docker index" + "start":"Start a stopped container" + "stop":"Stop a running container" + "tag":"Tag an image into a repository" + "top":"Lookup the running processes of a container" + "version":"Show the docker version information" + "wait":"Block until a container stops, then print its exit code" +) + +_arguments '*:: :->command' + +if (( CURRENT == 1 )); then + _describe -t commands "docker command" _1st_arguments + return +fi + +local -a _command_args +case "$words[1]" in + attach) + __attach ;; + build) + __build ;; + commit) + __commit ;; + cp) + __cp ;; + diff) + __diff ;; + events) + __events ;; + export) + __export ;; + history) + __history ;; + images) + __images ;; + import) + __import ;; + info) + __info ;; + insert) + __insert ;; + inspect) + __inspect ;; + kill) + __kill ;; + load) + __load ;; + login) + __login ;; + logs) + __logs ;; + port) + __port ;; + ps) + __ps ;; + pull) + __pull ;; + push) + __push ;; + restart) + __restart ;; + rm) + __rm ;; + rmi) + __rmi ;; + run) + __run ;; + save) + __save ;; + search) + __search ;; + start) + __start ;; + stop) + __stop ;; + tag) + __tag ;; + top) + __top ;; + version) + __version ;; + wait) + __wait ;; +esac diff --git a/plugins/emacs/emacs.plugin.zsh b/plugins/emacs/emacs.plugin.zsh new file mode 100644 index 000000000..a3f0085a8 --- /dev/null +++ b/plugins/emacs/emacs.plugin.zsh @@ -0,0 +1,56 @@ +# Emacs 23 daemon capability is a killing feature. +# One emacs process handles all your frames whether +# you use a frame opened in a terminal via a ssh connection or X frames +# opened on the same host. + +# Benefits are multiple +# - You don't have the cost of starting Emacs all the time anymore +# - Opening a file is as fast as Emacs does not have anything else to do. +# - You can share opened buffered across opened frames. +# - Configuration changes made at runtime are applied to all frames. + + +if "$ZSH/tools/require_tool.sh" emacs 23 2>/dev/null ; then + export EMACS_PLUGIN_LAUNCHER="$ZSH/plugins/emacs/emacsclient.sh" + + # set EDITOR if not already defined. + export EDITOR="${EDITOR:-${EMACS_PLUGIN_LAUNCHER}}" + + alias emacs="$EMACS_PLUGIN_LAUNCHER --no-wait" + alias e=emacs + + # same than M-x eval but from outside Emacs. + alias eeval="$EMACS_PLUGIN_LAUNCHER --eval" + # create a new X frame + alias eframe='emacsclient --alternate-editor "" --create-frame' + + # to code all night long + alias emasc=emacs + alias emcas=emacs + + # Write to standard output the path to the file + # opened in the current buffer. + function efile { + local cmd="(buffer-file-name (window-buffer))" + "$EMACS_PLUGIN_LAUNCHER" --eval "$cmd" | tr -d \" + } + + # Write to standard output the directory of the file + # opened in the the current buffer + function ecd { + local cmd="(let ((buf-name (buffer-file-name (window-buffer)))) + (if buf-name (file-name-directory buf-name)))" + + local dir="$($EMACS_PLUGIN_LAUNCHER --eval $cmd | tr -d \")" + if [ -n "$dir" ] ;then + echo "$dir" + else + echo "can not deduce current buffer filename." >/dev/stderr + return 1 + fi + } +fi + +## Local Variables: +## mode: sh +## End: diff --git a/plugins/emacs/emacsclient.sh b/plugins/emacs/emacsclient.sh new file mode 100755 index 000000000..625201a16 --- /dev/null +++ b/plugins/emacs/emacsclient.sh @@ -0,0 +1,12 @@ +#!/bin/sh + +# get list of available X windows. +x=`emacsclient --alternate-editor '' --eval '(x-display-list)' 2>/dev/null` + +if [ -z "$x" ] || [ "$x" = "nil" ] ;then + # Create one if there is no X window yet. + emacsclient --alternate-editor "" --create-frame "$@" +else + # prevent creating another X frame if there is at least one present. + emacsclient --alternate-editor "" "$@" +fi diff --git a/plugins/emoji-clock/emoji-clock.plugin.zsh b/plugins/emoji-clock/emoji-clock.plugin.zsh new file mode 100644 index 000000000..7351a02ec --- /dev/null +++ b/plugins/emoji-clock/emoji-clock.plugin.zsh @@ -0,0 +1,29 @@ +# ------------------------------------------------------------------------------ +# FILE: emoji-clock.plugin.zsh +# DESCRIPTION: The current time with half hour accuracy as an emoji symbol. +# Inspired by Andre Torrez' "Put A Burger In Your Shell" +# http://notes.torrez.org/2013/04/put-a-burger-in-your-shell.html +# AUTHOR: Alexis Hildebrandt (afh[at]surryhill.net) +# VERSION: 1.0.0 +# ----------------------------------------------------------------------------- + +function emoji-clock() { + hour=$(date '+%I') + minutes=$(date '+%M') + case $hour in + 01) clock="🕐"; [ $minutes -ge 30 ] && clock="🕜";; + 02) clock="🕑"; [ $minutes -ge 30 ] && clock="🕝";; + 03) clock="🕒"; [ $minutes -ge 30 ] && clock="🕞";; + 04) clock="🕓"; [ $minutes -ge 30 ] && clock="🕟";; + 05) clock="🕔"; [ $minutes -ge 30 ] && clock="🕠";; + 06) clock="🕕"; [ $minutes -ge 30 ] && clock="🕡";; + 07) clock="🕖"; [ $minutes -ge 30 ] && clock="🕢";; + 08) clock="🕗"; [ $minutes -ge 30 ] && clock="🕣";; + 09) clock="🕘"; [ $minutes -ge 30 ] && clock="🕤";; + 10) clock="🕙"; [ $minutes -ge 30 ] && clock="🕥";; + 11) clock="🕚"; [ $minutes -ge 30 ] && clock="🕦";; + 12) clock="🕛"; [ $minutes -ge 30 ] && clock="🕧";; + *) clock="⌛";; + esac + echo $clock +} diff --git a/plugins/encode64/encode64.plugin.zsh b/plugins/encode64/encode64.plugin.zsh index cfb7c6a18..4dbd1b453 100644 --- a/plugins/encode64/encode64.plugin.zsh +++ b/plugins/encode64/encode64.plugin.zsh @@ -1,2 +1,4 @@ encode64(){ echo -n $1 | base64 } -decode64(){ echo -n $1 | base64 -D }
\ No newline at end of file +decode64(){ echo -n $1 | base64 --decode } +alias e64=encode64 +alias d64=decode64 diff --git a/plugins/extract/extract.plugin.zsh b/plugins/extract/extract.plugin.zsh index 5c125e98b..a6e16ddf7 100644 --- a/plugins/extract/extract.plugin.zsh +++ b/plugins/extract/extract.plugin.zsh @@ -52,8 +52,8 @@ function extract() { (*.xz) unxz "$1" ;; (*.lzma) unlzma "$1" ;; (*.Z) uncompress "$1" ;; - (*.zip) unzip "$1" -d $extract_dir ;; - (*.rar) unrar e -ad "$1" ;; + (*.zip|*.war|*.jar|*.sublime-package) unzip "$1" -d $extract_dir ;; + (*.rar) unrar x -ad "$1" ;; (*.7z) 7za x "$1" ;; (*.deb) mkdir -p "$extract_dir/control" diff --git a/plugins/fabric/_fab b/plugins/fabric/_fab new file mode 100644 index 000000000..9628e1224 --- /dev/null +++ b/plugins/fabric/_fab @@ -0,0 +1,60 @@ +#compdef fab +#autoload + +local curcontext=$curcontext state line +declare -A opt_args + +declare target_list +target_list=(`fab --shortlist 2>/dev/null`) + +_targets() { + _describe -t commands "fabric targets" target_list +} + +output_levels=( + 'status: Status messages, i.e. noting when Fabric is done running, if the user used a keyboard interrupt, or when servers are disconnected from. These messages are almost always relevant and rarely verbose.' + 'aborts: Abort messages. Like status messages, these should really only be turned off when using Fabric as a library, and possibly not even then. Note that even if this output group is turned off, aborts will still occur – there just won’t be any output about why Fabric aborted!' + 'warnings: Warning messages. These are often turned off when one expects a given operation to fail, such as when using grep to test existence of text in a file. If paired with setting env.warn_only to True, this can result in fully silent warnings when remote programs fail. As with aborts, this setting does not control actual warning behavior, only whether warning messages are printed or hidden.' + 'running: Printouts of commands being executed or files transferred, e.g. [myserver] run: ls /var/www. Also controls printing of tasks being run, e.g. [myserver] Executing task ''foo''.' + 'stdout: Local, or remote, stdout, i.e. non-error output from commands.' + 'stderr: Local, or remote, stderr, i.e. error-related output from commands.' + 'user: User-generated output, i.e. local output printed by fabfile code via use of the fastprint or puts functions.' +) + +_arguments -w -S -C \ + '(-)'{-h,--help}'[show this help message and exit]: :->noargs' \ + '(-)'{-V,--version}'[show program''s version number and exit]: :->noargs' \ + '(-)--list[print list of possible commands and exit]: :->noargs' \ + '(-)--shortlist[print non-verbose list of possible commands and exit]: :->noargs' \ + '(--reject-unknown-hosts)--reject-unknown-hosts[reject unknown hosts]' \ + '(--no-pty)--no-pty[do not use pseudo-terminal in run/sudo]' \ + "(-d+ --display=-)"{-d+,--display=-}"[print detailed info about a given command]: :_targets" \ + '(-D --disable-known-hosts)'{-D,--disable-known-hosts}'[do not load user known_hosts file]' \ + '(-r --reject-unknown-hosts)'{-r,--reject-unknown-hosts}'[reject unknown hosts]' \ + '(-u+ --user=-)'{-u+,--user=-}'[username to use when connecting to remote hosts]: :' \ + '(-p+ --password=-)'{-p+,--password=-}'[password for use with authentication and/or sudo]: :' \ + '(-H+ --hosts=-)'{-H+,--hosts=-}'[comma separated list of hosts to operate on]: :' \ + '(-R+ --roles=-)'{-R+,--roles=-}'[comma separated list of roles to operate on]: :' \ + '(-a --no-agent)'{-a,--no-agent}'[don''t use the running SSH agent]' \ + '(-k --no-keys)'{-k,--no-keys}'[don''t load private key files from ~/.ssh/]' \ + '(-w --warn-only)'{-w,--warn-only}'[warn instead of abort, when commands fail]' \ + '-i+[path to SSH private key file. May be repeated]: :_files' \ + "(-f+ --fabfile=)"{-f+,--fabfile=}"[Python module file to import]: :_files -g *.py" \ + '(-c+ --config=-)'{-c+,--config=-}'[specify location of config file to use]: :_files' \ + '(-s+ --shell=-)'{-s+,--shell=-}'[specify a new shell, defaults to ''/bin/bash -l -c'']: :' \ + '(--hide=-)--hide=-[comma-separated list of output levels to hide]: :->levels' \ + '(--show=-)--show=-[comma-separated list of output levels to show]: :->levels' \ + '*::: :->subcmds' && return 0 + +if [[ CURRENT -ge 1 ]]; then + case $state in + noargs) + _message "nothing to complete";; + levels) + _describe -t commands "output levels" output_levels;; + *) + _targets;; + esac + + return +fi diff --git a/plugins/fabric/fabric.plugin.zsh b/plugins/fabric/fabric.plugin.zsh new file mode 100644 index 000000000..aca411329 --- /dev/null +++ b/plugins/fabric/fabric.plugin.zsh @@ -0,0 +1 @@ +# DECLARION: This plugin was created by vhbit. What I did is just making a portal from https://github.com/vhbit/fabric-zsh-autocomplete. diff --git a/plugins/fasd/fasd.plugin.zsh b/plugins/fasd/fasd.plugin.zsh index d42584f1a..8ad43fc23 100644 --- a/plugins/fasd/fasd.plugin.zsh +++ b/plugins/fasd/fasd.plugin.zsh @@ -1,5 +1,10 @@ if [ $commands[fasd] ]; then # check if fasd is installed - eval "$(fasd --init auto)" + fasd_cache="$HOME/.fasd-init-cache" + if [ "$(command -v fasd)" -nt "$fasd_cache" -o ! -s "$fasd_cache" ]; then + fasd --init auto >| "$fasd_cache" + fi + source "$fasd_cache" + unset fasd_cache alias v='f -e vim' alias o='a -e open' fi diff --git a/plugins/fastfile/fastfile.plugin.zsh b/plugins/fastfile/fastfile.plugin.zsh new file mode 100644 index 000000000..775e9483e --- /dev/null +++ b/plugins/fastfile/fastfile.plugin.zsh @@ -0,0 +1,138 @@ +################################################################################ +# FILE: fastfile.plugin.zsh +# DESCRIPTION: oh-my-zsh plugin file. +# AUTHOR: Michael Varner (musikmichael@web.de) +# VERSION: 1.0.0 +# +# This plugin adds the ability to on the fly generate and access file shortcuts. +# +################################################################################ + +########################### +# Settings + +# These can be overwritten any time. +# If they are not set yet, they will be +# overwritten with their default values + +default fastfile_dir "${HOME}/.fastfile/" +default fastfile_var_prefix "§" + +########################### +# Impl + +# +# Generate a shortcut +# +# Arguments: +# 1. name - The name of the shortcut (default: name of the file) +# 2. file - The file or directory to make the shortcut for +# STDOUT: +# => fastfle_print +# +function fastfile() { + test "$2" || 2="." + file=$(readlink -f "$2") + + test "$1" || 1="$(basename "$file")" + name=$(echo "$1" | tr " " "_") + + + mkdir -p "${fastfile_dir}" + echo "$file" > "$(fastfile_resolv "$name")" + + fastfile_sync + fastfile_print "$name" +} + +# +# Resolve the location of a shortcut file (the database file, where the value is written!) +# +# Arguments: +# 1. name - The name of the shortcut +# STDOUT: +# The path +# +function fastfile_resolv() { + echo "${fastfile_dir}${1}" +} + +# +# Get the real path of a shortcut +# +# Arguments: +# 1. name - The name of the shortcut +# STDOUT: +# The path +# +function fastfile_get() { + cat "$(fastfile_resolv "$1")" +} + +# +# Print a shortcut +# +# Arguments: +# 1. name - The name of the shortcut +# STDOUT: +# Name and value of the shortcut +# +function fastfile_print() { + echo "${fastfile_var_prefix}${1} -> $(fastfile_get "$1")" +} + +# +# List all shortcuts +# +# STDOUT: +# (=> fastfle_print) for each shortcut +# +function fastfile_ls() { + for f in "${fastfile_dir}"/*; do + file=`basename "$f"` # To enable simpler handeling of spaces in file names + varkey=`echo "$file" | tr " " "_"` + + # Special format for colums + echo "${fastfile_var_prefix}${varkey}|->|$(fastfile_get "$file")" + done | column -t -s "|" +} + +# +# Remove a shortcut +# +# Arguments: +# 1. name - The name of the shortcut (default: name of the file) +# 2. file - The file or directory to make the shortcut for +# STDOUT: +# => fastfle_print +# +function fastfile_rm() { + fastfile_print "$1" + rm "$(fastfile_resolv "$1")" +} + +# +# Generate the aliases for the shortcuts +# +function fastfile_sync() { + for f in "${fastfile_dir}"/*; do + file=`basename "$f"` # To enable simpler handeling of spaces in file names + varkey=`echo "$file" | tr " " "_"` + + alias -g "${fastfile_var_prefix}${varkey}"="'$(fastfile_get "$file")'" + done +} + +################################## +# Shortcuts + +alias ff=fastfile +alias ffp=fastfile_print +alias ffrm=fastfile_rm +alias ffls=fastfile_ls +alias ffsync=fastfile_sync + +################################## +# Init + +fastfile_sync
\ No newline at end of file diff --git a/plugins/fbterm/fbterm.plugin.zsh b/plugins/fbterm/fbterm.plugin.zsh new file mode 100644 index 000000000..4f0456016 --- /dev/null +++ b/plugins/fbterm/fbterm.plugin.zsh @@ -0,0 +1,6 @@ +# start fbterm automatically in /dev/tty* + +if [[ $(tty|grep -o '/dev/tty') = /dev/tty ]] ; then + fbterm + exit +fi diff --git a/plugins/forklift/forklift.plugin.zsh b/plugins/forklift/forklift.plugin.zsh index 056069d36..b0e60a434 100644 --- a/plugins/forklift/forklift.plugin.zsh +++ b/plugins/forklift/forklift.plugin.zsh @@ -1,5 +1,6 @@ -# Open folder in ForkLift.app from console +# Open folder in ForkLift.app of ForkLift2.app from console # Author: Adam Strzelecki nanoant.com, modified by Bodo Tasche bitboxer.de +# Updated to support ForkLift2 by Johan Kaving # # Usage: # fl [<folder>] @@ -22,9 +23,33 @@ function fl { fi fi osascript 2>&1 1>/dev/null <<END - tell application "ForkLift" - activate - end tell + + try + tell application "Finder" + set appName to name of application file id "com.binarynights.ForkLift2" + end tell + on error err_msg number err_num + tell application "Finder" + set appName to name of application file id "com.binarynights.ForkLift" + end tell + end try + + if application appName is running + tell application appName + activate + end tell + else + tell application appName + activate + end tell + repeat until application appName is running + delay 1 + end repeat + tell application appName + activate + end tell + end if + tell application "System Events" tell application process "ForkLift" try @@ -36,7 +61,7 @@ function fl { keystroke "g" using {command down, shift down} tell sheet 1 of topWindow set value of text field 1 to "$PWD" - keystroke return + keystroke return end tell end tell end tell diff --git a/plugins/frontend-search/README.md b/plugins/frontend-search/README.md new file mode 100644 index 000000000..32784d03b --- /dev/null +++ b/plugins/frontend-search/README.md @@ -0,0 +1,82 @@ +## Rationale ## + +The idea for this script is to help searches in important doc contents from frontend. + +## Instalation ## + +I will send a Pull Request with this plugin for .oh-my-zsh official repository. If accept them, it's only add in plugins list that exists in ```.zshrc``` file. + +For now, you can clone this repository and add in ```custom/plugins``` folder + +```bash +$ git clone git://github.com/willmendesneto/frontend-search.git ~/.oh-my-zsh/custom/plugins/frontend-search +``` + +After this, restart your terminal and frontend-search plugin is configurated in you CLI. + +```bash +... +plugins=( <your-plugins-list>... frontend-search) +... +``` + +## Commands ## + +All command searches are accept only in format + +* `frontend <search-content> <search-term>` + +The search content are + +* `jquery <api.jquery.com>` +* `mdn <developer.mozilla.org>` +* `compass <compass-style.org>` +* `html5please <html5please.com>` +* `caniuse <caniuse.com>` +* `aurajs <aurajs.com>` +* `dartlang <api.dartlang.org/apidocs/channels/stable/dartdoc-viewer>` +* `lodash <search>` +* `qunit <api.qunitjs.com>` +* `fontello <fontello.com>` +* `bootsnipp <bootsnipp.com>` +* `cssflow <cssflow.com>` +* `codepen <codepen.io>` +* `unheap <www.unheap.com>` +* `bem <google.com/search?as_q=<search-term>&as_sitesearch=bem.info>` +* `smacss <google.com/search?as_q=<search-term>&as_sitesearch=smacss.com>` +* `angularjs <google.com/search?as_q=<search-term>&as_sitesearch=angularjs.org>` +* `reactjs <google.com/search?as_q=<search-term>&as_sitesearch=facebook.github.io/react>` +* `emberjs <emberjs.com>` + + +## Aliases ## + +There are a few aliases presented as well: + +* `jquery` A shorthand for `frontend jquery` +* `mdn` A shorthand for `frontend mdn` +* `compass` A shorthand for `frontend compass` +* `html5please` A shorthand for `frontend html5please` +* `caniuse` A shorthand for `frontend caniuse` +* `aurajs` A shorthand for `frontend aurajs` +* `dartlang` A shorthand for `frontend dartlang` +* `lodash` A shorthand for `frontend lodash` +* `qunit` A shorthand for `frontend qunit` +* `fontello` A shorthand for `frontend fontello` +* `bootsnipp` A shorthand for `frontend bootsnipp` +* `cssflow` A shorthand for `frontend cssflow` +* `codepen` A shorthand for `frontend codepen` +* `unheap` A shorthand for `frontend unheap` +* `bem` A shorthand for `frontend bem` +* `smacss` A shorthand for `frontend smacss` +* `angularjs` A shorthand for `frontend angularjs` +* `reactjs` A shorthand for `frontend reactjs` +* `emberjs` A shorthand for `frontend emberjs` + +## Author + +**Wilson Mendes (willmendesneto)** ++ <https://twitter.com/willmendesneto> ++ <http://github.com/willmendesneto> + +New features comming soon. diff --git a/plugins/frontend-search/frontend-search.plugin.zsh b/plugins/frontend-search/frontend-search.plugin.zsh new file mode 100644 index 000000000..38b1a80ea --- /dev/null +++ b/plugins/frontend-search/frontend-search.plugin.zsh @@ -0,0 +1,151 @@ +# frontend from terminal + +function frontend() { + + # get the open command + local open_cmd + if [[ $(uname -s) == 'Darwin' ]]; then + open_cmd='open' + else + open_cmd='xdg-open' + fi + + # no keyword provided, simply show how call methods + if [[ $# -le 1 ]]; then + echo "Please provide a search-content and a search-term for app.\nEx:\nfrontend <search-content> <search-term>\n" + return 1 + fi + + # check whether the search engine is supported + if [[ ! $1 =~ '(jquery|mdn|compass|html5please|caniuse|aurajs|dartlang|qunit|fontello|bootsnipp|cssflow|codepen|unheap|bem|smacss|angularjs|reactjs|emberjs)' ]]; + then + echo "Search valid search content $1 not supported." + echo "Valid contents: (formats 'frontend <search-content>' or '<search-content>')" + echo "* jquery" + echo "* mdn" + echo "* compass" + echo "* html5please" + echo "* caniuse" + echo "* aurajs" + echo "* dartlang" + echo "* lodash" + echo "* qunit" + echo "* fontello" + echo "* bootsnipp" + echo "* cssflow" + echo "* codepen" + echo "* unheap" + echo "* bem" + echo "* smacss" + echo "* angularjs" + echo "* reactjs" + echo "* emberjs" + echo "" + + return 1 + fi + + local url="http://" + local query="" + + case "$1" in + "jquery") + url="${url}api.jquery.com" + url="${url}/?s=$2" ;; + "mdn") + url="${url}developer.mozilla.org" + url="${url}/search?q=$2" ;; + "compass") + url="${url}compass-style.org" + url="${url}/search?q=$2" ;; + "html5please") + url="${url}html5please.com" + url="${url}/#$2" ;; + "caniuse") + url="${url}caniuse.com" + url="${url}/#search=$2" ;; + "aurajs") + url="${url}aurajs.com" + url="${url}/api/#stq=$2" ;; + "dartlang") + url="${url}api.dartlang.org/apidocs/channels/stable/dartdoc-viewer" + url="${url}/dart-$2" ;; + "qunit") + url="${url}api.qunitjs.com" + url="${url}/?s=$2" ;; + "fontello") + url="${url}fontello.com" + url="${url}/#search=$2" ;; + "bootsnipp") + url="${url}bootsnipp.com" + url="${url}/search?q=$2" ;; + "cssflow") + url="${url}cssflow.com" + url="${url}/search?q=$2" ;; + "codepen") + url="${url}codepen.io" + url="${url}/search?q=$2" ;; + "unheap") + url="${url}www.unheap.com" + url="${url}/?s=$2" ;; + "bem") + url="${url}google.com" + url="${url}/search?as_q=$2&as_sitesearch=bem.info" ;; + "smacss") + url="${url}google.com" + url="${url}/search?as_q=$2&as_sitesearch=smacss.com" ;; + "angularjs") + url="${url}google.com" + url="${url}/search?as_q=$2&as_sitesearch=angularjs.org" ;; + "reactjs") + url="${url}google.com" + url="${url}/search?as_q=$2&as_sitesearch=facebook.github.io/react" ;; + "emberjs") + url="${url}emberjs.com" + url="${url}/api/#stq=$2&stp=1" ;; + *) echo "INVALID PARAM!" + return ;; + esac + + echo "$url" + + $open_cmd "$url" + +} + +# javascript +alias jquery='frontend jquery' +alias mdn='frontend mdn' + +# pre processors frameworks +alias compass='frontend compass' + +# important links +alias html5please='frontend html5please' +alias caniuse='frontend caniuse' + +# components and libraries +alias aurajs='frontend aurajs' +alias dartlang='frontend dartlang' +alias lodash='frontend lodash' + +#tests +alias qunit='frontend qunit' + +#fonts +alias fontello='frontend fontello' + +# snippets +alias bootsnipp='frontend bootsnipp' +alias cssflow='frontend cssflow' +alias codepen='frontend codepen' +alias unheap='frontend unheap' + +# css architecture +alias bem='frontend bem' +alias smacss='frontend smacss' + +# frameworks +alias angularjs='frontend angularjs' +alias reactjs='frontend reactjs' +alias emberjs='frontend emberjs' diff --git a/plugins/gas/_gas b/plugins/gas/_gas index befdc9459..23e6d99aa 100644 --- a/plugins/gas/_gas +++ b/plugins/gas/_gas @@ -13,6 +13,7 @@ case $state in cmds=( "version:Prints Gas's version" "use:Uses author" + "ssh:Creates a new ssh key for an existing gas author" "show:Shows your current user" "list:Lists your authors" "import:Imports current user to gasconfig" @@ -25,8 +26,12 @@ case $state in args) case $line[1] in (use|delete) - _values -S , 'authors' $(cat ~/.gas | sed -n -e 's/^\[\(.*\)\]/\1/p') && ret=0 - ;; + VERSION=$(gas -v) + if [[ $VERSION == <1->.*.* ]] || [[ $VERSION == 0.<2->.* ]] || [[ $VERSION == 0.1.<6-> ]] then + _values -S , 'authors' $(cat ~/.gas/gas.authors | sed -n -e 's/^.*\[\(.*\)\]/\1/p') && ret=0 + else + _values -S , 'authors' $(cat ~/.gas | sed -n -e 's/^\[\(.*\)\]/\1/p') && ret=0 + fi esac ;; esac diff --git a/plugins/gem/_gem b/plugins/gem/_gem index 83cba40d1..92feebe95 100644 --- a/plugins/gem/_gem +++ b/plugins/gem/_gem @@ -4,13 +4,15 @@ # gem zsh completion, based on homebrew completion _gem_installed() { - installed_gems=(`gem list --local --no-versions`) + installed_gems=(${(f)"$(gem list --local --no-versions)"}) } local -a _1st_arguments + _1st_arguments=( + 'build:Build a gem from a gemspec' 'cert:Manage RubyGems certificates and signing settings' - 'check:Check installed gems' + 'check:Check a gem repository for added or missing files' 'cleanup:Clean up old versions of installed gems in the local repository' 'contents:Display the contents of the installed gems' 'dependency:Show the dependencies of an installed gem' @@ -21,7 +23,7 @@ _1st_arguments=( 'install:Install a gem into the local repository' 'list:Display gems whose name starts with STRING' 'lock:Generate a lockdown list of gems' - 'mirror:Mirror a gem repository' + 'mirror:Mirror all gem files (requires rubygems-mirror)' 'outdated:Display all gems that need updates' 'owner:Manage gem owners on RubyGems.org.' 'pristine:Restores installed gems to pristine condition from files located in the gem cache' @@ -35,8 +37,9 @@ _1st_arguments=( 'stale:List gems along with access times' 'uninstall:Uninstall gems from the local repository' 'unpack:Unpack an installed gem to the current directory' - 'update:Update the named gems (or all installed gems) in the local repository' + 'update:Update installed gems to the latest version' 'which:Find the location of a library file you can require' + 'yank:Remove a specific gem version release from RubyGems.org' ) local expl @@ -53,6 +56,11 @@ if (( CURRENT == 1 )); then fi case "$words[1]" in + build) + _files -g "*.gemspec" + ;; + install) + _files ;; list) if [[ "$state" == forms ]]; then _gem_installed diff --git a/plugins/gem/gem.plugin.zsh b/plugins/gem/gem.plugin.zsh new file mode 100644 index 000000000..938f5c993 --- /dev/null +++ b/plugins/gem/gem.plugin.zsh @@ -0,0 +1,7 @@ +alias gemb="gem build *.gemspec" +alias gemp="gem push *.gem" + +# gemy GEM 0.0.0 = gem yank GEM -v 0.0.0 +function gemy { + gem yank $1 -v $2 +}
\ No newline at end of file diff --git a/plugins/git-flow-avh/git-flow-avh.plugin.zsh b/plugins/git-flow-avh/git-flow-avh.plugin.zsh new file mode 100644 index 000000000..ba98fff01 --- /dev/null +++ b/plugins/git-flow-avh/git-flow-avh.plugin.zsh @@ -0,0 +1,419 @@ +#!zsh +# +# Installation +# ------------ +# +# To achieve git-flow completion nirvana: +# +# 0. Update your zsh's git-completion module to the newest verion. +# From here. http://zsh.git.sourceforge.net/git/gitweb.cgi?p=zsh/zsh;a=blob_plain;f=Completion/Unix/Command/_git;hb=HEAD +# +# 1. Install this file. Either: +# +# a. Place it in your .zshrc: +# +# b. Or, copy it somewhere (e.g. ~/.git-flow-completion.zsh) and put the following line in +# your .zshrc: +# +# source ~/.git-flow-completion.zsh +# +# c. Or, use this file as a oh-my-zsh plugin. +# + +_git-flow () +{ + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + + local -a subcommands + subcommands=( + 'init:Initialize a new git repo with support for the branching model.' + 'feature:Manage your feature branches.' + 'config:Manage your configuration.' + 'release:Manage your release branches.' + 'hotfix:Manage your hotfix branches.' + 'support:Manage your support branches.' + 'version:Shows version information.' + 'finish:Finish the branch you are currently on.' + 'delete:Delete the branch you are currently on.' + 'publish:Publish the branch you are currently on.' + ) + _describe -t commands 'git flow' subcommands + ;; + + (options) + case $line[1] in + + (init) + _arguments \ + -f'[Force setting of gitflow branches, even if already configured]' + ;; + + (version) + ;; + + (hotfix) + __git-flow-hotfix + ;; + + (release) + __git-flow-release + ;; + + (feature) + __git-flow-feature + ;; + (config) + __git-flow-config + ;; + + esac + ;; + esac +} + +__git-flow-release () +{ + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + + local -a subcommands + subcommands=( + 'start:Start a new release branch.' + 'finish:Finish a release branch.' + 'list:List all your release branches. (Alias to `git flow release`)' + 'publish:Publish release branch to remote.' + 'track:Checkout remote release branch.' + 'delete:Delete a release branch.' + ) + _describe -t commands 'git flow release' subcommands + _arguments \ + -v'[Verbose (more) output]' + ;; + + (options) + case $line[1] in + + (start) + _arguments \ + -F'[Fetch from origin before performing finish]'\ + ':version:__git_flow_version_list' + ;; + + (finish) + _arguments \ + -F'[Fetch from origin before performing finish]' \ + -s'[Sign the release tag cryptographically]'\ + -u'[Use the given GPG-key for the digital signature (implies -s)]'\ + -m'[Use the given tag message]'\ + -p'[Push to $ORIGIN after performing finish]'\ + ':version:__git_flow_version_list' + ;; + + (delete) + _arguments \ + -f'[Force deletion]' \ + -r'[Delete remote branch]' \ + ':version:__git_flow_version_list' + ;; + + (publish) + _arguments \ + ':version:__git_flow_version_list' + ;; + + (track) + _arguments \ + ':version:__git_flow_version_list' + ;; + + *) + _arguments \ + -v'[Verbose (more) output]' + ;; + esac + ;; + esac +} + +__git-flow-hotfix () +{ + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + + local -a subcommands + subcommands=( + 'start:Start a new hotfix branch.' + 'finish:Finish a hotfix branch.' + 'delete:Delete a hotfix branch.' + 'list:List all your hotfix branches. (Alias to `git flow hotfix`)' + ) + _describe -t commands 'git flow hotfix' subcommands + _arguments \ + -v'[Verbose (more) output]' + ;; + + (options) + case $line[1] in + + (start) + _arguments \ + -F'[Fetch from origin before performing finish]'\ + ':hotfix:__git_flow_version_list'\ + ':branch-name:__git_branch_names' + ;; + + (finish) + _arguments \ + -F'[Fetch from origin before performing finish]' \ + -s'[Sign the release tag cryptographically]'\ + -u'[Use the given GPG-key for the digital signature (implies -s)]'\ + -m'[Use the given tag message]'\ + -p'[Push to $ORIGIN after performing finish]'\ + ':hotfix:__git_flow_hotfix_list' + ;; + + (delete) + _arguments \ + -f'[Force deletion]' \ + -r'[Delete remote branch]' \ + ':hotfix:__git_flow_hotfix_list' + ;; + + *) + _arguments \ + -v'[Verbose (more) output]' + ;; + esac + ;; + esac +} + +__git-flow-feature () +{ + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + + local -a subcommands + subcommands=( + 'start:Start a new feature branch.' + 'finish:Finish a feature branch.' + 'delete:Delete a feature branch.' + 'list:List all your feature branches. (Alias to `git flow feature`)' + 'publish:Publish feature branch to remote.' + 'track:Checkout remote feature branch.' + 'diff:Show all changes.' + 'rebase:Rebase from integration branch.' + 'checkout:Checkout local feature branch.' + 'pull:Pull changes from remote.' + ) + _describe -t commands 'git flow feature' subcommands + _arguments \ + -v'[Verbose (more) output]' + ;; + + (options) + case $line[1] in + + (start) + _arguments \ + -F'[Fetch from origin before performing finish]'\ + ':feature:__git_flow_feature_list'\ + ':branch-name:__git_branch_names' + ;; + + (finish) + _arguments \ + -F'[Fetch from origin before performing finish]' \ + -r'[Rebase instead of merge]'\ + ':feature:__git_flow_feature_list' + ;; + + (delete) + _arguments \ + -f'[Force deletion]' \ + -r'[Delete remote branch]' \ + ':feature:__git_flow_feature_list' + ;; + + (publish) + _arguments \ + ':feature:__git_flow_feature_list'\ + ;; + + (track) + _arguments \ + ':feature:__git_flow_feature_list'\ + ;; + + (diff) + _arguments \ + ':branch:__git_branch_names'\ + ;; + + (rebase) + _arguments \ + -i'[Do an interactive rebase]' \ + ':branch:__git_branch_names' + ;; + + (checkout) + _arguments \ + ':branch:__git_flow_feature_list'\ + ;; + + (pull) + _arguments \ + ':remote:__git_remotes'\ + ':branch:__git_branch_names' + ;; + + *) + _arguments \ + -v'[Verbose (more) output]' + ;; + esac + ;; + esac +} + +__git-flow-config () +{ + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + + local -a subcommands + subcommands=( + 'list:List the configuration. (Alias to `git flow config`)' + 'set:Set the configuration option' + ) + _describe -t commands 'git flow config' subcommands + ;; + + (options) + case $line[1] in + + (set) + _arguments \ + --local'[Use repository config file]' \ + --global'[Use global config file]'\ + --system'[Use system config file]'\ + --file'[Use given config file]'\ + ':option:(master develop feature hotfix release support versiontagprefix)' + ;; + + *) + _arguments \ + --local'[Use repository config file]' \ + --global'[Use global config file]'\ + --system'[Use system config file]'\ + --file'[Use given config file]' + ;; + esac + ;; + esac +} +__git_flow_version_list () +{ + local expl + declare -a versions + + versions=(${${(f)"$(_call_program versions git flow release list 2> /dev/null | tr -d ' |*')"}}) + __git_command_successful || return + + _wanted versions expl 'version' compadd $versions +} + +__git_flow_feature_list () +{ + local expl + declare -a features + + features=(${${(f)"$(_call_program features git flow feature list 2> /dev/null | tr -d ' |*')"}}) + __git_command_successful || return + + _wanted features expl 'feature' compadd $features +} + +__git_remotes () { + local expl gitdir remotes + + gitdir=$(_call_program gitdir git rev-parse --git-dir 2>/dev/null) + __git_command_successful || return + + remotes=(${${(f)"$(_call_program remotes git config --get-regexp '"^remote\..*\.url$"')"}//#(#b)remote.(*).url */$match[1]}) + __git_command_successful || return + + # TODO: Should combine the two instead of either or. + if (( $#remotes > 0 )); then + _wanted remotes expl remote compadd $* - $remotes + else + _wanted remotes expl remote _files $* - -W "($gitdir/remotes)" -g "$gitdir/remotes/*" + fi +} + +__git_flow_hotfix_list () +{ + local expl + declare -a hotfixes + + hotfixes=(${${(f)"$(_call_program hotfixes git flow hotfix list 2> /dev/null | tr -d ' |*')"}}) + __git_command_successful || return + + _wanted hotfixes expl 'hotfix' compadd $hotfixes +} + +__git_branch_names () { + local expl + declare -a branch_names + + branch_names=(${${(f)"$(_call_program branchrefs git for-each-ref --format='"%(refname)"' refs/heads 2>/dev/null)"}#refs/heads/}) + __git_command_successful || return + + _wanted branch-names expl branch-name compadd $* - $branch_names +} + +__git_command_successful () { + if (( ${#pipestatus:#0} > 0 )); then + _message 'not a git repository' + return 1 + fi + return 0 +} + +zstyle ':completion:*:*:git:*' user-commands flow:'provide high-level repository operations' diff --git a/plugins/git-flow/git-flow.plugin.zsh b/plugins/git-flow/git-flow.plugin.zsh index ec21019e6..b9ea06844 100644 --- a/plugins/git-flow/git-flow.plugin.zsh +++ b/plugins/git-flow/git-flow.plugin.zsh @@ -20,6 +20,12 @@ # c. Or, use this file as a oh-my-zsh plugin. # +#Alias +alias gf='git flow' +alias gcd='git checkout develop' +alias gch='git checkout hotfix' +alias gcr='git checkout release' + _git-flow () { local curcontext="$curcontext" state line @@ -88,6 +94,8 @@ __git-flow-release () 'start:Start a new release branch.' 'finish:Finish a release branch.' 'list:List all your release branches. (Alias to `git flow release`)' + 'publish: public' + 'track: track' ) _describe -t commands 'git flow release' subcommands _arguments \ @@ -115,6 +123,16 @@ __git-flow-release () ':version:__git_flow_version_list' ;; + (publish) + _arguments \ + ':version:__git_flow_version_list'\ + ;; + + (track) + _arguments \ + ':version:__git_flow_version_list'\ + ;; + *) _arguments \ -v'[Verbose (more) output]' diff --git a/plugins/git-prompt/git-prompt.plugin.zsh b/plugins/git-prompt/git-prompt.plugin.zsh new file mode 100644 index 000000000..01b8a88d9 --- /dev/null +++ b/plugins/git-prompt/git-prompt.plugin.zsh @@ -0,0 +1,60 @@ +# ZSH Git Prompt Plugin from: +# http://github.com/olivierverdier/zsh-git-prompt +# +export __GIT_PROMPT_DIR=$ZSH/plugins/git-prompt +# Initialize colors. +autoload -U colors +colors + +# Allow for functions in the prompt. +setopt PROMPT_SUBST + +## Enable auto-execution of functions. +typeset -ga preexec_functions +typeset -ga precmd_functions +typeset -ga chpwd_functions + +# Append git functions needed for prompt. +preexec_functions+='preexec_update_git_vars' +precmd_functions+='precmd_update_git_vars' +chpwd_functions+='chpwd_update_git_vars' + +## Function definitions +function preexec_update_git_vars() { + case "$2" in + git*) + __EXECUTED_GIT_COMMAND=1 + ;; + esac +} + +function precmd_update_git_vars() { + if [ -n "$__EXECUTED_GIT_COMMAND" ]; then + update_current_git_vars + unset __EXECUTED_GIT_COMMAND + fi +} + +function chpwd_update_git_vars() { + update_current_git_vars +} + +function update_current_git_vars() { + unset __CURRENT_GIT_STATUS + + local gitstatus="$__GIT_PROMPT_DIR/gitstatus.py" + _GIT_STATUS=`python ${gitstatus}` + __CURRENT_GIT_STATUS=("${(f)_GIT_STATUS}") +} + +function prompt_git_info() { + if [ -n "$__CURRENT_GIT_STATUS" ]; then + echo "(%{${fg[red]}%}$__CURRENT_GIT_STATUS[1]%{${fg[default]}%}$__CURRENT_GIT_STATUS[2]%{${fg[magenta]}%}$__CURRENT_GIT_STATUS[3]%{${fg[default]}%})" + fi +} + +# Set the prompt. +#PROMPT='%B%m%~%b$(prompt_git_info) %# ' +# for a right prompt: +#RPROMPT='%b$(prompt_git_info)' +RPROMPT='$(prompt_git_info)' diff --git a/plugins/git-prompt/gitstatus.py b/plugins/git-prompt/gitstatus.py new file mode 100644 index 000000000..256841432 --- /dev/null +++ b/plugins/git-prompt/gitstatus.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +from subprocess import Popen, PIPE +import re + +# change those symbols to whatever you prefer +symbols = { + 'ahead of': '↑', + 'behind': '↓', + 'staged': '♦', + 'changed': '‣', + 'untracked': '…', + 'clean': '⚡', + 'unmerged': '≠', + 'sha1': ':' +} + +output, error = Popen( + ['git', 'status'], stdout=PIPE, stderr=PIPE, universal_newlines=True).communicate() + +if error: + import sys + sys.exit(0) +lines = output.splitlines() + +behead_re = re.compile( + r"^# Your branch is (ahead of|behind) '(.*)' by (\d+) commit") +diverge_re = re.compile(r"^# and have (\d+) and (\d+) different") + +status = '' +staged = re.compile(r'^# Changes to be committed:$', re.MULTILINE) +changed = re.compile(r'^# Changed but not updated:$', re.MULTILINE) +untracked = re.compile(r'^# Untracked files:$', re.MULTILINE) +unmerged = re.compile(r'^# Unmerged paths:$', re.MULTILINE) + + +def execute(*command): + out, err = Popen(stdout=PIPE, stderr=PIPE, *command).communicate() + if not err: + nb = len(out.splitlines()) + else: + nb = '?' + return nb + +if staged.search(output): + nb = execute( + ['git', 'diff', '--staged', '--name-only', '--diff-filter=ACDMRT']) + status += '%s%s' % (symbols['staged'], nb) +if unmerged.search(output): + nb = execute(['git', 'diff', '--staged', '--name-only', '--diff-filter=U']) + status += '%s%s' % (symbols['unmerged'], nb) +if changed.search(output): + nb = execute(['git', 'diff', '--name-only', '--diff-filter=ACDMRT']) + status += '%s%s' % (symbols['changed'], nb) +if untracked.search(output): + status += symbols['untracked'] +if status == '': + status = symbols['clean'] + +remote = '' + +bline = lines[0] +if bline.find('Not currently on any branch') != -1: + branch = symbols['sha1'] + Popen([ + 'git', + 'rev-parse', + '--short', + 'HEAD'], stdout=PIPE).communicate()[0][:-1] +else: + branch = bline.split(' ')[-1] + bstatusline = lines[1] + match = behead_re.match(bstatusline) + if match: + remote = symbols[match.groups()[0]] + remote += match.groups()[2] + elif lines[2:]: + div_match = diverge_re.match(lines[2]) + if div_match: + remote = "{behind}{1}{ahead of}{0}".format( + *div_match.groups(), **symbols) + +print('\n'.join([branch, remote, status])) diff --git a/plugins/git-remote-branch/git-remote-branch.plugin.zsh b/plugins/git-remote-branch/git-remote-branch.plugin.zsh index ff98cbf87..6c5ab8f70 100644 --- a/plugins/git-remote-branch/git-remote-branch.plugin.zsh +++ b/plugins/git-remote-branch/git-remote-branch.plugin.zsh @@ -6,7 +6,8 @@ _git_remote_branch() { compadd create publish rename delete track elif (( CURRENT == 3 )); then # second arg: remote branch name - compadd `git branch -r | grep -v HEAD | sed "s/.*\///" | sed "s/ //g"` + remotes=`git remote | tr '\n' '|' | sed "s/\|$//g"` + compadd `git branch -r | grep -v HEAD | sed "s/$remotes\///" | sed "s/ //g"` elif (( CURRENT == 4 )); then # third arg: remote name compadd `git remote` diff --git a/plugins/git/README.md b/plugins/git/README.md new file mode 100644 index 000000000..8462dda1c --- /dev/null +++ b/plugins/git/README.md @@ -0,0 +1,4 @@ +## git +**Maintainer:** [Stibbons](https://github.com/Stibbons) + +This plugin adds several git aliases and increase the completion function provided by zsh diff --git a/plugins/git/_git-branch b/plugins/git/_git-branch new file mode 100644 index 000000000..6b9c1a483 --- /dev/null +++ b/plugins/git/_git-branch @@ -0,0 +1,83 @@ +#compdef git-branch + +_git-branch () +{ + declare l c m d + + l='--color --no-color -r -a --all -v --verbose --abbrev --no-abbrev' + c='-l -f --force -t --track --no-track --set-upstream --contains --merged --no-merged' + m='-m -M' + d='-d -D' + + declare -a dependent_creation_args + if (( words[(I)-r] == 0 )); then + dependent_creation_args=( + "($l $m $d): :__git_branch_names" + "::start-point:__git_revisions") + fi + + declare -a dependent_deletion_args + if (( words[(I)-d] || words[(I)-D] )); then + dependent_creation_args= + dependent_deletion_args=( + '-r[delete only remote-tracking branches]') + if (( words[(I)-r] )); then + dependent_deletion_args+='*: :__git_ignore_line_inside_arguments __git_remote_branch_names' + else + dependent_deletion_args+='*: :__git_ignore_line_inside_arguments __git_branch_names' + fi + fi + + declare -a dependent_modification_args + if (( words[(I)-m] || words[(I)-M] )); then + dependent_creation_args= + dependent_modification_args=( + ':old or new branch name:__git_branch_names' + '::new branch name:__git_branch_names') + fi + + _arguments -w -S -s \ + "($c $m $d --no-color :)--color=-[turn on branch coloring]:: :__git_color_whens" \ + "($c $m $d : --color)--no-color[turn off branch coloring]" \ + "($c $m -a --all)-r[list or delete only remote-tracking branches]" \ + "($c $m $d : -r)"{-a,--all}"[list both remote-tracking branches and local branches]" \ + "($c $m $d : -v --verbose)"{-v,--verbose}'[show SHA1 and commit subject line for each head]' \ + "($c $m $d :)--abbrev=[set minimum SHA1 display-length]: :__git_guard_number length" \ + "($c $m $d :)--no-abbrev[do not abbreviate sha1s]" \ + "($l $m $d)-l[create the branch's reflog]" \ + "($l $m $d -f --force)"{-f,--force}"[force the creation of a new branch]" \ + "($l $m $d -t --track)"{-t,--track}"[set up configuration so that pull merges from the start point]" \ + "($l $m $d)--no-track[override the branch.autosetupmerge configuration variable]" \ + "($l $m $d)--set-upstream[set up configuration so that pull merges]" \ + "($l $m $d)--contains=[only list branches which contain the specified commit]: :__git_committishs" \ + "($l $m $d)--merged=[only list branches which are fully contained by HEAD]: :__git_committishs" \ + "($l $m $d)--no-merged=[do not list branches which are fully contained by HEAD]: :__git_committishs" \ + $dependent_creation_args \ + "($l $c $d -M)-m[rename a branch and the corresponding reflog]" \ + "($l $c $d -m)-M[rename a branch even if the new branch-name already exists]" \ + $dependent_modification_args \ + "($l $c $m -D)-d[delete a fully merged branch]" \ + "($l $c $m -d)-D[delete a branch]" \ + $dependent_deletion_args +} + +(( $+functions[__git_ignore_line] )) || +__git_ignore_line () { + declare -a ignored + ignored=() + ((CURRENT > 1)) && + ignored+=(${line[1,CURRENT-1]//(#m)[\[\]()\\*?#<>~\^]/\\$MATCH}) + ((CURRENT < $#line)) && + ignored+=(${line[CURRENT+1,-1]//(#m)[\[\]()\\*?#<>~\^]/\\$MATCH}) + $* -F ignored +} + +(( $+functions[__git_ignore_line_inside_arguments] )) || +__git_ignore_line_inside_arguments () { + declare -a compadd_opts + + zparseopts -D -E -a compadd_opts V: J: 1 2 n f X: M: P: S: r: R: q F: + + __git_ignore_line $* $compadd_opts +} + diff --git a/plugins/git/_git-remote b/plugins/git/_git-remote new file mode 100644 index 000000000..4ba62a357 --- /dev/null +++ b/plugins/git/_git-remote @@ -0,0 +1,74 @@ +#compdef git-remote + +# NOTE: --track is undocumented. +# TODO: --track, -t, --master, and -m should take remote branches, I guess. +# NOTE: --master is undocumented. +# NOTE: --fetch is undocumented. +_git-remote () { + local curcontext=$curcontext state line + declare -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' && ret=0 + + case $state in + (command) + declare -a commands + + commands=( + 'add:add a new remote' + 'show:show information about a given remote' + 'prune:delete all stale tracking branches for a given remote' + 'update:fetch updates for a set of remotes' + 'rm:remove a remote from .git/config and all associated tracking branches' + 'rename:rename a remote from .git/config and update all associated tracking branches' + 'set-head:sets or deletes the default branch' + 'set-branches:changes the list of branches tracked by the named remote.' + 'set-url:changes URL remote points to.' + ) + + _describe -t commands 'sub-command' commands && ret=0 + ;; + (options) + case $line[1] in + (add) + _arguments \ + '*'{--track,-t}'[track given branch instead of default glob refspec]:branch:__git_branch_names' \ + '(--master -m)'{--master,-m}'[set the remote'\''s HEAD to point to given master branch]:branch:__git_branch_names' \ + '(--fetch -f)'{--fetch,-f}'[run git-fetch on the new remote after it has been created]' \ + ':branch name:__git_remotes' \ + ':url:_urls' && ret=0 + ;; + (show) + _arguments \ + '-n[do not contact the remote for a list of branches]' \ + ':remote:__git_remotes' && ret=0 + ;; + (prune) + _arguments \ + '(--dry-run -n)'{-n,--dry-run}'[do not actually prune, only list what would be done]' \ + ':remote:__git_remotes' && ret=0 + ;; + (update) + __git_remote-groups && ret=0 + ;; + (rm) + __git_remotes && ret=0 + ;; + (rename) + __git_remotes && ret=0 + ;; + (set-url) + _arguments \ + '*--push[manipulate push URLs]' \ + '(--add)--add[add URL]' \ + '(--delete)--delete[delete URLs]' \ + ':branch name:__git_remotes' \ + ':url:_urls' && ret=0 + ;; + + esac + ;; + esac +} diff --git a/plugins/git/git.plugin.zsh b/plugins/git/git.plugin.zsh index 79300a166..1ec4d6310 100644 --- a/plugins/git/git.plugin.zsh +++ b/plugins/git/git.plugin.zsh @@ -3,6 +3,12 @@ alias g='git' compdef g=git alias gst='git status' compdef _git gst=git-status +alias gd='git diff' +compdef _git gd=git-diff +alias gdc='git diff --cached' +compdef _git gdc=git-diff +alias gdt='git diff-tree --no-commit-id --name-only -r' +compdef _git gdc=git diff-tree --no-commit-id --name-only -r alias gl='git pull' compdef _git gl=git-pull alias gup='git pull --rebase' @@ -14,8 +20,14 @@ gdv() { git diff -w "$@" | view - } compdef _git gdv=git-diff alias gc='git commit -v' compdef _git gc=git-commit +alias gc!='git commit -v --amend' +compdef _git gc!=git-commit alias gca='git commit -v -a' -compdef _git gca=git-commit +compdef _git gc=git-commit +alias gca!='git commit -v -a --amend' +compdef _git gca!=git-commit +alias gcmsg='git commit -m' +compdef _git gcmsg=git-commit alias gco='git checkout' compdef _git gco=git-checkout alias gcm='git checkout master' @@ -31,6 +43,12 @@ alias grset='git remote set-url' compdef _git grset=git-remote alias grup='git remote update' compdef _git grset=git-remote +alias grbi='git rebase -i' +compdef _git grbi=git-rebase +alias grbc='git rebase --continue' +compdef _git grbc=git-rebase +alias grba='git rebase --abort' +compdef _git grba=git-rebase alias gb='git branch' compdef _git gb=git-branch alias gba='git branch -a' @@ -40,12 +58,16 @@ compdef gcount=git alias gcl='git config --list' alias gcp='git cherry-pick' compdef _git gcp=git-cherry-pick -alias glg='git log --stat --max-count=5' +alias glg='git log --stat --max-count=10' compdef _git glg=git-log -alias glgg='git log --graph --max-count=5' +alias glgg='git log --graph --max-count=10' compdef _git glgg=git-log alias glgga='git log --graph --decorate --all' compdef _git glgga=git-log +alias glo='git log --oneline --decorate --color' +compdef _git glo=git-log +alias glog='git log --oneline --decorate --color --graph' +compdef _git glog=git-log alias gss='git status -s' compdef _git gss=git-status alias ga='git add' @@ -54,9 +76,24 @@ alias gm='git merge' compdef _git gm=git-merge alias grh='git reset HEAD' alias grhh='git reset HEAD --hard' +alias gclean='git reset --hard && git clean -dfx' alias gwc='git whatchanged -p --abbrev-commit --pretty=medium' -alias gf='git ls-files | grep' + +#remove the gf alias +#alias gf='git ls-files | grep' + alias gpoat='git push origin --all && git push origin --tags' +alias gmt='git mergetool --no-prompt' +compdef _git gm=git-mergetool + +alias gg='git gui citool' +alias gga='git gui citool --amend' +alias gk='gitk --all --branches' + +alias gsts='git stash show --text' +alias gsta='git stash' +alias gstp='git stash pop' +alias gstd='git stash drop' # Will cd into the top of the current repository # or submodule. @@ -89,7 +126,41 @@ function current_repository() { # these aliases take advantage of the previous function alias ggpull='git pull origin $(current_branch)' compdef ggpull=git +alias ggpur='git pull --rebase origin $(current_branch)' +compdef ggpur=git alias ggpush='git push origin $(current_branch)' compdef ggpush=git alias ggpnp='git pull origin $(current_branch) && git push origin $(current_branch)' compdef ggpnp=git + +# Pretty log messages +function _git_log_prettily(){ + if ! [ -z $1 ]; then + git log --pretty=$1 + fi +} +alias glp="_git_log_prettily" +compdef _git glp=git-log + +# Work In Progress (wip) +# These features allow to pause a branch development and switch to another one (wip) +# When you want to go back to work, just unwip it +# +# This function return a warning if the current branch is a wip +function work_in_progress() { + if $(git log -n 1 2>/dev/null | grep -q -c "\-\-wip\-\-"); then + echo "WIP!!" + fi +} +# these alias commit and uncomit wip branches +alias gwip='git add -A; git ls-files --deleted -z | xargs -r0 git rm; git commit -m "--wip--"' +alias gunwip='git log -n 1 | grep -q -c "\-\-wip\-\-" && git reset HEAD~1' + +# these alias ignore changes to file +alias gignore='git update-index --assume-unchanged' +alias gunignore='git update-index --no-assume-unchanged' +# list temporarily ignored files +alias gignored='git ls-files -v | grep "^[[:lower:]]"' + + + diff --git a/plugins/gitfast/_git b/plugins/gitfast/_git index 45775021f..fac5e711e 100644 --- a/plugins/gitfast/_git +++ b/plugins/gitfast/_git @@ -2,18 +2,19 @@ # zsh completion wrapper for git # -# You need git's bash completion script installed somewhere, by default on the -# same directory as this script. +# Copyright (c) 2012-2013 Felipe Contreras <felipe.contreras@gmail.com> # -# If your script is on ~/.git-completion.sh instead, you can configure it on -# your ~/.zshrc: +# You need git's bash completion script installed somewhere, by default it +# would be the location bash-completion uses. +# +# If your script is somewhere else, you can configure it on your ~/.zshrc: # # zstyle ':completion:*:*:git:*' script ~/.git-completion.sh # -# The recommended way to install this script is to copy to -# '~/.zsh/completion/_git', and then add the following to your ~/.zshrc file: +# The recommended way to install this script is to copy to '~/.zsh/_git', and +# then add the following to your ~/.zshrc file: # -# fpath=(~/.zsh/completion $fpath) +# fpath=(~/.zsh $fpath) complete () { @@ -21,8 +22,23 @@ complete () return 0 } +zstyle -T ':completion:*:*:git:*' tag-order && \ + zstyle ':completion:*:*:git:*' tag-order 'common-commands' + zstyle -s ":completion:*:*:git:*" script script -test -z "$script" && script="$(dirname ${funcsourcetrace[1]%:*})"/git-completion.bash +if [ -z "$script" ]; then + local -a locations + local e + locations=( + '/etc/bash_completion.d/git' # fedora, old debian + '/usr/share/bash-completion/completions/git' # arch, ubuntu, new debian + '/usr/share/bash-completion/git' # gentoo + $(dirname ${funcsourcetrace[1]%:*})/git-completion.bash + ) + for e in $locations; do + test -f $e && script="$e" && break + done +fi ZSH_VERSION='' . "$script" __gitcomp () @@ -60,18 +76,140 @@ __gitcomp_nl () compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0 } +__gitcomp_file () +{ + emulate -L zsh + + local IFS=$'\n' + compset -P '*[=:]' + compadd -Q -p "${2-}" -f -- ${=1} && _ret=0 +} + +__git_zsh_bash_func () +{ + emulate -L ksh + + local command=$1 + + local completion_func="_git_${command//-/_}" + declare -f $completion_func >/dev/null && $completion_func && return + + local expansion=$(__git_aliased_command "$command") + if [ -n "$expansion" ]; then + completion_func="_git_${expansion//-/_}" + declare -f $completion_func >/dev/null && $completion_func + fi +} + +__git_zsh_cmd_common () +{ + local -a list + list=( + add:'add file contents to the index' + bisect:'find by binary search the change that introduced a bug' + branch:'list, create, or delete branches' + checkout:'checkout a branch or paths to the working tree' + clone:'clone a repository into a new directory' + commit:'record changes to the repository' + diff:'show changes between commits, commit and working tree, etc' + fetch:'download objects and refs from another repository' + grep:'print lines matching a pattern' + init:'create an empty Git repository or reinitialize an existing one' + log:'show commit logs' + merge:'join two or more development histories together' + mv:'move or rename a file, a directory, or a symlink' + pull:'fetch from and merge with another repository or a local branch' + push:'update remote refs along with associated objects' + rebase:'forward-port local commits to the updated upstream head' + reset:'reset current HEAD to the specified state' + rm:'remove files from the working tree and from the index' + show:'show various types of objects' + status:'show the working tree status' + tag:'create, list, delete or verify a tag object signed with GPG') + _describe -t common-commands 'common commands' list && _ret=0 +} + +__git_zsh_cmd_alias () +{ + local -a list + list=(${${${(0)"$(git config -z --get-regexp '^alias\.')"}#alias.}%$'\n'*}) + _describe -t alias-commands 'aliases' list $* && _ret=0 +} + +__git_zsh_cmd_all () +{ + local -a list + emulate ksh -c __git_compute_all_commands + list=( ${=__git_all_commands} ) + _describe -t all-commands 'all commands' list && _ret=0 +} + +__git_zsh_main () +{ + local curcontext="$curcontext" state state_descr line + typeset -A opt_args + local -a orig_words + + orig_words=( ${words[@]} ) + + _arguments -C \ + '(-p --paginate --no-pager)'{-p,--paginate}'[pipe all output into ''less'']' \ + '(-p --paginate)--no-pager[do not pipe git output into a pager]' \ + '--git-dir=-[set the path to the repository]: :_directories' \ + '--bare[treat the repository as a bare repository]' \ + '(- :)--version[prints the git suite version]' \ + '--exec-path=-[path to where your core git programs are installed]:: :_directories' \ + '--html-path[print the path where git''s HTML documentation is installed]' \ + '--info-path[print the path where the Info files are installed]' \ + '--man-path[print the manpath (see `man(1)`) for the man pages]' \ + '--work-tree=-[set the path to the working tree]: :_directories' \ + '--namespace=-[set the git namespace]' \ + '--no-replace-objects[do not use replacement refs to replace git objects]' \ + '(- :)--help[prints the synopsis and a list of the most commonly used commands]: :->arg' \ + '(-): :->command' \ + '(-)*:: :->arg' && return + + case $state in + (command) + _alternative \ + 'alias-commands:alias:__git_zsh_cmd_alias' \ + 'common-commands:common:__git_zsh_cmd_common' \ + 'all-commands:all:__git_zsh_cmd_all' && _ret=0 + ;; + (arg) + local command="${words[1]}" __git_dir + + if (( $+opt_args[--bare] )); then + __git_dir='.' + else + __git_dir=${opt_args[--git-dir]} + fi + + (( $+opt_args[--help] )) && command='help' + + words=( ${orig_words[@]} ) + + __git_zsh_bash_func $command + ;; + esac +} + _git () { local _ret=1 - () { - emulate -L ksh - local cur cword prev - cur=${words[CURRENT-1]} - prev=${words[CURRENT-2]} - let cword=CURRENT-1 - __${service}_main - } - let _ret && _default -S '' && _ret=0 + local cur cword prev + + cur=${words[CURRENT]} + prev=${words[CURRENT-1]} + let cword=CURRENT-1 + + if (( $+functions[__${service}_zsh_main] )); then + __${service}_zsh_main + else + emulate ksh -c __${service}_main + fi + + let _ret && _default && _ret=0 return _ret } diff --git a/plugins/gitfast/git-completion.bash b/plugins/gitfast/git-completion.bash index be800e09b..5da920ecd 100644 --- a/plugins/gitfast/git-completion.bash +++ b/plugins/gitfast/git-completion.bash @@ -13,6 +13,7 @@ # *) .git/remotes file names # *) git 'subcommands' # *) tree paths within 'ref:path/to/file' expressions +# *) file paths within current working directory and index # *) common --long-options # # To use these routines: @@ -23,10 +24,6 @@ # 3) Consider changing your PS1 to also show the current branch, # see git-prompt.sh for details. -if [[ -n ${ZSH_VERSION-} ]]; then - autoload -U +X bashcompinit && bashcompinit -fi - case "$COMP_WORDBREAKS" in *:*) : great ;; *) COMP_WORDBREAKS="$COMP_WORDBREAKS:" @@ -36,8 +33,6 @@ esac # returns location of .git repo __gitdir () { - # Note: this function is duplicated in git-prompt.sh - # When updating it, make sure you update the other one to match. if [ -z "${1-}" ]; then if [ -n "${__git_dir-}" ]; then echo "$__git_dir" @@ -56,19 +51,6 @@ __gitdir () fi } -__gitcomp_1 () -{ - local c IFS=$' \t\n' - for c in $1; do - c="$c$2" - case $c in - --*=*|*.) ;; - *) c="$c " ;; - esac - printf '%s\n' "$c" - done -} - # The following function is based on code from: # # bash_completion - programmable completion functions for bash 3.2+ @@ -169,7 +151,6 @@ __git_reassemble_comp_words_by_ref() } if ! type _get_comp_words_by_ref >/dev/null 2>&1; then -if [[ -z ${ZSH_VERSION:+set} ]]; then _get_comp_words_by_ref () { local exclude cur_ words_ cword_ @@ -197,36 +178,20 @@ _get_comp_words_by_ref () shift done } -else -_get_comp_words_by_ref () +fi + +__gitcompadd () { - while [ $# -gt 0 ]; do - case "$1" in - cur) - cur=${COMP_WORDS[COMP_CWORD]} - ;; - prev) - prev=${COMP_WORDS[COMP_CWORD-1]} - ;; - words) - words=("${COMP_WORDS[@]}") - ;; - cword) - cword=$COMP_CWORD - ;; - -n) - # assume COMP_WORDBREAKS is already set sanely - shift - ;; - esac - shift + local i=0 + for x in $1; do + if [[ "$x" == "$3"* ]]; then + COMPREPLY[i++]="$2$x$4" + fi done } -fi -fi -# Generates completion reply with compgen, appending a space to possible -# completion words, if necessary. +# Generates completion reply, appending a space to possible completion words, +# if necessary. # It accepts 1 to 4 arguments: # 1: List of possible completion words. # 2: A prefix to be added to each possible completion word (optional). @@ -238,19 +203,25 @@ __gitcomp () case "$cur_" in --*=) - COMPREPLY=() ;; *) - local IFS=$'\n' - COMPREPLY=($(compgen -P "${2-}" \ - -W "$(__gitcomp_1 "${1-}" "${4-}")" \ - -- "$cur_")) + local c i=0 IFS=$' \t\n' + for c in $1; do + c="$c${4-}" + if [[ $c == "$cur_"* ]]; then + case $c in + --*=*|*.) ;; + *) c="$c " ;; + esac + COMPREPLY[i++]="${2-}$c" + fi + done ;; esac } -# Generates completion reply with compgen from newline-separated possible -# completion words by appending a space to all of them. +# Generates completion reply from newline-separated possible completion words +# by appending a space to all of them. # It accepts 1 to 4 arguments: # 1: List of possible completion words, separated by a single newline. # 2: A prefix to be added to each possible completion word (optional). @@ -261,7 +232,69 @@ __gitcomp () __gitcomp_nl () { local IFS=$'\n' - COMPREPLY=($(compgen -P "${2-}" -S "${4- }" -W "$1" -- "${3-$cur}")) + __gitcompadd "$1" "${2-}" "${3-$cur}" "${4- }" +} + +# Generates completion reply with compgen from newline-separated possible +# completion filenames. +# It accepts 1 to 3 arguments: +# 1: List of possible completion filenames, separated by a single newline. +# 2: A directory prefix to be added to each possible completion filename +# (optional). +# 3: Generate possible completion matches for this word (optional). +__gitcomp_file () +{ + local IFS=$'\n' + + # XXX does not work when the directory prefix contains a tilde, + # since tilde expansion is not applied. + # This means that COMPREPLY will be empty and Bash default + # completion will be used. + __gitcompadd "$1" "${2-}" "${3-$cur}" "" + + # use a hack to enable file mode in bash < 4 + compopt -o filenames +o nospace 2>/dev/null || + compgen -f /non-existing-dir/ > /dev/null +} + +# Execute 'git ls-files', unless the --committable option is specified, in +# which case it runs 'git diff-index' to find out the files that can be +# committed. It return paths relative to the directory specified in the first +# argument, and using the options specified in the second argument. +__git_ls_files_helper () +{ + ( + test -n "${CDPATH+set}" && unset CDPATH + cd "$1" + if [ "$2" == "--committable" ]; then + git diff-index --name-only --relative HEAD + else + # NOTE: $2 is not quoted in order to support multiple options + git ls-files --exclude-standard $2 + fi + ) 2>/dev/null +} + + +# __git_index_files accepts 1 or 2 arguments: +# 1: Options to pass to ls-files (required). +# 2: A directory path (optional). +# If provided, only files within the specified directory are listed. +# Sub directories are never recursed. Path must have a trailing +# slash. +__git_index_files () +{ + local dir="$(__gitdir)" root="${2-.}" file + + if [ -d "$dir" ]; then + __git_ls_files_helper "$root" "$1" | + while read -r file; do + case "$file" in + ?*/*) echo "${file%%/*}" ;; + *) echo "$file" ;; + esac + done | sort | uniq + fi } __git_heads () @@ -321,7 +354,7 @@ __git_refs () if [[ "$ref" == "$cur"* ]]; then echo "$ref" fi - done | uniq -u + done | sort | uniq -u fi return fi @@ -336,14 +369,8 @@ __git_refs () done ;; *) - git ls-remote "$dir" HEAD ORIG_HEAD 'refs/tags/*' 'refs/heads/*' 'refs/remotes/*' 2>/dev/null | \ - while read -r hash i; do - case "$i" in - *^{}) ;; - refs/*) echo "${i#refs/*/}" ;; - *) echo "$i" ;; - esac - done + echo "HEAD" + git for-each-ref --format="%(refname:short)" -- "refs/remotes/$dir/" | sed -e "s#^$dir/##" ;; esac } @@ -428,7 +455,7 @@ __git_complete_revlist_file () *) pfx="$ref:$pfx" ;; esac - __gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" \ + __gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" 2>/dev/null \ | sed '/^100... blob /{ s,^.* ,, s,$, , @@ -461,6 +488,25 @@ __git_complete_revlist_file () } +# __git_complete_index_file requires 1 argument: +# 1: the options to pass to ls-file +# +# The exception is --committable, which finds the files appropriate commit. +__git_complete_index_file () +{ + local pfx="" cur_="$cur" + + case "$cur_" in + ?*/*) + pfx="${cur_%/*}" + cur_="${cur_##*/}" + pfx="${pfx}/" + ;; + esac + + __gitcomp_file "$(__git_index_files "$1" "$pfx")" "$pfx" "$cur_" +} + __git_complete_file () { __git_complete_revlist_file @@ -486,7 +532,6 @@ __git_complete_remote_or_refspec () case "$cmd" in push) no_complete_refspec=1 ;; fetch) - COMPREPLY=() return ;; *) ;; @@ -502,7 +547,6 @@ __git_complete_remote_or_refspec () return fi if [ $no_complete_refspec = 1 ]; then - COMPREPLY=() return fi [ "$remote" = "." ] && remote= @@ -562,10 +606,19 @@ __git_complete_strategy () return 1 } +__git_commands () { + if test -n "${GIT_TESTING_COMMAND_COMPLETION:-}" + then + printf "%s" "${GIT_TESTING_COMMAND_COMPLETION}" + else + git help -a|egrep '^ [a-zA-Z0-9]' + fi +} + __git_list_all_commands () { local i IFS=" "$'\n' - for i in $(git help -a|egrep '^ [a-zA-Z0-9]') + for i in $(__git_commands) do case $i in *--*) : helper pattern;; @@ -585,7 +638,7 @@ __git_list_porcelain_commands () { local i IFS=" "$'\n' __git_compute_all_commands - for i in "help" $__git_all_commands + for i in $__git_all_commands do case $i in *--*) : helper pattern;; @@ -594,6 +647,8 @@ __git_list_porcelain_commands () archimport) : import;; cat-file) : plumbing;; check-attr) : plumbing;; + check-ignore) : plumbing;; + check-mailmap) : plumbing;; check-ref-format) : plumbing;; checkout-index) : plumbing;; commit-tree) : plumbing;; @@ -753,6 +808,43 @@ __git_has_doubledash () return 1 } +# Try to count non option arguments passed on the command line for the +# specified git command. +# When options are used, it is necessary to use the special -- option to +# tell the implementation were non option arguments begin. +# XXX this can not be improved, since options can appear everywhere, as +# an example: +# git mv x -n y +# +# __git_count_arguments requires 1 argument: the git command executed. +__git_count_arguments () +{ + local word i c=0 + + # Skip "git" (first argument) + for ((i=1; i < ${#words[@]}; i++)); do + word="${words[i]}" + + case "$word" in + --) + # Good; we can assume that the following are only non + # option arguments. + ((c = 0)) + ;; + "$1") + # Skip the specified git command and discard git + # main options + ((c = 0)) + ;; + ?*) + ((c++)) + ;; + esac + done + + printf "%d" $c +} + __git_whitespacelist="nowarn warn error error-all fix" _git_am () @@ -776,7 +868,6 @@ _git_am () " return esac - COMPREPLY=() } _git_apply () @@ -796,13 +887,10 @@ _git_apply () " return esac - COMPREPLY=() } _git_add () { - __git_has_doubledash && return - case "$cur" in --*) __gitcomp " @@ -811,7 +899,9 @@ _git_add () " return esac - COMPREPLY=() + + # XXX should we check for --update and --all options ? + __git_complete_index_file "--others --modified" } _git_archive () @@ -856,7 +946,6 @@ _git_bisect () __gitcomp_nl "$(__git_refs)" ;; *) - COMPREPLY=() ;; esac } @@ -949,9 +1038,14 @@ _git_cherry () _git_cherry_pick () { + local dir="$(__gitdir)" + if [ -f "$dir"/CHERRY_PICK_HEAD ]; then + __gitcomp "--continue --quit --abort" + return + fi case "$cur" in --*) - __gitcomp "--edit --no-commit" + __gitcomp "--edit --no-commit --signoff --strategy= --mainline" ;; *) __gitcomp_nl "$(__git_refs)" @@ -961,15 +1055,15 @@ _git_cherry_pick () _git_clean () { - __git_has_doubledash && return - case "$cur" in --*) __gitcomp "--dry-run --quiet" return ;; esac - COMPREPLY=() + + # XXX should we check for -x option ? + __git_complete_index_file "--others" } _git_clone () @@ -989,16 +1083,22 @@ _git_clone () --upload-pack --template= --depth + --single-branch + --branch " return ;; esac - COMPREPLY=() } _git_commit () { - __git_has_doubledash && return + case "$prev" in + -c|-C) + __gitcomp_nl "$(__git_refs)" "" "${cur}" + return + ;; + esac case "$cur" in --cleanup=*) @@ -1027,7 +1127,13 @@ _git_commit () " return esac - COMPREPLY=() + + if git rev-parse --verify --quiet HEAD >/dev/null; then + __git_complete_index_file "--committable" + else + # This is the first commit + __git_complete_index_file "--cached" + fi } _git_describe () @@ -1043,6 +1149,8 @@ _git_describe () __gitcomp_nl "$(__git_refs)" } +__git_diff_algorithms="myers minimal patience histogram" + __git_diff_common_options="--stat --numstat --shortstat --summary --patch-with-stat --name-only --name-status --color --no-color --color-words --no-renames --check @@ -1053,10 +1161,11 @@ __git_diff_common_options="--stat --numstat --shortstat --summary --no-ext-diff --no-prefix --src-prefix= --dst-prefix= --inter-hunk-context= - --patience - --raw + --patience --histogram --minimal + --raw --word-diff --dirstat --dirstat= --dirstat-by-file --dirstat-by-file= --cumulative + --diff-algorithm= " _git_diff () @@ -1064,6 +1173,10 @@ _git_diff () __git_has_doubledash && return case "$cur" in + --diff-algorithm=*) + __gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}" + return + ;; --*) __gitcomp "--cached --staged --pickaxe-all --pickaxe-regex --base --ours --theirs --no-index @@ -1097,7 +1210,7 @@ _git_difftool () return ;; esac - __git_complete_file + __git_complete_revlist_file } __git_fetch_options=" @@ -1116,6 +1229,15 @@ _git_fetch () __git_complete_remote_or_refspec } +__git_format_patch_options=" + --stdout --attach --no-attach --thread --thread= --no-thread + --numbered --start-number --numbered-files --keep-subject --signoff + --signature --no-signature --in-reply-to= --cc= --full-index --binary + --not --all --cover-letter --no-prefix --src-prefix= --dst-prefix= + --inline --suffix= --ignore-if-in-upstream --subject-prefix= + --output-directory --reroll-count --to= --quiet --notes +" + _git_format_patch () { case "$cur" in @@ -1126,21 +1248,7 @@ _git_format_patch () return ;; --*) - __gitcomp " - --stdout --attach --no-attach --thread --thread= - --output-directory - --numbered --start-number - --numbered-files - --keep-subject - --signoff --signature --no-signature - --in-reply-to= --cc= - --full-index --binary - --not --all - --cover-letter - --no-prefix --src-prefix= --dst-prefix= - --inline --suffix= --ignore-if-in-upstream - --subject-prefix= - " + __gitcomp "$__git_format_patch_options" return ;; esac @@ -1158,7 +1266,6 @@ _git_fsck () return ;; esac - COMPREPLY=() } _git_gc () @@ -1169,7 +1276,6 @@ _git_gc () return ;; esac - COMPREPLY=() } _git_gitk () @@ -1246,13 +1352,10 @@ _git_init () return ;; esac - COMPREPLY=() } _git_ls_files () { - __git_has_doubledash && return - case "$cur" in --*) __gitcomp "--cached --deleted --modified --others --ignored @@ -1265,7 +1368,10 @@ _git_ls_files () return ;; esac - COMPREPLY=() + + # XXX ignore options like --modified and always suggest all cached + # files. + __git_complete_index_file "--cached" } _git_ls_remote () @@ -1381,7 +1487,6 @@ _git_mergetool () return ;; esac - COMPREPLY=() } _git_merge_base () @@ -1397,7 +1502,14 @@ _git_mv () return ;; esac - COMPREPLY=() + + if [ $(__git_count_arguments "mv") -gt 0 ]; then + # We need to show both cached and untracked files (including + # empty directories) since this may not be the last argument. + __git_complete_index_file "--cached --others --directory" + else + __git_complete_index_file "--cached" + fi } _git_name_rev () @@ -1554,6 +1666,12 @@ _git_send_email () __gitcomp "ssl tls" "" "${cur##--smtp-encryption=}" return ;; + --thread=*) + __gitcomp " + deep shallow + " "" "${cur##--thread=}" + return + ;; --*) __gitcomp "--annotate --bcc --cc --cc-cmd --chain-reply-to --compose --confirm= --dry-run --envelope-sender @@ -1563,11 +1681,12 @@ _git_send_email () --signed-off-by-cc --smtp-pass --smtp-server --smtp-server-port --smtp-encryption= --smtp-user --subject --suppress-cc= --suppress-from --thread --to - --validate --no-validate" + --validate --no-validate + $__git_format_patch_options" return ;; esac - COMPREPLY=() + __git_complete_revlist } _git_stage () @@ -1581,7 +1700,7 @@ __git_config_get_set_variables () while [ $c -gt 1 ]; do word="${words[c]}" case "$word" in - --global|--system|--file=*) + --system|--global|--local|--file=*) config_file="$word" break ;; @@ -1608,7 +1727,7 @@ __git_config_get_set_variables () _git_config () { case "$prev" in - branch.*.remote) + branch.*.remote|branch.*.pushremote) __gitcomp_nl "$(__git_remotes)" return ;; @@ -1616,11 +1735,19 @@ _git_config () __gitcomp_nl "$(__git_refs)" return ;; + branch.*.rebase) + __gitcomp "false true" + return + ;; + remote.pushdefault) + __gitcomp_nl "$(__git_remotes)" + return + ;; remote.*.fetch) local remote="${prev#remote.}" remote="${remote%.fetch}" if [ -z "$cur" ]; then - COMPREPLY=("refs/heads/") + __gitcomp_nl "refs/heads/" "" "" "" return fi __gitcomp_nl "$(__git_refs_remotes "$remote")" @@ -1655,6 +1782,10 @@ _git_config () " return ;; + diff.submodule) + __gitcomp "log short" + return + ;; help.format) __gitcomp "man info web html" return @@ -1680,14 +1811,13 @@ _git_config () return ;; *.*) - COMPREPLY=() return ;; esac case "$cur" in --*) __gitcomp " - --global --system --file= + --system --global --local --file= --list --replace-all --get --get-all --get-regexp --add --unset --unset-all @@ -1697,7 +1827,7 @@ _git_config () ;; branch.*.*) local pfx="${cur%.*}." cur_="${cur##*.}" - __gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur_" + __gitcomp "remote pushremote merge mergeoptions rebase" "$pfx" "$cur_" return ;; branch.*) @@ -1828,7 +1958,6 @@ _git_config () core.fileMode core.fsyncobjectfiles core.gitProxy - core.ignoreCygwinFSTricks core.ignoreStat core.ignorecase core.logAllRefUpdates @@ -1850,16 +1979,18 @@ _git_config () core.whitespace core.worktree diff.autorefreshindex - diff.statGraphWidth diff.external diff.ignoreSubmodules diff.mnemonicprefix diff.noprefix diff.renameLimit diff.renames + diff.statGraphWidth + diff.submodule diff.suppressBlankEmpty diff.tool diff.wordRegex + diff.algorithm difftool. difftool.prompt fetch.recurseSubmodules @@ -1990,6 +2121,7 @@ _git_config () receive.fsckObjects receive.unpackLimit receive.updateserverinfo + remote.pushdefault remotes. repack.usedeltabaseoffset rerere.autoupdate @@ -2060,7 +2192,6 @@ _git_remote () __gitcomp "$c" ;; *) - COMPREPLY=() ;; esac } @@ -2096,15 +2227,14 @@ _git_revert () _git_rm () { - __git_has_doubledash && return - case "$cur" in --*) __gitcomp "--cached --dry-run --ignore-unmatch --quiet" return ;; esac - COMPREPLY=() + + __git_complete_index_file "--cached" } _git_shortlog () @@ -2134,6 +2264,10 @@ _git_show () " "" "${cur#*=}" return ;; + --diff-algorithm=*) + __gitcomp "$__git_diff_algorithms" "" "${cur##--diff-algorithm=}" + return + ;; --*) __gitcomp "--pretty= --format= --abbrev-commit --oneline $__git_diff_common_options @@ -2141,7 +2275,7 @@ _git_show () return ;; esac - __git_complete_file + __git_complete_revlist_file } _git_show_branch () @@ -2173,8 +2307,6 @@ _git_stash () *) if [ -z "$(__git_find_on_cmdline "$save_opts")" ]; then __gitcomp "$subcommands" - else - COMPREPLY=() fi ;; esac @@ -2187,14 +2319,12 @@ _git_stash () __gitcomp "--index --quiet" ;; show,--*|drop,--*|branch,--*) - COMPREPLY=() ;; show,*|apply,*|drop,*|pop,*|branch,*) __gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \ | sed -n -e 's/:.*//p')" ;; *) - COMPREPLY=() ;; esac fi @@ -2204,7 +2334,7 @@ _git_submodule () { __git_has_doubledash && return - local subcommands="add status init update summary foreach sync" + local subcommands="add status init deinit update summary foreach sync" if [ -z "$(__git_find_on_cmdline "$subcommands")" ]; then case "$cur" in --*) @@ -2236,7 +2366,7 @@ _git_svn () --no-metadata --use-svm-props --use-svnsync-props --log-window-size= --no-checkout --quiet --repack-flags --use-log-author --localtime - --ignore-paths= $remote_opts + --ignore-paths= --include-paths= $remote_opts " local init_opts=" --template= --shared= --trunk= --tags= @@ -2311,7 +2441,6 @@ _git_svn () __gitcomp "--revision= --parent" ;; *) - COMPREPLY=() ;; esac fi @@ -2336,13 +2465,10 @@ _git_tag () case "$prev" in -m|-F) - COMPREPLY=() ;; -*|tag) if [ $f = 1 ]; then __gitcomp_nl "$(__git_tags)" - else - COMPREPLY=() fi ;; *) @@ -2364,9 +2490,10 @@ __git_main () i="${words[c]}" case "$i" in --git-dir=*) __git_dir="${i#--git-dir=}" ;; + --git-dir) ((c++)) ; __git_dir="${words[c]}" ;; --bare) __git_dir="." ;; --help) command="help"; break ;; - -c) c=$((++c)) ;; + -c|--work-tree|--namespace) ((c++)) ;; -*) ;; *) command="$i"; break ;; esac @@ -2384,6 +2511,7 @@ __git_main () --exec-path --exec-path= --html-path + --man-path --info-path --work-tree= --namespace= @@ -2429,20 +2557,72 @@ __gitk_main () __git_complete_revlist } -__git_func_wrap () -{ - if [[ -n ${ZSH_VERSION-} ]]; then - emulate -L bash - setopt KSH_TYPESET +if [[ -n ${ZSH_VERSION-} ]]; then + echo "WARNING: this script is deprecated, please see git-completion.zsh" 1>&2 - # workaround zsh's bug that leaves 'words' as a special - # variable in versions < 4.3.12 - typeset -h words + autoload -U +X compinit && compinit - # workaround zsh's bug that quotes spaces in the COMPREPLY - # array if IFS doesn't contain spaces. - typeset -h IFS - fi + __gitcomp () + { + emulate -L zsh + + local cur_="${3-$cur}" + + case "$cur_" in + --*=) + ;; + *) + local c IFS=$' \t\n' + local -a array + for c in ${=1}; do + c="$c${4-}" + case $c in + --*=*|*.) ;; + *) c="$c " ;; + esac + array[$#array+1]="$c" + done + compset -P '*[=:]' + compadd -Q -S '' -p "${2-}" -a -- array && _ret=0 + ;; + esac + } + + __gitcomp_nl () + { + emulate -L zsh + + local IFS=$'\n' + compset -P '*[=:]' + compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0 + } + + __gitcomp_file () + { + emulate -L zsh + + local IFS=$'\n' + compset -P '*[=:]' + compadd -Q -p "${2-}" -f -- ${=1} && _ret=0 + } + + _git () + { + local _ret=1 cur cword prev + cur=${words[CURRENT]} + prev=${words[CURRENT-1]} + let cword=CURRENT-1 + emulate ksh -c __${service}_main + let _ret && _default && _ret=0 + return _ret + } + + compdef _git git gitk + return +fi + +__git_func_wrap () +{ local cur words cword prev _get_comp_words_by_ref -n =: cur words cword prev $1 diff --git a/plugins/gitfast/git-prompt.sh b/plugins/gitfast/git-prompt.sh index bf20491ec..a81ef5a48 100644 --- a/plugins/gitfast/git-prompt.sh +++ b/plugins/gitfast/git-prompt.sh @@ -3,20 +3,37 @@ # Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org> # Distributed under the GNU General Public License, version 2.0. # -# This script allows you to see the current branch in your prompt. +# This script allows you to see repository status in your prompt. # # To enable: # # 1) Copy this file to somewhere (e.g. ~/.git-prompt.sh). # 2) Add the following line to your .bashrc/.zshrc: # source ~/.git-prompt.sh -# 3) Change your PS1 to also show the current branch: -# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' -# ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ ' +# 3a) Change your PS1 to call __git_ps1 as +# command-substitution: +# Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ ' +# ZSH: setopt PROMPT_SUBST ; PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ ' +# the optional argument will be used as format string. +# 3b) Alternatively, for a slightly faster prompt, __git_ps1 can +# be used for PROMPT_COMMAND in Bash or for precmd() in Zsh +# with two parameters, <pre> and <post>, which are strings +# you would put in $PS1 before and after the status string +# generated by the git-prompt machinery. e.g. +# Bash: PROMPT_COMMAND='__git_ps1 "\u@\h:\w" "\\\$ "' +# will show username, at-sign, host, colon, cwd, then +# various status string, followed by dollar and SP, as +# your prompt. +# ZSH: precmd () { __git_ps1 "%n" ":%~$ " "|%s" } +# will show username, pipe, then various status string, +# followed by colon, cwd, dollar and SP, as your prompt. +# Optionally, you can supply a third argument with a printf +# format string to finetune the output of the branch status # -# The argument to __git_ps1 will be displayed only if you are currently -# in a git repository. The %s token will be the name of the current -# branch. +# The repository status will be displayed only if you are currently in a +# git repository. The %s token is the placeholder for the shown status. +# +# The prompt status always includes the current branch name. # # In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value, # unstaged (*) and staged (+) changes will be shown next to the branch @@ -30,7 +47,10 @@ # # If you would like to see if there're untracked files, then you can set # GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked -# files, then a '%' will be shown next to the branch name. +# files, then a '%' will be shown next to the branch name. You can +# configure this per-repository with the bash.showUntrackedFiles +# variable, which defaults to true once GIT_PS1_SHOWUNTRACKEDFILES is +# enabled. # # If you would like to see the difference between HEAD and its upstream, # set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates you are behind, ">" @@ -49,30 +69,20 @@ # find one, or @{upstream} otherwise. Once you have set # GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by # setting the bash.showUpstream config variable. - -# __gitdir accepts 0 or 1 arguments (i.e., location) -# returns location of .git repo -__gitdir () -{ - # Note: this function is duplicated in git-completion.bash - # When updating it, make sure you update the other one to match. - if [ -z "${1-}" ]; then - if [ -n "${__git_dir-}" ]; then - echo "$__git_dir" - elif [ -n "${GIT_DIR-}" ]; then - test -d "${GIT_DIR-}" || return 1 - echo "$GIT_DIR" - elif [ -d .git ]; then - echo .git - else - git rev-parse --git-dir 2>/dev/null - fi - elif [ -d "$1/.git" ]; then - echo "$1/.git" - else - echo "$1" - fi -} +# +# If you would like to see more information about the identity of +# commits checked out as a detached HEAD, set GIT_PS1_DESCRIBE_STYLE +# to one of these values: +# +# contains relative to newer annotated tag (v1.6.3.2~35) +# branch relative to newer tag or branch (master~4) +# describe relative to older annotated tag (v1.6.3.1-13-gdd42c2f) +# default exactly matching tag +# +# If you would like a colored hint about the current dirty state, set +# GIT_PS1_SHOWCOLORHINTS to a nonempty value. The colors are based on +# the colored output of "git status -sb" and are available only when +# using __git_ps1 for PROMPT_COMMAND or precmd. # stores the divergence from upstream in $p # used by GIT_PS1_SHOWUPSTREAM @@ -95,7 +105,7 @@ __git_ps1_show_upstream () fi ;; svn-remote.*.url) - svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value" + svn_remote[$((${#svn_remote[@]} + 1))]="$value" svn_url_pattern+="\\|$value" upstream=svn+git # default upstream is SVN if available, else git ;; @@ -117,10 +127,11 @@ __git_ps1_show_upstream () svn*) # get the upstream from the "git-svn-id: ..." in a commit message # (git-svn uses essentially the same procedure internally) - local svn_upstream=($(git log --first-parent -1 \ + local -a svn_upstream + svn_upstream=($(git log --first-parent -1 \ --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null)) if [[ 0 -ne ${#svn_upstream[@]} ]]; then - svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]} + svn_upstream=${svn_upstream[${#svn_upstream[@]} - 2]} svn_upstream=${svn_upstream%@*} local n_stop="${#svn_remote[@]}" for ((n=1; n <= n_stop; n++)); do @@ -193,40 +204,161 @@ __git_ps1_show_upstream () } +# Helper function that is meant to be called from __git_ps1. It +# injects color codes into the appropriate gitstring variables used +# to build a gitstring. +__git_ps1_colorize_gitstring () +{ + if [[ -n ${ZSH_VERSION-} ]]; then + local c_red='%F{red}' + local c_green='%F{green}' + local c_lblue='%F{blue}' + local c_clear='%f' + else + # Using \[ and \] around colors is necessary to prevent + # issues with command line editing/browsing/completion! + local c_red='\[\e[31m\]' + local c_green='\[\e[32m\]' + local c_lblue='\[\e[1;34m\]' + local c_clear='\[\e[0m\]' + fi + local bad_color=$c_red + local ok_color=$c_green + local flags_color="$c_lblue" + + local branch_color="" + if [ $detached = no ]; then + branch_color="$ok_color" + else + branch_color="$bad_color" + fi + c="$branch_color$c" + + z="$c_clear$z" + if [ "$w" = "*" ]; then + w="$bad_color$w" + fi + if [ -n "$i" ]; then + i="$ok_color$i" + fi + if [ -n "$s" ]; then + s="$flags_color$s" + fi + if [ -n "$u" ]; then + u="$bad_color$u" + fi + r="$c_clear$r" +} # __git_ps1 accepts 0 or 1 arguments (i.e., format string) -# returns text to add to bash PS1 prompt (includes branch name) +# when called from PS1 using command substitution +# in this mode it prints text to add to bash PS1 prompt (includes branch name) +# +# __git_ps1 requires 2 or 3 arguments when called from PROMPT_COMMAND (pc) +# in that case it _sets_ PS1. The arguments are parts of a PS1 string. +# when two arguments are given, the first is prepended and the second appended +# to the state string when assigned to PS1. +# The optional third parameter will be used as printf format string to further +# customize the output of the git-status string. +# In this mode you can request colored hints using GIT_PS1_SHOWCOLORHINTS=true __git_ps1 () { - local g="$(__gitdir)" - if [ -n "$g" ]; then - local r="" - local b="" + local pcmode=no + local detached=no + local ps1pc_start='\u@\h:\w ' + local ps1pc_end='\$ ' + local printf_format=' (%s)' + + case "$#" in + 2|3) pcmode=yes + ps1pc_start="$1" + ps1pc_end="$2" + printf_format="${3:-$printf_format}" + ;; + 0|1) printf_format="${1:-$printf_format}" + ;; + *) return + ;; + esac + + local repo_info rev_parse_exit_code + repo_info="$(git rev-parse --git-dir --is-inside-git-dir \ + --is-bare-repository --is-inside-work-tree \ + --short HEAD 2>/dev/null)" + rev_parse_exit_code="$?" + + if [ -z "$repo_info" ]; then + if [ $pcmode = yes ]; then + #In PC mode PS1 always needs to be set + PS1="$ps1pc_start$ps1pc_end" + fi + return + fi + + local short_sha + if [ "$rev_parse_exit_code" = "0" ]; then + short_sha="${repo_info##*$'\n'}" + repo_info="${repo_info%$'\n'*}" + fi + local inside_worktree="${repo_info##*$'\n'}" + repo_info="${repo_info%$'\n'*}" + local bare_repo="${repo_info##*$'\n'}" + repo_info="${repo_info%$'\n'*}" + local inside_gitdir="${repo_info##*$'\n'}" + local g="${repo_info%$'\n'*}" + + local r="" + local b="" + local step="" + local total="" + if [ -d "$g/rebase-merge" ]; then + read b 2>/dev/null <"$g/rebase-merge/head-name" + read step 2>/dev/null <"$g/rebase-merge/msgnum" + read total 2>/dev/null <"$g/rebase-merge/end" if [ -f "$g/rebase-merge/interactive" ]; then r="|REBASE-i" - b="$(cat "$g/rebase-merge/head-name")" - elif [ -d "$g/rebase-merge" ]; then + else r="|REBASE-m" - b="$(cat "$g/rebase-merge/head-name")" + fi + else + if [ -d "$g/rebase-apply" ]; then + read step 2>/dev/null <"$g/rebase-apply/next" + read total 2>/dev/null <"$g/rebase-apply/last" + if [ -f "$g/rebase-apply/rebasing" ]; then + read b 2>/dev/null <"$g/rebase-apply/head-name" + r="|REBASE" + elif [ -f "$g/rebase-apply/applying" ]; then + r="|AM" + else + r="|AM/REBASE" + fi + elif [ -f "$g/MERGE_HEAD" ]; then + r="|MERGING" + elif [ -f "$g/CHERRY_PICK_HEAD" ]; then + r="|CHERRY-PICKING" + elif [ -f "$g/REVERT_HEAD" ]; then + r="|REVERTING" + elif [ -f "$g/BISECT_LOG" ]; then + r="|BISECTING" + fi + + if [ -n "$b" ]; then + : + elif [ -h "$g/HEAD" ]; then + # symlink symbolic ref + b="$(git symbolic-ref HEAD 2>/dev/null)" else - if [ -d "$g/rebase-apply" ]; then - if [ -f "$g/rebase-apply/rebasing" ]; then - r="|REBASE" - elif [ -f "$g/rebase-apply/applying" ]; then - r="|AM" - else - r="|AM/REBASE" + local head="" + if ! read head 2>/dev/null <"$g/HEAD"; then + if [ $pcmode = yes ]; then + PS1="$ps1pc_start$ps1pc_end" fi - elif [ -f "$g/MERGE_HEAD" ]; then - r="|MERGING" - elif [ -f "$g/CHERRY_PICK_HEAD" ]; then - r="|CHERRY-PICKING" - elif [ -f "$g/BISECT_LOG" ]; then - r="|BISECTING" + return fi - - b="$(git symbolic-ref HEAD 2>/dev/null)" || { - + # is it a symbolic ref? + b="${head#ref: }" + if [ "$head" = "$b" ]; then + detached=yes b="$( case "${GIT_PS1_DESCRIBE_STYLE-}" in (contains) @@ -239,52 +371,75 @@ __git_ps1 () git describe --tags --exact-match HEAD ;; esac 2>/dev/null)" || - b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." || - b="unknown" + b="$short_sha..." b="($b)" - } + fi fi + fi + + if [ -n "$step" ] && [ -n "$total" ]; then + r="$r $step/$total" + fi - local w="" - local i="" - local s="" - local u="" - local c="" - local p="" + local w="" + local i="" + local s="" + local u="" + local c="" + local p="" - if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then - if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then - c="BARE:" + if [ "true" = "$inside_gitdir" ]; then + if [ "true" = "$bare_repo" ]; then + c="BARE:" + else + b="GIT_DIR!" + fi + elif [ "true" = "$inside_worktree" ]; then + if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ] && + [ "$(git config --bool bash.showDirtyState)" != "false" ] + then + git diff --no-ext-diff --quiet --exit-code || w="*" + if [ -n "$short_sha" ]; then + git diff-index --cached --quiet HEAD -- || i="+" else - b="GIT_DIR!" - fi - elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then - if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then - if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then - git diff --no-ext-diff --quiet --exit-code || w="*" - if git rev-parse --quiet --verify HEAD >/dev/null; then - git diff-index --cached --quiet HEAD -- || i="+" - else - i="#" - fi - fi - fi - if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then - git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$" + i="#" fi + fi + if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ] && + [ -r "$g/refs/stash" ]; then + s="$" + fi - if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then - if [ -n "$(git ls-files --others --exclude-standard)" ]; then - u="%" - fi - fi + if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ] && + [ "$(git config --bool bash.showUntrackedFiles)" != "false" ] && + git ls-files --others --exclude-standard --error-unmatch -- '*' >/dev/null 2>/dev/null + then + u="%${ZSH_VERSION+%}" + fi - if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then - __git_ps1_show_upstream - fi + if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then + __git_ps1_show_upstream fi + fi + + local z="${GIT_PS1_STATESEPARATOR-" "}" + + # NO color option unless in PROMPT_COMMAND mode + if [ $pcmode = yes ] && [ -n "${GIT_PS1_SHOWCOLORHINTS-}" ]; then + __git_ps1_colorize_gitstring + fi - local f="$w$i$s$u" - printf -- "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p" + local f="$w$i$s$u" + local gitstring="$c${b##refs/heads/}${f:+$z$f}$r$p" + + if [ $pcmode = yes ]; then + if [[ -n ${ZSH_VERSION-} ]]; then + gitstring=$(printf -- "$printf_format" "$gitstring") + else + printf -v gitstring -- "$printf_format" "$gitstring" + fi + PS1="$ps1pc_start$gitstring$ps1pc_end" + else + printf -- "$printf_format" "$gitstring" fi } diff --git a/plugins/gitfast/gitfast.plugin.zsh b/plugins/gitfast/gitfast.plugin.zsh index 7e50cf721..dba1b1315 100644 --- a/plugins/gitfast/gitfast.plugin.zsh +++ b/plugins/gitfast/gitfast.plugin.zsh @@ -3,5 +3,6 @@ source $dir/../git/git.plugin.zsh source $dir/git-prompt.sh function git_prompt_info() { - __git_ps1 "${ZSH_THEME_GIT_PROMPT_PREFIX//\%/%%}%s${ZSH_THEME_GIT_PROMPT_SUFFIX//\%/%%}" + dirty="$(parse_git_dirty)" + __git_ps1 "${ZSH_THEME_GIT_PROMPT_PREFIX//\%/%%}%s${dirty//\%/%%}${ZSH_THEME_GIT_PROMPT_SUFFIX//\%/%%}" } diff --git a/plugins/github/github.plugin.zsh b/plugins/github/github.plugin.zsh index 598b059c1..bd69b1bd5 100644 --- a/plugins/github/github.plugin.zsh +++ b/plugins/github/github.plugin.zsh @@ -1,5 +1,22 @@ # Setup hub function for git, if it is available; http://github.com/defunkt/hub if [ "$commands[(I)hub]" ] && [ "$commands[(I)ruby]" ]; then + # Autoload _git completion functions + if declare -f _git > /dev/null; then + _git + fi + + if declare -f _git_commands > /dev/null; then + _hub_commands=( + 'alias:show shell instructions for wrapping git' + 'pull-request:open a pull request on GitHub' + 'fork:fork origin repo on GitHub' + 'create:create new repo on GitHub for the current project' + 'browse:browse the project on GitHub' + 'compare:open GitHub compare view' + ) + # Extend the '_git_commands' function with hub commands + eval "$(declare -f _git_commands | sed -e 's/base_commands=(/base_commands=(${_hub_commands} /')" + fi # eval `hub alias -s zsh` function git(){ if ! (( $+_has_working_hub )); then @@ -23,7 +40,7 @@ fi # # Use this when creating a new repo from scratch. empty_gh() { # [NAME_OF_REPO] - repo = $1 + repo=$1 ghuser=$( git config github.user ) mkdir "$repo" @@ -67,5 +84,14 @@ exist_gh() { # [DIRECTORY] git push -u origin master } +# git.io "GitHub URL" +# +# Shorten GitHub url, example: +# https://github.com/nvogel/dotzsh > http://git.io/8nU25w +# source: https://github.com/nvogel/dotzsh +# documentation: https://github.com/blog/985-git-io-github-url-shortener +# +git.io() {curl -i -s http://git.io -F "url=$1" | grep "Location" | cut -f 2 -d " "} + # End Functions ############################################################# diff --git a/plugins/gitignore/gitignore.plugin.zsh b/plugins/gitignore/gitignore.plugin.zsh new file mode 100644 index 000000000..be037d87a --- /dev/null +++ b/plugins/gitignore/gitignore.plugin.zsh @@ -0,0 +1,12 @@ +function gi() { curl http://www.gitignore.io/api/$@ ;} + +_gitignireio_get_command_list() { + curl -s http://www.gitignore.io/api/list | tr "," "\n" +} + +_gitignireio () { + compset -P '*,' + compadd -S '' `_gitignireio_get_command_list` +} + +compdef _gitignireio gi diff --git a/plugins/glassfish/_asadmin b/plugins/glassfish/_asadmin new file mode 100644 index 000000000..a6a7af549 --- /dev/null +++ b/plugins/glassfish/_asadmin @@ -0,0 +1,1150 @@ +#compdef asadmin +#autoload + +local -a _1st_arguments +_1st_arguments=( + "add-library:adds one or more library JAR files to GlassFish Server" + "add-resources:creates the resources specified in an XML file" + "apply-http-lb-changes:applies load balancer configuration changes to the load balancer" + "backup-domain:performs a backup on the domain" + "change-admin-password:changes the administrator password" + "change-master-broker:changes the master broker in a Message Queue cluster providing JMS services for a GlassFish Server cluster." + "change-master-password:changes the master password" + "collect-log-files:creates a ZIP archive of all available log files" + "configure-jms-cluster:configures the Message Queue cluster providing JMS services to a GlassFish Server cluster" + "configure-lb-weight:sets load balancing weights for clustered instances" + "configure-ldap-for-admin:configures the authentication realm named admin-realm for the given LDAP" + "copy-config:copies an existing named configuration to create another configuration" + "create-admin-object:adds the administered object with the specified JNDI name for a resource adapter" + "create-application-ref:creates a reference to an application" + "create-audit-module:adds an audit module" + "create-auth-realm:adds the named authentication realm" + "create-cluster:creates a GlassFish Server cluster" + "create-connector-connection-pool:adds a connection pool with the specified connection pool name" + "create-connector-resource:registers the connector resource with the specified JNDI name" + "create-connector-security-map:creates a security map for the specified connector connection pool" + "create-connector-work-security-map:creates a work security map for the specified resource adapter" + "create-custom-resource:creates a custom resource" + "create-domain:creates a domain" + "create-file-user:creates a new file user" + "create-http:sets HTTP parameters for a protocol" + "create-http-health-checker:creates a health-checker for a specified load balancer configuration" + "create-http-lb:creates a load balancer" + "create-http-lb-config:creates a configuration for the load balancer" + "create-http-lb-ref:adds an existing cluster or server instance to an existing load balancer configuration or load balancer" + "create-http-listener:adds a new HTTP network listener socket" + "create-http-redirect:adds a new HTTP redirect" + "create-iiop-listener:adds an IIOP listener" + "create-instance:creates a GlassFish Server instance" + "create-jacc-provider:enables administrators to create a JACC provider that can be used by third-party authorization modules for applications running in GlassFish Server" + "create-javamail-resource:creates a JavaMail session resource" + "create-jdbc-connection-pool:registers a JDBC connection pool" + "create-jdbc-resource:creates a JDBC resource with the specified JNDI name" + "create-jms-host:creates a JMS host" + "create-jms-resource:creates a JMS resource" + "create-jmsdest:creates a JMS physical destination" + "create-jndi-resource:registers a JNDI resource" + "create-jvm-options:creates options for the Java application launcher" + "create-lifecycle-module:creates a lifecycle module" + "create-local-instance:creates a GlassFish Server instance on the host where the subcommand is run" + "create-message-security-provider:enables administrators to create a message security provider, which specifies how SOAP messages will be secured." + "create-network-listener:adds a new network listener socket" + "create-node-config:creates a node that is not enabled for remote communication" + "create-node-dcom:creates a node that is enabled for com munication over DCOM" + "create-node-ssh:creates a node that is enabled for communication over SSH" + "create-password-alias:creates a password alias" + "create-profiler:creates the profiler element" + "create-protocol:adds a new protocol" + "create-protocol-filter:adds a new protocol filter" + "create-protocol-finder:adds a new protocol finder" + "create-resource-adapter-config:creates the configuration information for the connector module" + "create-resource-ref:creates a reference to a resource" + "create-service:configures the starting of a DAS or a GlassFish Server instance on an unattended boot" + "create-ssl:creates and configures the SSL element in the selected HTTP listener, IIOP listener, or IIOP service" + "create-system-properties:adds one or more system property elements that can be referenced elsewhere in the configuration." + "create-threadpool:adds a thread pool" + "create-transport:adds a new transport" + "create-virtual-server:creates the named virtual server" + "delete-admin-object:removes the administered object with the specified JNDI name." + "delete-application-ref:removes a reference to an applica tion" + "delete-audit-module:removes the named audit-module" + "delete-auth-realm:removes the named authentication realm" + "delete-cluster:deletes a GlassFish Server cluster" + "delete-config:deletes an existing named configuration" + "delete-connector-connection-pool:removes the specified connector connection pool" + "delete-connector-resource:removes the connector resource with the specified JNDI name" + "delete-connector-security-map:deletes a security map for the specified connector connection pool" + "delete-connector-work-security-map:deletes a work security map for the specified resource adapter" + "delete-custom-resource:removes a custom resource" + "delete-domain:deletes a domain" + "delete-file-user:removes the named file user" + "delete-http:removes HTTP parameters from a protocol" + "delete-http-health-checker:deletes the health-checker for a specified load balancer configuration" + "delete-http-lb:deletes a load balancer" + "delete-http-lb-config:deletes a load balancer configuration" + "delete-http-lb-ref:deletes the cluster or server instance from a load balancer" + "delete-http-listener:removes a network listener" + "delete-http-redirect:removes an HTTP redirect" + "delete-iiop-listener:removes an IIOP listener" + "delete-instance:deletes a GlassFish Server instance" + "delete-jacc-provider:enables administrators to delete JACC providers defined for a domain" + "delete-javamail-resource:removes a JavaMail session resource" + "delete-jdbc-connection-pool:removes the specified JDBC connection pool" + "delete-jdbc-resource:removes a JDBC resource with the specified JNDI name" + "delete-jms-host:removes a JMS host" + "delete-jms-resource:removes a JMS resource" + "delete-jmsdest:removes a JMS physical destination" + "delete-jndi-resource:removes a JNDI resource" + "delete-jvm-options:removes one or more options for the Java application launcher" + "delete-lifecycle-module:removes the lifecycle module" + "delete-local-instance:deletes a GlassFish Server instance on the machine where the subcommand is run" + "delete-log-levels:" + "delete-message-security-provider:enables administrators to delete a message security provider" + "delete-network-listener:removes a network listener" + "delete-node-config:deletes a node that is not enabled for remote communication" + "delete-node-dcom:deletes a node that is enabled for communication over DCOM" + "delete-node-ssh:deletes a node that is enabled for communication over SSH" + "delete-password-alias:deletes a password alias" + "delete-profiler:removes the profiler element" + "delete-protocol:removes a protocol" + "delete-protocol-filter:removes a protocol filter" + "delete-protocol-finder:removes a protocol finder" + "delete-resource-adapter-config:deletes the resource adapter configuration" + "delete-resource-ref:removes a reference to a resource" + "delete-ssl:deletes the SSL element in the selected HTTP listener, IIOP listener, or IIOP service" + "delete-system-property:removes a system property of the domain, configuration, cluster, or server instance, one at a time" + "delete-threadpool:removes a thread pool" + "delete-transport:removes a transport" + "delete-virtual-server:removes a virtual server" + "deploy:deploys the specified component" + "deploydir:deploys an exploded format of application archive" + "environment variable" + "disable:disables the component" + "disable-http-lb-application:disables an application managed by a load balancer" + "disable-http-lb-server:disables a sever or cluster managed by a load balancer" + "disable-monitoring:disables monitoring for the server or for specific monitorable modules" + "disable-secure-admin:disables secure admin if it is already enabled." + "disable-secure-admin-internal-user:Instructs the GlassFish Server DAS and instances to not use the specified admin user to authenticate with each other and to authorize admin operations." + "disable-secure-admin-principal:disables the certificate for authorizing access in secure administration." + "enable:enables the component" + "enable-http-lb-application:enables a previously-disabled application managed by a load balancer" + "enable-http-lb-server:enables a previously disabled sever or cluster managed by a load balancer" + "enable-monitoring:enables monitoring for the server or for specific monitorable modules" + "enable-secure-admin:enables secure admin (if it is not already enabled), optionally changing the alias used for DAS-to-instance admin messages or the alias used for instance-to-DAS admin messages." + "enable-secure-admin-internal-user:Instructs the GlassFish Server DAS and instances to use the specified admin user and the password associated with the password alias to authenticate with each other and to authorize admin operations." + "enable-secure-admin-principal:Instructs GlassFish Server, when secure admin is enabled, to accept admin requests from clients identified by the specified SSL certificate." + "export:marks a variable name for automatic export to the environment of subsequent commands in multimode" + "export-http-lb-config:exports the load balancer configuration or load balancer to a file" + "export-sync-bundle:exports the configuration data of a cluster or standalone instance to an archive file" + "flush-connection-pool:reintializes all connections established in the specified connection pool" + "flush-jmsdest:purges messages in a JMS destination." + "freeze-transaction-service:freezes the transaction subsystem" + "generate-domain-schema:" + "generate-jvm-report:shows the JVM machine statistics for a given target instance" + "get:gets the values of configurable or monitorable attributes" + "get-client-stubs:retrieves the application JAR files needed to launch the application client." + "get-health:provides information on the cluster health" + "help" + "asadmin:utility for performing administrative tasks for Oracle GlassFish Server" + "import-sync-bundle:imports the configuration data of a clustered instance or standalone instance from an archive file" + "install-node:installs GlassFish Server software on specified SSH-enabled hosts" + "install-node-dcom:installs GlassFish Server software on specified DCOM-enabled hosts" + "install-node-ssh:installs GlassFish Server software on specified SSH-enabled hosts" + "jms-ping:checks if the JMS service is up and running" + "list:lists configurable or monitorable elements" + "list-admin-objects:gets all the administered objects" + "list-application-refs:lists the existing application references" + "list-applications:lists deployed applications" + "list-audit-modules:gets all audit modules and displays them" + "list-auth-realms:lists the authentication realms" + "list-backups:lists all backups" + "list-clusters:lists existing clusters in a domain" + "list-commands:lists available commands" + "list-components:lists deployed components" + "list-configs:lists named configurations" + "list-connector-connection-pools:lists the existing connector connection pools" + "list-connector-resources:lists all connector resources" + "list-connector-security-maps:lists the security maps belonging to the specified connector connection pool" + "list-connector-work-security-maps:lists the work security maps belonging to the specified resource adapter" + "list-containers:lists application containers" + "list-custom-resources:gets all custom resources" + "list-domains:lists the domains in the specified directory" + "list-file-groups:lists file groups" + "list-file-users:lists the file users" + "list-http-lb-configs:lists load balancer configurations" + "list-http-lbs:lists load balancers" + "list-http-listeners:lists the existing network listeners" + "list-iiop-listeners:lists the existing IIOP listeners" + "list-instances:lists GlassFish Server instances in a domain" + "list-jacc-providers:enables administrators to list JACC providers defined for a domain" + "list-javamail-resources:lists the existing JavaMail session resources" + "list-jdbc-connection-pools:lists all JDBC connection pools" + "list-jdbc-resources:lists all JDBC resources" + "list-jms-hosts:lists the existing JMS hosts" + "list-jms-resources:lists the JMS resources" + "list-jmsdest:lists the existing JMS physical destinations" + "list-jndi-entries:browses and queries the JNDI tree" + "list-jndi-resources:lists all existing JNDI resources" + "list-jvm-options:lists options for the Java application launcher" + "list-libraries:lists library JAR files on GlassFish Server" + "list-lifecycle-modules:lists the lifecycle modules" + "list-log-attributes:lists all logging attributes defined for a specified target in a domain" + "list-log-levels:lists the loggers and their log levels" + "list-message-security-providers:lists all security message providers for the given message layer" + "list-modules:lists GlassFish Server modules" + "list-network-listeners:lists the existing network listeners" + "list-nodes:lists all GlassFish Server nodes in a domain" + "list-nodes-config:lists all GlassFish Server nodes that do not support remote communication in a domain" + "list-nodes-dcom:lists all GlassFish Server nodes that support communication over DCOM in a domain" + "list-nodes-ssh:lists all GlassFish Server nodes that support communication over SSH in a domain" + "list-password-aliases:lists all password aliases" + "list-persistence-types:lists registered persistence types for HTTP sessions and SFSB instances" + "list-protocol-filters:lists the existing protocol filters" + "list-protocol-finders:lists the existing protocol finders" + "list-protocols:lists the existing protocols" + "list-resource-adapter-configs:lists the names of the current resource adapter configurations" + "list-resource-refs:lists existing resource references" + "list-secure-admin-internal-users:lists the user names that the GlassFish Server DAS and instances use to authenticate with each other and to authorize admin operations." + "list-secure-admin-principals:lists the certificates for which GlassFish Server accepts admin requests from clients." + "list-sub-components:lists EJB or servlet components in a deployed module or module of a deployed application" + "list-supported-cipher-suites:enables administrators to list the cipher suites that are supported and available to a specified GlassFish Server target" + "list-system-properties:lists the system properties of the domain, configuration, cluster, or server instance" + "list-threadpools:lists all the thread pools" + "list-timers:lists all of the persistent timers owned by server instance(s)" + "list-transports:lists the existing transports" + "list-virtual-servers:lists the existing virtual servers" + "list-web-context-param:lists servlet contextinitialization parameters of a deployed web application or module" + "list-web-env-entry:lists environment entries for a deployed web application or module" + "login:logs you into a domain" + "migrate-timers:moves EJB timers when a clustered instance was stopped or has crashed" + "monitor:displays monitoring data for commonly used components and services" + "multimode:allows multiple subcommands to be run while preserving environment settings and remaining in the asadmin utility" + "ping-connection-pool:tests if a connection pool is usable" + "ping-node-dcom:tests if a node that is enabled for communication over DCOM is usable" + "ping-node-ssh:tests if a node that is enabled for communication over SSH is usable" + "recover-transactions:manually recovers pending transactions" + "redeploy:redeploys the specified component" + "remove-library:removes one or more library JAR files from GlassFish Server" + "restart-domain:restarts the DAS of the specified domain" + "restart-instance:restarts a running GlassFish Server instance" + "restart-local-instance:restarts a running GlassFish Server instance on the host where the subcommand is run" + "restore-domain:restores files from backup" + "rollback-transaction:rolls back the named transaction" + "rotate-log:rotates the log file" + "set:sets the values of configurable attributes" + "set-log-attributes:sets the logging attributes for one or more loggers" + "set-log-levels:sets the log level for one or more loggers" + "set-web-context-param:sets a servlet context initialization parameter of a deployed web application or module" + "set-web-env-entry:sets an environment entry for a deployed web application or module" + "setup-ssh:sets up an SSH key on specified hosts" + "show-component-status:displays the status of the deployed component" + "start-cluster:starts a cluster" + "start-database:starts the Java DB" + "start-domain:starts the DAS of the specified domain" + "start-instance:starts a GlassFish Server instance" + "start-local-instance:starts a GlassFish Server instance on the host where the subcommand is run" + "stop-cluster:stops a GlassFish Server cluster" + "stop-database:stops the Java DB" + "stop-domain:stops the Domain Administration Server of the specified domain" + "stop-instance:stops a running GlassFish Server instance" + "stop-local-instance:stops a GlassFish Server instance on the machine where the subcommand is run" + "undeploy:removes a deployed component" + "unfreeze-transaction-service:resumes all suspended transactions" + "uninstall-node:uninstalls GlassFish Server software from specified hosts" + "uninstall-node-dcom:uninstalls GlassFish Server software from specified DCOM-enabled hosts" + "uninstall-node-ssh:uninstalls GlassFish Server software from specified SSH-enabled hosts" + "unset:removes one or more variables from the multimode environment" + "unset-web-context-param:unsets a servlet context initialization parameter of a deployed web application or module" + "unset-web-env-entry:unsets an environment entry for a deployed web application or module" + "update-connector-security-map:modifies a security map for the specified connector connection pool" + "update-connector-work-security-map:modifies a work security map for the specified resource adapter" + "update-file-user:updates a current file user as specified" + "update-node-config:updates the configuration data of anode" + "update-node-dcom:updates the configuration data of a node" + "update-node-ssh:updates the configuration data of a node" + "update-password-alias:updates a password alias" + "uptime:returns the length of time that the DAS has been running" + "validate-dcom:tests the connection over DCOM to a remote host" + "validate-multicast:validates that multicast transport is available for clusters" + "verify-domain-xml:verifies the content of the domain.xml file" + "version:displays version information forGlassFish Server" +) + +_arguments '*:: :->command' + +if (( CURRENT == 1 )); then + _describe -t commands "asadmin command" _1st_arguments + return +fi + +local -a _command_args +case "$words[1]" in + add-library) + _command_args=('*:directory:_files' '--host+:' '--port+:' '--type+:type:(common ext app)') + ;; + add-resources) + _command_args=('*:directory:_files' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + apply-http-lb-changes) + _command_args=('--host+:' '--ping+:' '--port+:') + ;; + backup-domain) + _command_args=('--backupconfig+:' '--backupdir+:' '--description+:' '--domaindir+:' '--long+:long:(true false)') + ;; + change-admin-password) + _command_args=('--domain_name+:' '--domaindir+:') + ;; + change-master-broker) + _command_args=('--host+:' '--port+:') + ;; + change-master-password) + _command_args=('--domaindir+:' '--nodedir+:' '--savemasterpassword+:savemasterpassword:(true false)') + ;; + collect-log-files) + _command_args=('--host+:' '--port+:' '--retrieve+:retrieve:(true false)' '--target+:') + ;; + configure-jms-cluster) + _command_args=('--clustertype+:' '--configstoretype+:' '--dburl+:' '--dbuser+:' '--dbvendor+:' '--host+:' '--jmsdbpassword+:' '--messagestoretype+:' '--port+:' '--property+:') + ;; + configure-lb-weight) + _command_args=('--cluster+:cluster:_asadmin_clusters' '--host+:' '--port+:') + ;; + configure-ldap-for-admin) + _command_args=('--basedn+:' '--host+:' '--ldap-group+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--url+:') + ;; + copy-config) + _command_args=('--host+:' '--port+:' '--systemproperties+:') + ;; + create-admin-object) + _command_args=('--classname+:' '--description+:' '--enabled+:enabled:(true false)' '--host+:' '--port+:' '--property+:' '--raname+:' '--restype+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-application-ref) + _command_args=('--enabled+:enabled:(true false)' '--host+:' '--lbenabled+:lbenabled:(true false)' '--port+:' '--target+:target:_asadmin_targets_cluster_das_standalone_instance' '--virtualservers+:') + ;; + create-audit-module) + _command_args=('--classname+:' '--host+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-auth-realm) + _command_args=('--classname+:' '--host+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-connector-connection-pool) + _command_args=('--associatewiththread+:associatewiththread:(true false)' '--connectiondefinition+:' '--creationretryattempts+:' '--creationretryinterval+:' '--description+:' '--failconnection+:failconnection:(true false)' '--host+:' '--idletimeout+:' '--isconnectvalidatereq+:isconnectvalidatereq:(true false)' '--lazyconnectionassociation+:lazyconnectionassociation:(true false)' '--lazyconnectionenlistment+:lazyconnectionenlistment:(true false)' '--leakreclaim+:leakreclaim:(true false)' '--leaktimeout+:' '--matchconnections+:matchconnections:(true false)' '--maxconnectionusagecount+:' '--maxpoolsize+:' '--maxwait+:' '--ping+:ping:(true false)' '--pooling+:pooling:(true false)' '--poolresize+:' '--port+:' '--property+:' '--raname+:' '--steadypoolsize+:' '--target+:' '--transactionsupport+:transactionsupport:(XATransaction LocalTransaction NoTransaction)' '--validateatmostonceperiod+:') + ;; + create-connector-resource) + _command_args=('--description+:' '--enabled+:enabled:(true false)' '--host+:' '--objecttype+:' '--poolname+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + create-connector-security-map) + _command_args=('--host+:' '--mappedpassword+:' '--mappedusername+:' '--poolname+:' '--port+:' '--principals+:' '--target+:' '--usergroups+:') + ;; + create-connector-work-security-map) + _command_args=('--description+:' '--groupsmap+:' '--host+:' '--port+:' '--principalsmap+:' '--raname+:') + ;; + create-custom-resource) + _command_args=('--description+:' '--enabled+:enabled:(true false)' '--factoryclass+:' '--host+:' '--port+:' '--property+:' '--restype+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + create-domain) + _command_args=('--adminport+:' '--checkports+:checkports:(true false)' '--domaindir+:' '--domainproperties+:' '--instanceport+:' '--keytooloptions+:' '--nopassword+:nopassword:(true false)' '--portbase+:' '--profile+:' '--savelogin+:savelogin:(true false)' '--savemasterpassword+:savemasterpassword:(true false)' '--template+:' '--usemasterpassword+:usemasterpassword:(true false)') + ;; + create-file-user) + _command_args=('--authrealmname+:' '--groups+:' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--userpassword+:') + ;; + create-http) + _command_args=('--default-virtual-server+:' '--dns-lookup-enabled+:dns-lookup-enabled:(true false)' '--host+:' '--max-connection+:' '--port+:' '--request-timeout-seconds+:' '--servername+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--timeout-seconds+:' '--xpowered+:xpowered:(true false)') + ;; + create-http-health-checker) + _command_args=('--config+:' '--host+:' '--interval+:' '--port+:' '--timeout+:' '--url+:') + ;; + create-http-lb) + _command_args=('--autoapplyenabled+:autoapplyenabled:(true false)' '--devicehost+:' '--deviceport+:' '--healthcheckerinterval+:' '--healthcheckertimeout+:' '--healthcheckerurl+:' '--host+:' '--httpsrouting+:httpsrouting:(true false)' '--lbenableallapplications+:' '--lbenableallinstances+:' '--lbpolicy+:' '--lbpolicymodule+:' '--lbweight+:' '--monitor+:monitor:(true false)' '--port+:' '--property+:' '--reloadinterval+:' '--responsetimeout+:' '--routecookie+:routecookie:(true false)' '--sslproxyhost+:' '--sslproxyport+:' '--target+:target:_asadmin_targets_cluster_standalone_instance') + ;; + create-http-lb-ref) + _command_args=('--config+:' '--healthcheckerinterval+:' '--healthcheckertimeout+:' '--healthcheckerurl+:' '--host+:' '--lbenableallapplications+:' '--lbenableallinstances+:' '--lbname+:' '--lbpolicy+:' '--lbpolicymodule+:' '--lbweight+:' '--port+:') + ;; + create-http-listener) + _command_args=('--acceptorthreads+:' '--default-virtual-server+:' '--defaultvs+:' '--enabled+:enabled:(true false)' '--host+:' '--listeneraddress+:' '--listenerport+:' '--port+:' '--redirectport+:' '--secure+:secure:(true false)' '--securityenabled+:securityenabled:(true false)' '--servername+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--xpowered+:xpowered:(true false)') + ;; + create-http-redirect) + _command_args=('--host+:' '--port+:' '--redirect-port+:' '--secure-redirect+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-iiop-listener) + _command_args=('--enabled+:enabled:(true false)' '--host+:' '--iiopport+:' '--listeneraddress+:' '--port+:' '--property+:' '--securityenabled+:securityenabled:(true false)' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-instance) + _command_args=('--checkports+:checkports:(true false)' '--cluster+:cluster:_asadmin_clusters' '--config+:' '--host+:' '--lbenabled+:lbenabled:(true false)' '--node+:node:_asadmin_nodes' '--port+:' '--portbase+:' '--systemproperties+:' '--terse+:terse:(true false)') + ;; + create-jacc-provider) + _command_args=('--host+:' '--policyconfigfactoryclass+:' '--policyproviderclass+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-javamail-resource) + _command_args=('--debug+:debug:(true false)' '--description+:' '--enabled+:enabled:(true false)' '--fromaddress+:' '--host+:' '--mailhost+:' '--mailuser+:' '--port+:' '--property+:' '--storeprotocol+:' '--storeprotocolclass+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance' '--transprotocol+:' '--transprotocolclass+:') + ;; + create-jdbc-connection-pool) + _command_args=('--allownoncomponentcallers+:allownoncomponentcallers:(true false)' '--associatewiththread+:associatewiththread:(true false)' '--creationretryattempts+:' '--creationretryinterval+:' '--datasourceclassname+:' '--description+:' '--driverclassname+:' '--failconnection+:failconnection:(true false)' '--host+:' '--idletimeout+:' '--initsql+:' '--isconnectvalidatereq+:isconnectvalidatereq:(true false)' '--isisolationguaranteed+:isisolationguaranteed:(true false)' '--isolationlevel+:' '--lazyconnectionassociation+:lazyconnectionassociation:(true false)' '--lazyconnectionenlistment+:lazyconnectionenlistment:(true false)' '--leakreclaim+:leakreclaim:(true false)' '--leaktimeout+:' '--matchconnections+:matchconnections:(true false)' '--maxconnectionusagecount+:' '--maxpoolsize+:' '--maxwait+:' '--nontransactionalconnections+:nontransactionalconnections:(true false)' '--ping+:ping:(true false)' '--pooling+:pooling:(true false)' '--poolresize+:' '--port+:' '--property+:' '--restype+:restype:(javax.sql.DataSource javax.sql.XADataSource javax.sql.ConnectionPoolDataSource java.sql.Driver)' '--sqltracelisteners+:' '--statementcachesize+:' '--statementleakreclaim+:statementleakreclaim:(true false)' '--statementleaktimeout+:' '--statementtimeout+:' '--steadypoolsize+:' '--target+:' '--validateatmostonceperiod+:' '--validationclassname+:' '--validationmethod+:validationmethod:(auto-commit meta-data table custom-validation)' '--validationtable+:' '--wrapjdbcobjects+:wrapjdbcobjects:(true false)') + ;; + create-jdbc-resource) + _command_args=('--connectionpoolid+:' '--description+:' '--enabled+:enabled:(true false)' '--host+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + create-jms-host) + _command_args=('--host+:' '--mqhost+:' '--mqpassword+:' '--mqport+:' '--mquser+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-jms-resource) + _command_args=('--description+:' '--enabled+:enabled:(true false)' '--host+:' '--port+:' '--property+:' '--restype+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + create-jmsdest) + _command_args=('--desttype+:' '--host+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-jndi-resource) + _command_args=('--description+:' '--enabled+:enabled:(true false)' '--factoryclass+:' '--host+:' '--jndilookupname+:' '--port+:' '--property+:' '--restype+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + create-jvm-options) + _command_args=('--host+:' '--port+:' '--profiler+:profiler:(true false)' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-lifecycle-module) + _command_args=('--classname+:' '--classpath+:' '--description+:' '--enabled+:enabled:(true false)' '--failurefatal+:failurefatal:(true false)' '--host+:' '--loadorder+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + create-local-instance) + _command_args=('--checkports+:checkports:(true false)' '--cluster+:cluster:_asadmin_clusters' '--config+:' '--lbenabled+:lbenabled:(true false)' '--node+:node:_asadmin_nodes' '--nodedir+:' '--portbase+:' '--savemasterpassword+:savemasterpassword:(true false)' '--systemproperties+:' '--usemasterpassword+:usemasterpassword:(true false)') + ;; + create-message-security-provider) + _command_args=('--classname+:' '--host+:' '--isdefaultprovider+:isdefaultprovider:(true false)' '--layer+:layer:(SOAP HttpServlet)' '--port+:' '--property+:' '--providertype+:providertype:(client server client-server)' '--requestauthrecipient+:' '--requestauthsource+:' '--responseauthrecipient+:' '--responseauthsource+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-network-listener) + _command_args=('--address+:' '--enabled+:enabled:(true false)' '--host+:' '--jkenabled+:jkenabled:(true false)' '--listenerport+:' '--port+:' '--protocol+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--threadpool+:' '--transport+:') + ;; + create-node-config) + _command_args=('--host+:' '--installdir+:' '--nodedir+:' '--nodehost+:' '--port+:') + ;; + create-node-dcom) + _command_args=('--archive+:' '--force+:force:(true false)' '--host+:' '--install+:install:(true false)' '--installdir+:' '--nodedir+:' '--nodehost+:' '--port+:' '--windowsdomain+:' '--windowspassword+:' '--windowsuser+:') + ;; + create-node-ssh) + _command_args=('--archive+:' '--force+:force:(true false)' '--host+:' '--install+:install:(true false)' '--installdir+:' '--nodedir+:' '--nodehost+:' '--port+:' '--sshkeyfile+:' '--sshkeypassphrase+:' '--sshpassword+:' '--sshport+:' '--sshuser+:') + ;; + create-password-alias) + _command_args=('--aliaspassword+:' '--host+:' '--port+:') + ;; + create-profiler) + _command_args=('--classpath+:' '--enabled+:enabled:(true false)' '--host+:' '--nativelibrarypath+:' '--port+:' '--property+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-protocol) + _command_args=('--host+:' '--port+:' '--securityenabled+:securityenabled:(true false)' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-protocol-filter) + _command_args=('--classname+:' '--host+:' '--port+:' '--protocol+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + create-protocol-finder) + _command_args=('--classname+:' '--host+:' '--port+:' '--protocol+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--targetprotocol+:') + ;; + create-resource-adapter-config) + _command_args=('--host+:' '--objecttype+:' '--port+:' '--property+:' '--target+:' '--threadpoolid+:') + ;; + create-resource-ref) + _command_args=('--enabled+:enabled:(true false)' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_standalone_instance') + ;; + create-service) + _command_args=('*:instances:_asadmin_instances' '--domaindir+:domaindir:directory:_files' '--dry-run+:dry-run:(true false)' '--force+:force:(true false)' '--name+:' '--node+:node:_asadmin_nodes' '--nodedir+:' '--serviceproperties+:' '--serviceuser+:') + ;; + create-ssl) + _command_args=('--certname+:' '--clientauthenabled+:clientauthenabled:(true false)' '--host+:' '--port+:' '--ssl2ciphers+:' '--ssl2enabled+:ssl2enabled:(true false)' '--ssl3enabled+:ssl3enabled:(true false)' '--ssl3tlsciphers+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--tlsenabled+:tlsenabled:(true false)' '--tlsrollbackenabled+:tlsrollbackenabled:(true false)' '--type+:type:(network-listener http-listener iiop-listener iiop-service jmx-connector)') + ;; + create-system-properties) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_config_das_domain_standalone_instance') + ;; + create-threadpool) + _command_args=('--host+:' '--idletimeout+:' '--maxqueuesize+:' '--maxthreadpoolsize+:' '--minthreadpoolsize+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--workqueues+:') + ;; + create-transport) + _command_args=('--acceptorthreads+:' '--buffersizebytes+:' '--bytebuffertype+:' '--classname+:' '--displayconfiguration+:displayconfiguration:(true false)' '--enablesnoop+:enablesnoop:(true false)' '--host+:' '--idlekeytimeoutseconds+:' '--maxconnectionscount+:' '--port+:' '--readtimeoutmillis+:' '--selectionkeyhandler+:' '--selectorpolltimeoutmillis+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--tcpnodelay+:tcpnodelay:(true false)' '--writetimeoutmillis+:') + ;; + create-virtual-server) + _command_args=('--defaultwebmodule+:' '--host+:' '--hosts+:' '--httplisteners+:' '--logfile+:' '--networklisteners+:' '--port+:' '--property+:' '--state+:state:(on off disabled)' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-admin-object) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-application-ref) + _command_args=('--cascade+:cascade:(true false)' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_standalone_instance') + ;; + delete-audit-module) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-auth-realm) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-config) + _command_args=('--host+:' '--port+:') + ;; + delete-connector-connection-pool) + _command_args=('--cascade+:cascade:(true false)' '--host+:' '--port+:' '--target+:') + ;; + delete-connector-resource) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + delete-connector-security-map) + _command_args=('--host+:' '--poolname+:' '--port+:' '--target+:') + ;; + delete-connector-work-security-map) + _command_args=('--host+:' '--port+:' '--raname+:') + ;; + delete-custom-resource) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + delete-domain) + _command_args=('--domaindir+:') + ;; + delete-file-user) + _command_args=('--authrealmname+:' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-http) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-http-health-checker) + _command_args=('--config+:' '--host+:' '--port+:') + ;; + delete-http-lb-ref) + _command_args=('--config+:' '--force+:' '--host+:' '--lbname+:' '--port+:') + ;; + delete-http-listener) + _command_args=('--host+:' '--port+:' '--secure+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-http-redirect) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-iiop-listener) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-instance) + _command_args=('*:instances:_asadmin_instances' '--host+:' '--port+:' '--terse+:terse:(true false)') + ;; + delete-jacc-provider) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-javamail-resource) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + delete-jdbc-connection-pool) + _command_args=('--cascade+:cascade:(true false)' '--host+:' '--port+:' '--target+:') + ;; + delete-jdbc-resource) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + delete-jms-host) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-jms-resource) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + delete-jmsdest) + _command_args=('--desttype+:' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-jndi-resource) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + delete-jvm-options) + _command_args=('--host+:' '--port+:' '--profiler+:profiler:(true false)' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-lifecycle-module) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + delete-local-instance) + _command_args=('*:instances:_asadmin_instances' '--node+:node:_asadmin_nodes' '--nodedir+:') + ;; + delete-log-levels) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-message-security-provider) + _command_args=('--host+:' '--layer+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-network-listener) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-node-config) + _command_args=('*:nodes:_asadmin_nodes_config' '--host+:' '--port+:') + ;; + delete-node-dcom) + _command_args=('*:nodes:_asadmin_nodes_dcom' '--force+:force:(true false)' '--host+:' '--port+:' '--uninstall+:uninstall:(true false)') + ;; + delete-node-ssh) + _command_args=('*:nodes:_asadmin_nodes_ssh' '--force+:force:(true false)' '--host+:' '--port+:' '--uninstall+:uninstall:(true false)') + ;; + delete-password-alias) + _command_args=('--host+:' '--port+:') + ;; + delete-profiler) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-protocol) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-protocol-filter) + _command_args=('--host+:' '--port+:' '--protocol+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-protocol-finder) + _command_args=('--host+:' '--port+:' '--protocol+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-resource-adapter-config) + _command_args=('--host+:' '--port+:' '--target+:') + ;; + delete-resource-ref) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_standalone_instance') + ;; + delete-ssl) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--type+:type:(network-listener http-listener iiop-listener iiop-service jmx-connector)') + ;; + delete-system-property) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_config_das_domain_standalone_instance') + ;; + delete-threadpool) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-transport) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + delete-virtual-server) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + deploy) + _command_args=('*:directory:_files' '--asyncreplication+:asyncreplication:(true false)' '--availabilityenabled+:availabilityenabled:(true false)' '--contextroot+:' '--createtables+:createtables:(true false)' '--dbvendorname+:' '--deploymentplan+:deploymentplan:directory:_files' '--description+:' '--dropandcreatetables+:dropandcreatetables:(true false)' '--enabled+:enabled:(true false)' '--force+:force:(true false)' '--generatermistubs+:generatermistubs:(true false)' '--host+:' '--isredeploy+:isredeploy:(true false)' '--keepfailedstubs+:keepfailedstubs:(true false)' '--keepreposdir+:keepreposdir:(true false)' '--keepstate+:keepstate:(true false)' '--lbenabled+:lbenabled:(true false)' '--libraries+:' '--logreportederrors+:logreportederrors:(true false)' '--name+:' '--port+:' '--precompilejsp+:precompilejsp:(true false)' '--properties+:' '--property+:' '--retrieve+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance' '--type+:' '--uniquetablenames+:uniquetablenames:(true false)' '--verify+:verify:(true false)' '--virtualservers+:') + ;; + deploydir) + _command_args=('*:directory:_files' '--asyncreplication+:asyncreplication:(true false)' '--availabilityenabled+:availabilityenabled:(true false)' '--contextroot+:' '--createtables+:createtables:(true false)' '--dbvendorname+:' '--deploymentplan+:deploymentplan:directory:_files' '--description+:' '--dropandcreatetables+:dropandcreatetables:(true false)' '--enabled+:enabled:(true false)' '--force+:force:(true false)' '--generatermistubs+:generatermistubs:(true false)' '--host+:' '--isredeploy+:isredeploy:(true false)' '--keepfailedstubs+:keepfailedstubs:(true false)' '--keepreposdir+:keepreposdir:(true false)' '--keepstate+:keepstate:(true false)' '--lbenabled+:lbenabled:(true false)' '--libraries+:' '--logreportederrors+:logreportederrors:(true false)' '--name+:' '--port+:' '--precompilejsp+:precompilejsp:(true false)' '--properties+:' '--property+:' '--retrieve+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance' '--type+:' '--uniquetablenames+:uniquetablenames:(true false)' '--verify+:verify:(true false)' '--virtualservers+:') + ;; + disable) + _command_args=('--cascade+:cascade:(true false)' '--droptables+:droptables:(true false)' '--host+:' '--isredeploy+:isredeploy:(true false)' '--isundeploy+:isundeploy:(true false)' '--keepreposdir+:keepreposdir:(true false)' '--keepstate+:keepstate:(true false)' '--port+:' '--properties+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance') + ;; + disable-http-lb-application) + _command_args=('--host+:' '--name+:' '--port+:' '--timeout+:') + ;; + disable-http-lb-server) + _command_args=('--host+:' '--port+:' '--timeout+:') + ;; + disable-monitoring) + _command_args=('--host+:' '--modules+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + disable-secure-admin) + _command_args=('--host+:' '--port+:') + ;; + enable) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance') + ;; + enable-http-lb-application) + _command_args=('--host+:' '--name+:' '--port+:') + ;; + enable-http-lb-server) + _command_args=('--host+:' '--port+:') + ;; + enable-monitoring) + _command_args=('--dtrace+:dtrace:(true false)' '--host+:' '--mbean+:mbean:(true false)' '--modules+:' '--options+:' '--pid+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + enable-secure-admin) + _command_args=('--adminalias+:' '--host+:' '--instancealias+:' '--port+:') + ;; + export-http-lb-config) + _command_args=('--config+:' '--host+:' '--lbname+:' '--lbtargets+:' '--port+:' '--property+:' '--retrievefile+:retrievefile:(true false)') + ;; + export-sync-bundle) + _command_args=('--host+:' '--port+:' '--retrieve+:retrieve:(true false)' '--target+:') + ;; + flush-connection-pool) + _command_args=('--appname+:' '--host+:' '--modulename+:' '--port+:') + ;; + flush-jmsdest) + _command_args=('--desttype+:' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + freeze-transaction-service) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance') + ;; + generate-domain-schema) + _command_args=('--format+:' '--host+:' '--port+:' '--showdeprecated+:showdeprecated:(true false)' '--showsubclasses+:showsubclasses:(true false)') + ;; + generate-jvm-report) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_clustered_instance_das_standalone_instance' '--type+:type:(summary thread class memory log)') + ;; + get) + _command_args=('--host+:' '--monitor+:monitor:(true false)' '--port+:') + ;; + get-client-stubs) + _command_args=('--appname+:' '--host+:' '--port+:') + ;; + get-health) + _command_args=('--host+:' '--port+:') + ;; + help) + _describe -t help-commands "asadmin help command" _1st_arguments + ;; + import-sync-bundle) + _command_args=('--instance+:' '--node+:node:_asadmin_nodes' '--nodedir+:') + ;; + install-node) + _command_args=('--archive+:' '--create+:create:(true false)' '--force+:force:(true false)' '--installdir+:' '--save+:save:(true false)' '--sshkeyfile+:' '--sshport+:' '--sshuser+:') + ;; + install-node-dcom) + _command_args=('--archive+:' '--create+:create:(true false)' '--force+:force:(true false)' '--installdir+:' '--save+:save:(true false)' '--windowsdomain+:' '--windowsuser+:') + ;; + install-node-ssh) + _command_args=('--archive+:' '--create+:create:(true false)' '--force+:force:(true false)' '--installdir+:' '--save+:save:(true false)' '--sshkeyfile+:' '--sshport+:' '--sshuser+:') + ;; + jms-ping) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance') + ;; + list) + _command_args=('--host+:' '--monitor+:monitor:(true false)' '--port+:') + ;; + list-admin-objects) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-application-refs) + _command_args=('*:targets:_asadmin_targets_cluster_das_standalone_instance' '--host+:' '--long+:long:(true false)' '--port+:' '--terse+:terse:(true false)') + ;; + list-applications) + _command_args=('*:targets:_asadmin_targets_cluster_das_domain_standalone_instance' '--host+:' '--long+:long:(true false)' '--port+:' '--resources+:resources:(true false)' '--subcomponents+:subcomponents:(true false)' '--terse+:terse:(true false)' '--type+:') + ;; + list-audit-modules) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-auth-realms) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-backups) + _command_args=('--backupconfig+:' '--backupdir+:' '--domaindir+:' '--long+:long:(true false)') + ;; + list-clusters) + _command_args=('--host+:' '--port+:') + ;; + list-commands) + _command_args=('--localonly+:localonly:(true false)' '--remoteonly+:remoteonly:(true false)') + ;; + list-components) + _command_args=('*:targets:_asadmin_targets_cluster_das_domain_standalone_instance' '--host+:' '--long+:long:(true false)' '--port+:' '--resources+:resources:(true false)' '--subcomponents+:subcomponents:(true false)' '--terse+:terse:(true false)' '--type+:') + ;; + list-configs) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_domain_standalone_instance' '--host+:' '--port+:') + ;; + list-connector-connection-pools) + _command_args=('--host+:' '--port+:') + ;; + list-connector-resources) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance' '--host+:' '--port+:') + ;; + list-connector-security-maps) + _command_args=('--host+:' '--long+:long:(true false)' '--port+:' '--securitymap+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance') + ;; + list-connector-work-security-maps) + _command_args=('--host+:' '--port+:' '--securitymap+:') + ;; + list-containers) + _command_args=('--host+:' '--port+:') + ;; + list-custom-resources) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance') + ;; + list-descriptors) + _command_args=('--host+:' '--port+:') + ;; + list-domains) + _command_args=('--domaindir+:') + ;; + list-file-groups) + _command_args=('--authrealmname+:' '--host+:' '--name+:' '--port+:') + ;; + list-file-users) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance' '--authrealmname+:' '--host+:' '--port+:') + ;; + list-http-lb-configs) + _command_args=('--host+:' '--port+:') + ;; + list-http-listeners) + _command_args=('*:targets:_asadmin_targets_cluster_config_das_standalone_instance' '--host+:' '--long+:long:(true false)' '--port+:') + ;; + list-iiop-listeners) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_domain_standalone_instance' '--host+:' '--port+:') + ;; + list-instances) + _command_args=('--host+:' '--long+:long:(true false)' '--nostatus+:nostatus:(true false)' '--port+:' '--standaloneonly+:standaloneonly:(true false)' '--timeoutmsec+:') + ;; + list-jacc-providers) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-javamail-resources) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance') + ;; + list-jdbc-connection-pools) + _command_args=('--host+:' '--port+:') + ;; + list-jdbc-resources) + _command_args=('--host+:' '--port+:') + ;; + list-jms-hosts) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + list-jms-resources) + _command_args=('*:targets:_asadmin_targets_cluster_das_domain_standalone_instance' '--host+:' '--port+:' '--restype+:') + ;; + list-jmsdest) + _command_args=('*:targets:_asadmin_targets_cluster_config_das_standalone_instance' '--desttype+:' '--host+:' '--port+:' '--property+:') + ;; + list-jndi-entries) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance' '--context+:' '--host+:' '--port+:') + ;; + list-jndi-resources) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance') + ;; + list-jvm-options) + _command_args=('--host+:' '--port+:' '--profiler+:profiler:(true false)' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + list-libraries) + _command_args=('--host+:' '--port+:' '--type+:type:(common ext app)') + ;; + list-lifecycle-modules) + _command_args=('*:targets:_asadmin_targets_cluster_das_domain_standalone_instance' '--host+:' '--port+:' '--terse+:terse:(true false)') + ;; + list-log-attributes) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-log-levels) + _command_args=('--host+:' '--port+:') + ;; + list-message-security-providers) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance' '--host+:' '--layer+:layer:(SOAP HttpServlet)' '--port+:') + ;; + list-modules) + _command_args=('--host+:' '--port+:') + ;; + list-network-listeners) + _command_args=('*:targets:_asadmin_targets_cluster_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-nodes) + _command_args=('--host+:' '--long+:long:(true false)' '--port+:' '--terse+:terse:(true false)') + ;; + list-nodes-config) + _command_args=('--host+:' '--long+:long:(true false)' '--port+:' '--terse+:terse:(true false)') + ;; + list-nodes-dcom) + _command_args=('--host+:' '--long+:long:(true false)' '--port+:' '--terse+:terse:(true false)') + ;; + list-nodes-ssh) + _command_args=('--host+:' '--long+:long:(true false)' '--port+:' '--terse+:terse:(true false)') + ;; + list-password-aliases) + _command_args=('--host+:' '--port+:') + ;; + list-persistence-types) + _command_args=('--host+:' '--port+:' '--type+:') + ;; + list-protocol-filters) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + list-protocol-finders) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + list-protocols) + _command_args=('*:targets:_asadmin_targets_cluster_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-resource-adapter-configs) + _command_args=('--host+:' '--long+:long:(true false)' '--port+:' '--raname+:') + ;; + list-resource-refs) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_das_standalone_instance' '--host+:' '--port+:') + ;; + list-sub-components) + _command_args=('--appname+:' '--host+:' '--port+:' '--resources+:resources:(true false)' '--terse+:terse:(true false)' '--type+:') + ;; + list-supported-cipher-suites) + _command_args=('--host+:' '--port+:') + ;; + list-system-properties) + _command_args=('*:targets:_asadmin_targets_cluster_clustered_instance_config_das_domain_standalone_instance' '--host+:' '--port+:') + ;; + list-threadpools) + _command_args=('--host+:' '--port+:') + ;; + list-timers) + _command_args=('*:targets:_asadmin_targets_cluster_das_standalone_instance' '--host+:' '--port+:') + ;; + list-transports) + _command_args=('*:targets:_asadmin_targets_cluster_config_das_standalone_instance' '--host+:' '--port+:') + ;; + list-virtual-servers) + _command_args=('*:targets:_asadmin_targets_cluster_config_das_standalone_instance' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + list-web-context-param) + _command_args=('--host+:' '--name+:' '--port+:') + ;; + list-web-env-entry) + _command_args=('--host+:' '--name+:' '--port+:') + ;; + login) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_das_standalone_instance') + ;; + migrate-timers) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_clustered_instance_das') + ;; + monitor) + _command_args=('--filename+:filename:directory:_files' '--filter+:' '--interval+:' '--type+:') + ;; + multimode) + _command_args=('--encoding+:' '--file+:file:directory:_files' '--printprompt+:printprompt:(true false)') + ;; + ping-connection-pool) + _command_args=('--appname+:' '--host+:' '--modulename+:' '--port+:' '--target+:') + ;; + ping-node-dcom) + _command_args=('*:nodes:_asadmin_nodes_dcom' '--host+:' '--port+:' '--validate+:validate:(true false)') + ;; + ping-node-ssh) + _command_args=('*:nodes:_asadmin_nodes_ssh' '--host+:' '--port+:' '--validate+:validate:(true false)') + ;; + recover-transactions) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_clustered_instance_das_standalone_instance' '--transactionlogdir+:') + ;; + redeploy) + _command_args=('*:directory:_files' '--asyncreplication+:asyncreplication:(true false)' '--availabilityenabled+:availabilityenabled:(true false)' '--contextroot+:' '--createtables+:createtables:(true false)' '--dbvendorname+:' '--deploymentplan+:deploymentplan:directory:_files' '--description+:' '--dropandcreatetables+:dropandcreatetables:(true false)' '--enabled+:enabled:(true false)' '--force+:force:(true false)' '--generatermistubs+:generatermistubs:(true false)' '--host+:' '--isredeploy+:isredeploy:(true false)' '--keepfailedstubs+:keepfailedstubs:(true false)' '--keepreposdir+:keepreposdir:(true false)' '--keepstate+:keepstate:(true false)' '--lbenabled+:lbenabled:(true false)' '--libraries+:' '--logreportederrors+:logreportederrors:(true false)' '--name+:' '--port+:' '--precompilejsp+:precompilejsp:(true false)' '--properties+:' '--property+:' '--retrieve+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance' '--type+:' '--uniquetablenames+:uniquetablenames:(true false)' '--verify+:verify:(true false)' '--virtualservers+:') + ;; + remove-library) + _command_args=('*:libraries:_asadmin_libraries' '--host+:' '--port+:' '--type+:type:(common ext app)') + ;; + restart-domain) + _command_args=('--debug+:debug:(true false)' '--domaindir+:' '--force+:force:(true false)' '--kill+:kill:(true false)') + ;; + restart-instance) + _command_args=('*:instances:_asadmin_instances' '--debug+:' '--host+:' '--port+:') + ;; + restart-local-instance) + _command_args=('*:instances:_asadmin_instances' '--debug+:debug:(true false)' '--force+:force:(true false)' '--kill+:kill:(true false)' '--node+:node:_asadmin_nodes' '--nodedir+:') + ;; + restore-domain) + _command_args=('--backupconfig+:' '--backupdir+:' '--description+:' '--domaindir+:' '--filename+:' '--force+:force:(true false)' '--long+:long:(true false)') + ;; + rollback-transaction) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_clustered_instance_das_standalone_instance' '--transaction_id+:') + ;; + rotate-log) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_standalone_instance') + ;; + set) + _command_args=('--host+:' '--port+:') + ;; + set-log-attributes) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + set-log-levels) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance') + ;; + set-web-context-param) + _command_args=('--description+:' '--host+:' '--ignoredescriptoritem+:ignoredescriptoritem:(true false)' '--name+:' '--port+:' '--value+:') + ;; + set-web-env-entry) + _command_args=('--description+:' '--host+:' '--ignoredescriptoritem+:ignoredescriptoritem:(true false)' '--name+:' '--port+:' '--type+:' '--value+:') + ;; + setup-ssh) + _command_args=('--generatekey+:generatekey:(true false)' '--sshkeyfile+:' '--sshport+:' '--sshpublickeyfile+:' '--sshuser+:') + ;; + show-component-status) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance') + ;; + start-cluster) + _command_args=('*:clusters:_asadmin_clusters' '--autohadboverride+:autohadboverride:(true false)' '--host+:' '--port+:' '--verbose+:verbose:(true false)') + ;; + start-database) + _command_args=('--dbhome+:' '--dbhost+:' '--dbport+:' '--jvmoptions+:') + ;; + start-domain) + _command_args=('--debug+:debug:(true false)' '--domaindir+:' '--upgrade+:upgrade:(true false)' '--verbose+:verbose:(true false)') + ;; + start-instance) + _command_args=('*:instances:_asadmin_instances' '--debug+:debug:(true false)' '--host+:' '--port+:' '--setenv+:' '--sync+:sync:(none normal full)' '--terse+:terse:(true false)') + ;; + start-local-instance) + _command_args=('*:instances:_asadmin_instances' '--debug+:debug:(true false)' '--node+:node:_asadmin_nodes' '--nodedir+:' '--sync+:sync:(none normal full)' '--verbose+:verbose:(true false)') + ;; + stop-cluster) + _command_args=('*:clusters:_asadmin_clusters' '--autohadboverride+:autohadboverride:(true false)' '--host+:' '--kill+:kill:(true false)' '--port+:' '--verbose+:verbose:(true false)') + ;; + stop-database) + _command_args=('--dbhost+:' '--dbport+:' '--dbuser+:') + ;; + stop-domain) + _command_args=('--domaindir+:' '--force+:force:(true false)' '--kill+:kill:(true false)') + ;; + stop-instance) + _command_args=('*:instances:_asadmin_instances' '--force+:force:(true false)' '--host+:' '--kill+:kill:(true false)' '--port+:') + ;; + stop-local-instance) + _command_args=('*:instances:_asadmin_instances' '--force+:force:(true false)' '--kill+:kill:(true false)' '--node+:node:_asadmin_nodes' '--nodedir+:') + ;; + test-upgrade) + _command_args=('--host+:' '--port+:') + ;; + undeploy) + _command_args=('*:applications:_asadmin_applications' '--cascade+:cascade:(true false)' '--droptables+:droptables:(true false)' '--host+:' '--isredeploy+:isredeploy:(true false)' '--keepreposdir+:keepreposdir:(true false)' '--keepstate+:keepstate:(true false)' '--port+:' '--properties+:' '--target+:target:_asadmin_targets_cluster_das_domain_standalone_instance') + ;; + unfreeze-transaction-service) + _command_args=('--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance') + ;; + uninstall-node) + _command_args=('--force+:force:(true false)' '--installdir+:' '--sshkeyfile+:' '--sshport+:' '--sshuser+:') + ;; + uninstall-node-dcom) + _command_args=('--force+:force:(true false)' '--installdir+:' '--windowsdomain+:' '--windowsuser+:') + ;; + uninstall-node-ssh) + _command_args=('--force+:force:(true false)' '--installdir+:' '--sshkeyfile+:' '--sshport+:' '--sshuser+:') + ;; + unset-web-context-param) + _command_args=('--host+:' '--name+:' '--port+:') + ;; + unset-web-env-entry) + _command_args=('--host+:' '--name+:' '--port+:') + ;; + update-connector-security-map) + _command_args=('--addprincipals+:' '--addusergroups+:' '--host+:' '--mappedpassword+:' '--mappedusername+:' '--poolname+:' '--port+:' '--removeprincipals+:' '--removeusergroups+:' '--target+:') + ;; + update-connector-work-security-map) + _command_args=('--addgroups+:' '--addprincipals+:' '--host+:' '--port+:' '--raname+:' '--removegroups+:' '--removeprincipals+:') + ;; + update-file-user) + _command_args=('--authrealmname+:' '--groups+:' '--host+:' '--port+:' '--target+:target:_asadmin_targets_cluster_config_das_standalone_instance' '--userpassword+:') + ;; + update-node-config) + _command_args=('*:nodes:_asadmin_nodes_config' '--host+:' '--installdir+:' '--nodedir+:' '--nodehost+:' '--port+:') + ;; + update-node-dcom) + _command_args=('*:nodes:_asadmin_nodes_dcom' '--force+:force:(true false)' '--host+:' '--installdir+:' '--nodedir+:' '--nodehost+:' '--port+:' '--windowsdomain+:' '--windowspassword+:' '--windowsuser+:') + ;; + update-node-ssh) + _command_args=('*:nodes:_asadmin_nodes_ssh' '--force+:force:(true false)' '--host+:' '--installdir+:' '--nodedir+:' '--nodehost+:' '--port+:' '--sshkeyfile+:' '--sshkeypassphrase+:' '--sshpassword+:' '--sshport+:' '--sshuser+:') + ;; + update-password-alias) + _command_args=('--aliaspassword+:' '--host+:' '--port+:') + ;; + uptime) + _command_args=('--host+:' '--milliseconds+:milliseconds:(true false)' '--port+:') + ;; + validate-dcom) + _command_args=('--host+:' '--port+:' '--remotetestdir+:' '--verbose+:verbose:(true false)' '--windowsdomain+:' '--windowspassword+:' '--windowsuser+:') + ;; + validate-multicast) + _command_args=('--bindaddress+:' '--multicastaddress+:' '--multicastport+:' '--sendperiod+:' '--timeout+:' '--timetolive+:' '--verbose+:verbose:(true false)') + ;; + verify-domain-xml) + _command_args=('--domaindir+:') + ;; + version) + _command_args=('--local+:local:(true false)' '--terse+:terse:(true false)' '--verbose+:verbose:(true false)') + ;; +esac + + +_asadmin_applications() { + compadd $(command asadmin list-applications --terse | sed 's/\s.*//') +} + +_asadmin_clusters() { + compadd $(command asadmin list-clusters --terse | sed 's/\s.*//') +} + +_asadmin_configs() { + compadd $(command asadmin list-configs --terse) +} + +_asadmin_instances() { + compadd $(command asadmin list-instances --terse --nostatus domain) +} + +_asadmin_instances_standalone() { + compadd $(command asadmin list-instances --terse --standaloneonly --nostatus domain) +} + +_asadmin_libraries() { + compadd $(command asadmin list-libraries --terse) +} + +_asadmin_nodes() { + compadd $(command asadmin list-nodes --terse) +} + +_asadmin_nodes_config() { + compadd $(command asadmin list-nodes-config --terse) +} + +_asadmin_nodes_dcom() { + compadd $(command asadmin list-nodes-dcom --terse) +} + +_asadmin_nodes_ssh() { + compadd $(command asadmin list-nodes-ssh --terse) +} + +_asadmin_targets() { + _asadmin_instances + _asadmin_clusters + _asadmin_configs + compadd domain server +} + +_asadmin_targets_cluster_clustered_instance_config_das_domain_standalone_instance() { + _asadmin_instances + _asadmin_clusters + _asadmin_configs + compadd domain server +} + +_asadmin_targets_cluster_clustered_instance_config_das_standalone_instance() { + _asadmin_instances + _asadmin_clusters + _asadmin_configs + compadd server +} + +_asadmin_targets_cluster_clustered_instance_das_domain_standalone_instance() { + _asadmin_instances + _asadmin_clusters + compadd domain server +} + +_asadmin_targets_cluster_clustered_instance_das_standalone_instance() { + _asadmin_instances + _asadmin_clusters + _asadmin_configs + compadd server +} + +_asadmin_targets_cluster_config_das_standalone_instance() { + _asadmin_instances_standalone + _asadmin_clusters + _asadmin_configs + compadd server +} + +_asadmin_targets_cluster_das_domain_standalone_instance() { + _asadmin_instances_standalone + _asadmin_clusters + compadd domain server +} + +_asadmin_targets_cluster_das_standalone_instance() { + _asadmin_instances_standalone + _asadmin_clusters + compadd server +} + +_asadmin_targets_clustered_instance_das() { + _asadmin_instances + compadd server +} + +_asadmin_targets_clustered_instance_das_standalone_instance() { + _asadmin_instances + compadd server +} + +_asadmin_targets_cluster_standalone_instance() { + _asadmin_clusters + _asadmin_instances_standalone +} + + +compadd '--help' +_arguments \ + $_command_args \ + && return 0; diff --git a/plugins/glassfish/glassfish.plugin.zsh b/plugins/glassfish/glassfish.plugin.zsh new file mode 100644 index 000000000..fde2edb2a --- /dev/null +++ b/plugins/glassfish/glassfish.plugin.zsh @@ -0,0 +1,3 @@ +# if there is a user named 'glassfish' on the system, we'll assume +# that is the user asadmin should be run as +# grep -e '^glassfish' /etc/passwd > /dev/null && alias asadmin='sudo -u glassfish asadmin'
\ No newline at end of file diff --git a/plugins/go/go.plugin.zsh b/plugins/go/go.plugin.zsh new file mode 120000 index 000000000..cf943e2e1 --- /dev/null +++ b/plugins/go/go.plugin.zsh @@ -0,0 +1 @@ +../golang/golang.plugin.zsh
\ No newline at end of file diff --git a/plugins/golang/golang.plugin.zsh b/plugins/golang/golang.plugin.zsh new file mode 100644 index 000000000..f2be6ca9c --- /dev/null +++ b/plugins/golang/golang.plugin.zsh @@ -0,0 +1,163 @@ +# install in /etc/zsh/zshrc or your personal .zshrc + +# gc +prefixes=(5 6 8) +for p in $prefixes; do + compctl -g "*.${p}" ${p}l + compctl -g "*.go" ${p}g +done + +# standard go tools +compctl -g "*.go" gofmt + +# gccgo +compctl -g "*.go" gccgo + +# go tool +__go_tool_complete() { + typeset -a commands build_flags + commands+=( + 'build[compile packages and dependencies]' + 'clean[remove object files]' + 'doc[run godoc on package sources]' + 'env[print Go environment information]' + 'fix[run go tool fix on packages]' + 'fmt[run gofmt on package sources]' + 'get[download and install packages and dependencies]' + 'help[display help]' + 'install[compile and install packages and dependencies]' + 'list[list packages]' + 'run[compile and run Go program]' + 'test[test packages]' + 'tool[run specified go tool]' + 'version[print Go version]' + 'vet[run go tool vet on packages]' + ) + if (( CURRENT == 2 )); then + # explain go commands + _values 'go tool commands' ${commands[@]} + return + fi + build_flags=( + '-a[force reinstallation of packages that are already up-to-date]' + '-n[print the commands but do not run them]' + '-p[number of parallel builds]:number' + '-race[enable data race detection]' + '-x[print the commands]' + '-work[print temporary directory name and keep it]' + '-ccflags[flags for 5c/6c/8c]:flags' + '-gcflags[flags for 5g/6g/8g]:flags' + '-ldflags[flags for 5l/6l/8l]:flags' + '-gccgoflags[flags for gccgo]:flags' + '-compiler[name of compiler to use]:name' + '-installsuffix[suffix to add to package directory]:suffix' + '-tags[list of build tags to consider satisfied]:tags' + ) + __go_list() { + local expl importpaths + declare -a importpaths + importpaths=($(go list ${words[$CURRENT]}... 2>/dev/null)) + _wanted importpaths expl 'import paths' compadd "$@" - "${importpaths[@]}" + } + case ${words[2]} in + clean|doc) + _arguments -s -w : '*:importpaths:__go_list' + ;; + fix|fmt|list|vet) + _alternative ':importpaths:__go_list' ':files:_path_files -g "*.go"' + ;; + install) + _arguments -s -w : ${build_flags[@]} \ + "-v[show package names]" \ + '*:importpaths:__go_list' + ;; + get) + _arguments -s -w : \ + ${build_flags[@]} + ;; + build) + _arguments -s -w : \ + ${build_flags[@]} \ + "-v[show package names]" \ + "-o[output file]:file:_files" \ + "*:args:{ _alternative ':importpaths:__go_list' ':files:_path_files -g \"*.go\"' }" + ;; + test) + _arguments -s -w : \ + ${build_flags[@]} \ + "-c[do not run, compile the test binary]" \ + "-i[do not run, install dependencies]" \ + "-v[print test output]" \ + "-x[print the commands]" \ + "-short[use short mode]" \ + "-parallel[number of parallel tests]:number" \ + "-cpu[values of GOMAXPROCS to use]:number list" \ + "-run[run tests and examples matching regexp]:regexp" \ + "-bench[run benchmarks matching regexp]:regexp" \ + "-benchmem[print memory allocation stats]" \ + "-benchtime[run each benchmark until taking this long]:duration" \ + "-blockprofile[write goroutine blocking profile to file]:file" \ + "-blockprofilerate[set sampling rate of goroutine blocking profile]:number" \ + "-timeout[kill test after that duration]:duration" \ + "-cpuprofile[write CPU profile to file]:file:_files" \ + "-memprofile[write heap profile to file]:file:_files" \ + "-memprofilerate[set heap profiling rate]:number" \ + "*:args:{ _alternative ':importpaths:__go_list' ':files:_path_files -g \"*.go\"' }" + ;; + help) + _values "${commands[@]}" \ + 'gopath[GOPATH environment variable]' \ + 'packages[description of package lists]' \ + 'remote[remote import path syntax]' \ + 'testflag[description of testing flags]' \ + 'testfunc[description of testing functions]' + ;; + run) + _arguments -s -w : \ + ${build_flags[@]} \ + '*:file:_path_files -g "*.go"' + ;; + tool) + if (( CURRENT == 3 )); then + _values "go tool" $(go tool) + return + fi + case ${words[3]} in + [568]g) + _arguments -s -w : \ + '-I[search for packages in DIR]:includes:_path_files -/' \ + '-L[show full path in file:line prints]' \ + '-S[print the assembly language]' \ + '-V[print the compiler version]' \ + '-e[no limit on number of errors printed]' \ + '-h[panic on an error]' \ + '-l[disable inlining]' \ + '-m[print optimization decisions]' \ + '-o[file specify output file]:file' \ + '-p[assumed import path for this code]:importpath' \ + '-u[disable package unsafe]' \ + "*:file:_files -g '*.go'" + ;; + [568]l) + local O=${words[3]%l} + _arguments -s -w : \ + '-o[file specify output file]:file' \ + '-L[search for packages in DIR]:includes:_path_files -/' \ + "*:file:_files -g '*.[ao$O]'" + ;; + dist) + _values "dist tool" banner bootstrap clean env install version + ;; + *) + # use files by default + _files + ;; + esac + ;; + esac +} + +compdef __go_tool_complete go + +# aliases +alias gfa='go fmt . ./...' diff --git a/plugins/gpg-agent/gpg-agent.plugin.zsh b/plugins/gpg-agent/gpg-agent.plugin.zsh index 8cc71fd57..3e6a34f42 100644 --- a/plugins/gpg-agent/gpg-agent.plugin.zsh +++ b/plugins/gpg-agent/gpg-agent.plugin.zsh @@ -1,26 +1,41 @@ -# Based on ssh-agent code - local GPG_ENV=$HOME/.gnupg/gpg-agent.env -function start_agent { - /usr/bin/env gpg-agent --daemon --enable-ssh-support --write-env-file ${GPG_ENV} > /dev/null - chmod 600 ${GPG_ENV} - . ${GPG_ENV} > /dev/null +function start_agent_nossh { + eval $(/usr/bin/env gpg-agent --quiet --daemon --write-env-file ${GPG_ENV} 2> /dev/null) + chmod 600 ${GPG_ENV} + export GPG_AGENT_INFO } -# Source GPG agent settings, if applicable -if [ -f "${GPG_ENV}" ]; then - . ${GPG_ENV} > /dev/null - ps -ef | grep ${SSH_AGENT_PID} | grep gpg-agent > /dev/null || { - start_agent; - } -else - start_agent; -fi +function start_agent_withssh { + eval $(/usr/bin/env gpg-agent --quiet --daemon --enable-ssh-support --write-env-file ${GPG_ENV} 2> /dev/null) + chmod 600 ${GPG_ENV} + export GPG_AGENT_INFO + export SSH_AUTH_SOCK + export SSH_AGENT_PID +} -export GPG_AGENT_INFO -export SSH_AUTH_SOCK -export SSH_AGENT_PID +# check if another agent is running +if ! gpg-connect-agent --quiet /bye > /dev/null 2> /dev/null; then + # source settings of old agent, if applicable + if [ -f "${GPG_ENV}" ]; then + . ${GPG_ENV} > /dev/null + export GPG_AGENT_INFO + export SSH_AUTH_SOCK + export SSH_AGENT_PID + fi + + # check again if another agent is running using the newly sourced settings + if ! gpg-connect-agent --quiet /bye > /dev/null 2> /dev/null; then + # check for existing ssh-agent + if ssh-add -l > /dev/null 2> /dev/null; then + # ssh-agent running, start gpg-agent without ssh support + start_agent_nossh; + else + # otherwise start gpg-agent with ssh support + start_agent_withssh; + fi + fi +fi GPG_TTY=$(tty) export GPG_TTY diff --git a/plugins/gradle/gradle.plugin.zsh b/plugins/gradle/gradle.plugin.zsh index fc4c78c50..97bf50b43 100644 --- a/plugins/gradle/gradle.plugin.zsh +++ b/plugins/gradle/gradle.plugin.zsh @@ -54,27 +54,14 @@ function in_gradle() { fi } -############################################################################ -# Define the stat_cmd command based on platform behavior -########################################################################## -stat -f%m . > /dev/null 2>&1 -if [ "$?" = 0 ]; then - stat_cmd=(stat -f%m) -else - stat_cmd=(stat -L --format=%Y) -fi - ############################################################################## Examine the build.gradle file to see if its # timestamp has changed, and if so, regen # the .gradle_tasks cache file ############################################################################ _gradle_does_task_list_need_generating () { - if [ ! -f .gradletasknamecache ]; then return 0; - else - accurate=$($stat_cmd .gradletasknamecache) - changed=$($stat_cmd build.gradle) - return $(expr $accurate '>=' $changed) - fi + [ ! -f .gradletasknamecache ] && return 0; + [ build.gradle -nt .gradletasknamecache ] && return 0; + return 1; } diff --git a/plugins/grails/grails.plugin.zsh b/plugins/grails/grails.plugin.zsh index cc6f9c53b..11777738c 100755..100644 --- a/plugins/grails/grails.plugin.zsh +++ b/plugins/grails/grails.plugin.zsh @@ -24,17 +24,23 @@ _enumerateGrailsScripts() { return fi - # - Strip the path - # - Remove all scripts with a leading '_' - # - PackagePlugin_.groovy -> PackagePlugin - # - PackagePlugin -> Package-Plugin - # - Package-Plugin -> package-plugin - basename $files \ - | sed -E -e 's/^_?([^_]+)_?.groovy/\1/'\ - -e 's/([a-z])([A-Z])/\1-\2/g' \ - | tr "[:upper:]" "[:lower:]" \ - | sort \ - | uniq + scripts=() + for file in $files + do + # - Strip the path + # - Remove all scripts with a leading '_' + # - PackagePlugin_.groovy -> PackagePlugin + # - PackagePlugin -> Package-Plugin + # - Package-Plugin -> package-plugin + command=$(basename $file \ + | sed -E -e 's/^_?([^_]+)_?.groovy/\1/'\ + -e 's/([a-z])([A-Z])/\1-\2/g' \ + | tr "[:upper:]" "[:lower:]" \ + | sort \ + | uniq) + scripts+=($command) + done + echo $scripts } _grails() { diff --git a/plugins/heroku/_heroku b/plugins/heroku/_heroku index a95c38647..46663303a 100644 --- a/plugins/heroku/_heroku +++ b/plugins/heroku/_heroku @@ -23,8 +23,10 @@ _1st_arguments=( "auth\:login":"log in with your heroku credentials" "auth\:logout":"clear local authentication credentials" "config":"display the config vars for an app" - "config\:add":"add one or more config vars" - "config\:remove":"remove a config var" + "config\:pull":"pull heroku config vars down to the local environment" + "config\:push":"push local config vars to heroku" + "config\:set":"set one or more config vars" + "config\:unset":"unset one or more config vars" "db\:push":"push local data up to your app" "db\:pull":"pull heroku data down into your local database" "domains":"list custom domains for an app" diff --git a/plugins/history-substring-search/history-substring-search.zsh b/plugins/history-substring-search/history-substring-search.zsh index 53f707c79..22f03dd6d 100644 --- a/plugins/history-substring-search/history-substring-search.zsh +++ b/plugins/history-substring-search/history-substring-search.zsh @@ -163,8 +163,13 @@ function history-substring-search-down() { zle -N history-substring-search-up zle -N history-substring-search-down -bindkey '\e[A' history-substring-search-up -bindkey '\e[B' history-substring-search-down +zmodload zsh/terminfo +if [[ -n "$terminfo[kcuu1]" ]]; then + bindkey "$terminfo[kcuu1]" history-substring-search-up +fi +if [[ -n "$terminfo[kcud1]" ]]; then + bindkey "$terminfo[kcud1]" history-substring-search-down +fi #----------------------------------------------------------------------------- # implementation details diff --git a/plugins/iwhois/iwhois.plugin.zsh b/plugins/iwhois/iwhois.plugin.zsh new file mode 100644 index 000000000..38790bf28 --- /dev/null +++ b/plugins/iwhois/iwhois.plugin.zsh @@ -0,0 +1,8 @@ +# provide a whois command with a more accurate and up to date list of whois +# servers using CNAMES via whois.geek.nz + +function iwhois() { + resolver="whois.geek.nz" + tld=`echo ${@: -1} | awk -F "." '{print $NF}'` + whois -h ${tld}.${resolver} "$@" ; +} diff --git a/plugins/jira/jira.plugin.zsh b/plugins/jira/jira.plugin.zsh index b91f93c95..3d510e430 100644 --- a/plugins/jira/jira.plugin.zsh +++ b/plugins/jira/jira.plugin.zsh @@ -3,7 +3,7 @@ # .jira-url in the current directory takes precedence # # If you use Rapid Board, set: -#JIRA_RAPID_BOARD="yes" +#JIRA_RAPID_BOARD="true" # in you .zshrc # # Setup: cd to/my/project @@ -11,6 +11,13 @@ # Usage: jira # opens a new issue # jira ABC-123 # Opens an existing issue open_jira_issue () { + local open_cmd + if [[ $(uname -s) == 'Darwin' ]]; then + open_cmd='open' + else + open_cmd='xdg-open' + fi + if [ -f .jira-url ]; then jira_url=$(cat .jira-url) elif [ -f ~/.jira-url ]; then @@ -22,15 +29,23 @@ open_jira_issue () { return 0 fi + if [ -f .jira-prefix ]; then + jira_prefix=$(cat .jira-prefix) + elif [ -f ~/.jira-prefix ]; then + jira_prefix=$(cat ~/.jira-prefix) + else + jira_prefix="" + fi + if [ -z "$1" ]; then echo "Opening new issue" - `open $jira_url/secure/CreateIssue!default.jspa` + $open_cmd "$jira_url/secure/CreateIssue!default.jspa" else echo "Opening issue #$1" - if [[ "x$JIRA_RAPID_BOARD" = "yes" ]]; then - `open $jira_url/issues/$1` + if [[ "x$JIRA_RAPID_BOARD" = "xtrue" ]]; then + $open_cmd "$jira_url/issues/$jira_prefix$1" else - `open $jira_url/browse/$1` + $open_cmd "$jira_url/browse/$jira_prefix$1" fi fi } diff --git a/plugins/jruby/jruby.plugin.zsh b/plugins/jruby/jruby.plugin.zsh index bb7975b10..bb7975b10 100755..100644 --- a/plugins/jruby/jruby.plugin.zsh +++ b/plugins/jruby/jruby.plugin.zsh diff --git a/plugins/jsontools/README.md b/plugins/jsontools/README.md new file mode 100644 index 000000000..4faf58b98 --- /dev/null +++ b/plugins/jsontools/README.md @@ -0,0 +1,42 @@ +# jsontools + +Handy command line tools for dealing with json data. + +## Tools + +- **pp_json** - pretty prints json +- **is_json** - returns true if valid json; false otherwise +- **urlencode_json** - returns a url encoded string for the given json +- **urldecode_json** - returns decoded json for the given url encoded string + +## Usage +Usage is simple...just take your json data and pipe it into the appropriate jsontool. +```sh +<json data> | <jsontools tool> +``` +## Examples + +##### pp_json + +```sh +# curl json data and pretty print the results +curl https://coderwall.com/bobwilliams.json | pp_json +``` + +##### is_json +```sh +# pretty print the contents of an existing json file +less data.json | is_json +``` + +##### urlencode_json +```sh +# json data directly from the command line +echo '{"b":2, "a":1}' | urlencode_json +``` + +##### urldecode_json +```sh +# url encoded string to decode +echo '%7B%22b%22:2,%20%22a%22:1%7D%0A' | urldecode_json +```
\ No newline at end of file diff --git a/plugins/jsontools/jsontools.plugin.zsh b/plugins/jsontools/jsontools.plugin.zsh new file mode 100644 index 000000000..20d5eb1c9 --- /dev/null +++ b/plugins/jsontools/jsontools.plugin.zsh @@ -0,0 +1,39 @@ +# JSON Tools +# Adds command line aliases useful for dealing with JSON + +if [[ $(whence $JSONTOOLS_METHOD) = "" ]]; then + JSONTOOLS_METHOD="" +fi + +if [[ $(whence node) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xnode" ) ]]; then + alias pp_json='xargs -0 node -e "console.log(JSON.stringify(JSON.parse(process.argv[1]), null, 4));"' + alias is_json='xargs -0 node -e "try {json = JSON.parse(process.argv[1]);} catch (e) { console.log(false); json = null; } if(json) { console.log(true); }"' + alias urlencode_json='xargs -0 node -e "console.log(encodeURIComponent(process.argv[1]))"' + alias urldecode_json='xargs -0 node -e "console.log(decodeURIComponent(process.argv[1]))"' +elif [[ $(whence python) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xpython" ) ]]; then + alias pp_json='python -mjson.tool' + alias is_json='python -c " +import json, sys; +try: + json.loads(sys.stdin.read()) +except ValueError, e: + print False +else: + print True +sys.exit(0)"' + alias urlencode_json='python -c " +import urllib, json, sys; +print urllib.quote_plus(sys.stdin.read()) +sys.exit(0)"' + alias urldecode_json='python -c " +import urllib, json, sys; +print urllib.unquote_plus(sys.stdin.read()) +sys.exit(0)"' +elif [[ $(whence ruby) != "" && ( "x$JSONTOOLS_METHOD" = "x" || "x$JSONTOOLS_METHOD" = "xruby" ) ]]; then + alias pp_json='ruby -e "require \"json\"; require \"yaml\"; puts JSON.parse(STDIN.read).to_yaml"' + alias is_json='ruby -e "require \"json\"; begin; JSON.parse(STDIN.read); puts true; rescue Exception => e; puts false; end"' + alias urlencode_json='ruby -e "require \"uri\"; puts URI.escape(STDIN.read)"' + alias urldecode_json='ruby -e "require \"uri\"; puts URI.unescape(STDIN.read)"' +fi + +unset JSONTOOLS_METHOD
\ No newline at end of file diff --git a/plugins/jump/jump.plugin.zsh b/plugins/jump/jump.plugin.zsh new file mode 100644 index 000000000..1b23b5d42 --- /dev/null +++ b/plugins/jump/jump.plugin.zsh @@ -0,0 +1,58 @@ +# Easily jump around the file system by manually adding marks +# marks are stored as symbolic links in the directory $MARKPATH (default $HOME/.marks) +# +# jump FOO: jump to a mark named FOO +# mark FOO: create a mark named FOO +# unmark FOO: delete a mark +# marks: lists all marks +# +export MARKPATH=$HOME/.marks + +jump() { + cd -P "$MARKPATH/$1" 2>/dev/null || echo "No such mark: $1" +} + +mark() { + if (( $# == 0 )); then + MARK=$(basename "$(pwd)") + else + MARK="$1" + fi + if read -q \?"Mark $(pwd) as ${MARK}? (y/n) "; then + mkdir -p "$MARKPATH"; ln -s "$(pwd)" "$MARKPATH/$MARK" + fi +} + +unmark() { + rm -i "$MARKPATH/$1" +} + +autoload colors +marks() { + for link in $MARKPATH/*(@); do + local markname="$fg[cyan]${link:t}$reset_color" + local markpath="$fg[blue]$(readlink $link)$reset_color" + printf "%s\t" $markname + printf "-> %s \t\n" $markpath + done +} + +_completemarks() { + if [[ $(ls "${MARKPATH}" | wc -l) -gt 1 ]]; then + reply=($(ls $MARKPATH/**/*(-) | grep : | sed -E 's/(.*)\/([_a-zA-Z0-9\.\-]*):$/\2/g')) + else + if readlink -e "${MARKPATH}"/* &>/dev/null; then + reply=($(ls "${MARKPATH}")) + fi + fi +} +compctl -K _completemarks jump +compctl -K _completemarks unmark + +_mark_expansion() { + setopt extendedglob + autoload -U modify-current-argument + modify-current-argument '$(readlink "$MARKPATH/$ARG")' +} +zle -N _mark_expansion +bindkey "^g" _mark_expansion diff --git a/plugins/knife/_knife b/plugins/knife/_knife index dec491257..163149267 100644 --- a/plugins/knife/_knife +++ b/plugins/knife/_knife @@ -1,5 +1,10 @@ #compdef knife +# You can override the path to knife.rb and your cookbooks by setting +# KNIFE_CONF_PATH=/path/to/my/.chef/knife.rb +# KNIFE_COOKBOOK_PATH=/path/to/my/chef/cookbooks +# Read around where these are used for more detail. + # These flags should be available everywhere according to man knife knife_general_flags=( --help --server-url --key --config --editor --format --log_level --logfile --no-editor --user --print-after --version --yes ) @@ -26,7 +31,7 @@ _knife() { case $state in knifecmd) - compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" exec environment index node recipe role search ssh status windows $cloudproviders + compadd -Q "$@" bootstrap client configure cookbook "cookbook site" "data bag" diff exec environment index node recipe role search ssh status upload vault windows $cloudproviders ;; knifesubcmd) case $words[2] in @@ -42,9 +47,12 @@ _knife() { cookbook) compadd -Q "$@" test list create download delete "metadata from" show "bulk delete" metadata upload ;; - environment) + diff) + _arguments '*:file or directory:_files -g "*"' + ;; + environment) compadd -Q "$@" list create delete edit show "from file" - ;; + ;; node) compadd -Q "$@" "from file" create show edit delete list run_list "bulk delete" ;; @@ -54,6 +62,12 @@ _knife() { role) compadd -Q "$@" "bulk delete" create delete edit "from file" list show ;; + upload) + _arguments '*:file or directory:_files -g "*"' + ;; + vault) + compadd -Q "$@" create decrypt delete edit remove "rotate all keys" "rotate keys" show update + ;; windows) compadd "$@" bootstrap ;; @@ -170,11 +184,13 @@ _chef_environments_remote() { # The chef_x_local functions use the knife config to find the paths of relevant objects x to be uploaded to the server _chef_cookbooks_local() { - local knife_rb="$HOME/.chef/knife.rb" - if [ -f ./.chef/knife.rb ]; then - knife_rb="./.chef/knife.rb" - fi - (for i in $( grep cookbook_path $knife_rb | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' ); do ls $i; done) + + local knife_rb=${KNIFE_CONF_PATH:-${HOME}/.chef/knife.rb} + if [ -f ./.chef/knife.rb ]; then + knife_rb="./.chef/knife.rb" + fi + local cookbook_path=${KNIFE_COOKBOOK_PATH:-$(grep cookbook_path $knife_rb | awk 'BEGIN {FS = "[" }; {print $2}' | sed 's/\,//g' | sed "s/'//g" | sed 's/\(.*\)]/\1/' )} + (for i in $cookbook_path; do ls $i; done) } # This function extracts the available cookbook versions on the chef server diff --git a/plugins/knife_ssh/knife_ssh.plugin.zsh b/plugins/knife_ssh/knife_ssh.plugin.zsh new file mode 100644 index 000000000..7fdd42a1e --- /dev/null +++ b/plugins/knife_ssh/knife_ssh.plugin.zsh @@ -0,0 +1,18 @@ +function knife_ssh() { + grep -q $1 ~/.knife_comp~ 2> /dev/null || rm -f ~/.knife_comp~; + ssh $(knife node show $1 | awk '/IP:/{print $2}') +} + +_knife_ssh() { + if hash knife 2>/dev/null; then + if [[ ! -f ~/.knife_comp~ ]]; then + echo "\nGenerating ~/.knife_comp~..." >/dev/stderr + knife node list > ~/.knife_comp~ + fi + compadd $(<~/.knife_comp~) + else + echo "Could not find knife" > /dev/stderr; + fi +} + +compdef _knife_ssh knife_ssh diff --git a/plugins/laravel4/laravel4.plugin.zsh b/plugins/laravel4/laravel4.plugin.zsh new file mode 100644 index 000000000..4b1022b66 --- /dev/null +++ b/plugins/laravel4/laravel4.plugin.zsh @@ -0,0 +1,20 @@ +# Laravel4 basic command completion +_laravel4_get_command_list () { + php artisan --no-ansi | sed "1,/Available commands/d" | awk '/^ [a-z]+/ { print $1 }' +} + +_laravel4 () { + if [ -f artisan ]; then + compadd `_laravel4_get_command_list` + fi +} + +compdef _laravel4 artisan +compdef _laravel4 la4 + +#Alias +alias la4='php artisan' + +alias la4dump='php artisan dump-autoload' +alias la4cache='php artisan cache:clear' +alias la4routes='php artisan routes' diff --git a/plugins/last-working-dir/last-working-dir.plugin.zsh b/plugins/last-working-dir/last-working-dir.plugin.zsh index 190bc279d..bc36c80db 100644 --- a/plugins/last-working-dir/last-working-dir.plugin.zsh +++ b/plugins/last-working-dir/last-working-dir.plugin.zsh @@ -9,7 +9,8 @@ local cache_file="$ZSH/cache/last-working-dir" # Updates the last directory once directory is changed. function chpwd() { - echo "$PWD" > "$cache_file" + # Use >| in case noclobber is set to avoid "file exists" error + pwd >| "$cache_file" } # Changes directory to the last working directory. diff --git a/plugins/lein/lein.plugin.zsh b/plugins/lein/lein.plugin.zsh index 19af3556a..11c92979b 100644 --- a/plugins/lein/lein.plugin.zsh +++ b/plugins/lein/lein.plugin.zsh @@ -5,15 +5,29 @@ function _lein_commands() { case $state in subcommand) subcommands=( + "classpath:print the classpath of the current project" "clean:remove compiled files and dependencies from project" "compile:ahead-of-time compile the project" + "deploy:build jar and deploy to remote repository" "deps:download and install all dependencies" "help:display a list of tasks or help for a given task" "install:install the project and its dependencies in your local repository" + "int:enter an interactive task shell" + "interactive:enter an interactive task shell" + "jack-in:jack in to a clojure slime session from emacs." "jar:create a jar file containing the compiled .class files" + "javac:compile java source files" "new:create a new project skeleton" + "plugin:manage user-level plugins" "pom:write a pom.xml file to disk for maven interop" + "repl:start a repl session either with the current project or standalone" + "retest:run only the test namespaces which failed last time around" + "run:run the project's -main function" + "search:search remote maven repositories for matching jars" + "swank:launch swank server for Emacs to connect" "test:run the project's tests" + "test!:run a project's tests after cleaning and fetching dependencies" + "trampoline:run a task without nesting the project's JVM inside Leiningen's." "uberjar:Create a jar including the contents of each of deps" "upgrade:upgrade leiningen to the latest stable release" "version:print leiningen's version" diff --git a/plugins/lol/lol.plugin.zsh b/plugins/lol/lol.plugin.zsh index ae065c12f..1b32ec2e4 100644 --- a/plugins/lol/lol.plugin.zsh +++ b/plugins/lol/lol.plugin.zsh @@ -36,3 +36,15 @@ alias nomnom='killall' alias byes='exit' alias cya='reboot' alias kthxbai='halt' + +alias pwned='ssh' + +alias hackzor='git init' +alias rulz='git push' +alias bringz='git pull' +alias chicken='git add' +alias oanward='git commit -m' +alias ooanward='git commit -am' +alias letcat='git checkout' +alias violenz='git rebase' + diff --git a/plugins/mercurial/mercurial.plugin.zsh b/plugins/mercurial/mercurial.plugin.zsh index a3a6ff8b6..ff95d5e40 100644 --- a/plugins/mercurial/mercurial.plugin.zsh +++ b/plugins/mercurial/mercurial.plugin.zsh @@ -1,20 +1,66 @@ - # Mercurial alias hgc='hg commit' alias hgb='hg branch' alias hgba='hg branches' +alias hgbk='hg bookmarks' alias hgco='hg checkout' alias hgd='hg diff' alias hged='hg diffmerge' # pull and update +alias hgi='hg incoming' alias hgl='hg pull -u' +alias hglr='hg pull --rebase' +alias hgo='hg outgoing' alias hgp='hg push' alias hgs='hg status' +alias hgsl='hg log --limit 20 --template "{node|short} | {date|isodatesec} | {author|user}: {desc|strip|firstline}\n" ' # this is the 'git commit --amend' equivalent alias hgca='hg qimport -r tip ; hg qrefresh -e ; hg qfinish tip' +# list unresolved files (since hg does not list unmerged files in the status command) +alias hgun='hg resolve --list' + +function in_hg() { + if [[ -d .hg ]] || $(hg summary > /dev/null 2>&1); then + echo 1 + fi +} + +function hg_get_branch_name() { + if [ $(in_hg) ]; then + echo $(hg branch) + fi +} -function hg_current_branch() { - if [ -d .hg ]; then - echo hg:$(hg branch) +function hg_prompt_info { + if [ $(in_hg) ]; then + _DISPLAY=$(hg_get_branch_name) + echo "$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_HG_PROMPT_PREFIX\ +$ZSH_THEME_REPO_NAME_COLOR$_DISPLAY$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_HG_PROMPT_SUFFIX$ZSH_PROMPT_BASE_COLOR$(hg_dirty)$ZSH_PROMPT_BASE_COLOR" + unset _DISPLAY fi -}
\ No newline at end of file +} + +function hg_dirty_choose { + if [ $(in_hg) ]; then + hg status 2> /dev/null | grep -Eq '^\s*[ACDIM!?L]' + if [ $pipestatus[-1] -eq 0 ]; then + # Grep exits with 0 when "One or more lines were selected", return "dirty". + echo $1 + else + # Otherwise, no lines were found, or an error occurred. Return clean. + echo $2 + fi + fi +} + +function hg_dirty { + hg_dirty_choose $ZSH_THEME_HG_PROMPT_DIRTY $ZSH_THEME_HG_PROMPT_CLEAN +} + +function hgic() { + hg incoming "$@" | grep "changeset" | wc -l +} + +function hgoc() { + hg outgoing "$@" | grep "changeset" | wc -l +} diff --git a/plugins/meteor/_meteor b/plugins/meteor/_meteor new file mode 100644 index 000000000..cd7fc304f --- /dev/null +++ b/plugins/meteor/_meteor @@ -0,0 +1,48 @@ +#compdef meteor +#autoload + +# Meteor Autocomplete plugin for Oh-My-Zsh, based on homebrew completion +# Original author: Dimitri JORGE (https://github.com/jorge-d) + +_meteor_all_packages() { + packages=(`meteor list | cut -d" " -f1`) +} +_meteor_installed_packages() { + installed_packages=(`meteor list --using`) +} + +local -a _1st_arguments +_1st_arguments=( + 'run:[Default] Run this project in local development mode' + 'create:Create a new project' + 'update:Upgrade this project to the latest version of Meteor' + 'add:Add a package to this project' + 'remove:Remove a package from this project' + 'list:List available packages' + 'help:Display Meteor help' + 'bundle:Pack this project up into a tarball' + 'mongo:Connect to the Mongo database for the specified site' + 'deploy:Deploy this project to Meteor' + 'logs:Show logs for specified site' + 'reset:Reset the project state. Erases the local database.' + 'test-packages:Test one or more packages' +) + +local expl +local -a packages installed_packages + +if (( CURRENT == 2 )); then + _describe -t commands "meteor subcommand" _1st_arguments + return +fi + +case "$words[2]" in + help) + _describe -t commands "meteor subcommand" _1st_arguments ;; + remove) + _meteor_installed_packages + _wanted installed_packages expl 'installed packages' compadd -a installed_packages ;; + add) + _meteor_all_packages + _wanted packages expl 'all packages' compadd -a packages ;; +esac
\ No newline at end of file diff --git a/plugins/mix/_mix b/plugins/mix/_mix new file mode 100644 index 000000000..602f5ffa0 --- /dev/null +++ b/plugins/mix/_mix @@ -0,0 +1,63 @@ +#compdef mix +#autoload + +# Elixir mix zsh completion + +local -a _1st_arguments +_1st_arguments=( + 'archive:Archive this project into a .ez file' + 'clean:Clean generated application files' + 'compile:Compile source files' + 'deps:List dependencies and their status' + "deps.clean:Remove dependencies' files" + 'deps.compile:Compile dependencies' + 'deps.get:Get all out of date dependencies' + 'deps.unlock:Unlock the given dependencies' + 'deps.update:Update dependencies' + 'do:Executes the commands separated by comma' + 'escriptize:Generates an escript for the project' + 'help:Print help information for tasks' + 'local:List local tasks' + 'local.install:Install a task or an archive locally' + 'local.rebar:Install rebar locally' + 'local.uninstall:Uninstall local tasks or archives' + 'new:Creates a new Elixir project' + 'run:Run the given file or expression' + "test:Run a project's tests" + '--help:Describe available tasks' + '--version:Prints the Elixir version information' +) + +__task_list () +{ + local expl + declare -a tasks + + tasks=(archive clean compile deps deps.clean deps.compile deps.get deps.unlock deps.update do escriptize help local local.install local.rebar local.uninstall new run test) + + _wanted tasks expl 'help' compadd $tasks +} + +local expl + +local curcontext="$curcontext" state line +typeset -A opt_args + +_arguments -C \ + ':command:->command' \ + '*::options:->options' + +case $state in + (command) + _describe -t commands "mix subcommand" _1st_arguments + return + ;; + + (options) + case $line[1] in + (help) + _arguments ':feature:__task_list' + esac + ;; +esac + diff --git a/plugins/mosh/mosh.plugin.zsh b/plugins/mosh/mosh.plugin.zsh new file mode 100644 index 000000000..ea36b7ee9 --- /dev/null +++ b/plugins/mosh/mosh.plugin.zsh @@ -0,0 +1,2 @@ +# Allow SSH tab completion for mosh hostnames +compdef mosh=ssh diff --git a/plugins/mvn/mvn.plugin.zsh b/plugins/mvn/mvn.plugin.zsh index da29b4f0a..a70625fcf 100644 --- a/plugins/mvn/mvn.plugin.zsh +++ b/plugins/mvn/mvn.plugin.zsh @@ -24,16 +24,18 @@ export RESET_FORMATTING=`tput sgr0` # Wrapper function for Maven's mvn command. mvn-color() { - # Filter mvn output using sed - mvn $@ | sed -e "s/\(\[INFO\]\ \-.*\)/${TEXT_BLUE}${BOLD}\1/g" \ - -e "s/\(\[INFO\]\ \[.*\)/${RESET_FORMATTING}${BOLD}\1${RESET_FORMATTING}/g" \ + ( + # Filter mvn output using sed. Before filtering set the locale to C, so invalid characters won't break some sed implementations + unset LANG + LC_CTYPE=C mvn $@ | sed -e "s/\(\[INFO\]\)\(.*\)/${TEXT_BLUE}${BOLD}\1${RESET_FORMATTING}\2/g" \ -e "s/\(\[INFO\]\ BUILD SUCCESSFUL\)/${BOLD}${TEXT_GREEN}\1${RESET_FORMATTING}/g" \ - -e "s/\(\[WARNING\].*\)/${BOLD}${TEXT_YELLOW}\1${RESET_FORMATTING}/g" \ - -e "s/\(\[ERROR\].*\)/${BOLD}${TEXT_RED}\1${RESET_FORMATTING}/g" \ + -e "s/\(\[WARNING\]\)\(.*\)/${BOLD}${TEXT_YELLOW}\1${RESET_FORMATTING}\2/g" \ + -e "s/\(\[ERROR\]\)\(.*\)/${BOLD}${TEXT_RED}\1${RESET_FORMATTING}\2/g" \ -e "s/Tests run: \([^,]*\), Failures: \([^,]*\), Errors: \([^,]*\), Skipped: \([^,]*\)/${BOLD}${TEXT_GREEN}Tests run: \1${RESET_FORMATTING}, Failures: ${BOLD}${TEXT_RED}\2${RESET_FORMATTING}, Errors: ${BOLD}${TEXT_RED}\3${RESET_FORMATTING}, Skipped: ${BOLD}${TEXT_YELLOW}\4${RESET_FORMATTING}/g" # Make sure formatting is reset echo -ne ${RESET_FORMATTING} + ) } # Override the mvn command with the colorized one. @@ -42,14 +44,22 @@ mvn-color() # aliases alias mvncie='mvn clean install eclipse:eclipse' alias mvnci='mvn clean install' +alias mvncist='mvn clean install -DskipTests' alias mvne='mvn eclipse:eclipse' alias mvnce='mvn clean eclipse:clean eclipse:eclipse' alias mvnd='mvn deploy' alias mvnp='mvn package' alias mvnc='mvn clean' alias mvncom='mvn compile' +alias mvnct='mvn clean test' alias mvnt='mvn test' alias mvnag='mvn archetype:generate' +alias mvn-updates='mvn versions:display-dependency-updates' +alias mvntc7='mvn tomcat7:run' +alias mvntc='mvn tomcat:run' +alias mvnjetty='mvn jetty:run' +alias mvndt='mvn dependency:tree' +alias mvns='mvn site' function listMavenCompletions { reply=( @@ -114,11 +124,13 @@ function listMavenCompletions { # jboss jboss:start jboss:stop jboss:deploy jboss:undeploy jboss:redeploy # tomcat - tomcat:start tomcat:stop tomcat:deploy tomcat:undeploy tomcat:undeploy + tomcat:start tomcat:stop tomcat:deploy tomcat:undeploy tomcat:redeploy # tomcat6 tomcat6:run tomcat6:run-war tomcat6:run-war-only tomcat6:stop tomcat6:deploy tomcat6:undeploy # tomcat7 tomcat7:run tomcat7:run-war tomcat7:run-war-only tomcat7:deploy + # spring-boot + spring-boot:run spring-boot:repackage # exec exec:exec exec:java # versions @@ -163,7 +175,7 @@ function listMavenCompletions { cli:execute cli:execute-phase archetype:generate generate-sources cobertura:cobertura - -Dtest= `if [ -d ./src ] ; then find ./src/test/java -type f -name '*.java' | grep -v svn | sed 's?.*/\([^/]*\)\..*?-Dtest=\1?' ; fi` + -Dtest= `if [ -d ./src/test/java ] ; then find ./src/test/java -type f -name '*.java' | grep -v svn | sed 's?.*/\([^/]*\)\..*?-Dtest=\1?' ; fi` ); } diff --git a/plugins/node/node.plugin.zsh b/plugins/node/node.plugin.zsh index 3bbed6f04..2d78f2b4c 100644 --- a/plugins/node/node.plugin.zsh +++ b/plugins/node/node.plugin.zsh @@ -1,5 +1,13 @@ # Open the node api for your current version to the optional section. # TODO: Make the section part easier to use. function node-docs { - open "http://nodejs.org/docs/$(node --version)/api/all.html#all_$1" + # get the open command + local open_cmd + if [[ $(uname -s) == 'Darwin' ]]; then + open_cmd='open' + else + open_cmd='xdg-open' + fi + + $open_cmd "http://nodejs.org/docs/$(node --version)/api/all.html#all_$1" } diff --git a/plugins/nvm/_nvm b/plugins/nvm/_nvm new file mode 100644 index 000000000..a95c9e375 --- /dev/null +++ b/plugins/nvm/_nvm @@ -0,0 +1,26 @@ +#compdef nvm +#autoload + +[[ -s ~/.nvm/nvm.sh ]] || return 0 + +local -a _1st_arguments +_1st_arguments=( + 'help:show help' + 'install:download and install a version' + 'uninstall:uninstall a version' + 'use:modify PATH to use version' + 'run:run version with given arguments' + 'ls:list installed versions or versions matching a given description' + 'ls-remote:list remote versions available for install' + 'deactivate:undo effects of NVM on current shell' + 'alias:show or set aliases' + 'unalias:deletes an alias' + 'copy-packages:install global NPM packages to current version' +) + +_arguments -C '*:: :->subcmds' && return 0 + +if (( CURRENT == 1 )); then + _describe -t commands "nvm subcommand" _1st_arguments + return +fi
\ No newline at end of file diff --git a/plugins/nvm/nvm.plugin.zsh b/plugins/nvm/nvm.plugin.zsh new file mode 100644 index 000000000..9709719fe --- /dev/null +++ b/plugins/nvm/nvm.plugin.zsh @@ -0,0 +1,3 @@ +# The addition 'nvm install' attempts in ~/.profile + +[[ -s ~/.nvm/nvm.sh ]] && . ~/.nvm/nvm.sh diff --git a/plugins/nyan/nyan.plugin.zsh b/plugins/nyan/nyan.plugin.zsh index 6321e5f5a..ac9d0017e 100644 --- a/plugins/nyan/nyan.plugin.zsh +++ b/plugins/nyan/nyan.plugin.zsh @@ -1,5 +1,5 @@ if [[ -x `which nc` ]]; then - alias nyan='nc -v miku.acm.uiuc.edu 23' # nyan cat + alias nyan='nc -v nyancat.dakko.us 23' # nyan cat fi diff --git a/plugins/osx/osx.plugin.zsh b/plugins/osx/osx.plugin.zsh index f278d4f8d..a63f0ee05 100644 --- a/plugins/osx/osx.plugin.zsh +++ b/plugins/osx/osx.plugin.zsh @@ -6,7 +6,7 @@ # ------------------------------------------------------------------------------ function tab() { - local command="cd \\\"$PWD\\\"" + local command="cd \\\"$PWD\\\"; clear; " (( $# > 0 )) && command="${command}; $*" the_app=$( @@ -34,7 +34,7 @@ EOF launch session "Default Session" set current_session to current session tell current_session - write text "${command}; clear;" + write text "${command}" end tell end tell end tell @@ -154,3 +154,43 @@ function trash() { IFS=$temp_ifs } +function vncviewer() { + open vnc://$@ +} + +# iTunes control function +function itunes() { + local opt=$1 + shift + case "$opt" in + launch|play|pause|stop|rewind|resume|quit) + ;; + mute) + opt="set mute to true" + ;; + unmute) + opt="set mute to false" + ;; + next|previous) + opt="$opt track" + ;; + vol) + opt="set sound volume to $1" #$1 Due to the shift + ;; + ""|-h|--help) + echo "Usage: itunes <option>" + echo "option:" + echo "\tlaunch|play|pause|stop|rewind|resume|quit" + echo "\tmute|unmute\tcontrol volume set" + echo "\tnext|previous\tplay next or previous track" + echo "\tvol\tSet the volume, takes an argument from 0 to 100" + echo "\thelp\tshow this message and exit" + return 0 + ;; + *) + print "Unknown option: $opt" + return 1 + ;; + esac + osascript -e "tell application \"iTunes\" to $opt" +} diff --git a/plugins/pass/_pass b/plugins/pass/_pass index f6c1a6c4b..7a9b1f955 100644 --- a/plugins/pass/_pass +++ b/plugins/pass/_pass @@ -1,13 +1,16 @@ #compdef pass #autoload -# Copyright (C) 2012: +# Copyright (C) 2012 - 2014: # Johan Venant <jvenant@invicem.pro> # Brian Mattern <rephorm@rephorm.com> # Jason A. Donenfeld <Jason@zx2c4.com>. -# Santiago Borrazás <sanbor@gmail.com> # All Rights Reserved. -# This file is licensed under the GPLv2+. Please see COPYING for more information. +# +# This file is licensed under the GPLv2+. +# Please visit http://git.zx2c4.com/password-store/tree/COPYING for more information. +# +# Oh my zsh plugin maintainer: Santiago Borrazás <sanbor@gmail.com> _pass () { @@ -23,8 +26,8 @@ _pass () { case "${cmd}" in init) _arguments : \ - "-r[re-encrypt existing passwords]" \ - "--reencrypt[re-encrypt existing passwords]" + "-p[gpg-id will only be applied to this subfolder]" \ + "--path[gpg-id will only be applied to this subfolder]" _pass_complete_keys ;; ls|list|edit) @@ -43,9 +46,19 @@ _pass () { "-n[don't include symbols in password]" \ "--no-symbols[don't include symbols in password]" \ "-c[copy password to the clipboard]" \ - "--clip[copy password to the clipboard]" + "--clip[copy password to the clipboard]" \ + "-f[force overwrite]" \ + "--force[force overwrite]" \ + "-i[replace first line]" \ + "--in-place[replace first line]" _pass_complete_entries_with_subdirs ;; + cp|copy|mv|rename) + _arguments : \ + "-f[force rename]" \ + "--force[force rename]" + _pass_complete_entries_with_subdirs + ;; rm) _arguments : \ "-f[force deletion]" \ @@ -75,10 +88,14 @@ _pass () { subcommands=( "init:Initialize new password storage" "ls:List passwords" + "find:Find password files or directories based on pattern" + "grep:Search inside decrypted password files for matching pattern" "show:Decrypt and print a password" "insert:Insert a new password" "generate:Generate a new password using pwgen" "edit:Edit a password with \$EDITOR" + "mv:Rename the password" + "cp:Copy the password" "rm:Remove the password" "git:Call git on the password store" "version:Output version information" @@ -101,7 +118,7 @@ _pass_cmd_show () { _pass_complete_entries_helper () { local IFS=$'\n' local prefix="${PASSWORD_STORE_DIR:-$HOME/.password-store}" - _values -C 'passwords' $(find "$prefix" \( -name .git -o -name .gpg-id \) -prune -o $@ -print | sed -e "s#${prefix}.##" -e 's#\.gpg##' | sort) + _values -C 'passwords' ${$(find -L "$prefix" \( -name .git -o -name .gpg-id \) -prune -o $@ -print 2>/dev/null | sed -e "s#${prefix}/\{0,1\}##" -e 's#\.gpg##' | sort):-""} } _pass_complete_entries_with_subdirs () { @@ -117,3 +134,5 @@ _pass_complete_keys () { # Extract names and email addresses from gpg --list-keys _values 'gpg keys' $(gpg2 --list-secret-keys --with-colons | cut -d : -f 10 | sort -u | sed '/^$/d') } + +_pass diff --git a/plugins/pep8/_pep8 b/plugins/pep8/_pep8 new file mode 100644 index 000000000..ce19951dc --- /dev/null +++ b/plugins/pep8/_pep8 @@ -0,0 +1,34 @@ +#compdef pep8 +# +# this is zsh completion function file. +# generated by genzshcomp(ver: 0.5.1) +# + +typeset -A opt_args +local context state line + +_arguments -s -S \ + "--help[show this help message and exit]:" \ + "-h[show this help message and exit]:" \ + "--version[show program's version number and exit]:" \ + "--verbose[print status messages, or debug with -vv]" \ + "-v[print status messages, or debug with -vv]" \ + "--quiet[report only file names, or nothing with -qq]" \ + "-q[report only file names, or nothing with -qq]" \ + "--repeat[(obsolete) show all occurrences of the same error]" \ + "-r[(obsolete) show all occurrences of the same error]" \ + "--first[show first occurrence of each error]" \ + "--exclude[exclude files or directories which match these comma separated patterns (default: .svn,CVS,.bzr,.hg,.git,__pycache__)]::patterns:_files" \ + "--filename[when parsing directories, only check filenames matching these comma separated patterns (default: *.py)]::patterns:_files" \ + "--select[select errors and warnings (e.g. E,W6)]::errors:_files" \ + "--ignore[skip errors and warnings (e.g. E4,W)]::errors:_files" \ + "--show-source[show source code for each error]" \ + "--show-pep8[show text of PEP 8 for each error (implies --first)]" \ + "--statistics[count errors and warnings]" \ + "--count[print total number of errors and warnings to standard error and set exit code to 1 if total is not null]" \ + "--max-line-length[set maximum allowed line length (default: 79)]::n:_files" \ + "--format[set the error format \[default|pylint|<custom>\]]::format:_files" \ + "--diff[report only lines changed according to the unified diff received on STDIN]" \ + "--benchmark[measure processing speed are read from the \[pep8\] section of the tox.ini fg file located in any parent folder of the path(s) llowed options are: exclude, filename, select, ngth, count, format, quiet, show-pep8, show-source, .]" \ + "--config[user config file location (default: /home/gsemet/.config/pep8)]::path:_files" \ + "*::args:_files" diff --git a/plugins/per-directory-history/README.md b/plugins/per-directory-history/README.md new file mode 100644 index 000000000..d8ff93dc0 --- /dev/null +++ b/plugins/per-directory-history/README.md @@ -0,0 +1,56 @@ +[Per-Directory-History][6] +========================= + +Per directory history for zsh, as well as global history, and the +ability to toggle between them with ^G. + +This is a implementation of per directory history for zsh, some +implementations of which exist in bash[1][],[2][]. It also implements +a per-directory-history-toggle-history function to change from using the +directory history to using the global history. In both cases the history is +always saved to both the global history and the directory history, so the +toggle state will not effect the saved histories. Being able to switch +between global and directory histories on the fly is a novel feature as far +as I am aware. + +This is a standalone repository for the script, however it is also included in +[oh-my-zsh][4] as a plugin. + +---------------------------------------------------------------------------- +Usage +---------------------------------------------------------------------------- + +1. Load this script into your interactive ZSH session: + + % source zsh-per-directory-history.zsh + +2. The default mode if per directory history, interact with your history as normal. + +3. Press ^G (the Control and G keys simultaneously) to toggle between local + and global histories. + + + +------------------------------------------------------------------------------- +Configuration +------------------------------------------------------------------------------- + +* HISTORY_BASE a global variable that defines the base directory in which the + directory histories are stored +* per-directory-history-toggle-history is the function to toggle the history + +------------------------------------------------------------------------------- +History +------------------------------------------------------------------------------- + +The idea/inspiration for a per directory history is from [Stewart MacArthur][1] +and [Dieter][2], the implementation idea is from [Bart Schaefer][3]. The +implementation is by [Jim Hester][5] in September 2012. + +[1]: http://www.compbiome.com/2010/07/bash-per-directory-bash-history.html +[2]: http://dieter.plaetinck.be/per_directory_bash +[3]: http://www.zsh.org/mla/users/1997/msg00226.html +[4]: https://github.com/robbyrussell/oh-my-zsh +[5]: http://jimhester.com +[6]: http://github.com/jimhester/per-directory-history + diff --git a/plugins/per-directory-history/per-directory-history.plugin.zsh b/plugins/per-directory-history/per-directory-history.plugin.zsh index 61e8b5a62..142d9541d 100644..120000 --- a/plugins/per-directory-history/per-directory-history.plugin.zsh +++ b/plugins/per-directory-history/per-directory-history.plugin.zsh @@ -1,149 +1 @@ -#!/usr/bin/env zsh -# -# This is a implementation of per directory history for zsh, some -# implementations of which exist in bash[1,2]. It also implements -# a per-directory-history-toggle-history function to change from using the -# directory history to using the global history. In both cases the history is -# always saved to both the global history and the directory history, so the -# toggle state will not effect the saved histories. Being able to switch -# between global and directory histories on the fly is a novel feature as far -# as I am aware. -# -#------------------------------------------------------------------------------- -# Configuration -#------------------------------------------------------------------------------- -# -# HISTORY_BASE a global variable that defines the base directory in which the -# directory histories are stored -# -#------------------------------------------------------------------------------- -# History -#------------------------------------------------------------------------------- -# -# The idea/inspiration for a per directory history is from Stewart MacArthur[1] -# and Dieter[2], the implementation idea is from Bart Schaefer on the the zsh -# mailing list[3]. The implementation is by Jim Hester in September 2012. -# -# [1]: http://www.compbiome.com/2010/07/bash-per-directory-bash-history.html -# [2]: http://dieter.plaetinck.be/per_directory_bash -# [3]: http://www.zsh.org/mla/users/1997/msg00226.html -# -################################################################################ -# -# Copyright (c) 2012 Jim Hester -# -# This software is provided 'as-is', without any express or implied warranty. -# In no event will the authors be held liable for any damages arising from the -# use of this software. -# -# Permission is granted to anyone to use this software for any purpose, -# including commercial applications, and to alter it and redistribute it -# freely, subject to the following restrictions: -# -# 1. The origin of this software must not be misrepresented; you must not claim -# that you wrote the original software. If you use this software in a product, -# an acknowledgment in the product documentation would be appreciated but is -# not required. -# -# 2. Altered source versions must be plainly marked as such, and must not be -# misrepresented as being the original software. -# -# 3. This notice may not be removed or altered from any source distribution.. -# -################################################################################ - -#------------------------------------------------------------------------------- -# configuration, the base under which the directory histories are stored -#------------------------------------------------------------------------------- - -[[ -z $HISTORY_BASE ]] && HISTORY_BASE="$HOME/.directory_history" - -#------------------------------------------------------------------------------- -# toggle global/directory history used for searching - ctrl-G by default -#------------------------------------------------------------------------------- - -function per-directory-history-toggle-history() { - if [[ $_per_directory_history_is_global == true ]]; then - _per-directory-history-set-directory-history - print "\nusing local history\n" - else - _per-directory-history-set-global-history - print "\nusing global history\n" - fi - zle .push-line - zle .accept-line -} - -autoload per-directory-history-toggle-history -zle -N per-directory-history-toggle-history -bindkey '^G' per-directory-history-toggle-history - -#------------------------------------------------------------------------------- -# implementation details -#------------------------------------------------------------------------------- - -_per_directory_history_directory="$HISTORY_BASE${PWD:A}/history" - -function _per-directory-history-change-directory() { - _per_directory_history_directory="$HISTORY_BASE${PWD:A}/history" - mkdir -p ${_per_directory_history_directory:h} - if [[ $_per_directory_history_is_global == false ]]; then - #save to the global history - fc -AI $HISTFILE - #save history to previous file - local prev="$HISTORY_BASE${OLDPWD:A}/history" - mkdir -p ${prev:h} - fc -AI $prev - - #discard previous directory's history - local original_histsize=$HISTSIZE - HISTSIZE=0 - HISTSIZE=$original_histsize - - #read history in new file - if [[ -e $_per_directory_history_directory ]]; then - fc -R $_per_directory_history_directory - fi - fi -} - -function _per-directory-history-addhistory() { - print -sr -- ${1%%$'\n'} - fc -p $_per_directory_history_directory -} - - -function _per-directory-history-set-directory-history() { - if [[ $_per_directory_history_is_global == true ]]; then - fc -AI $HISTFILE - local original_histsize=$HISTSIZE - HISTSIZE=0 - HISTSIZE=$original_histsize - if [[ -e "$_per_directory_history_directory" ]]; then - fc -R "$_per_directory_history_directory" - fi - fi - _per_directory_history_is_global=false -} -function _per-directory-history-set-global-history() { - if [[ $_per_directory_history_is_global == false ]]; then - fc -AI $_per_directory_history_directory - local original_histsize=$HISTSIZE - HISTSIZE=0 - HISTSIZE=$original_histsize - if [[ -e "$HISTFILE" ]]; then - fc -R "$HISTFILE" - fi - fi - _per_directory_history_is_global=true -} - - -#add functions to the exec list for chpwd and zshaddhistory -chpwd_functions=(${chpwd_functions[@]} "_per-directory-history-change-directory") -zshaddhistory_functions=(${zshaddhistory_functions[@]} "_per-directory-history-addhistory") - -#start in directory mode -mkdir -p ${_per_directory_history_directory:h} -_per_directory_history_is_global=true -_per-directory-history-set-directory-history +per-directory-history.zsh
\ No newline at end of file diff --git a/plugins/per-directory-history/per-directory-history.zsh b/plugins/per-directory-history/per-directory-history.zsh new file mode 100644 index 000000000..bdee341bd --- /dev/null +++ b/plugins/per-directory-history/per-directory-history.zsh @@ -0,0 +1,149 @@ +#!/usr/bin/env zsh +# +# This is a implementation of per directory history for zsh, some +# implementations of which exist in bash[1,2]. It also implements +# a per-directory-history-toggle-history function to change from using the +# directory history to using the global history. In both cases the history is +# always saved to both the global history and the directory history, so the +# toggle state will not effect the saved histories. Being able to switch +# between global and directory histories on the fly is a novel feature as far +# as I am aware. +# +#------------------------------------------------------------------------------- +# Configuration +#------------------------------------------------------------------------------- +# +# HISTORY_BASE a global variable that defines the base directory in which the +# directory histories are stored +# +#------------------------------------------------------------------------------- +# History +#------------------------------------------------------------------------------- +# +# The idea/inspiration for a per directory history is from Stewart MacArthur[1] +# and Dieter[2], the implementation idea is from Bart Schaefer on the the zsh +# mailing list[3]. The implementation is by Jim Hester in September 2012. +# +# [1]: http://www.compbiome.com/2010/07/bash-per-directory-bash-history.html +# [2]: http://dieter.plaetinck.be/per_directory_bash +# [3]: http://www.zsh.org/mla/users/1997/msg00226.html +# +################################################################################ +# +# Copyright (c) 2012 Jim Hester +# +# This software is provided 'as-is', without any express or implied warranty. +# In no event will the authors be held liable for any damages arising from the +# use of this software. +# +# Permission is granted to anyone to use this software for any purpose, +# including commercial applications, and to alter it and redistribute it +# freely, subject to the following restrictions: +# +# 1. The origin of this software must not be misrepresented; you must not claim +# that you wrote the original software. If you use this software in a product, +# an acknowledgment in the product documentation would be appreciated but is +# not required. +# +# 2. Altered source versions must be plainly marked as such, and must not be +# misrepresented as being the original software. +# +# 3. This notice may not be removed or altered from any source distribution.. +# +################################################################################ + +#------------------------------------------------------------------------------- +# configuration, the base under which the directory histories are stored +#------------------------------------------------------------------------------- + +[[ -z $HISTORY_BASE ]] && HISTORY_BASE="$HOME/.directory_history" + +#------------------------------------------------------------------------------- +# toggle global/directory history used for searching - ctrl-G by default +#------------------------------------------------------------------------------- + +function per-directory-history-toggle-history() { + if [[ $_per_directory_history_is_global == true ]]; then + _per-directory-history-set-directory-history + print -n "\nusing local history" + else + _per-directory-history-set-global-history + print -n "\nusing global history" + fi + zle .push-line + zle .accept-line +} + +autoload per-directory-history-toggle-history +zle -N per-directory-history-toggle-history +bindkey '^G' per-directory-history-toggle-history + +#------------------------------------------------------------------------------- +# implementation details +#------------------------------------------------------------------------------- + +_per_directory_history_directory="$HISTORY_BASE${PWD:A}/history" + +function _per-directory-history-change-directory() { + _per_directory_history_directory="$HISTORY_BASE${PWD:A}/history" + mkdir -p ${_per_directory_history_directory:h} + if [[ $_per_directory_history_is_global == false ]]; then + #save to the global history + fc -AI $HISTFILE + #save history to previous file + local prev="$HISTORY_BASE${OLDPWD:A}/history" + mkdir -p ${prev:h} + fc -AI $prev + + #discard previous directory's history + local original_histsize=$HISTSIZE + HISTSIZE=0 + HISTSIZE=$original_histsize + + #read history in new file + if [[ -e $_per_directory_history_directory ]]; then + fc -R $_per_directory_history_directory + fi + fi +} + +function _per-directory-history-addhistory() { + print -Sr -- ${1%%$'\n'} + fc -p $_per_directory_history_directory +} + + +function _per-directory-history-set-directory-history() { + if [[ $_per_directory_history_is_global == true ]]; then + fc -AI $HISTFILE + local original_histsize=$HISTSIZE + HISTSIZE=0 + HISTSIZE=$original_histsize + if [[ -e "$_per_directory_history_directory" ]]; then + fc -R "$_per_directory_history_directory" + fi + fi + _per_directory_history_is_global=false +} +function _per-directory-history-set-global-history() { + if [[ $_per_directory_history_is_global == false ]]; then + fc -AI $_per_directory_history_directory + local original_histsize=$HISTSIZE + HISTSIZE=0 + HISTSIZE=$original_histsize + if [[ -e "$HISTFILE" ]]; then + fc -R "$HISTFILE" + fi + fi + _per_directory_history_is_global=true +} + + +#add functions to the exec list for chpwd and zshaddhistory +chpwd_functions=(${chpwd_functions[@]} "_per-directory-history-change-directory") +zshaddhistory_functions=(${zshaddhistory_functions[@]} "_per-directory-history-addhistory") + +#start in directory mode +mkdir -p ${_per_directory_history_directory:h} +_per_directory_history_is_global=true +_per-directory-history-set-directory-history diff --git a/plugins/phing/phing.plugin.zsh b/plugins/phing/phing.plugin.zsh index 8f4adca08..795f1db85 100644 --- a/plugins/phing/phing.plugin.zsh +++ b/plugins/phing/phing.plugin.zsh @@ -1,16 +1,13 @@ _phing_does_target_list_need_generating () { - if [ ! -f .phing_targets ]; then return 0; - else - accurate=$(stat -f%m .phing_targets) - changed=$(stat -f%m build.xml) - return $(expr $accurate '>=' $changed) - fi + [ ! -f .phing_targets ] && return 0; + [ .phing_targets -nt build.xml ] && return 0; + return 1; } _phing () { if [ -f build.xml ]; then if _phing_does_target_list_need_generating; then - phing -l |grep -v ":" |grep -v "^$"|grep -v "\-" > .phing_targets + phing -l |grep -v ":$" |grep -v "^-*$" > .phing_targets fi compadd `cat .phing_targets` fi diff --git a/plugins/pip/_pip b/plugins/pip/_pip index df53ba5ce..cb155e5f4 100644 --- a/plugins/pip/_pip +++ b/plugins/pip/_pip @@ -1,4 +1,4 @@ -#compdef pip +#compdef pip pip2 pip-2.7 pip3 pip-3.2 pip-3.3 pip-3.4 #autoload # pip zsh completion, based on homebrew completion @@ -6,8 +6,8 @@ _pip_all() { # we cache the list of packages (originally from the macports plugin) if (( ! $+piplist )); then - echo -n " (caching package index...)" - piplist=($(pip search * | cut -d ' ' -f 1 | tr '[A-Z]' '[a-z]')) + zsh-pip-cache-packages + piplist=($(cat $ZSH_PIP_CACHE_FILE)) fi } @@ -20,6 +20,7 @@ _1st_arguments=( 'bundle:create pybundles (archives containing multiple packages)' 'freeze:output all currently installed packages (exact versions) to stdout' 'help:show available commands' + 'show:show information about installed packages' 'install:install packages' 'search:search PyPI' 'uninstall:uninstall packages' @@ -58,12 +59,18 @@ case "$words[1]" in _arguments \ '(-U --upgrade)'{-U,--upgrade}'[upgrade all packages to the newest available version]' \ '(-f --find-links)'{-f,--find-links}'[URL for finding packages]' \ + '(-r --requirement)'{-r,--requirement}'[Requirements file for packages to install]:File:_files' \ '(--no-deps --no-dependencies)'{--no-deps,--no-dependencies}'[iIgnore package dependencies]' \ '(--no-install)--no-install[only download packages]' \ '(--no-download)--no-download[only install downloaded packages]' \ '(--install-option)--install-option[extra arguments to be supplied to the setup.py]' \ + '(--single-version-externally-managed)--single-version-externally-managed[do not download/install dependencies. requires --record or --root]'\ + '(--root)--root[treat this path as a fake chroot, installing into it. implies --single-version-externally-managed]'\ + '(--record)--record[file to record all installed files to.]'\ + '(-r --requirement)'{-r,--requirement}'[requirements file]: :_files'\ + '(-e --editable)'{-e,--editable}'[path of or url to source to link to instead of installing.]: :_files -/'\ '1: :->packages' && return 0 - + if [[ "$state" == packages ]]; then _pip_all _wanted piplist expl 'packages' compadd -a piplist @@ -71,4 +78,7 @@ case "$words[1]" in uninstall) _pip_installed _wanted installed_pkgs expl 'installed packages' compadd -a installed_pkgs ;; + show) + _pip_installed + _wanted installed_pkgs expl 'installed packages' compadd -a installed_pkgs ;; esac diff --git a/plugins/pip/pip.plugin.zsh b/plugins/pip/pip.plugin.zsh new file mode 100644 index 000000000..b5433ae9d --- /dev/null +++ b/plugins/pip/pip.plugin.zsh @@ -0,0 +1,78 @@ +# Usage: +# Just add pip to your installed plugins. + +# If you would like to change the cheeseshops used for autocomplete set +# ZSH_PIP_INDEXES in your zshrc. If one of your indexes are bogus you won't get +# any kind of error message, pip will just not autocomplete from them. Double +# check! +# +# If you would like to clear your cache, go ahead and do a +# "zsh-pip-clear-cache". + +ZSH_PIP_CACHE_FILE=~/.pip/zsh-cache +ZSH_PIP_INDEXES=(https://pypi.python.org/simple/) + +zsh-pip-clear-cache() { + rm $ZSH_PIP_CACHE_FILE + unset piplist +} + +zsh-pip-clean-packages() { + sed -n '/<a href/ s/.*>\([^<]\{1,\}\).*/\1/p' +} + +zsh-pip-cache-packages() { + if [[ ! -d ${ZSH_PIP_CACHE_FILE:h} ]]; then + mkdir -p ${ZSH_PIP_CACHE_FILE:h} + fi + + if [[ ! -f $ZSH_PIP_CACHE_FILE ]]; then + echo -n "(...caching package index...)" + tmp_cache=/tmp/zsh_tmp_cache + for index in $ZSH_PIP_INDEXES ; do + # well... I've already got two problems + curl $index 2>/dev/null | \ + zsh-pip-clean-packages \ + >> $tmp_cache + done + sort $tmp_cache | uniq | tr '\n' ' ' > $ZSH_PIP_CACHE_FILE + rm $tmp_cache + fi +} + +# A test function that validates the regex against known forms of the simple +# index. If you modify the regex to make it work for you, you should add a test +# case in here and make sure that your changes don't break things for someone +# else. +zsh-pip-test-clean-packages() { + local expected + local actual + expected="0x10c-asm +1009558_nester" + + actual=$(echo -n "<html><head><title>Simple Index</title><meta name=\"api-version\" value=\"2\" /></head><body> +<a href='0x10c-asm'>0x10c-asm</a><br/> +<a href='1009558_nester'>1009558_nester</a><br/> +</body></html>" | zsh-pip-clean-packages) + + if [[ $actual != $expected ]] ; then + echo -e "python's simple index is broken:\n$actual\n !=\n$expected" + else + echo "python's simple index is fine" + fi + + actual=$(echo -n '<html> + <head> + <title>Simple Package Index</title> + </head> + <body> + <a href="0x10c-asm">0x10c-asm</a><br/> + <a href="1009558_nester">1009558_nester</a><br/> +</body></html>' | zsh-pip-clean-packages) + + if [[ $actual != $expected ]] ; then + echo -e "the djangopypi2 index is broken:\n$actual\n !=\n$expected" + else + echo "the djangopypi2 index is fine" + fi +} diff --git a/plugins/pj/pj.plugin.zsh b/plugins/pj/pj.plugin.zsh new file mode 100644 index 000000000..ba3765b83 --- /dev/null +++ b/plugins/pj/pj.plugin.zsh @@ -0,0 +1,42 @@ +#!/bin/zsh + +# +# Original idea by DefV (Jan De Poorter) +# Source: https://gist.github.com/pjaspers/368394#comment-1016 +# +# Usage: +# - Set `$PROJECT_PATHS` in your ~/.zshrc +# e.g.: PROJECT_PATHS=(~/src ~/work) +# - In ZSH you now can open a project directory with the command: `pj my-project` +# the plugin will locate the `my-project` directory in one of the $PROJECT_PATHS +# Also tab completion is supported. +# - `pjo my-project` will open the directory in $EDITOR +# + +function pj() { + cmd="cd" + file=$1 + + if [[ "open" == "$file" ]] then + file=$2 + cmd=(${(s: :)EDITOR}) + fi + + for project in $PROJECT_PATHS; do + if [[ -d $project/$file ]] then + $cmd "$project/$file" + unset project # Unset project var + return + fi + done + + echo "No such project $1" +} + +alias pjo="pj open" + +function _pj () { + compadd `/bin/ls -l $PROJECT_PATHS 2>/dev/null | awk '{ print $9 }'` +} + +compdef _pj pj diff --git a/plugins/pod/_pod b/plugins/pod/_pod new file mode 100644 index 000000000..8c0f4460f --- /dev/null +++ b/plugins/pod/_pod @@ -0,0 +1,389 @@ +#compdef pod +#autoload + +# ----------------------------------------------------------------------------- +# FILE: _pod +# DESCRIPTION: Cocoapods (0.27.1) autocomplete plugin for Oh-My-Zsh +# http://cocoapods.org +# AUTHOR: Alexandre Joly (alexandre.joly@mekanics.ch) +# GITHUB: https://github.com/mekanics +# TWITTER: @jolyAlexandre +# VERSION: 0.0.3 +# LICENSE: MIT +# ----------------------------------------------------------------------------- + +local -a _1st_arguments +_1st_arguments=( + 'help:Show help for the given command' + 'init:Generate a Podfile for the current directory' + 'install:Install project dependencies' + 'ipc:Inter-process communication' + 'list:List pods' + 'outdated:Show outdated project dependencies' + 'podfile-info:Shows information on installed Pods' + 'push:Push new specifications to a spec-repo' + 'repo:Manage spec-repositories' + 'search:Searches for pods' + 'setup:Setup the CocoaPods environment' + 'spec:Manage pod specs' + 'update:Update outdated project dependencies' +) + +local -a _repo_arguments +_repo_arguments=( + 'add:Add a spec repo' + 'lint:Validates all specs in a repo' + 'remove:Remove a spec repo.' + 'update:Update a spec repo' +) + +local -a _spec_arguments +_spec_arguments=( + 'cat:Prints a spec file' + 'create:Create spec file stub' + 'edit:Edit a spec file' + 'lint:Validates a spec file' + 'which:Prints the path of the given spec' +) + +local -a _ipc_arguments +_ipc_arguments=( + 'list:Lists the specifications know to CocoaPods' + 'podfile:Converts a Podfile to YAML' + 'repl:The repl listens to commands on standard input' + 'spec:Converts a podspec to YAML' + 'update-search-index:Updates the search index' +) + +local -a _list_arguments +_list_arguments=( + 'new:Lists pods introduced in the master spec-repo since the last check' +) + +local -a _inherited_options +_inherited_options=( + '(--silent)--silent[Show nothing]' \ + '(--version)--version[Show the version of CocoaPods]' \ + '(--no-color)--no-color[Show output without color]' \ + '(--verbose)--verbose[Show more debugging information]' \ + '(--help)--help[Show help banner of specified command]' +) + +local -a _install_options +_install_options=( + '(--no-clean)--no-clean[Leave SCM dirs like `.git` and `.svn` intact after downloading]' \ + '(--no-integrate)--no-integrate[Skip integration of the Pods libraries in the Xcode project(s)]' \ + '(--no-repo-update)--no-repo-update[Skip running `pod repo update` before install]' +) + +local -a _update_options +_update_options=( + '(--no-clean)--no-clean[Leave SCM dirs like `.git` and `.svn intact after downloading]' \ + '(--no-integrate)--no-integrate[Skip integration of the Pods libraries in the Xcode project(s)]' \ + '(--no-repo-update)--no-repo-update[Skip running `pod repo update before install]' +) + +local -a _outdated_options +_outdated_options=( + '(--no-repo-update)--no-repo-update[Skip running `pod repo update` before install]' +) + +local -a _search_options +_search_options=( + '(--full)--full[Search by name, summary, and description]' \ + '(--stats)--stats[Show additional stats (like GitHub watchers and forks)]' \ + '(--ios)--ios[Restricts the search to Pods supported on iOS]' \ + '(--osx)--osx[Restricts the search to Pods supported on OS X]' +) + +local -a _list_options +_list_options=( + '(--update)--update[Run `pod repo update` before listing]' +) + +local -a _podfile_info_options +_podfile_info_options=( + '(--all)--all[Show information about all Pods with dependencies that are used in a project]' \ + '(--md)--md[Output information in Markdown format]' \ + '*:script or directory:_files' +) + +local -a _push_options +_push_options=( + '(--allow-warnings)--allow-warnings[Allows pushing even if there are warnings]' \ + '(--local-only)--local-only[Does not perform the step of pushing REPO to its remote]' \ + '*:script or directory:_files' +) + +local -a _repo_lint_options +_repo_lint_options=( + '(--only-errors)--only-errors[Lint presents only the errors]' +) + +local -a _setup_options +_setup_options=( + '(--push)--push[Use this option to enable push access once granted]' +) + +local -a _spec_lint_options +_spec_lint_options=( + '(--quick)--quick[Lint skips checks that would require to download and build the spec]' \ + '(--only-errors)--only-errors[Lint validates even if warnings are present]' \ + '(--no-clean)--no-clean[Lint leaves the build directory intact for inspection]' \ + '*:script or directory:_files' +) + +local -a _spec_cat_options +_spec_cat_options=( + '(--show-all)--show-all[Pick from all versions of the given podspec]' +) + +local -a _spec_which_options +_spec_which_options=( + '(--show-all)--show-all[Print all versions of the given podspec]' +) + +local -a _spec_edit_options +_spec_edit_options=( + '(--show-all)--show-all[Pick which spec to edit from all available versions of the given podspec]' +) + + +__first_command_list () +{ + local expl + declare -a tasks + + tasks=(install ipc list outdated podfile-info push repo search setup spec update) + + _wanted tasks expl 'help' compadd $tasks +} + +__repo_list() { + _wanted application expl 'repo' compadd $(command ls -1 ~/.cocoapods/repos 2>/dev/null | sed -e 's/ /\\ /g') +} + +__pod-repo() { + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + _describe -t commands "pod repo" _repo_arguments + return + ;; + + (options) + case $line[1] in + (lint) + _arguments \ + $_inherited_options \ + $_repo_lint_options \ + ':feature:__repo_list' + ;; + + (update) + _arguments \ + $_inherited_options \ + ':feature:__repo_list' + ;; + + (add) + _arguments \ + $_inherited_options + + (remove) + _arguments \ + $_inherited_options \ + ':feature:__repo_list' + ;; + esac + ;; + esac +} + +__pod-spec() { + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + _describe -t commands "pod spec" _spec_arguments + return + ;; + + (options) + case $line[1] in + (create) + _arguments \ + $_inherited_options + ;; + + (lint) + _arguments \ + $_inherited_options \ + $_spec_lint_options + ;; + + (cat) + _arguments \ + $_inherited_options \ + $_spec_cat_options + ;; + + (which) + _arguments \ + $_inherited_options \ + $_spec_which_options + ;; + + (edit) + _arguments \ + $_inherited_options \ + $_spec_edit_options + ;; + esac + return + ;; + esac +} + +__pod-ipc() { + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + _describe -t commands "pod ipc" _ipc_arguments + return + ;; + + (options) + _arguments -C \ + $_inherited_options + return + ;; + esac +} + +__pod-list() { + local curcontext="$curcontext" state line + typeset -A opt_args + + _arguments -C \ + $_inherited_options \ + $_list_options \ + ':command:->command' \ + '*::options:->options' + + case $state in + (command) + _describe -t commands "pod list" _list_arguments + return + ;; + + (options) + _arguments -C \ + $_inherited_options \ + $_list_options + return + ;; + esac +} + +local curcontext="$curcontext" state line +typeset -A opt_args + +_arguments -C \ + $_inherited_options \ + ':command:->command' \ + '*::options:->options' + +case $state in + (command) + _describe -t commands "pod" _1st_arguments + return + ;; + + (options) + case $line[1] in + (help) + _arguments \ + $_inherited_options \ + ':help:__first_command_list' + ;; + + (push) + _arguments \ + $_inherited_options \ + $_push_options \ + ':repo:__repo_list' + ;; + + (repo) + __pod-repo + ;; + + (spec) + __pod-spec + ;; + + (ipc) + __pod-ipc + ;; + + (list) + __pod-list + ;; + + (install) + _arguments \ + $_inherited_options \ + $_install_options + ;; + + (update) + _arguments \ + $_inherited_options \ + $_update_options + ;; + + (outdated) + _arguments \ + $_inherited_options \ + $_outdated_options + ;; + + (search) + _arguments \ + $_inherited_options \ + $_search_options + ;; + + (podfile-info) + _arguments \ + $_inherited_options \ + $_podfile_info_options + ;; + + (setup) + _arguments \ + $_inherited_options \ + $_setup_options + ;; + + esac + ;; +esac diff --git a/plugins/postgres/postgres.plugin.zsh b/plugins/postgres/postgres.plugin.zsh new file mode 100644 index 000000000..c2dbef244 --- /dev/null +++ b/plugins/postgres/postgres.plugin.zsh @@ -0,0 +1,8 @@ +# Aliases to control Postgres +# Paths noted below are for Postgres installed via Homebrew on OSX + +alias startpost='pg_ctl -D /usr/local/var/postgres -l /usr/local/var/postgres/server.log start' +alias stoppost='pg_ctl -D /usr/local/var/postgres stop -s -m fast' +alias restartpost='stoppost && sleep 1 && startpost' +alias reloadpost='pg_ctl reload -D /usr/local/var/postgres -s' +alias statuspost='pg_ctl status -D /usr/local/var/postgres -s'
\ No newline at end of file diff --git a/plugins/pow/pow.plugin.zsh b/plugins/pow/pow.plugin.zsh index 399a54cb0..d85c88777 100644 --- a/plugins/pow/pow.plugin.zsh +++ b/plugins/pow/pow.plugin.zsh @@ -8,21 +8,21 @@ # Supports command completion. # # If you are not already using completion you might need to enable it with -# +# # autoload -U compinit compinit # # Changes: # -# Defaults to the current application, and will walk up the tree to find +# Defaults to the current application, and will walk up the tree to find # a config.ru file and restart the corresponding app # -# Will Detect if a app does not exist in pow and print a (slightly) helpful +# Will Detect if a app does not exist in pow and print a (slightly) helpful # error message -rack_root_detect(){ +rack_root(){ setopt chaselinks - local orgdir=$(pwd) - local basedir=$(pwd) + local orgdir="$(pwd)" + local basedir="$(pwd)" while [[ $basedir != '/' ]]; do test -e "$basedir/config.ru" && break @@ -30,8 +30,13 @@ rack_root_detect(){ basedir="$(pwd)" done - builtin cd $orgdir 2>/dev/null + builtin cd "$orgdir" 2>/dev/null [[ ${basedir} == "/" ]] && return 1 + echo $basedir +} + +rack_root_detect(){ + basedir=$(rack_root) echo `basename $basedir | sed -E "s/.(com|net|org)//"` } @@ -51,16 +56,30 @@ kapow(){ compctl -W ~/.pow -/ kapow powit(){ - local basedir=$(pwd) + local basedir="$(pwd)" local vhost=$1 [ ! -n "$vhost" ] && vhost=$(rack_root_detect) if [ ! -h ~/.pow/$vhost ] - then - echo "pow: Symlinking your app with pow. ${vhost}" - [ ! -d ~/.pow/${vhost} ] && ln -s $basedir ~/.pow/$vhost + then + echo "pow: Symlinking your app with pow. ${vhost}" + [ ! -d ~/.pow/${vhost} ] && ln -s "$basedir" ~/.pow/$vhost return 1 fi } +powed(){ + local basedir="$(rack_root)" + find ~/.pow/ -type l -lname "*$basedir*" -exec basename {}'.dev' \; +} + +# Restart pow process +# taken from http://www.matthewratzloff.com/blog/2011/12/23/restarting-pow-when-dns-stops-responding +repow(){ + lsof | grep 20560 | awk '{print $2}' | xargs kill -9 + launchctl unload ~/Library/LaunchAgents/cx.pow.powd.plist + launchctl load ~/Library/LaunchAgents/cx.pow.powd.plist + echo "restarted pow" +} + # View the standard out (puts) from any pow app alias kaput="tail -f ~/Library/Logs/Pow/apps/*" diff --git a/plugins/powify/_powify b/plugins/powify/_powify new file mode 100644 index 000000000..9507f400e --- /dev/null +++ b/plugins/powify/_powify @@ -0,0 +1,55 @@ +#compdef powify + +_powify_all_servers() { + all_servers=(`ls $HOME/.pow/ 2>/dev/null`) +} + +local -a all_servers + +local -a _1st_arguments +_1st_arguments=( + 'server:server specific commands' + 'utils:manage powify' + 'create:creates a pow app from the current directory (to change the name append name as an argument)' + 'destroy:destroys the pow app linked to the current directory' + 'restart:restarts the pow app linked to the current directory' + 'always_restart:reload the pow app after each request' + 'always_restart_off:do not reload the pow app after each request' + 'rename:rename the current pow app to [NAME] or renmae [OLD] to [NEW]' + 'environment:run the this pow app in a different environment (aliased `env`)' + 'browse:opens and navigates the default browser to this app' + 'logs:tail the application logs' +) + +_arguments '*:: :->command' + +if (( CURRENT == 1 )); then + _describe -t commands "powify command" _1st_arguments + return +fi + +case "$words[1]" in + server) + _values , \ + 'install[install pow server]' \ + 'reinstall[reinstall pow server]' \ + 'update[update pow server]' \ + 'uninstall[uninstall pow server]' \ + 'list[list all pow apps]' \ + 'start[start the pow server]' \ + 'stop[stop the pow server]' \ + 'restart[restart the pow server]' \ + 'host[adds all pow apps to /etc/hosts file]' \ + 'unhost[removes all pow apps from /etc/hosts file]' \ + 'status[print the current server status]' \ + 'config[print the current server configuration]' \ + 'logs[tails the pow server logs]' ;; + utils) + _values , \ + 'install[install powify.dev server management tool]' \ + 'reinstall[reinstall powify.dev server management tool]' \ + 'uninstall[uninstall powify.dev server management tool]' ;; + destroy|restart|always_restart|always_restart_off|rename|browse|logs) + _powify_all_servers + _wanted all_servers expl 'all pow servers' compadd -a all_servers ;; +esac diff --git a/plugins/profiles/profiles.plugin.zsh b/plugins/profiles/profiles.plugin.zsh new file mode 100644 index 000000000..8faae6b53 --- /dev/null +++ b/plugins/profiles/profiles.plugin.zsh @@ -0,0 +1,12 @@ +# You will probably want to list this plugin as the first in your .zshrc. + +# This will look for a custom profile for the local machine and each domain or +# subdomain it belongs to. (e.g. com, example.com and foo.example.com) +parts=(${(s:.:)$(hostname)}) +for i in {${#parts}..1}; do + profile=${(j:.:)${parts[$i,${#parts}]}} + file=$ZSH_CUSTOM/profiles/$profile + if [ -f $file ]; then + source $file + fi +done diff --git a/plugins/pyenv/pyenv.plugin.zsh b/plugins/pyenv/pyenv.plugin.zsh new file mode 100644 index 000000000..b3dc7aa17 --- /dev/null +++ b/plugins/pyenv/pyenv.plugin.zsh @@ -0,0 +1,31 @@ +_homebrew-installed() { + type brew &> /dev/null +} + +_pyenv-from-homebrew-installed() { + brew --prefix pyenv &> /dev/null +} + +FOUND_PYENV=0 +pyenvdirs=("$HOME/.pyenv" "/usr/local/pyenv" "/opt/pyenv") +if _homebrew-installed && _pyenv-from-homebrew-installed ; then + pyenvdirs=($(brew --prefix pyenv) "${pyenvdirs[@]}") +fi + +for pyenvdir in "${pyenvdirs[@]}" ; do + if [ -d $pyenvdir/bin -a $FOUND_PYENV -eq 0 ] ; then + FOUND_PYENV=1 + export PYENV_ROOT=$pyenvdir + export PATH=${pyenvdir}/bin:$PATH + eval "$(pyenv init --no-rehash - zsh)" + + function pyenv_prompt_info() { + echo "$(pyenv version-name)" + } + fi +done +unset pyenvdir + +if [ $FOUND_PYENV -eq 0 ] ; then + function pyenv_prompt_info() { echo "system: $(python -V 2>&1 | cut -f 2 -d ' ')" } +fi diff --git a/plugins/pylint/_pylint b/plugins/pylint/_pylint new file mode 100644 index 000000000..e466d051b --- /dev/null +++ b/plugins/pylint/_pylint @@ -0,0 +1,31 @@ +#compdef pylint +# +# this is zsh completion function file. +# generated by genzshcomp(ver: 0.5.1) +# + +typeset -A opt_args +local context state line + +_arguments -s -S \ + "--help[show this help message and exit]:" \ + "-h[show this help message and exit]:" \ + "--version[show program's version number and exit]:" \ + "--long-help[more verbose help.]" \ + "--rcfile[Specify a configuration file.]::<file>:_files" \ + "--errors-only[In error mode, checkers without error messages are disabled and for others, only the ERROR messages are displayed, and no reports are done by default]" \ + "-E[In error mode, checkers without error messages are disabled and for others, only the ERROR messages are displayed, and no reports are done by default]" \ + "--ignore[Add files or directories to the blacklist. They should be base names, not paths. \[current: CVS\]]::<file>[,<file>...]:_files" \ + "--help-msg[Display a help message for the given message id and exit. The value may be a comma separated list of message ids.]::<msg-id>:_files" \ + "--generate-rcfile[Generate a sample configuration file according to the current configuration. You can put other options before this one to get them in the generated configuration.]" \ + "--enable[Enable the message, report, category or checker with the given id(s). You can either give multiple identifier separated by comma (,) or put this option multiple time.]::<msg ids>:_files" \ + "-e[Enable the message, report, category or checker with the given id(s). You can either give multiple identifier separated by comma (,) or put this option multiple time.]::<msg ids>:_files" \ + "--disable[Disable the message, report, category or checker with the given id(s). You can either give multiple identifier separated by comma (,) or put this option multiple time (only on the command line, not in the configuration file where it should appear only once).]::<msg ids>:_files" \ + "-d[Disable the message, report, category or checker with the given id(s). You can either give multiple identifier separated by comma (,) or put this option multiple time (only on the command line, not in the configuration file where it should appear only once).]::<msg ids>:_files" \ + "--output-format[Set the output format. Available formats are text, parseable, colorized, msvs (visual studio) and html \[current: text\]]::<format>:_files" \ + "-f[Set the output format. Available formats are text, parseable, colorized, msvs (visual studio) and html \[current: text\]]::<format>:_files" \ + "--include-ids[Include message's id in output \[current: no\]]::<y_or_n>:_files" \ + "-i[Include message's id in output \[current: no\]]::<y_or_n>:_files" \ + "--reports[Tells whether to display a full report or only the messages \[current: yes\]]::<y_or_n>:_files" \ + "-r[Tells whether to display a full report or only the messages \[current: yes\]]::<y_or_n>:_files" \ + "*::args:_files" diff --git a/plugins/pylint/pylint.plugin.zsh b/plugins/pylint/pylint.plugin.zsh new file mode 100644 index 000000000..6760c67b0 --- /dev/null +++ b/plugins/pylint/pylint.plugin.zsh @@ -0,0 +1,3 @@ +# Aliases +alias pylint-quick='pylint --reports=n --include-ids=y' +compdef _pylint-quick pylint-quick='pylint --reports=n --include-ids=y'
\ No newline at end of file diff --git a/plugins/python/_python b/plugins/python/_python new file mode 100644 index 000000000..f517d4806 --- /dev/null +++ b/plugins/python/_python @@ -0,0 +1,54 @@ +#compdef python + +# Python 2.6 +# Python 3.0 + +local curcontext="$curcontext" state line expl +typeset -A opt_args + +local -a args + +if _pick_variant python3=Python\ 3 python2 --version; then + args=( + '(-bb)-b[issue warnings about str(bytes_instance), str(bytearray_instance) and comparing bytes/bytearray with str]' + '(-b)-bb[issue errors about str(bytes_instance), str(bytearray_instance) and comparing bytes/bytearray with str]' + ) +else + args=( + '-Q+[division options]:division option:(old warn warnall new)' + '(-tt)-t[issue warnings about inconsistent tab usage]' + '(-t)-tt[issue errors about inconsistent tab usage]' + '-3[warn about Python 3.x incompatibilities]' + ) +fi + +_arguments -C -s -S "$args[@]" \ + "-B[don't write .py\[co\] files on import]" \ + '(1 -)-c+[program passed in as string (terminates option list)]:python command:' \ + '-d[debug output from parser]' \ + '-E[ignore PYTHON* environment variables (such as PYTHONPATH)]' \ + '(1 * -)-h[display help information]' \ + '-i[inspect interactively after running script]' \ + '(1 * -)-m[run library module as a script (terminates option list)]:module:->modules' \ + '-O[optimize generated bytecode slightly]' \ + '-OO[remove doc-strings in addition to the -O optimizations]' \ + "-s[don't add user site directory to sys.path]" \ + "-S[don't imply 'import site' on initialization]" \ + '-u[unbuffered binary stdout and stderr]' \ + '-v[verbose (trace import statements)]' \ + '(1 * -)'{-V,--version}'[display version information]' \ + '-W+[warning control]:warning filter (action\:message\:category\:module\:lineno):(default always ignore module once error)' \ + '-x[skip first line of source, allowing use of non-Unix forms of #!cmd]' \ + '(-)1:script file:_files -g "*.py(|c|o)(-.)"' \ + '*::script argument: _normal' && return + +if [[ "$state" = modules ]]; then + local -a modules + modules=( + ${${=${(f)"$(_call_program modules $words[1] -c \ + 'from\ pydoc\ import\ help\;\ help\(\"modules\"\)')"}[2,-3]}:#\(package\)} + ) + _wanted modules expl module compadd -a modules && return +fi + +return 1 diff --git a/plugins/python/python.plugin.zsh b/plugins/python/python.plugin.zsh index 852c8b919..a10c06fd3 100644 --- a/plugins/python/python.plugin.zsh +++ b/plugins/python/python.plugin.zsh @@ -6,7 +6,9 @@ alias pyfind='find . -name "*.py"' function pyclean() { ZSH_PYCLEAN_PLACES=${*:-'.'} find ${ZSH_PYCLEAN_PLACES} -type f -name "*.py[co]" -delete + find ${ZSH_PYCLEAN_PLACES} -type d -name "__pycache__" -delete } # Grep among .py files alias pygrep='grep --include="*.py"' + diff --git a/plugins/rails/_rails b/plugins/rails/_rails new file mode 100644 index 000000000..96f57ce64 --- /dev/null +++ b/plugins/rails/_rails @@ -0,0 +1,63 @@ +#compdef rails +#autoload + +local -a _1st_arguments +_1st_arguments=( + 'generate:Generate new code (short-cut alias: "g")' + 'console:Start the Rails console (short-cut alias: "c")' + 'server:Start the Rails server (short-cut alias: "s")' + 'dbconsole:Start a console for the database specified in config/database.yml (short-cut alias: "db")' + 'new:Create a new Rails application. "rails new my_app" creates a new application called MyApp in "./my_app"' + 'application:Generate the Rails application code' + 'destroy:Undo code generated with "generate"' + + 'benchmarker:See how fast a piece of code runs' + 'profiler:Get profile information from a piece of code' + 'plugin:Install a plugin' + + 'plugin new:Generates skeleton for developing a Rails plugin' + 'runner:Run a piece of code in the application environment (short-cut alias: "r")' +) + +_rails_generate_arguments() { + generate_arguments=( + assets + controller + decorator + generator + helper + integration_test + mailer + migration + model + observer + performance_test + plugin + resource + scaffold + scaffold_controller + session_migration + stylesheets + task + ) +} + + +_arguments \ + '(--version)--version[show version]' \ + '(--help)--help[show help]' \ + '*:: :->subcmds' && return 0 + +if (( CURRENT == 1 )); then + _describe -t commands "rails subcommand" _1st_arguments + return +fi + +case "$words[1]" in + g|generate) + _rails_generate_arguments + _wanted generate_arguments expl 'all generate' compadd -a generate_arguments ;; + d|destroy) + _rails_generate_arguments + _wanted generate_arguments expl 'all generate' compadd -a generate_arguments ;; +esac diff --git a/plugins/rails/rails.plugin.zsh b/plugins/rails/rails.plugin.zsh index d3849b4f4..fb46cdcf0 100644 --- a/plugins/rails/rails.plugin.zsh +++ b/plugins/rails/rails.plugin.zsh @@ -1,15 +1,71 @@ +function _rails_command () { + if [ -e "bin/rails" ]; then + bin/rails $@ + elif [ -e "script/rails" ]; then + ruby script/rails $@ + elif [ -e "script/server" ]; then + ruby script/$@ + else + rails $@ + fi +} + +function _rake_command () { + if [ -e "bin/rake" ]; then + bin/rake $@ + else + rake $@ + fi +} + +alias rails='_rails_command' +compdef _rails_command=rails + +alias rake='_rake_command' +compdef _rake_command=rake + +alias devlog='tail -f log/development.log' +alias prodlog='tail -f log/production.log' +alias testlog='tail -f log/test.log' + +alias -g RED='RAILS_ENV=development' +alias -g REP='RAILS_ENV=production' +alias -g RET='RAILS_ENV=test' + +# Rails aliases +alias rc='rails console' +alias rd='rails destroy' +alias rdb='rails dbconsole' +alias rg='rails generate' +alias rgm='rails generate migration' +alias rp='rails plugin' +alias ru='rails runner' +alias rs='rails server' +alias rsd='rails server --debugger' + +# Rake aliases +alias rdm='rake db:migrate' +alias rdr='rake db:rollback' +alias rdc='rake db:create' +alias rds='rake db:seed' +alias rdd='rake db:drop' +alias rdtc='rake db:test:clone' +alias rdtp='rake db:test:prepare' +alias rdmtc='rake db:migrate db:test:clone' + +alias rlc='rake log:clear' +alias rn='rake notes' +alias rr='rake routes' + +# legacy stuff alias ss='thin --stats "/thin/stats" start' alias sg='ruby script/generate' alias sd='ruby script/destroy' alias sp='ruby script/plugin' alias sr='ruby script/runner' alias ssp='ruby script/spec' -alias rdbm='rake db:migrate' -alias rdbtp='rake db:test:prepare' -alias migrate='rake db:migrate && rake db:test:prepare' alias sc='ruby script/console' alias sd='ruby script/server --debugger' -alias devlog='tail -f log/development.log' function remote_console() { /usr/bin/env ssh $1 "( cd $2 && ruby script/console production )" diff --git a/plugins/rails3/rails3.plugin.zsh b/plugins/rails3/rails3.plugin.zsh index 237d0594b..261b92108 100644 --- a/plugins/rails3/rails3.plugin.zsh +++ b/plugins/rails3/rails3.plugin.zsh @@ -1,23 +1,4 @@ -# Rails 3 aliases, backwards-compatible with Rails 2. - -function _rails_command () { - if [ -e "script/server" ]; then - ruby script/$@ - else - ruby script/rails $@ - fi -} - -alias rc='_rails_command console' -alias rd='_rails_command destroy' -alias rdb='_rails_command dbconsole' -alias rdbm='rake db:migrate db:test:clone' -alias rg='_rails_command generate' -alias rgm='_rails_command generate migration' -alias rp='_rails_command plugin' -alias ru='_rails_command runner' -alias rs='_rails_command server' -alias rsd='_rails_command server --debugger' -alias devlog='tail -f log/development.log' -alias rdm='rake db:migrate' -alias rdr='rake db:rollback' +echo "It looks like you have been using the 'rails3' plugin," +echo "which has been deprecated in favor of a newly consolidated 'rails' plugin." +echo "You will want to modify your ~/.zshrc configuration to begin using it." +echo "Learn more at https://github.com/robbyrussell/oh-my-zsh/pull/2240" diff --git a/plugins/rails4/rails4.plugin.zsh b/plugins/rails4/rails4.plugin.zsh new file mode 100644 index 000000000..5452c242c --- /dev/null +++ b/plugins/rails4/rails4.plugin.zsh @@ -0,0 +1,4 @@ +echo "It looks like you have been using the 'rails4' plugin," +echo "which has been deprecated in favor of a newly consolidated 'rails' plugin." +echo "You will want to modify your ~/.zshrc configuration to begin using it." +echo "Learn more at https://github.com/robbyrussell/oh-my-zsh/pull/2240" diff --git a/plugins/rake-fast/README.md b/plugins/rake-fast/README.md new file mode 100644 index 000000000..f56142f69 --- /dev/null +++ b/plugins/rake-fast/README.md @@ -0,0 +1,23 @@ +# rake-fast + +Fast rake autocompletion plugin. + +This script caches the output for later usage and significantly speeds it up. It generates a .rake_tasks cache file in parallel to the Rakefile. It also checks the file modification dates to see if it needs to regenerate the cache file. + +This is entirely based on [this pull request by Ullrich Schäfer](https://github.com/robb/.dotfiles/pull/10/), which is inspired by [this Ruby on Rails trick from 2006](http://weblog.rubyonrails.org/2006/3/9/fast-rake-task-completion-for-zsh/). + +Think about that. 2006. + +## Installation + +Just add the plugin to your `.zshrc`: + +```bash +plugins=(foo bar rake-fast) +``` + +You might consider adding `.rake_tasks` to your [global .gitignore](https://help.github.com/articles/ignoring-files#global-gitignore) + +## Usage + +`rake`, then press tab diff --git a/plugins/rake-fast/rake-fast.plugin.zsh b/plugins/rake-fast/rake-fast.plugin.zsh new file mode 100644 index 000000000..cb84f69a1 --- /dev/null +++ b/plugins/rake-fast/rake-fast.plugin.zsh @@ -0,0 +1,39 @@ +_rake_refresh () { + if [ -f .rake_tasks ]; then + rm .rake_tasks + fi + echo "Generating .rake_tasks..." > /dev/stderr + _rake_generate + cat .rake_tasks +} + +_rake_does_task_list_need_generating () { + if [ ! -f .rake_tasks ]; then return 0; + else + if [[ $(uname -s) == 'Darwin' ]]; then + accurate=$(stat -f%m .rake_tasks) + changed=$(stat -f%m Rakefile) + else + accurate=$(stat -c%Y .rake_tasks) + changed=$(stat -c%Y Rakefile) + fi + return $(expr $accurate '>=' $changed) + fi +} + +_rake_generate () { + rake --silent --tasks | cut -d " " -f 2 > .rake_tasks +} + +_rake () { + if [ -f Rakefile ]; then + if _rake_does_task_list_need_generating; then + echo "\nGenerating .rake_tasks..." > /dev/stderr + _rake_generate + fi + compadd `cat .rake_tasks` + fi +} + +compdef _rake rake +alias rake_refresh='_rake_refresh' diff --git a/plugins/rake/rake.plugin.zsh b/plugins/rake/rake.plugin.zsh index 16b933c14..121150017 100644 --- a/plugins/rake/rake.plugin.zsh +++ b/plugins/rake/rake.plugin.zsh @@ -1,3 +1,7 @@ +# Thank you Jim for everything you contributed to the Ruby and open source community +# over the years. We will miss you dearly. +alias jimweirich="rake" + alias rake="noglob rake" # allows square brackts for rake task invocation alias brake='noglob bundle exec rake' # execute the bundled rake gem alias srake='noglob sudo rake' # noglob must come before sudo diff --git a/plugins/rand-quote/rand-quote.plugin.zsh b/plugins/rand-quote/rand-quote.plugin.zsh new file mode 100644 index 000000000..c3bf6234e --- /dev/null +++ b/plugins/rand-quote/rand-quote.plugin.zsh @@ -0,0 +1,28 @@ +# Get a random quote fron the site http://www.quotationspage.com/random.php3 +# Created by Eduardo San Martin Morote aka Posva +# http://posva.github.io +# Sun Jun 09 10:59:36 CEST 2013 +# Don't remove this header, thank you +# Usage: quote + +WHO_COLOR="\e[0;33m" +TEXT_COLOR="\e[0;35m" +COLON_COLOR="\e[0;35m" +END_COLOR="\e[m" + +if [[ -x `which curl` ]]; then + function quote() + { + Q=$(curl -s --connect-timeout 2 "http://www.quotationspage.com/random.php3" | iconv -c -f ISO-8859-1 -t UTF-8 | grep -m 1 "dt ") + TXT=$(echo "$Q" | sed -e 's/<\/dt>.*//g' -e 's/.*html//g' -e 's/^[^a-zA-Z]*//' -e 's/<\/a..*$//g') + W=$(echo "$Q" | sed -e 's/.*\/quotes\///g' -e 's/<.*//g' -e 's/.*">//g') + if [ "$W" -a "$TXT" ]; then + echo "${WHO_COLOR}${W}${COLON_COLOR}: ${TEXT_COLOR}“${TXT}”${END_COLOR}" + else + quote + fi + } + #quote +else + echo "rand-quote plugin needs curl to work" >&2 +fi diff --git a/plugins/rbenv/rbenv.plugin.zsh b/plugins/rbenv/rbenv.plugin.zsh index d855c2445..213e1beb0 100644 --- a/plugins/rbenv/rbenv.plugin.zsh +++ b/plugins/rbenv/rbenv.plugin.zsh @@ -7,7 +7,7 @@ _rbenv-from-homebrew-installed() { } FOUND_RBENV=0 -rbenvdirs=("$HOME/.rbenv" "/usr/local/rbenv" "/opt/rbenv") +rbenvdirs=("$HOME/.rbenv" "/usr/local/rbenv" "/opt/rbenv" "/usr/local/opt/rbenv") if _homebrew-installed && _rbenv-from-homebrew-installed ; then rbenvdirs=($(brew --prefix rbenv) "${rbenvdirs[@]}") fi @@ -15,9 +15,12 @@ fi for rbenvdir in "${rbenvdirs[@]}" ; do if [ -d $rbenvdir/bin -a $FOUND_RBENV -eq 0 ] ; then FOUND_RBENV=1 - export RBENV_ROOT=$rbenvdir + if [[ $RBENV_ROOT = '' ]]; then + RBENV_ROOT=$rbenvdir + fi + export RBENV_ROOT export PATH=${rbenvdir}/bin:$PATH - eval "$(rbenv init - zsh)" + eval "$(rbenv init --no-rehash - zsh)" alias rubies="rbenv versions" alias gemsets="rbenv gemset list" @@ -32,11 +35,11 @@ for rbenvdir in "${rbenvdirs[@]}" ; do function gems { local rbenv_path=$(rbenv prefix) - gem list $@ | sed \ - -Ee "s/\([0-9\.]+( .+)?\)/$fg[blue]&$reset_color/g" \ - -Ee "s|$(echo $rbenv_path)|$fg[magenta]\$rbenv_path$reset_color|g" \ - -Ee "s/$current_ruby@global/$fg[yellow]&$reset_color/g" \ - -Ee "s/$current_ruby$current_gemset$/$fg[green]&$reset_color/g" + gem list $@ | sed -E \ + -e "s/\([0-9a-z, \.]+( .+)?\)/$fg[blue]&$reset_color/g" \ + -e "s|$(echo $rbenv_path)|$fg[magenta]\$rbenv_path$reset_color|g" \ + -e "s/$current_ruby@global/$fg[yellow]&$reset_color/g" \ + -e "s/$current_ruby$current_gemset$/$fg[green]&$reset_color/g" } function rbenv_prompt_info() { diff --git a/plugins/rebar/_rebar b/plugins/rebar/_rebar new file mode 100644 index 000000000..7ac5a510c --- /dev/null +++ b/plugins/rebar/_rebar @@ -0,0 +1,79 @@ +#compdef rebar + +local curcontext=$curcontext state ret=1 +typeset -ga _rebar_global_opts + +_rebar_global_opts=( + '(--help -h)'{--help,-h}'[Show the program options]' + '(--commands -c)'{--commands,-c}'[Show available commands]' + '(--version -V)'{--version,-V}'[Show version information]' + '(-vvv -vv -v)'--verbose+'[Verbosity level. Default: 0]:verbosity level:(0 1 2 3)' + '(-vvv)-v[Slightly more verbose output]' + '(-vvv)-vv[More verbose output]' + '(-v -vv)-vvv[Most verbose output]' + '(--force -f)'{--force,-f}'[Force]' + '-D+[Define compiler macro]' + '(--jobs -j)'{--jobs+,-j+}'[Number of concurrent workers a command may use. Default: 3]:workers:(1 2 3 4 5 6 7 8 9)' + '(--config -C)'{--config,-C}'[Rebar config file to use]:files:_files' + '(--profile -p)'{--profile,-p}'[Profile this run of rebar]' + '(--keep-going -k)'{--keep-going,-k}'[Keep running after a command fails]' +) + +_rebar () { + _arguments -C $_rebar_global_opts \ + '*::command and variable:->cmd_and_var' \ + && return + + case $state in + cmd_and_var) + _values -S = 'variables' \ + 'clean[Clean]' \ + 'compile[Compile sources]' \ + 'create[Create skel based on template and vars]' \ + 'create-app[Create simple app skel]' \ + 'create-node[Create simple node skel]' \ + 'list-template[List avaiavle templates]' \ + 'doc[Generate Erlang program documentation]' \ + 'check-deps[Display to be fetched dependencies]' \ + 'get-deps[Fetch dependencies]' \ + 'update-deps[Update fetched dependencies]' \ + 'delete-deps[Delete fetched dependencies]' \ + 'list-deps[List dependencies]' \ + 'generate[Build release with reltool]' \ + 'overlay[Run reltool overlays only]' \ + 'generate-appups[Generate appup files]' \ + 'generate-upgrade[Build an upgrade package]' \ + 'eunit[Run eunit tests]' \ + 'ct[Run common_test suites]' \ + 'qc[Test QuickCheck properties]' \ + 'xref[Run cross reference analysis]' \ + 'help[Show the program options]' \ + 'version[Show version information]' \ + 'apps[Application names to process]:' \ + 'case[Common Test case]:' \ + 'dump_spec[Dump reltool spec]:' \ + 'jobs[Number of workers]::workers:(0 1 2 3 4 5 6 7 8 9)' \ + 'suites[Common Test suites]::suite name:_path_files -W "(src test)" -g "*.erl(:r)"' \ + 'verbose[Verbosity level]::verbosity level:(0 1 2 3)' \ + 'appid[Application id]:' \ + 'previous_release[Previous release path]:' \ + 'nodeid[Node id]:' \ + 'root_dir[Reltool config root directory]::directory:_files -/' \ + 'skip_deps[Skip deps]::flag:(true false)' \ + 'skip_apps[Application names to not process]::flag:(true false)' \ + 'template[Template name]:' \ + 'template_dir[Template directory]::directory:_files -/' \ + && ret=0 + ;; + esac +} + +_rebar + +# Local variables: +# mode: shell-script +# sh-basic-offset: 2 +# sh-indent-comment: t +# indent-tabs-mode: nil +# End: +# ex: sw=2 ts=2 et filetype=sh diff --git a/plugins/repo/README.md b/plugins/repo/README.md new file mode 100644 index 000000000..0b77e6d48 --- /dev/null +++ b/plugins/repo/README.md @@ -0,0 +1,7 @@ +## repo +**Maintainer:** [Stibbons](https://github.com/Stibbons) + +This plugin mainly add support automatic completion for the repo command line tool: +http://code.google.com/p/git-repo/ + +* `r` aliases `repo` diff --git a/plugins/repo/_repo b/plugins/repo/_repo new file mode 100644 index 000000000..59e39c954 --- /dev/null +++ b/plugins/repo/_repo @@ -0,0 +1,272 @@ +#compdef repo + + +__git_apply_whitespace_strategies () +{ + declare -a strategies + + strategies=( + 'nowarn:turn off the trailing-whitespace warning' + 'warn:output trailing-whitespace warning, but apply patch' + 'fix:output trailing-whitespace warning and strip trailing whitespace' + 'error:output trailing-whitespace warning and refuse to apply patch' + 'error-all:same as "error", but output warnings for all files') + + _describe -t strategies 'trailing-whitespace resolution strategy' strategies $* +} + + +_repo() +{ + local context state state_descr line curcontext="$curcontext" + typeset -A opt_args + + local ret=1 + + _arguments -C \ + '(- 1 *)--help[show usage]'\ + '1:command:->command'\ + '*::args:->args' && ret=0 + + case $state in + (command) + repo list 2> /dev/null > /dev/null + if [[ $? == 0 ]]; then + local commands; + commands=( + 'abandon:Permanently abandon a development branch' + 'branch:View current topic branches' + 'branches:View current topic branches' + 'checkout:Checkout a branch for development' + 'cherry-pick:Cherry-pick a change.' + 'diff:Show changes between commit and working tree' + 'download:Download and checkout a change' + 'forall:execute command on several project' + 'grep:Print lines matching a pattern' + 'help:Display detailed help on a command' + 'init:Initialize repo in the current directory' + 'list:List projects and their associated directories' + 'manifest:Manifest inspection utility' + 'overview:Display overview of unmerged project branches' + 'prune:Prune (delete) already merged topics' + 'rebase:Rebase local branches on upstream branch' + 'selfupdate:Update repo to the latest version' + 'smartsync:Update working tree to the latest known good revision' + 'stage:Stage file(s) for commit' + 'start:Start a new branch for development' + 'status:Show the working tree status' + 'sync:Update working tree to the latest revision' + 'upload:Upload changes for code review' + 'version:Display the version of repo' + ) + _describe -t commands 'command' commands && ret=0 + else + local commands; + commands=( + 'init:Install repo in the current working directory' + 'help:Display detailed help on a command' + ) + _describe -t commands 'command' commands && ret=0 + fi + ;; + (args) + case $words[1] in + (branch | branches) + # TODO : list available projects and add them in list to feed compadd with + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + ': :__repo_projects' \ + && ret=0 + ;; + (abandon) + # TODO : list available projects and add them in list to feed compadd with + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + ':branch name:__repo_branch' \ + ': :__repo_projects'\ + && ret=0 + ;; + (checkout) + # TODO : list available projects and add them in list to feed compadd with + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + ':branch name:__repo_branch' \ + ': :__repo_projects'\ + && ret=0 + ;; + (init) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + "(-q --quiet)"{-q,--quiet}"[be quiet]" \ + "(-u --manifest-url)"{-u,--manifest-url=}"[manifest repository location]":url:__repo_url_prompt \ + "(-b --manifest-branch)"{-b,--manifest-branch=}"[manifest branch or revision]":branch:__repo_branch\ + "(-m --manifest-name)"{-m,--manifest-name=}"[initial manifest file]":manifest_name:__repo_manifest_name\ + "(--mirror)--mirror[mirror the forrest]"\ + "(--reference)--reference=[location of mirror directory]":dir:_dirs\ + "(--depth)--depth=[create a shallow clone with given depth; see git clone]":depth:__repo_depth_prompt\ + "(-g --group=)"{-g,--group=}"[restrict manifest projects to ones with a specified group]":group:_group\ + "(-p --platform=)"{-p,--platform=}"[restrict manifest projects to ones with a specified platform group(auto|all|none|linux|darwin|...)]":platform:"(auto all none linux darwin)"\ + "(--repo-url)--repo-url=[repo repository location]":url:__repo_url_prompt\ + "(--repo-branch)--repo-branch[repo branch or revision]":branch_or_rev:__repo__repo_branch_or_rev\ + "(--no-repo-verify)--no-repo-verify[do not verify repo source code]"\ + "(--config-name)--config-name[Always prompt for name/e-mail]"\ + && ret=0 + ;; + (start) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + "(--all)--all=[begin branch in all projects]"\ + ':branch name:__repo_new__repo_branch_name' \ + ':projects:__repo_projects_or_all' \ + && ret=0 + ;; + (rebase) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + "(-i --interactive)"{-i,--interactive}"[interactive rebase (single project only)]: :__repo_projects" \ + "(-f --force-rebase)"{-f,--force-rebase}"[Pass --force-rebase to git rebase]" \ + "(--no-ff)--no-ff=[Pass --no-ff to git rebase]"\ + "(-q --quiet)"{-q,--quiet}"[Pass --quiet to git rebase]" \ + "(--autosquash)--no-ff[Pass --autosquash to git rebase]"\ + "(--whitespace=)--whitespace=[Pass --whitespace to git rebase]: :__git_apply_whitespace_strategies"\ + "(--auto-stash)--auto-stash[Stash local modifications before starting]"\ + && ret=0 + ;; + (checkout) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + ':branch name:__git_branch_names' \ + ':projects:__repo_projects' \ + && ret=0 + ;; + (list) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + && ret=0 + ;; + (status) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + "(-j --jobs)"{-j,--jobs}"[number of projects to check simultaneously]" \ + ':projects:__repo_projects' \ + && ret=0 + ;; + (sync) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + "(--no-force-broken)--no-force-broken[stop sync if a project fails to sync (probably because of permissions)]" \ + "(-l --local-only)"{-l,--local-only}"[only update working tree, don't fetch]" \ + "(-n --network-only)"{-n,--network-branch}"[fetch only, don't update working tree]" \ + "(-d --detach)"{-d,--detach}"[detach projects back to manifest revision]" \ + "(-c --current-branch)"{-c,--current-branch}"[fetch only current branch from server]" \ + "(-q --quiet)"{-q,--quiet}"[be more quiet]" \ + "(-j --jobs=)"{-j,--jobs=}"[projects to fetch simultaneously (default 1) (limited to 5)]:projects to fetch simultaneously (default 1) (limited to 5)" \ + "(-m --manifest-name=)"{-m,--manifest-name=}"[temporary manifest to use for this sync]:manifest xml file:_files -g *.xml" \ + "(--no-clone-bundle)--no-clone-bundle[disable use of /clone.bundle on HTTP/HTTPS]" \ + "(-s --smart-sync)"{-s,--smart-sync=}"[smart sync using manifest from a known tag]:tag:" \ + '(--no-repo-verify)--no-repo-verify[do not verify repo source code]' \ + ': :__repo_projects' \ + && ret=0 + ;; + (upload) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + "(-t)-t[Send local branch name to Gerrit Code Review]" \ + "(--re= --reviewers=)"{--re=,--reviewers=}"[Request reviews from these people]:Request reviews from these people:" \ + "(--cc=)--cc=[Also send email to these email addresses.]:email addresses:_email_addresses" \ + "(--br=)--br=[Branch to upload.]:branch:__repo_branch" \ + "(--cbr --current-branch)"{--cbr,--current-branch}"[Upload current git branch]" \ + "(-d --draft)"{-d,--draft}"[If specified, upload as a draft.]" \ + "(--verify --no-verify)--no-verify[Do not run the upload hook.]" \ + '(--verify --no-verify)--verify[Run the upload hook without prompting]' \ + ': :__repo_projects' \ + && ret=0 + ;; + (forall) + _arguments : \ + "(-h --help)"{-h,--help}"[Show help]" \ + "(-v --verbose)"{-v,--verbose}"[Show command error messages]" \ + '(-p)-p[Show project headers before output]' \ + ': :__repo_projects_mandatory' \ + "(-c --command -h --help -v --verbose -p)"{-c,--command}"[Command (and arguments) to execute]" \ + && ret=0 + ;; + *) + ret=0 + esac + ;; + esac + + return $ret +} + +__repo_reviewers() +{ + # _message -e url 'reviewers' +} + +__repo_url_prompt() +{ + _message -e url 'url' +} + +__repo_manifest_name() +{ + _message -e manifest_name 'manifest name' +} + +_group() +{ + _message -e group 'group' +} + +__repo_branch() +{ + #_message -e branch 'Repo branch' + branches=($(repo branches| cut -c4- | grep '|' | cut -d' ' -f1)) + _describe -t branches 'Select repo branch' branches +} + +__repo__repo_branch_or_rev() +{ + _message -e branch_or_rev 'repo branch or revision' +} + +__repo_depth_prompt() +{ + _message -e depth 'depth' +} + +__repo_projects() +{ + _message -e depth 'Optional option : <projects>...' + projects=($(repo list | cut -d' ' -f1)) + _describe -t projects 'Select projects (keep empty for selecting all projects)' projects +} + +__repo_projects_mandatory() +{ + projects=($(repo list | cut -d' ' -f1)) + #_describe -t projects 'Select projects to apply commands' projects + _values -s ' ' "Select projects to apply commands" $projects +} + +__repo_new__repo_branch_name() +{ + branches=($(repo branches| cut -c4- | grep '|' | cut -d' ' -f1)) + _describe "" branches + _message -e "branch name" 'Enter new branch name or select an existing repo branch' +} + +__repo_projects_or_all() +{ + #_message -e depth '[--all | <project>...]' + + projects=(--all $(repo list | cut -d' ' -f1)) + _describe -t projects 'Select projects or --all' projects + _describe -t --all 'All projects' +} + +_repo "$@" +return $? + diff --git a/plugins/repo/repo.plugin.zsh b/plugins/repo/repo.plugin.zsh new file mode 100644 index 000000000..33f4195c7 --- /dev/null +++ b/plugins/repo/repo.plugin.zsh @@ -0,0 +1,18 @@ +# Aliases +alias r='repo' +compdef _repo r=repo + +alias rra='repo rebase --auto-stash' +compdef _repo rra='repo rebase --auto-stash' + +alias rs='repo sync' +compdef _repo rs='repo sync' + +alias rsrra='repo sync ; repo rebase --auto-stash' +compdef _repo rsrra='repo sync ; repo rebase --auto-stash' + +alias ru='repo upload' +compdef _repo ru='repo upload' + +alias rst='repo status' +compdef _repo rst='repo status' diff --git a/plugins/rsync/rsync.plugin.zsh b/plugins/rsync/rsync.plugin.zsh index 33a31a5c1..1a3bb4cc3 100644 --- a/plugins/rsync/rsync.plugin.zsh +++ b/plugins/rsync/rsync.plugin.zsh @@ -1,4 +1,4 @@ -alias rsync-copy="rsync -av --progress -h" -alias rsync-move="rsync -av --progress -h --remove-source-files" -alias rsync-update="rsync -avu --progress -h" -alias rsync-synchronize="rsync -avu --delete --progress -h" +alias rsync-copy="rsync -avz --progress -h" +alias rsync-move="rsync -avz --progress -h --remove-source-files" +alias rsync-update="rsync -avzu --progress -h" +alias rsync-synchronize="rsync -avzu --delete --progress -h" diff --git a/plugins/rvm/rvm.plugin.zsh b/plugins/rvm/rvm.plugin.zsh index 1ab800b0f..234ac1642 100644 --- a/plugins/rvm/rvm.plugin.zsh +++ b/plugins/rvm/rvm.plugin.zsh @@ -3,8 +3,10 @@ fpath=($rvm_path/scripts/zsh/Completion $fpath) alias rubies='rvm list rubies' alias gemsets='rvm gemset list' -local ruby18='ruby-1.8.7-p334' -local ruby19='ruby-1.9.3-p194' +local ruby18='ruby-1.8.7' +local ruby19='ruby-1.9.3' +local ruby20='ruby-2.0.0' +local ruby21='ruby-2.1.2' function rb18 { if [ -z "$1" ]; then @@ -28,9 +30,30 @@ function rb19 { _rb19() {compadd `ls -1 $rvm_path/gems | grep "^$ruby19@" | sed -e "s/^$ruby19@//" | awk '{print $1}'`} compdef _rb19 rb19 +function rb20 { + if [ -z "$1" ]; then + rvm use "$ruby20" + else + rvm use "$ruby20@$1" + fi +} + +_rb20() {compadd `ls -1 $rvm_path/gems | grep "^$ruby20@" | sed -e "s/^$ruby20@//" | awk '{print $1}'`} +compdef _rb20 rb20 + +function rb21 { + if [ -z "$1" ]; then + rvm use "$ruby21" + else + rvm use "$ruby21@$1" + fi +} + +_rb21() {compadd `ls -1 $rvm_path/gems | grep "^$ruby21@" | sed -e "s/^$ruby21@//" | awk '{print $1}'`} +compdef _rb21 rb21 + function rvm-update { rvm get head - rvm reload # TODO: Reload rvm completion? } # TODO: Make this usable w/o rvm. diff --git a/plugins/safe-paste/safe-paste.plugin.zsh b/plugins/safe-paste/safe-paste.plugin.zsh new file mode 100644 index 000000000..17c212c19 --- /dev/null +++ b/plugins/safe-paste/safe-paste.plugin.zsh @@ -0,0 +1,54 @@ +# Code from Mikael Magnusson: http://www.zsh.org/mla/users/2011/msg00367.html +# +# Requires xterm, urxvt, iTerm2 or any other terminal that supports bracketed +# paste mode as documented: http://www.xfree86.org/current/ctlseqs.html + +# create a new keymap to use while pasting +bindkey -N paste +# make everything in this keymap call our custom widget +bindkey -R -M paste "^@"-"\M-^?" paste-insert +# these are the codes sent around the pasted text in bracketed +# paste mode. +# do the first one with both -M viins and -M vicmd in vi mode +bindkey '^[[200~' _start_paste +bindkey -M paste '^[[201~' _end_paste +# insert newlines rather than carriage returns when pasting newlines +bindkey -M paste -s '^M' '^J' + +zle -N _start_paste +zle -N _end_paste +zle -N zle-line-init _zle_line_init +zle -N zle-line-finish _zle_line_finish +zle -N paste-insert _paste_insert + +# switch the active keymap to paste mode +function _start_paste() { + bindkey -A paste main +} + +# go back to our normal keymap, and insert all the pasted text in the +# command line. this has the nice effect of making the whole paste be +# a single undo/redo event. +function _end_paste() { +#use bindkey -v here with vi mode probably. maybe you want to track +#if you were in ins or cmd mode and restore the right one. + bindkey -e + LBUFFER+=$_paste_content + unset _paste_content +} + +function _paste_insert() { + _paste_content+=$KEYS +} + +function _zle_line_init() { + # Tell terminal to send escape codes around pastes. + [[ $TERM == rxvt-unicode || $TERM == xterm || $TERM = xterm-256color || $TERM = screen || $TERM = screen-256color ]] && printf '\e[?2004h' +} + +function _zle_line_finish() { + # Tell it to stop when we leave zle, so pasting in other programs + # doesn't get the ^[[200~ codes around the pasted text. + [[ $TERM == rxvt-unicode || $TERM == xterm || $TERM = xterm-256color || $TERM = screen || $TERM = screen-256color ]] && printf '\e[?2004l' +} + diff --git a/plugins/sbt/_sbt b/plugins/sbt/_sbt new file mode 100644 index 000000000..91372aa72 --- /dev/null +++ b/plugins/sbt/_sbt @@ -0,0 +1,55 @@ +#compdef sbt +#autoload + +local -a _sbt_commands +_sbt_commands=( + 'clean:delete files produced by the build' + 'compile:compile sources' + 'console:start the Scala REPL with project classes on the classpath' + 'console-quick:start the Scala REPL with project deps on the classpath' + 'console-project:start the Scala REPL w/sbt+build-def on the classpath' + 'dist:generate distribution artifacts' + 'dist\:clean:clean distribution artifacts' + 'doc:generate API documentation' + 'gen-idea:generate Intellij Idea project files' + 'package:produce the main artifact, such as a binary jar' + 'package-doc:produce a doc artifact, such as a jar containing API docs' + 'package-src:produce a source artifact, such as a jar containing sources' + 'publish:publish artifacts to a repository' + 'publish-local:publish artifacts to the local repository' + 'run:run a main class' + 'run-main:run the main class selected by the first argument' + 'test:execute all tests' + 'test-only:execute the tests provided as arguments' + 'test-quick:execute previously failed tests' + 'update:resolve and optionally retrieve dependencies' +) + +local expl + +_arguments \ + '(-help)-h[prints an help message]' \ + '(-h)-help[prints an help message]' \ + '(-verbose)-v[this runner is chattier]' \ + '(-v)-verbose[this runner is chattier]' \ + '(-debug)-d[set sbt log level to debug]' \ + '(-d)-debug[set sbt log level to debug]' \ + '-no-colors[disable ANSI color codes]' \ + '-sbt-create[start even if current dir contains no sbt project]' \ + '-sbt-dir[path to global settings/plugins dir (default: ~/.sbt)]' \ + '-sbt-boot[path to shared boot dir (default: ~/.sbt/boot)]' \ + '-ivy[path to local Ivy repository (default: ~/.ivy2)]' \ + '-mem[set memory options]' \ + '-no-share[use all local caches; no sharing]' \ + '-no-global[use global caches, but do not use global ~/.sbt dir]' \ + '-jvm-debug[turn on JVM debugging, open at the given port]' \ + '-batch[disable interactive mode]' \ + '-sbt-version[use the specified version of sbt]' \ + '-sbt-jar[use the specified jar as the sbt launcher]' \ + '(-sbt-snapshot)-sbt-rc[use an RC version of sbt]' \ + '(-sbt-rc)-sbt-snapshot[use a snapshot version of sbt]' \ + '-java-home[alternate JAVA_HOME]' \ + '*:: :->subcmds' && return 0 + +_describe -t commands "sbt subcommand" _sbt_commands +return diff --git a/plugins/sbt/sbt.plugin.zsh b/plugins/sbt/sbt.plugin.zsh new file mode 100644 index 000000000..203c691f5 --- /dev/null +++ b/plugins/sbt/sbt.plugin.zsh @@ -0,0 +1,23 @@ +# ------------------------------------------------------------------------------ +# FILE: sbt.plugin.zsh +# DESCRIPTION: oh-my-zsh plugin file. +# AUTHOR: Mirko Caserta (mirko.caserta@gmail.com) +# VERSION: 1.0.2 +# ------------------------------------------------------------------------------ + +# aliases - mnemonic: prefix is 'sb' +alias sbc='sbt compile' +alias sbco='sbt console' +alias sbcq='sbt console-quick' +alias sbcl='sbt clean' +alias sbcp='sbt console-project' +alias sbd='sbt doc' +alias sbdc='sbt dist:clean' +alias sbdi='sbt dist' +alias sbgi='sbt gen-idea' +alias sbp='sbt publish' +alias sbpl='sbt publish-local' +alias sbr='sbt run' +alias sbrm='sbt run-main' +alias sbu='sbt update' +alias sbx='sbt test' diff --git a/plugins/scala/_scala b/plugins/scala/_scala new file mode 100644 index 000000000..c4ccb37d3 --- /dev/null +++ b/plugins/scala/_scala @@ -0,0 +1,249 @@ +#compdef scala scalac +# ------------------------------------------------------------------------------ +# Copyright (c) 2012 Github zsh-users - http://github.com/zsh-users +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the zsh-users nor the +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# ------------------------------------------------------------------------------ +# Description +# ----------- +# +# Completion script for scala and scalac (http://www.scala-lang.org/). +# +# ------------------------------------------------------------------------------ +# Authors +# ------- +# +# * Tony Sloane <inkytonik@gmail.com> +# +# ------------------------------------------------------------------------------ + +typeset -A opt_args +local context state line + +_scala_features () { + compadd "postfixOps" "reflectiveCalls" "implicitConversions" "higherKinds" \ + "existentials" "experimental.macros" "_" +} + +_scala_phases () { + compadd "parser" "namer" "packageobjects" "typer" "patmat" "superaccessors" \ + "extmethods" "pickler" "refchecks" "selectiveanf" "selectivecps" "uncurry" \ + "tailcalls" "specialize" "explicitouter" "erasure" "posterasure" "lazyvals" \ + "lambdalift" "constructors" "flatten" "mixin" "cleanup" "icode" "inliner" \ + "inlineExceptionHandlers" "closelim" "dce" "jvm" "terminal" +} + +local -a shared_opts +shared_opts=( + "-bootclasspath+[Override location of bootstrap class files]:bootstrap class directory:_files -/" + "-classpath+[Specify where to find user class files]:directory:_files -/" + "-D-[Pass -Dproperty=value directly to the runtime system]" + "-d+[Destination for generated classfiles]: directory or jar file:_files" + "-dependencyfile+[Set dependency tracking file]:dependency tracking file:_files" + "-deprecation[Emit warning and location for usages of deprecated APIs]" + "-encoding+[Specify character encoding used by source files]:encoding:" + "-explaintypes[Explain type errors in more detail]" + "-extdirs+[Override location of installed extensions]:extensions directory:_files -/" + "-g\:-[Set level of generated debugging info (default\: vars)]:debugging info level:(none source line vars notailcalls)" + "-help[Print a synopsis of standard options]" + "-J-[pass argument directly to Java runtime system]:JVM argument:" + "-javabootclasspath+[Override java boot classpath]:Java boot class path directory]:_files -/" + "-javaextdirs+[Override java extdirs classpath]:Java extdirs directory:_files -/" + "-language\:-[Enable one or more language features]:feature:_scala_features" + "-no-specialization[Ignore @specialize annotations]" + "-nobootcp[Do not use the boot classpath for the scala jars]" + "-nowarn[Generate no warnings]" + "-optimise[Generate faster bytecode by applying optimisations to the program]" + "-P\:-[Pass an option to a plugin (written plugin\:opt)]:plugin option:" + "-print[Print program with Scala-specific features removed]" + "-sourcepath+[Specify location(s) of source files]:source file directory:_files -/" + "-target\:-[Target platform for object files (default\: jvm-1.5)]:platform name:(jvm-1.5 msil)" + "-toolcp+[Add to the runner classpath]:directory:_files -/" + "-unchecked[Enable detailed unchecked (erasure) warnings]" + "-uniqid[Uniquely tag all identifiers in debugging output]" + "-usejavacp[Utilize the java.class.path in classpath resolution]" + "-verbose[Output messages about what the compiler is doing]" + "-version[Print product version and exit]" + "-X[Print a synopsis of advanced options]" + "-Y[Print a synopsis of private options]" +) + +local -a X_opts +X_opts=( + "-Xcheck-null[Warn upon selection of nullable reference]" + "-Xcheckinit[Wrap field accessors to throw an exception on uninitialized access]" + "-Xdisable-assertions[Generate no assertions or assumptions]" + "-Xelide-below+[Calls to @elidable methods are omitted if method priority is lower than integer argument]" + "-Xexperimental[Enable experimental extensions]" + "-Xfatal-warnings[Fail the compilation if there are any warnings]" + "-Xfull-lubs[Retains pre 2.10 behavior of less aggressive truncation of least upper bounds]" + "-Xfuture[Turn on future language features]" + "-Xgenerate-phase-graph+[Generate the phase graphs (outputs .dot files) to fileX.dot]:output file:_files" + "-Xlint[Enable recommended additional warnings]" + "-Xlog-free-terms[Print a message when reification creates a free term]" + "-Xlog-free-types[Print a message when reification resorts to generating a free type]" + "-Xlog-implicits[Show more detail on why some implicits are not applicable]" + "-Xlog-implicit-conversions[Print a message whenever an implicit conversion is inserted]" + "-Xlog-reflective-calls[Print a message when a reflective method call is generated]" + "-Xmacro-settings\:-[Custom settings for macros]:option" + "-Xmain-class+[Class for manifest's Main-Class entry (only useful with -d jar)]:path:" + "-Xmax-classfile-name+[Maximum filename length for generated classes]" + "-Xmigration[Warn about constructs whose behavior may have changed]" + "-Xno-forwarders[Do not generate static forwarders in mirror classes]" + "-Xno-patmat-analysis[Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation]" + "-Xno-uescape[Disable handling of \u unicode escapes]" + "-Xnojline[Do not use JLine for editing]" + "-Xoldpatmat[Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10]" + "-Xprint\:-[Print out program after <phase>]:phase name:_scala_phases" + "-Xprint-icode\:-[Log internal icode to *.icode files after phase (default\: icode)]:phase name:_scala_phases" + "-Xprint-pos[Print tree positions, as offsets]" + "-Xprint-types[Print tree types (debugging option)]" + "-Xprompt[Display a prompt after each error (debugging option)]" + "-Xresident[Compiler stays resident: read source filenames from standard input]" + "-Xscript+[Treat the source file as a script and wrap it in a main method]:main object name" + "-Xshow-class+[Show internal representation of class]:class name" + "-Xshow-object+[Show internal representation of object]:object name" + "-Xshow-phases[Print a synopsis of compiler phases]" + "-Xsource-reader+[Specify a class name for a custom method of reading source files]:class name" + "-Xverify[Verify generic signatures in generated bytecode]" + + "-Xassem-extdirs+[List of directories containing assemblies (requires -target:msil) (default\: lib)]:assembly directory:_files -/" + "-Xassem-name+[Name of the output assembly (requires -target:msil)]:assembly name:_files" + "-Xassem-path+[List of assemblies referenced by the program (requires -target:msil)]:assembly path:_files" + "-Xsourcedir+[Mirror source folder structure in output directory (requires -target:msil)]:source directory:_files -/" + + "-Xplugin\:-[Load one or more plugins from file]:plugin file:_files" + "-Xpluginsdir+[Path to search compiler plugins]:plugin directory:_files -/" + "-Xplugin-list[Print a synopsis of loaded plugins]" + "-Xplugin-disable\:-[Disable the given plugin(s)]" + "-Xplugin-require\:-[Abort unless the given plugin(s) are available]" +) + +local -a Y_opts +Y_opts=( + "-Y[Print a synopsis of private options]" + "-Ybuild-manager-debug[Generate debug information for the Refined Build Manager compiler]" + "-Ybuilder-debug\:-[Compile using the specified build manager (default\: none)]:build manager:(none refined simple)" + "-Yclosure-elim[Perform closure elimination]" + "-Ycompact-trees[Use compact tree printer when displaying trees]" + "-Ydead-code[Perform dead code elimination]" + "-Ydependent-method-types[Allow dependent method types]" + "-Ydump-classes+[Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders)]:output directory:_files -/" + "-Yeta-expand-keeps-star[Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.]" + "-Ygen-javap+[Generate a parallel output directory of .javap files]:output directory:_files -/" + "-Yinfer-argument-types[Infer types for arguments of overriden methods]" + "-Yinline[Perform inlining when possible]" + "-Yinline-handlers[Perform exception handler inlining when possible]" + "-Yinline-warnings[Emit inlining warnings (normally surpressed due to high volume)]" + "-Yinvalidate+[Invalidate classpath entry before run]:classpath entry" + "-Ylinearizer\:-[Linearizer to use (default\: rpo)]:linearizer:(normal dfs rpo dump)" + "-Ylog-classpath[Output information about what classpath is being applied]" + "-Yno-adapted-args[Do not adapt an argument list (either by inserting unit or creating a tuple) to match the receiver]" + "-Ymacro-debug-lite[Trace essential macro-related activities]" + "-Ymacro-debug-verbose[Trace all macro-related activities: compilation, generation of synthetics, classloading, expansion, exceptions]" + "-Yno-completion[Disable tab-completion in the REPL]" + "-Yno-generic-signatures[Suppress generation of generic signatures for Java]" + "-Yno-imports[Compile without any implicit imports]" + "-Yno-predef[Compile without importing Predef]" + "-Yno-self-type-checks[Suppress check for self-type conformance among inherited members]" + "-Yno-squeeze[Disable creation of compact code in matching]" + "-Ynotnull[Enable (experimental and incomplete) scala.NotNull]" + "-Yoverride-objects[Allow member objects to be overridden]" + "-Yoverride-vars[Allow vars to be overridden]" + "-Ypmat-naive[Desugar matches as naively as possible]" + "-Ypresentation-delay+[Wait number of ms after typing before starting typechecking]" + "-Ypresentation-log+[Log presentation compiler events into file]:log file:_files" + "-Ypresentation-replay+[Replay presentation compiler events from file]:log file:_files" + "-Ypresentation-strict[Do not report type errors in sources with syntax errors]" + "-Ypresentation-verbose[Print information about presentation compiler tasks]" + "-Yprofile-class+[Specify name of profiler class]:profiler class name" + "-Yprofile-memory[Heap snapshot after compiler run (requires jgpagent on JVM -agentpath)]" + "-Yrangepos[Use range positions for syntax trees]" + "-Yrecursion+[Set recursion depth used when locking symbols]" + "-Yreify-copypaste[Dump the reified trees in copypasteable representation]" + "-Yrepl-sync[Do not use asynchronous code for REPL startup]" + "-Yresolve-term-conflict\:-[Resolve term conflicts (default\: error)]:resolution strategy:(package object error)" + "-Yself-in-annots[Include a \"self\" identifier inside of annotations]" + "-Yshow\:-[Show after <phase> (requires -Xshow-class or -Xshow-object)]:phase name:_scala_phases" + "-Yshow-syms[Print the AST symbol hierarchy after each phase]" + "-Yshow-symkinds[Print abbreviated symbol kinds next to symbol names]" + "-Yshow-trees[Print detailed ASTs (requires -Xprint\:phase)]" + "-Yshow-trees-compact[Print detailed ASTs in compact form (requires -Xprint\:)]" + "-Yshow-trees-stringified[Print stringifications along with detailed ASTs (requires -Xprint\:)]" + "-Ystatistics[Print compiler statistics]" + "-Ystruct-dispatch\:-[Structural method dispatch policy (default\: poly-cache)]:policy name:(no-cache mono-cache poly-cache invoke-dynamic)" + + "-Ybrowse\:-[Browse the abstract syntax tree after <phase>]:phase name:_scala_phases" + "-Ycheck\:-[Check the tree at the end of <phase>]:phase name:_scala_phases" + "-Ylog\:-[Log operations during <phase>]:phase name:_scala_phases" + "-Yprofile\:-[Profile CPU usage of given phases (requires jgpagent on JVM -agentpath)]:phase name:_scala_phases" + "-Yskip\:-[Skip <phase>]:phase name:_scala_phases" + "-Ystop-after\:-[Stop after given phase <phase>]:phase name:_scala_phases" + "-Ystop-before\:-[Stop before given phase <phase>]:phase name:_scala_phases" + + "-Ywarn-adapted-args[Warn if an argument list is modified to match the receiver]" + "-Ywarn-all[Enable all -Y warnings]" + "-Ywarn-dead-code[Warn when dead code is identified]" + "-Ywarn-inaccessible[Warn about inaccessible types in method signatures]" + "-Ywarn-nullary-override[Warn when non-nullary overrides nullary, e.g. def foo() over def foo]" + "-Ywarn-nullary-unit[Warn when nullary methods return Unit]" + "-Ywarn-numeric-widen[Warn when numerics are widened]" + "-Ywarn-value-discard[Warn when non-Unit expression results are unused]" + + "-Ybuild-manager-debug[Generate debug information for the Refined Build Manager compiler]" + "-Ybuilder-debug\:-[Compile using the specified build manager (default\: none)]:manager:(none refined simple)" + "-Ycompletion-debug[Trace all tab completion activity]" + "-Ydebug[Increase the quantity of debugging output]" + "-Ydoc-debug[Trace all scaladoc activity]" + "-Yide-debug[Generate, validate and output trees using the interactive compiler]" + "-Yinfer-debug[Trace type inference and implicit search]" + "-Yissue-debug[Print stack traces when a context issues an error]" + "-Ypatmat-debug[Trace pattern matching translation]" + "-Ypmat-debug[Trace all pattern matcher activity]" + "-Ypos-debug[Trace position validation]" + "-Ypresentation-debug[Enable debugging output for the presentation compiler]" + "-Yreify-debug[Trace reification]" + "-Yrepl-debug[Trace all REPL activity]" + "-Ytyper-debug[Trace all type assignments]" +) + +local -a scala_opts +scala_opts=( + "-e+[execute <string> as if entered in the repl]:string" \ + "-howtorun+[what to run (default\: guess)]:execution mode:(script object jar guess)" \ + "-i+[preload <file> before starting the repl]:file to preload:_files" \ + "-nc[no compilation daemon\: do not use the fsc offline compiler]" \ + "-save[save the compiled script in a jar for future use]" +) + +case $words[$CURRENT] in + -X*) _arguments $X_opts;; + -Y*) _arguments $Y_opts;; + *) case $service in + scala) _arguments $scala_opts $shared_opts "*::filename:_files";; + scalac) _arguments $shared_opts "*::filename:_files";; + esac +esac + +return 0 diff --git a/plugins/scd/README.md b/plugins/scd/README.md new file mode 100644 index 000000000..197cea50a --- /dev/null +++ b/plugins/scd/README.md @@ -0,0 +1,122 @@ +# scd - smart change of directory + +Define `scd` shell function for changing to any directory with +a few keystrokes. + +`scd` keeps history of the visited directories, which serves as an index of +the known paths. The directory index is updated after every `cd` command in +the shell and can be also filled manually by running `scd -a`. To switch to +some directory, `scd` needs few fragments of the desired path to match with +the index. A selection menu is displayed in case of several matches, with a +preference given to recently visited paths. `scd` can create permanent +directory aliases, which appear as named directories in zsh session. + +## INSTALLATION + +For oh-my-zsh, add `scd` to the `plugins` array in the ~/.zshrc file as in the +[template file](../../templates/zshrc.zsh-template#L45). + +Besides zsh, `scd` can be used with *bash*, *dash* or *tcsh* +shells and is also available as [Vim](http://www.vim.org/) plugin and +[IPython](http://ipython.org/) extension. For installation details, see +https://github.com/pavoljuhas/smart-change-directory. + +## SYNOPSIS + +```sh +scd [options] [pattern1 pattern2 ...] +``` + +## OPTIONS + +<dl><dt> +-a, --add</dt><dd> + add specified directories to the directory index.</dd><dt> + +--unindex</dt><dd> + remove specified directories from the index.</dd><dt> + +-r, --recursive</dt><dd> + apply options <em>--add</em> or <em>--unindex</em> recursively.</dd><dt> + +--alias=ALIAS</dt><dd> + create alias for the current or specified directory and save it to + <em>~/.scdalias.zsh</em>.</dd><dt> + +--unalias</dt><dd> + remove ALIAS definition for the current or specified directory from + <em>~/.scdalias.zsh</em>.</dd><dt> + +--list</dt><dd> + show matching directories and exit.</dd><dt> + +-v, --verbose</dt><dd> + display directory rank in the selection menu.</dd><dt> + +-h, --help</dt><dd> + display this options summary and exit.</dd> +</dl> + +## Examples + +```sh +# Index recursively some paths for the very first run +scd -ar ~/Documents/ + +# Change to a directory path matching "doc" +scd doc + +# Change to a path matching all of "a", "b" and "c" +scd a b c + +# Change to a directory path that ends with "ts" +scd "ts(#e)" + +# Show selection menu and ranking of 20 most likely directories +scd -v + +# Alias current directory as "xray" +scd --alias=xray + +# Jump to a previously defined aliased directory +scd xray +``` + +# FILES + +<dl><dt> +~/.scdhistory</dt><dd> + time-stamped index of visited directories.</dd><dt> + +~/.scdalias.zsh</dt><dd> + scd-generated definitions of directory aliases.</dd> +</dl> + +# ENVIRONMENT + +<dl><dt> +SCD_HISTFILE</dt><dd> + path to the scd index file (by default ~/.scdhistory).</dd><dt> + +SCD_HISTSIZE</dt><dd> + maximum number of entries in the index (5000). Index is trimmed when it + exceeds <em>SCD_HISTSIZE</em> by more than 20%.</dd><dt> + +SCD_MENUSIZE</dt><dd> + maximum number of items for directory selection menu (20).</dd><dt> + +SCD_MEANLIFE</dt><dd> + mean lifetime in seconds for exponential decay of directory + likelihood (86400).</dd><dt> + +SCD_THRESHOLD</dt><dd> + threshold for cumulative directory likelihood. Directories with + a lower likelihood compared to the best match are excluded (0.005). + </dd><dt> + +SCD_SCRIPT</dt><dd> + command script file where scd writes the final <code>cd</code> + command. This variable must be defined when scd runs in its own + process rather than as a shell function. It is up to the + scd caller to use the output in <em>SCD_SCRIPT</em>.</dd> +</dl> diff --git a/plugins/scd/scd b/plugins/scd/scd new file mode 100755 index 000000000..1567d2736 --- /dev/null +++ b/plugins/scd/scd @@ -0,0 +1,353 @@ +#!/bin/zsh -f + +emulate -L zsh +local EXIT=return +if [[ $(whence -w $0) == *:' 'command ]]; then + emulate -R zsh + local RUNNING_AS_COMMAND=1 + EXIT=exit +fi + +local DOC='scd -- smart change to a recently used directory +usage: scd [options] [pattern1 pattern2 ...] +Go to a directory path that contains all fixed string patterns. Prefer +recently visited directories and directories with patterns in their tail +component. Display a selection menu in case of multiple matches. + +Options: + -a, --add add specified directories to the directory index + --unindex remove specified directories from the index + -r, --recursive apply options --add or --unindex recursively + --alias=ALIAS create alias for the current or specified directory and + store it in ~/.scdalias.zsh + --unalias remove ALIAS definition for the current or specified + directory from ~/.scdalias.zsh + --list show matching directories and exit + -v, --verbose display directory rank in the selection menu + -h, --help display this message and exit +' + +local SCD_HISTFILE=${SCD_HISTFILE:-${HOME}/.scdhistory} +local SCD_HISTSIZE=${SCD_HISTSIZE:-5000} +local SCD_MENUSIZE=${SCD_MENUSIZE:-20} +local SCD_MEANLIFE=${SCD_MEANLIFE:-86400} +local SCD_THRESHOLD=${SCD_THRESHOLD:-0.005} +local SCD_SCRIPT=${RUNNING_AS_COMMAND:+$SCD_SCRIPT} +local SCD_ALIAS=~/.scdalias.zsh + +local ICASE a d m p i tdir maxrank threshold +local opt_help opt_add opt_unindex opt_recursive opt_verbose +local opt_alias opt_unalias opt_list +local -A drank dalias +local dmatching +local last_directory + +setopt extendedhistory extendedglob noautonamedirs brace_ccl + +# If SCD_SCRIPT is defined make sure the file exists and is empty. +# This removes any previous old commands. +[[ -n "$SCD_SCRIPT" ]] && [[ -s $SCD_SCRIPT || ! -f $SCD_SCRIPT ]] && ( + umask 077 + : >| $SCD_SCRIPT +) + +# process command line options +zmodload -i zsh/zutil +zmodload -i zsh/datetime +zparseopts -D -- a=opt_add -add=opt_add -unindex=opt_unindex \ + r=opt_recursive -recursive=opt_recursive \ + -alias:=opt_alias -unalias=opt_unalias -list=opt_list \ + v=opt_verbose -verbose=opt_verbose h=opt_help -help=opt_help \ + || $EXIT $? + +if [[ -n $opt_help ]]; then + print $DOC + $EXIT +fi + +# load directory aliases if they exist +[[ -r $SCD_ALIAS ]] && source $SCD_ALIAS + +# works faster than the (:a) modifier and is compatible with zsh 4.2.6 +_scd_Y19oug_abspath() { + set -A $1 ${(ps:\0:)"$( + unfunction -m "*"; shift + for d; do + cd $d && print -Nr -- $PWD && cd $OLDPWD + done + )"} +} + +# define directory alias +if [[ -n $opt_alias ]]; then + if [[ -n $1 && ! -d $1 ]]; then + print -u2 "'$1' is not a directory." + $EXIT 1 + fi + a=${opt_alias[-1]#=} + _scd_Y19oug_abspath d ${1:-$PWD} + # alias in the current shell, update alias file if successful + hash -d -- $a=$d && + ( + umask 077 + hash -dr + [[ -r $SCD_ALIAS ]] && source $SCD_ALIAS + hash -d -- $a=$d + hash -dL >| $SCD_ALIAS + ) + $EXIT $? +fi + +# undefine directory alias +if [[ -n $opt_unalias ]]; then + if [[ -n $1 && ! -d $1 ]]; then + print -u2 "'$1' is not a directory." + $EXIT 1 + fi + _scd_Y19oug_abspath a ${1:-$PWD} + a=$(print -rD ${a}) + if [[ $a != [~][^/]## ]]; then + $EXIT + fi + a=${a#[~]} + # unalias in the current shell, update alias file if successful + if unhash -d -- $a 2>/dev/null && [[ -r $SCD_ALIAS ]]; then + ( + umask 077 + hash -dr + source $SCD_ALIAS + unhash -d -- $a 2>/dev/null && + hash -dL >| $SCD_ALIAS + ) + fi + $EXIT $? +fi + +# Rewrite directory index if it is at least 20% oversized +if [[ -s $SCD_HISTFILE ]] && \ +(( $(wc -l <$SCD_HISTFILE) > 1.2 * $SCD_HISTSIZE )); then + m=( ${(f)"$(<$SCD_HISTFILE)"} ) + print -lr -- ${m[-$SCD_HISTSIZE,-1]} >| ${SCD_HISTFILE} +fi + +# Determine the last recorded directory +if [[ -s ${SCD_HISTFILE} ]]; then + last_directory=${"$(tail -1 ${SCD_HISTFILE})"#*;} +fi + +# Internal functions are prefixed with "_scd_Y19oug_". +# The "record" function adds its arguments to the directory index. +_scd_Y19oug_record() { + while [[ -n $last_directory && $1 == $last_directory ]]; do + shift + done + if [[ $# -gt 0 ]]; then + ( umask 077 + p=": ${EPOCHSECONDS}:0;" + print -lr -- ${p}${^*} >>| $SCD_HISTFILE ) + fi +} + +if [[ -n $opt_add ]]; then + for d; do + if [[ ! -d $d ]]; then + print -u2 "Directory '$d' does not exist." + $EXIT 2 + fi + done + _scd_Y19oug_abspath m ${*:-$PWD} + _scd_Y19oug_record $m + if [[ -n $opt_recursive ]]; then + for d in $m; do + print -n "scanning ${d} ... " + _scd_Y19oug_record ${d}/**/*(-/N) + print "[done]" + done + fi + $EXIT +fi + +# take care of removing entries from the directory index +if [[ -n $opt_unindex ]]; then + if [[ ! -s $SCD_HISTFILE ]]; then + $EXIT + fi + # expand existing directories in the argument list + for i in {1..$#}; do + if [[ -d ${argv[i]} ]]; then + _scd_Y19oug_abspath d ${argv[i]} + argv[i]=${d} + fi + done + m="$(awk -v recursive=${opt_recursive} ' + BEGIN { + for (i = 2; i < ARGC; ++i) { + argset[ARGV[i]] = 1; + delete ARGV[i]; + } + } + 1 { + d = $0; sub(/^[^;]*;/, "", d); + if (d in argset) next; + } + recursive { + for (a in argset) { + if (substr(d, 1, length(a) + 1) == a"/") next; + } + } + { print $0 } + ' $SCD_HISTFILE ${*:-$PWD} )" || $EXIT $? + : >| ${SCD_HISTFILE} + [[ ${#m} == 0 ]] || print -r -- $m >> ${SCD_HISTFILE} + $EXIT +fi + +# The "action" function is called when there is just one target directory. +_scd_Y19oug_action() { + cd $1 || return $? + if [[ -z $SCD_SCRIPT && -n $RUNNING_AS_COMMAND ]]; then + print -u2 "Warning: running as command with SCD_SCRIPT undefined." + fi + if [[ -n $SCD_SCRIPT ]]; then + print -r "cd ${(q)1}" >| $SCD_SCRIPT + fi +} + +# Match and rank patterns to the index file +# set global arrays dmatching and drank +_scd_Y19oug_match() { + ## single argument that is an existing directory or directory alias + if [[ $# == 1 ]] && \ + [[ -d ${d::=$1} || -d ${d::=${nameddirs[$1]}} ]] && [[ -x $d ]]; + then + _scd_Y19oug_abspath dmatching $d + drank[${dmatching[1]}]=1 + return + fi + + # ignore case unless there is an argument with an uppercase letter + [[ "$*" == *[[:upper:]]* ]] || ICASE='(#i)' + + # calculate rank of all directories in the SCD_HISTFILE and keep it as drank + # include a dummy entry for splitting of an empty string is buggy + [[ -s $SCD_HISTFILE ]] && drank=( ${(f)"$( + print -l /dev/null -10 + <$SCD_HISTFILE \ + awk -v epochseconds=$EPOCHSECONDS -v meanlife=$SCD_MEANLIFE ' + BEGIN { FS = "[:;]"; } + length($0) < 4096 && $2 > 0 { + tau = 1.0 * ($2 - epochseconds) / meanlife; + if (tau < -4.61) tau = -4.61; + prec = exp(tau); + sub(/^[^;]*;/, ""); + if (NF) ptot[$0] += prec; + } + END { for (di in ptot) { print di; print ptot[di]; } }' + )"} + ) + unset "drank[/dev/null]" + + # filter drank to the entries that match all arguments + for a; do + p=${ICASE}"*${a}*" + drank=( ${(kv)drank[(I)${~p}]} ) + done + + # build a list of matching directories reverse-sorted by their probabilities + dmatching=( ${(f)"$( + for d p in ${(kv)drank}; do + print -r -- "$p $d"; + done | sort -grk1 | cut -d ' ' -f 2- + )"} + ) + + # if some directory paths match all patterns in order, discard all others + p=${ICASE}"*${(j:*:)argv}*" + m=( ${(M)dmatching:#${~p}} ) + [[ -d ${m[1]} ]] && dmatching=( $m ) + # if some directory names match last pattern, discard all others + p=${ICASE}"*${(j:*:)argv}[^/]#" + m=( ${(M)dmatching:#${~p}} ) + [[ -d ${m[1]} ]] && dmatching=( $m ) + # if some directory names match all patterns, discard all others + m=( $dmatching ) + for a; do + p=${ICASE}"*/[^/]#${a}[^/]#" + m=( ${(M)m:#${~p}} ) + done + [[ -d ${m[1]} ]] && dmatching=( $m ) + # if some directory names match all patterns in order, discard all others + p=${ICASE}"/*${(j:[^/]#:)argv}[^/]#" + m=( ${(M)dmatching:#${~p}} ) + [[ -d ${m[1]} ]] && dmatching=( $m ) + + # do not match $HOME or $PWD when run without arguments + if [[ $# == 0 ]]; then + dmatching=( ${dmatching:#(${HOME}|${PWD})} ) + fi + + # keep at most SCD_MENUSIZE of matching and valid directories + m=( ) + for d in $dmatching; do + [[ ${#m} == $SCD_MENUSIZE ]] && break + [[ -d $d && -x $d ]] && m+=$d + done + dmatching=( $m ) + + # find the maximum rank + maxrank=0.0 + for d in $dmatching; do + [[ ${drank[$d]} -lt maxrank ]] || maxrank=${drank[$d]} + done + + # discard all directories below the rank threshold + threshold=$(( maxrank * SCD_THRESHOLD )) + dmatching=( ${^dmatching}(Ne:'(( ${drank[$REPLY]} >= threshold ))':) ) +} + +_scd_Y19oug_match $* + +## process whatever directories that remained +if [[ ${#dmatching} == 0 ]]; then + print -u2 "No matching directory." + $EXIT 1 +fi + +## build formatted directory aliases for selection menu or list display +for d in $dmatching; do + if [[ -n ${opt_verbose} ]]; then + dalias[$d]=$(printf "%.3g %s" ${drank[$d]} $d) + else + dalias[$d]=$(print -Dr -- $d) + fi +done + +## process the --list option +if [[ -n $opt_list ]]; then + for d in $dmatching; do + print -r -- "# ${dalias[$d]}" + print -r -- $d + done + $EXIT +fi + +## process single directory match +if [[ ${#dmatching} == 1 ]]; then + _scd_Y19oug_action $dmatching + $EXIT $? +fi + +## here we have multiple matches - display selection menu +a=( {a-z} {A-Z} ) +p=( ) +for i in {1..${#dmatching}}; do + [[ -n ${a[i]} ]] || break + p+="${a[i]}) ${dalias[${dmatching[i]}]}" +done + +print -c -r -- $p + +if read -s -k 1 d && [[ ${i::=${a[(I)$d]}} -gt 0 ]]; then + _scd_Y19oug_action ${dmatching[i]} + $EXIT $? +fi diff --git a/plugins/scd/scd.plugin.zsh b/plugins/scd/scd.plugin.zsh new file mode 100644 index 000000000..0197c53a1 --- /dev/null +++ b/plugins/scd/scd.plugin.zsh @@ -0,0 +1,19 @@ +## The scd script should autoload as a shell function. +autoload scd + + +## If the scd function exists, define a change-directory-hook function +## to record visited directories in the scd index. +if [[ ${+functions[scd]} == 1 ]]; then + scd_chpwd_hook() { scd --add $PWD } + autoload add-zsh-hook + add-zsh-hook chpwd scd_chpwd_hook +fi + + +## Allow scd usage with unquoted wildcard characters such as "*" or "?". +alias scd='noglob scd' + + +## Load the directory aliases created by scd if any. +if [[ -s ~/.scdalias.zsh ]]; then source ~/.scdalias.zsh; fi diff --git a/plugins/sfffe/sfffe.plugin.zsh b/plugins/sfffe/sfffe.plugin.zsh new file mode 100644 index 000000000..a0f034908 --- /dev/null +++ b/plugins/sfffe/sfffe.plugin.zsh @@ -0,0 +1,28 @@ +# ------------------------------------------------------------------------------ +# FILE: sfffe.plugin.zsh +# DESCRIPTION: search file for FE +# AUTHOR: yleo77 (ylep77@gmail.com) +# VERSION: 0.1 +# REQUIRE: ack +# ------------------------------------------------------------------------------ + +if [ ! -x $(which ack) ]; then + echo \'ack\' is not installed! + exit -1 +fi + +ajs() { + ack "$@" --type js +} + +acss() { + ack "$@" --type css +} + +fjs() { + find ./ -name "$@*" -type f | grep '\.js' +} + +fcss() { + find ./ -name "$@*" -type f | grep '\.css' +} diff --git a/plugins/singlechar/singlechar.plugin.zsh b/plugins/singlechar/singlechar.plugin.zsh new file mode 100644 index 000000000..44bd998aa --- /dev/null +++ b/plugins/singlechar/singlechar.plugin.zsh @@ -0,0 +1,133 @@ +################################################################################ +# FILE: singlechar.plugin.zsh +# DESCRIPTION: oh-my-zsh plugin file. +# AUTHOR: Michael Varner (musikmichael@web.de) +# VERSION: 1.0.0 +# +# This plugin adds single char shortcuts (and combinations) for some commands. +# +################################################################################ + +########################### +# Settings + +# These can be overwritten any time. +# If they are not set yet, they will be +# overwritten with their default values + +default GREP grep +default ROOT sudo +default WGET wget +default CURL curl + +env_default PAGER less + +########################### +# Alias + +# CAT, GREP, CURL, WGET + +alias y='"$GREP" -Ri' +alias n='"$GREP" -Rvi' + +alias f.='find . | "$GREP"' +alias f:='find' + +alias f='"$GREP" -Rli' +alias fn='"$GREP" -Rlvi' + +alias w='echo >' +alias a='echo >>' + +alias c='cat' +alias p='"$PAGER"' + +alias m='man' + +alias d='"$WGET"' +alias u='"$CURL"' + +# enhanced writing + +alias w:='cat >' +alias a:='cat >>' + +# XARGS + +alias x='xargs' + +alias xy='xargs "$GREP" -Ri' +alias xn='xargs "$GREP" -Riv' + +alias xf.='xargs find | "$GREP"' +alias xf:='xargs find' + +alias xf='xargs "$GREP" -Rli' +alias xfn='xargs "$GREP" -Rlvi' + +alias xw='xargs echo >' +alias xa='xargs echo >>' + +alias xc='xargs cat' +alias xp='xargs "$PAGER"' + +alias xm='xargs man' + +alias xd='xargs "$WGET"' +alias xu='xargs "$CURL"' + +alias xw:='xargs cat >' +alias xa:='xargs >>' + +# SUDO + +alias s='"$ROOT"' + +alias sy='"$ROOT" "$GREP" -Ri' +alias sn='"$ROOT" "$GREP" -Riv' + +alias sf.='"$ROOT" find . | "$GREP"' +alias sf:='"$ROOT" find' + +alias sf='"$ROOT" "$GREP" -Rli' +alias sfn='"$ROOT" "$GREP" -Rlvi' + +alias sw='"$ROOT" echo >' +alias sa='"$ROOT" echo >>' + +alias sc='"$ROOT" cat' +alias sp='"$ROOT" "$PAGER"' + +alias sm='"$ROOT" man' + +alias sd='"$ROOT" "$WGET"' + +alias sw:='"$ROOT" cat >' +alias sa:='"$ROOT" cat >>' + +# SUDO-XARGS + +alias sx='"$ROOT" xargs' + +alias sxy='"$ROOT" xargs "$GREP" -Ri' +alias sxn='"$ROOT" xargs "$GREP" -Riv' + +alias sxf.='"$ROOT" xargs find | "$GREP"' +alias sxf:='"$ROOT" xargs find' + +alias sxf='"$ROOT" xargs "$GREP" -li' +alias sxfn='"$ROOT" xargs "$GREP" -lvi' + +alias sxw='"$ROOT" xargs echo >' +alias sxa='"$ROOT" xargs echo >>' + +alias sxc='"$ROOT" xargs cat' +alias sxp='"$ROOT" xargs "$PAGER"' + +alias sxm='"$ROOT" xargs man' + +alias sxd='"$ROOT" xargs "$WGET"' +alias sxu='"$ROOT" xargs "$CURL"' + +alias sxw:='"$ROOT" xargs cat >' +alias sxa:='"$ROOT" xargs cat >>'
\ No newline at end of file diff --git a/plugins/sprunge/sprunge.plugin.zsh b/plugins/sprunge/sprunge.plugin.zsh index 9f9432ac8..fcc9004f8 100644 --- a/plugins/sprunge/sprunge.plugin.zsh +++ b/plugins/sprunge/sprunge.plugin.zsh @@ -57,8 +57,6 @@ sprunge() { fi else echo Using input from a pipe or STDIN redirection... >&2 - while read -r line ; do - echo $line - done | curl -F 'sprunge=<-' http://sprunge.us + curl -F 'sprunge=<-' http://sprunge.us fi } diff --git a/plugins/ssh-agent/ssh-agent.plugin.zsh b/plugins/ssh-agent/ssh-agent.plugin.zsh index c4e92a1fe..610ad34dc 100644 --- a/plugins/ssh-agent/ssh-agent.plugin.zsh +++ b/plugins/ssh-agent/ssh-agent.plugin.zsh @@ -1,16 +1,22 @@ # # INSTRUCTIONS # -# To enabled agent forwarding support add the following to +# To enable agent forwarding support add the following to # your .zshrc file: # # zstyle :omz:plugins:ssh-agent agent-forwarding on # -# To load multiple identies use the identities style, For +# To load multiple identities use the identities style, For # example: # -# zstyle :omz:plugins:ssh-agent id_rsa id_rsa2 id_github +# zstyle :omz:plugins:ssh-agent identities id_rsa id_rsa2 id_github # +# To set the maximum lifetime of the identities, use the +# lifetime style. The lifetime may be specified in seconds +# or as described in sshd_config(5) (see TIME FORMATS) +# If left unspecified, the default lifetime is forever. +# +# zstyle :omz:plugins:ssh-agent lifetime 4h # # CREDITS # @@ -21,24 +27,35 @@ # Florent Thoumie and Jonas Pfenniger # -local _plugin__ssh_env=$HOME/.ssh/environment-$HOST +local _plugin__ssh_env local _plugin__forwarding function _plugin__start_agent() { local -a identities + local lifetime + zstyle -s :omz:plugins:ssh-agent lifetime lifetime # start ssh-agent and setup environment - /usr/bin/env ssh-agent | sed 's/^echo/#echo/' > ${_plugin__ssh_env} + /usr/bin/env ssh-agent ${lifetime:+-t} ${lifetime} | sed 's/^echo/#echo/' > ${_plugin__ssh_env} chmod 600 ${_plugin__ssh_env} . ${_plugin__ssh_env} > /dev/null # load identies - zstyle -a :omz:plugins:ssh-agent identities identities - echo starting... + zstyle -a :omz:plugins:ssh-agent identities identities + echo starting ssh-agent... + /usr/bin/ssh-add $HOME/.ssh/${^identities} } +# Get the filename to store/lookup the environment from +if (( $+commands[scutil] )); then + # It's OS X! + _plugin__ssh_env="$HOME/.ssh/environment-$(scutil --get ComputerName)" +else + _plugin__ssh_env="$HOME/.ssh/environment-$HOST" +fi + # test if agent-forwarding is enabled zstyle -b :omz:plugins:ssh-agent agent-forwarding _plugin__forwarding if [[ ${_plugin__forwarding} == "yes" && -n "$SSH_AUTH_SOCK" ]]; then @@ -48,7 +65,7 @@ if [[ ${_plugin__forwarding} == "yes" && -n "$SSH_AUTH_SOCK" ]]; then elif [ -f "${_plugin__ssh_env}" ]; then # Source SSH settings, if applicable . ${_plugin__ssh_env} > /dev/null - ps -ef | grep ${SSH_AGENT_PID} | grep ssh-agent$ > /dev/null || { + ps x | grep ${SSH_AGENT_PID} | grep ssh-agent > /dev/null || { _plugin__start_agent; } else diff --git a/plugins/sublime/sublime.plugin.zsh b/plugins/sublime/sublime.plugin.zsh index 1b2cbcdce..438f386fb 100755..100644 --- a/plugins/sublime/sublime.plugin.zsh +++ b/plugins/sublime/sublime.plugin.zsh @@ -1,21 +1,41 @@ # Sublime Text 2 Aliases -#unamestr = 'uname' - -local _sublime_darwin_subl=/Applications/Sublime\ Text\ 2.app/Contents/SharedSupport/bin/subl if [[ $('uname') == 'Linux' ]]; then - if [ -f '/usr/bin/sublime_text' ]; then - st_run() { nohup /usr/bin/sublime_text $@ > /dev/null & } - else - st_run() { nohup /usr/bin/sublime-text $@ > /dev/null & } - fi -alias st=st_run + local _sublime_linux_paths > /dev/null 2>&1 + _sublime_linux_paths=( + "$HOME/bin/sublime_text" + "/opt/sublime_text/sublime_text" + "/usr/bin/sublime_text" + "/usr/local/bin/sublime_text" + "/usr/bin/subl" + ) + for _sublime_path in $_sublime_linux_paths; do + if [[ -a $_sublime_path ]]; then + st_run() { $_sublime_path $@ >/dev/null 2>&1 &| } + alias st=st_run + break + fi + done + elif [[ $('uname') == 'Darwin' ]]; then - # Check if Sublime is installed in user's home application directory - if [[ -a $HOME/${_sublime_darwin_subl} ]]; then - alias st='$HOME/${_sublime_darwin_subl}' - else - alias st='${_sublime_darwin_subl}' - fi + local _sublime_darwin_paths > /dev/null 2>&1 + _sublime_darwin_paths=( + "/usr/local/bin/subl" + "/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl" + "/Applications/Sublime Text 3.app/Contents/SharedSupport/bin/subl" + "/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl" + "$HOME/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl" + "$HOME/Applications/Sublime Text 3.app/Contents/SharedSupport/bin/subl" + "$HOME/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl" + ) + + for _sublime_path in $_sublime_darwin_paths; do + if [[ -a $_sublime_path ]]; then + alias subl="'$_sublime_path'" + alias st=subl + break + fi + done fi + alias stt='st .' diff --git a/plugins/sudo/sudo.plugin.zsh b/plugins/sudo/sudo.plugin.zsh new file mode 100644 index 000000000..d12e06853 --- /dev/null +++ b/plugins/sudo/sudo.plugin.zsh @@ -0,0 +1,22 @@ +# ------------------------------------------------------------------------------ +# Description +# ----------- +# +# sudo will be inserted before the command +# +# ------------------------------------------------------------------------------ +# Authors +# ------- +# +# * Dongweiming <ciici123@gmail.com> +# +# ------------------------------------------------------------------------------ + +sudo-command-line() { +[[ -z $BUFFER ]] && zle up-history +[[ $BUFFER != sudo\ * ]] && BUFFER="sudo $BUFFER" +zle end-of-line +} +zle -N sudo-command-line +# Defined shortcut keys: [Esc] [Esc] +bindkey "\e\e" sudo-command-line diff --git a/plugins/supervisor/_supervisord b/plugins/supervisor/_supervisord index 34d27805d..e0cb670e1 100644 --- a/plugins/supervisor/_supervisord +++ b/plugins/supervisor/_supervisord @@ -7,6 +7,7 @@ _arguments \ {--configuration,-c}"[configuration file]:FILENAME:_files" \ {--nodaemon,-n}"[run in the foreground (same as 'nodaemon true' in config file)]" \ {--help,-h}"[print this usage message and exit]:" \ + {--version,-v}"[print supervisord version number and exit]:" \ {--user,-u}"[run supervisord as this user]:USER:_users" \ {--umask,-m}"[use this umask for daemon subprocess (default is 022)]" \ {--directory,-d}"[directory to chdir to when daemonized]" \ diff --git a/plugins/suse/suse.plugin.zsh b/plugins/suse/suse.plugin.zsh index d46286948..afd8ecabd 100644 --- a/plugins/suse/suse.plugin.zsh +++ b/plugins/suse/suse.plugin.zsh @@ -1,7 +1,61 @@ -alias zi='sudo zypper install' -alias zrf='sudo zypper refresh' -alias zs='zypper search' -alias zup='sudo zypper dist-upgrade' -alias zrm='sudo zypper remove' -alias zp='sudo zypper patch' -alias zps='sudo zypper ps' +#Alias for Zypper according to the offical Zypper's alias + +#Main commands +alias z='sudo zypper' #call zypper +alias zh='sudo zypper -h' #print help +alias zhse='sudo zypper -h se' #print help for the search command +alias zlicenses='sudo zypper licenses' #prints a report about licenses and EULAs of installed packages +alias zps='sudo zypper ps' #list process using deleted files +alias zshell='sudo zypper shell' #open a zypper shell session +alias zsource-download='sudo zypper source-download' #download source rpms for all installed packages +alias ztos='sudo zypper tos' #shows the ID string of the target operating system +alias zvcmp='sudo zypper vcmp' #tell whether version1 is older or newer than version2 + +#Packages commands +alias zin='sudo zypper in' #install packages +alias zinr='sudo zypper inr' #install newly added packages recommended by already installed ones +alias zrm='sudo zypper rm' #remove packages +alias zsi='sudo zypper si' #install source of a package +alias zve='sudo zypper ve' #verify dependencies of installed packages + +#Updates commands +alias zdup='sudo zypper dup' #upgrade packages +alias zlp='sudo zypper lp' #list necessary patchs +alias zlu='sudo zypper lu' #list updates +alias zpchk='sudo zypper pchk' #check for patches +alias zup='sudo zypper up' #update packages +alias zpatch='sudo zypper patch' #install patches + +#Request commands +alias zif='sudo zypper if' #display info about packages +alias zpa='sudo zypper pa' #list packages +alias zpatch-info='sudo zypper patch-info' #display info about patches +alias zpattern-info='sudo zypper patch-info' #display info about patterns +alias zproduct-info='sudo zypper patch-info' #display info about products +alias zpch='sudo zypper pch' #list all patches +alias zpd='sudo zypper pd' #list products +alias zpt='sudo zypper pt' #list patterns +alias zse='sudo zypper se' #search for packages +alias zwp='sudo zypper wp' #list all packages providing the specified capability + +#Repositories commands +alias zar='sudo zypper ar' #add a repository +alias zcl='sudo zypper clean' #clean cache +alias zlr='sudo zypper lr' #list repositories +alias zmr='sudo zypper mr' #modify repositories +alias znr='sudo zypper nr' #rename repositories (for the alias only) +alias zref='sudo zypper ref' #refresh repositories +alias zrr='sudo zypper rr' #remove repositories + +#Services commands +alias zas='sudo zypper as' #adds a service specified by URI to the system +alias zms='sudo zypper ms' #modify properties of specified services +alias zrefs='sudo zypper refs' #refreshing a service mean executing the service's special task +alias zrs='sudo zypper rs' #remove specified repository index service from the sytem +alias zls='sudo zypper ls' #list services defined on the system + +#Package Locks Management commands +alias zal='sudo zypper al' #add a package lock +alias zcl='sudo zypper cl' #Remove unused locks +alias zll='sudo zypper ll' #list currently active package locks +alias zrl='sudo zypper rl' #remove specified package lock diff --git a/plugins/svn-fast-info/svn-fast-info.plugin.zsh b/plugins/svn-fast-info/svn-fast-info.plugin.zsh new file mode 100644 index 000000000..ea19bcea0 --- /dev/null +++ b/plugins/svn-fast-info/svn-fast-info.plugin.zsh @@ -0,0 +1,73 @@ +# vim:ft=zsh ts=2 sw=2 sts=2 et +# +# Faster alternative to the current SVN plugin implementation. +# +# Works with svn 1.6, 1.7, 1.8. +# Use `svn_prompt_info` method to enquire the svn data. +# It's faster because his efficient use of svn (single svn call) which saves a lot on a huge codebase +# It displays the current status of the local files (added, deleted, modified, replaced, or else...) +# +# Use as a drop-in replacement of the svn plugin not as complementary plugin + +function svn_prompt_info() { + local info + info=$(svn info 2>&1) || return 1; # capture stdout and stderr + local repo_need_upgrade=$(svn_repo_need_upgrade $info) + + if [[ -n $repo_need_upgrade ]]; then + printf '%s%s%s%s%s%s%s\n' \ + $ZSH_PROMPT_BASE_COLOR \ + $ZSH_THEME_SVN_PROMPT_PREFIX \ + $ZSH_PROMPT_BASE_COLOR \ + $repo_need_upgrade \ + $ZSH_PROMPT_BASE_COLOR \ + $ZSH_THEME_SVN_PROMPT_SUFFIX \ + $ZSH_PROMPT_BASE_COLOR + else + printf '%s%s%s %s%s:%s%s%s%s%s' \ + $ZSH_PROMPT_BASE_COLOR \ + $ZSH_THEME_SVN_PROMPT_PREFIX \ + \ + "$(svn_status_info $info)" \ + $ZSH_PROMPT_BASE_COLOR \ + \ + $ZSH_THEME_BRANCH_NAME_COLOR \ + $(svn_current_branch_name $info) \ + $ZSH_PROMPT_BASE_COLOR \ + \ + $(svn_current_revision $info) \ + $ZSH_PROMPT_BASE_COLOR \ + \ + $ZSH_THEME_SVN_PROMPT_SUFFIX \ + $ZSH_PROMPT_BASE_COLOR + fi +} + +function svn_repo_need_upgrade() { + grep -q "E155036" <<< ${1:-$(svn info 2> /dev/null)} && \ + echo "E155036: upgrade repo with svn upgrade" +} + +function svn_current_branch_name() { + grep '^URL:' <<< "${1:-$(svn info 2> /dev/null)}" | egrep -o '(tags|branches)/[^/]+|trunk' +} + +function svn_repo_root_name() { + grep '^Repository\ Root:' <<< "${1:-$(svn info 2> /dev/null)}" | sed 's#.*/##' +} + +function svn_current_revision() { + echo "${1:-$(svn info 2> /dev/null)}" | sed -n 's/Revision: //p' +} + +function svn_status_info() { + local svn_status_string="$ZSH_THEME_SVN_PROMPT_CLEAN" + local svn_status="$(svn status 2> /dev/null)"; + if grep -E '^\s*A' &> /dev/null <<< $svn_status; then svn_status_string="$svn_status_string ${ZSH_THEME_SVN_PROMPT_ADDITIONS:-+}"; fi + if grep -E '^\s*D' &> /dev/null <<< $svn_status; then svn_status_string="$svn_status_string ${ZSH_THEME_SVN_PROMPT_DELETIONS:-✖}"; fi + if grep -E '^\s*M' &> /dev/null <<< $svn_status; then svn_status_string="$svn_status_string ${ZSH_THEME_SVN_PROMPT_MODIFICATIONS:-✎}"; fi + if grep -E '^\s*[R~]' &> /dev/null <<< $svn_status; then svn_status_string="$svn_status_string ${ZSH_THEME_SVN_PROMPT_REPLACEMENTS:-∿}"; fi + if grep -E '^\s*\?' &> /dev/null <<< $svn_status; then svn_status_string="$svn_status_string ${ZSH_THEME_SVN_PROMPT_UNTRACKED:-?}"; fi + if grep -E '^\s*[CI!L]' &> /dev/null <<< $svn_status; then svn_status_string="$svn_status_string ${ZSH_THEME_SVN_PROMPT_DIRTY:-'!'}"; fi + echo $svn_status_string +} diff --git a/plugins/svn/svn.plugin.zsh b/plugins/svn/svn.plugin.zsh index e38e8920b..ef6da5bd3 100644 --- a/plugins/svn/svn.plugin.zsh +++ b/plugins/svn/svn.plugin.zsh @@ -1,62 +1,95 @@ - -function svn_prompt_info { - if [ $(in_svn) ]; then - if [ "x$SVN_SHOW_BRANCH" = "xtrue" ]; then - unset SVN_SHOW_BRANCH - _DISPLAY=$(svn_get_branch_name) - else - _DISPLAY=$(svn_get_repo_name) - fi - echo "$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_PREFIX\ -$ZSH_THEME_REPO_NAME_COLOR$_DISPLAY$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_SUFFIX$ZSH_PROMPT_BASE_COLOR$(svn_dirty)$ZSH_PROMPT_BASE_COLOR" - unset _DISPLAY +# vim:ft=zsh ts=2 sw=2 sts=2 +# +function svn_prompt_info() { + if in_svn; then + if [ "x$SVN_SHOW_BRANCH" = "xtrue" ]; then + unset SVN_SHOW_BRANCH + _DISPLAY=$(svn_get_branch_name) + else + _DISPLAY=$(svn_get_repo_name) fi + echo "$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_PREFIX\ +$ZSH_THEME_REPO_NAME_COLOR$_DISPLAY$ZSH_PROMPT_BASE_COLOR$ZSH_THEME_SVN_PROMPT_SUFFIX$ZSH_PROMPT_BASE_COLOR$(svn_dirty)$(svn_dirty_pwd)$ZSH_PROMPT_BASE_COLOR" + unset _DISPLAY + fi } function in_svn() { - if [[ -d .svn ]]; then - echo 1 - fi + if $(svn info >/dev/null 2>&1); then + return 0 + fi + return 1 } -function svn_get_repo_name { - if [ $(in_svn) ]; then - svn info | sed -n 's/Repository\ Root:\ .*\///p' | read SVN_ROOT +function svn_get_repo_name() { + if in_svn; then + svn info | sed -n 's/Repository\ Root:\ .*\///p' | read SVN_ROOT + svn info | sed -n "s/URL:\ .*$SVN_ROOT\///p" + fi +} - svn info | sed -n "s/URL:\ .*$SVN_ROOT\///p" - fi +function svn_get_branch_name() { + _DISPLAY=$( + svn info 2> /dev/null | \ + awk -F/ \ + '/^URL:/ { \ + for (i=0; i<=NF; i++) { \ + if ($i == "branches" || $i == "tags" ) { \ + print $(i+1); \ + break;\ + }; \ + if ($i == "trunk") { print $i; break; } \ + } \ + }' + ) + + if [ "x$_DISPLAY" = "x" ]; then + svn_get_repo_name + else + echo $_DISPLAY + fi + unset _DISPLAY } -function svn_get_branch_name { - _DISPLAY=$(svn info 2> /dev/null | awk -F/ '/^URL:/ { for (i=0; i<=NF; i++) { if ($i == "branches" || $i == "tags" ) { print $(i+1); break }; if ($i == "trunk") { print $i; break } } }') - if [ "x$_DISPLAY" = "x" ]; then - svn_get_repo_name +function svn_get_rev_nr() { + if in_svn; then + svn info 2> /dev/null | sed -n 's/Revision:\ //p' + fi +} + +function svn_dirty_choose() { + if in_svn; then + root=`svn info 2> /dev/null | sed -n 's/^Working Copy Root Path: //p'` + if $(svn status $root 2> /dev/null | grep -Eq '^\s*[ACDIM!?L]'); then + # Grep exits with 0 when "One or more lines were selected", return "dirty". + echo $1 else - echo $_DISPLAY + # Otherwise, no lines were found, or an error occurred. Return clean. + echo $2 fi - unset _DISPLAY + fi } -function svn_get_rev_nr { - if [ $(in_svn) ]; then - svn info 2> /dev/null | sed -n s/Revision:\ //p - fi +function svn_dirty() { + svn_dirty_choose $ZSH_THEME_SVN_PROMPT_DIRTY $ZSH_THEME_SVN_PROMPT_CLEAN } -function svn_dirty_choose { - if [ $(in_svn) ]; then - svn status 2> /dev/null | grep -Eq '^\s*[ACDIM!?L]' - if [ $pipestatus[-1] -eq 0 ]; then - # Grep exits with 0 when "One or more lines were selected", return "dirty". - echo $1 - else - # Otherwise, no lines were found, or an error occurred. Return clean. - echo $2 - fi +function svn_dirty_choose_pwd () { + if in_svn; then + root=`pwd` + if $(svn status $root 2> /dev/null | grep -Eq '^\s*[ACDIM!?L]'); then + # Grep exits with 0 when "One or more lines were selected", return "dirty". + echo $1 + else + # Otherwise, no lines were found, or an error occurred. Return clean. + echo $2 fi + fi } -function svn_dirty { - svn_dirty_choose $ZSH_THEME_SVN_PROMPT_DIRTY $ZSH_THEME_SVN_PROMPT_CLEAN +function svn_dirty_pwd () { + svn_dirty_choose_pwd $ZSH_THEME_SVN_PROMPT_DIRTY_PWD $ZSH_THEME_SVN_PROMPT_CLEAN_PWD } + + diff --git a/plugins/symfony/symfony.plugin.zsh b/plugins/symfony/symfony.plugin.zsh index 9de767548..f070e9e47 100644 --- a/plugins/symfony/symfony.plugin.zsh +++ b/plugins/symfony/symfony.plugin.zsh @@ -1,7 +1,7 @@ # symfony basic command completion _symfony_get_command_list () { - ./symfony | sed "1,/Available tasks/d" | awk 'BEGIN { cat=null; } /^[A-Za-z]+$/ { cat = $1; } /^ :[a-z]+/ { print cat $1; }' + php symfony | sed "1,/Available tasks/d" | awk 'BEGIN { cat=null; } /^[A-Za-z]+$/ { cat = $1; } /^ :[a-z]+/ { print cat $1; }' } _symfony () { diff --git a/plugins/symfony2/symfony2.plugin.zsh b/plugins/symfony2/symfony2.plugin.zsh index cc9ffebc0..1d5177e6d 100644 --- a/plugins/symfony2/symfony2.plugin.zsh +++ b/plugins/symfony2/symfony2.plugin.zsh @@ -1,19 +1,23 @@ # Symfony2 basic command completion _symfony2_get_command_list () { - app/console --no-ansi | sed "1,/Available commands/d" | awk '/^ [a-z]+/ { print $1 }' + php $(find . -maxdepth 2 -mindepth 1 -name 'console') --no-ansi | sed "1,/Available commands/d" | awk '/^ [a-z]+/ { print $1 }' } _symfony2 () { - if [ -f app/console ]; then + if [ -f $(find . -maxdepth 2 -mindepth 1 -name 'console') ]; then compadd `_symfony2_get_command_list` fi } -compdef _symfony2 app/console +compdef _symfony2 $(find . -maxdepth 2 -mindepth 1 -name 'console') compdef _symfony2 sf #Alias -alias sf2='php app/console' -alias sf2clear='php app/console cache:clear' +alias sf='php $(find . -maxdepth 2 -mindepth 1 -name 'console') ' +alias sfcl='php $(find . -maxdepth 2 -mindepth 1 -name 'console') cache:clear' +alias sfcw='php $(find . -maxdepth 2 -mindepth 1 -name 'console') cache:warmup' +alias sfroute='php $(find . -maxdepth 2 -mindepth 1 -name 'console') router:debug' +alias sfcontainer='php $(find . -maxdepth 2 -mindepth 1 -name 'console') container:debug' +alias sfgb='php $(find . -maxdepth 2 -mindepth 1 -name 'console') generate:bundle' diff --git a/plugins/systemadmin/systemadmin.plugin.zsh b/plugins/systemadmin/systemadmin.plugin.zsh new file mode 100644 index 000000000..f5e44c66f --- /dev/null +++ b/plugins/systemadmin/systemadmin.plugin.zsh @@ -0,0 +1,159 @@ +# ------------------------------------------------------------------------------ +# Description +# ----------- +# +# This is one for the system administrator, operation and maintenance. +# Some of which come from http://justinlilly.com/dotfiles/zsh.html +# +# ------------------------------------------------------------------------------ +# Authors +# ------- +# +# * Dongweiming <ciici123@gmail.com> +# +# ------------------------------------------------------------------------------ + +function retval() { + if [[ -z $1 ]];then + echo '.' + else + echo $1 + fi +} + +function retlog() { + if [[ -z $1 ]];then + echo '/var/log/nginx/access.log' + else + echo $1 + fi +} + +alias ping='ping -c 5' +alias clr='clear;echo "Currently logged in on $(tty), as $(whoami) in directory $(pwd)."' +alias path='echo -e ${PATH//:/\\n}' +alias mkdir='mkdir -pv' +# get top process eating memory +alias psmem='ps -e -orss=,args= | sort -b -k1,1n' +alias psmem10='ps -e -orss=,args= | sort -b -k1,1n| head -10' +# get top process eating cpu if not work try excute : export LC_ALL='C' +alias pscpu='ps -e -o pcpu,cpu,nice,state,cputime,args|sort -k1 -nr' +alias pscpu10='ps -e -o pcpu,cpu,nice,state,cputime,args|sort -k1 -nr | head -10' +# top10 of the history +alias hist10='print -l ${(o)history%% *} | uniq -c | sort -nr | head -n 10' + +# directory LS +dls () { + ls -l | grep "^d" | awk '{ print $9 }' | tr -d "/" +} +psgrep() { + ps aux | grep "$(retval $1)" | grep -v grep +} +# Kills any process that matches a regexp passed to it +killit() { + ps aux | grep -v "grep" | grep "$@" | awk '{print $2}' | xargs sudo kill +} + +# list contents of directories in a tree-like format +if [ -z "\${which tree}" ]; then + tree () { + find $@ -print | sed -e 's;[^/]*/;|____;g;s;____|; |;g' + } +fi + +# Sort connection state +sortcons() { + netstat -nat |awk '{print $6}'|sort|uniq -c|sort -rn +} + +# View all 80 Port Connections +con80() { + netstat -nat|grep -i ":80"|wc -l +} + +# On the connected IP sorted by the number of connections +sortconip() { + netstat -ntu | awk '{print $5}' | cut -d: -f1 | sort | uniq -c | sort -n +} + +# top20 of Find the number of requests on 80 port +req20() { + netstat -anlp|grep 80|grep tcp|awk '{print $5}'|awk -F: '{print $1}'|sort|uniq -c|sort -nr|head -n20 +} + +# top20 of Using tcpdump port 80 access to view +http20() { + sudo tcpdump -i eth0 -tnn dst port 80 -c 1000 | awk -F"." '{print $1"."$2"."$3"."$4}' | sort | uniq -c | sort -nr |head -20 +} + +# top20 of Find time_wait connection +timewait20() { + netstat -n|grep TIME_WAIT|awk '{print $5}'|sort|uniq -c|sort -rn|head -n20 +} + +# top20 of Find SYN connection +syn20() { + netstat -an | grep SYN | awk '{print $5}' | awk -F: '{print $1}' | sort | uniq -c | sort -nr|head -n20 +} + +# Printing process according to the port number +port_pro() { + netstat -ntlp | grep "$(retval $1)" | awk '{print $7}' | cut -d/ -f1 +} + +# top10 of gain access to the ip address +accessip10() { + awk '{counts[$(11)]+=1}; END {for(url in counts) print counts[url], url}' "$(retlog)" +} + +# top20 of Most Visited file or page +visitpage20() { + awk '{print $11}' "$(retlog)"|sort|uniq -c|sort -nr|head -20 +} + +# top100 of Page lists the most time-consuming (more than 60 seconds) as well as the corresponding page number of occurrences +consume100() { + awk '($NF > 60 && $7~/\.php/){print $7}' "$(retlog)" |sort -n|uniq -c|sort -nr|head -100 + # if django website or other webiste make by no suffix language + # awk '{print $7}' "$(retlog)" |sort -n|uniq -c|sort -nr|head -100 +} + +# Website traffic statistics (G) +webtraffic() { + awk "{sum+=$10} END {print sum/1024/1024/1024}" "$(retlog)" +} + +# Statistical connections 404 +c404() { + awk '($9 ~/404/)' "$(retlog)" | awk '{print $9,$7}' | sort +} + +# Statistical http status. +httpstatus() { + awk '{counts[$(9)]+=1}; END {for(code in counts) print code, counts[code]}' "$(retlog)" +} + +# Delete 0 byte file +d0() { + find "$(retval $1)" -type f -size 0 -exec rm -rf {} \; +} + +# gather external ip address +geteip() { + curl http://ifconfig.me +} + +# determine local IP address +getip() { + ifconfig | grep 'inet addr:'| grep -v '127.0.0.1' | cut -d: -f2 | awk '{ print $1}' +} + +# Clear zombie processes +clrz() { + ps -eal | awk '{ if ($2 == "Z") {print $4}}' | kill -9 +} + +# Second concurrent +conssec() { + awk '{if($9~/200|30|404/)COUNT[$4]++}END{for( a in COUNT) print a,COUNT[a]}' "$(retlog)"|sort -k 2 -nr|head -n10 +} diff --git a/plugins/terminalapp/terminalapp.plugin.zsh b/plugins/terminalapp/terminalapp.plugin.zsh index 2249b1e2f..6e47ee188 100644 --- a/plugins/terminalapp/terminalapp.plugin.zsh +++ b/plugins/terminalapp/terminalapp.plugin.zsh @@ -32,7 +32,7 @@ if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]] && [[ -z "$INSIDE_EMACS" ]]; then # Register the function so it is called whenever the working # directory changes. autoload add-zsh-hook - add-zsh-hook chpwd update_terminal_cwd + add-zsh-hook precmd update_terminal_cwd # Tell the terminal about the initial directory. update_terminal_cwd diff --git a/plugins/textmate/textmate.plugin.zsh b/plugins/textmate/textmate.plugin.zsh index a11a097f5..773c4f8d2 100644 --- a/plugins/textmate/textmate.plugin.zsh +++ b/plugins/textmate/textmate.plugin.zsh @@ -6,7 +6,16 @@ alias etts='mate app config lib db public script spec test vendor/plugins vendor # Edit Ruby app in TextMate alias mr='mate CHANGELOG app config db lib public script spec test' +# If the tm command is called without an argument, open TextMate in the current directory +# If tm is passed a directory, cd to it and open it in TextMate +# If tm is passed a file, open it in TextMate function tm() { - cd $1 - mate $1 + if [[ -z $1 ]]; then + mate . + else + mate $1 + if [[ -d $1 ]]; then + cd $1 + fi + fi } diff --git a/plugins/themes/themes.plugin.zsh b/plugins/themes/themes.plugin.zsh index 8bab257ea..7519b0253 100644 --- a/plugins/themes/themes.plugin.zsh +++ b/plugins/themes/themes.plugin.zsh @@ -1,6 +1,6 @@ function theme { - if [ "$1" = "random" ]; then + if [ -z "$1" ] || [ "$1" = "random" ]; then themes=($ZSH/themes/*zsh-theme) N=${#themes[@]} ((N=(RANDOM%N)+1)) diff --git a/plugins/tmux/tmux.extra.conf b/plugins/tmux/tmux.extra.conf new file mode 100644 index 000000000..beffd380c --- /dev/null +++ b/plugins/tmux/tmux.extra.conf @@ -0,0 +1,2 @@ +set -g default-terminal $ZSH_TMUX_TERM +source $HOME/.tmux.conf diff --git a/plugins/tmux/tmux.only.conf b/plugins/tmux/tmux.only.conf new file mode 100644 index 000000000..0734df3e1 --- /dev/null +++ b/plugins/tmux/tmux.only.conf @@ -0,0 +1 @@ +set -g default-terminal $ZSH_TMUX_TERM diff --git a/plugins/tmux/tmux.plugin.zsh b/plugins/tmux/tmux.plugin.zsh new file mode 100644 index 000000000..626c41f3d --- /dev/null +++ b/plugins/tmux/tmux.plugin.zsh @@ -0,0 +1,95 @@ +# +# Aliases +# + +alias ta='tmux attach -t' +alias ts='tmux new-session -s' +alias tl='tmux list-sessions' + +# Only run if tmux is actually installed +if which tmux &> /dev/null + then + # Configuration variables + # + # Automatically start tmux + [[ -n "$ZSH_TMUX_AUTOSTART" ]] || ZSH_TMUX_AUTOSTART=false + # Only autostart once. If set to false, tmux will attempt to + # autostart every time your zsh configs are reloaded. + [[ -n "$ZSH_TMUX_AUTOSTART_ONCE" ]] || ZSH_TMUX_AUTOSTART_ONCE=true + # Automatically connect to a previous session if it exists + [[ -n "$ZSH_TMUX_AUTOCONNECT" ]] || ZSH_TMUX_AUTOCONNECT=true + # Automatically close the terminal when tmux exits + [[ -n "$ZSH_TMUX_AUTOQUIT" ]] || ZSH_TMUX_AUTOQUIT=$ZSH_TMUX_AUTOSTART + # Set term to screen or screen-256color based on current terminal support + [[ -n "$ZSH_TMUX_FIXTERM" ]] || ZSH_TMUX_FIXTERM=true + # Set '-CC' option for iTerm2 tmux integration + [[ -n "$ZSH_TMUX_ITERM2" ]] || ZSH_TMUX_ITERM2=false + # The TERM to use for non-256 color terminals. + # Tmux states this should be screen, but you may need to change it on + # systems without the proper terminfo + [[ -n "$ZSH_TMUX_FIXTERM_WITHOUT_256COLOR" ]] || ZSH_TMUX_FIXTERM_WITHOUT_256COLOR="screen" + # The TERM to use for 256 color terminals. + # Tmux states this should be screen-256color, but you may need to change it on + # systems without the proper terminfo + [[ -n "$ZSH_TMUX_FIXTERM_WITH_256COLOR" ]] || ZSH_TMUX_FIXTERM_WITH_256COLOR="screen-256color" + + + # Get the absolute path to the current directory + local zsh_tmux_plugin_path="$(cd "$(dirname "$0")" && pwd)" + + # Determine if the terminal supports 256 colors + if [[ `tput colors` == "256" ]] + then + export ZSH_TMUX_TERM=$ZSH_TMUX_FIXTERM_WITH_256COLOR + else + export ZSH_TMUX_TERM=$ZSH_TMUX_FIXTERM_WITHOUT_256COLOR + fi + + # Set the correct local config file to use. + if [[ "$ZSH_TMUX_ITERM2" == "false" ]] && [[ -f $HOME/.tmux.conf || -h $HOME/.tmux.conf ]] + then + #use this when they have a ~/.tmux.conf + export _ZSH_TMUX_FIXED_CONFIG="$zsh_tmux_plugin_path/tmux.extra.conf" + else + #use this when they don't have a ~/.tmux.conf + export _ZSH_TMUX_FIXED_CONFIG="$zsh_tmux_plugin_path/tmux.only.conf" + fi + + # Wrapper function for tmux. + function _zsh_tmux_plugin_run() + { + # We have other arguments, just run them + if [[ -n "$@" ]] + then + \tmux $@ + # Try to connect to an existing session. + elif [[ "$ZSH_TMUX_AUTOCONNECT" == "true" ]] + then + \tmux `[[ "$ZSH_TMUX_ITERM2" == "true" ]] && echo '-CC '` attach || \tmux `[[ "$ZSH_TMUX_ITERM2" == "true" ]] && echo '-CC '` `[[ "$ZSH_TMUX_FIXTERM" == "true" ]] && echo '-f '$_ZSH_TMUX_FIXED_CONFIG` new-session + [[ "$ZSH_TMUX_AUTOQUIT" == "true" ]] && exit + # Just run tmux, fixing the TERM variable if requested. + else + \tmux `[[ "$ZSH_TMUX_ITERM2" == "true" ]] && echo '-CC '` `[[ "$ZSH_TMUX_FIXTERM" == "true" ]] && echo '-f '$_ZSH_TMUX_FIXED_CONFIG` + [[ "$ZSH_TMUX_AUTOQUIT" == "true" ]] && exit + fi + } + + # Use the completions for tmux for our function + compdef _tmux _zsh_tmux_plugin_run + + # Alias tmux to our wrapper function. + alias tmux=_zsh_tmux_plugin_run + + # Autostart if not already in tmux and enabled. + if [[ ! -n "$TMUX" && "$ZSH_TMUX_AUTOSTART" == "true" ]] + then + # Actually don't autostart if we already did and multiple autostarts are disabled. + if [[ "$ZSH_TMUX_AUTOSTART_ONCE" == "false" || "$ZSH_TMUX_AUTOSTARTED" != "true" ]] + then + export ZSH_TMUX_AUTOSTARTED=true + _zsh_tmux_plugin_run + fi + fi +else + print "zsh tmux plugin: tmux not found. Please install tmux before using this plugin." +fi diff --git a/plugins/tmuxinator/_tmuxinator b/plugins/tmuxinator/_tmuxinator new file mode 100644 index 000000000..e4f8b6ce0 --- /dev/null +++ b/plugins/tmuxinator/_tmuxinator @@ -0,0 +1,38 @@ +#compdef tmuxinator mux +#autoload + +local curcontext="$curcontext" state line ret=1 +local -a _configs + +_arguments -C \ + '1: :->cmds' \ + '2:: :->args' && ret=0 + +case $state in + cmds) + _values "tmuxinator command" \ + "new[create a new project file and open it in your editor]" \ + "start[start a tmux session using project's tmuxinator config]" \ + "open[create a new project file and open it in your editor]" \ + "copy[copy source_project project file to a new project called new_project]" \ + "delete[deletes the project called project_name]" \ + "debug[output the shell commands generated by a projet]" \ + "implode[deletes all existing projects!]" \ + "list[list all existing projects]" \ + "doctor[look for problems in your configuration]" \ + "help[shows this help document]" \ + "version[shows tmuxinator version number]" + ret=0 + ;; + args) + case $line[1] in + start|open|copy|delete|debug) + _configs=(`find ~/.tmuxinator -name \*.yml | cut -d/ -f5 | sed s:.yml::g`) + [[ -n "$_configs" ]] && _values 'configs' $_configs + ret=0 + ;; + esac + ;; +esac + +return ret diff --git a/plugins/torrent/torrent.plugin.zsh b/plugins/torrent/torrent.plugin.zsh new file mode 100644 index 000000000..656e337de --- /dev/null +++ b/plugins/torrent/torrent.plugin.zsh @@ -0,0 +1,17 @@ +# +# Algorithm borrowed from http://wiki.rtorrent.org/MagnetUri and adapted to work with zsh. +# + +function magnet_to_torrent() { + [[ "$1" =~ xt=urn:btih:([^\&/]+) ]] || return 1 + + hashh=${match[1]} + + if [[ "$1" =~ dn=([^\&/]+) ]];then + filename=${match[1]} + else + filename=$hashh + fi + + echo "d10:magnet-uri${#1}:${1}e" > "$filename.torrent" +}
\ No newline at end of file diff --git a/plugins/urltools/urltools.plugin.zsh b/plugins/urltools/urltools.plugin.zsh index 4ddfff8ce..22327334d 100644 --- a/plugins/urltools/urltools.plugin.zsh +++ b/plugins/urltools/urltools.plugin.zsh @@ -14,6 +14,9 @@ if [[ $(whence node) != "" && ( "x$URLTOOLS_METHOD" = "x" || "x$URLTOOLS_METHOD elif [[ $(whence python) != "" && ( "x$URLTOOLS_METHOD" = "x" || "x$URLTOOLS_METHOD" = "xpython" ) ]]; then alias urlencode='python -c "import sys, urllib as ul; print ul.quote_plus(sys.argv[1])"' alias urldecode='python -c "import sys, urllib as ul; print ul.unquote_plus(sys.argv[1])"' +elif [[ $(whence xxd) != "" && ( "x$URLTOOLS_METHOD" = "x" || "x$URLTOOLS_METHOD" = "xshell" ) ]]; then + function urlencode() {echo $@ | tr -d "\n" | xxd -plain | sed "s/\(..\)/%\1/g"} + function urldecode() {printf $(echo -n $@ | sed 's/\\/\\\\/g;s/\(%\)\([0-9a-fA-F][0-9a-fA-F]\)/\\x\2/g')"\n"} elif [[ $(whence ruby) != "" && ( "x$URLTOOLS_METHOD" = "x" || "x$URLTOOLS_METHOD" = "xruby" ) ]]; then alias urlencode='ruby -r cgi -e "puts CGI.escape(ARGV[0])"' alias urldecode='ruby -r cgi -e "puts CGI.unescape(ARGV[0])"' @@ -33,4 +36,4 @@ elif [[ $(whence perl) != "" && ( "x$URLTOOLS_METHOD" = "x" || "x$URLTOOLS_METHO fi fi -unset URLTOOLS_METHOD
\ No newline at end of file +unset URLTOOLS_METHOD diff --git a/plugins/vagrant/_vagrant b/plugins/vagrant/_vagrant index 9bed1e3c6..9ddfa1be7 100644 --- a/plugins/vagrant/_vagrant +++ b/plugins/vagrant/_vagrant @@ -6,20 +6,28 @@ local -a _1st_arguments _1st_arguments=( 'box:Box commands' + 'connect:Connects to a shared, remote Vagrant environment' 'destroy:Destroys the vagrant environment' + 'docker-logs:Shows Docker logs' + 'docker-run:Run one-off commands against a Docker container' + 'global-status:Reports the status of all active Vagrant environments on the system' 'halt:Halts the currently running vagrant environment' - 'help:[TASK] Describe available tasks or one specific task' 'init:[box_name] [box_url] Initializes current folder for Vagrant usage' + 'login:Authenticates against a Vagrant Cloud server to access protected boxes' 'package:Packages a vagrant environment for distribution' + 'plugin:Plugin commands' 'provision:Run the provisioner' 'reload:Reload the vagrant environment' 'resume:Resumes a suspend vagrant environment' + 'share:Shares the Vagrant environment and allows remote access' 'ssh:SSH into the currently running environment' - 'ssh_config:outputs .ssh/config valid syntax for connecting to this environment via ssh.' - 'status:Shows the status of the current Vagrant environment.' + 'ssh-config:outputs .ssh/config valid syntax for connecting to this environment via ssh' + 'status:Shows the status of the current Vagrant environment' 'suspend:Suspends the currently running vagrant environment' 'up:Creates the vagrant environment' - 'version:Prints the Vagrant version information' + 'version:Prints the currently installed Vagrant version and checks for new updates' + '--help:[TASK] Describe available tasks or one specific task' + '--version:Prints the Vagrant version information' ) local -a _box_arguments @@ -43,7 +51,7 @@ __task_list () __box_list () { - _wanted application expl 'command' compadd $(command ls -1 $HOME/.vagrant/boxes 2>/dev/null| sed -e 's/ /\\ /g') + _wanted application expl 'command' compadd $(command vagrant box list | sed -e 's/ /\\ /g') } __vm_list () diff --git a/plugins/vi-mode/vi-mode.plugin.zsh b/plugins/vi-mode/vi-mode.plugin.zsh index f91be70e4..3ed32b3fb 100644 --- a/plugins/vi-mode/vi-mode.plugin.zsh +++ b/plugins/vi-mode/vi-mode.plugin.zsh @@ -3,18 +3,13 @@ function zle-keymap-select zle-line-init zle-line-finish { # The terminal must be in application mode when ZLE is active for $terminfo # values to be valid. - if (( $+terminfo[smkx] && $+terminfo[rmkx] )); then - case "$0" in - (zle-line-init) - # Enable terminal application mode. - echoti smkx - ;; - (zle-line-finish) - # Disable terminal application mode. - echoti rmkx - ;; - esac + if (( ${+terminfo[smkx]} )); then + printf '%s' ${terminfo[smkx]} fi + if (( ${+terminfo[rmkx]} )); then + printf '%s' ${terminfo[rmkx]} + fi + zle reset-prompt zle -R } @@ -22,9 +17,15 @@ function zle-keymap-select zle-line-init zle-line-finish { zle -N zle-line-init zle -N zle-line-finish zle -N zle-keymap-select +zle -N edit-command-line + bindkey -v +# allow v to edit the command line (standard behaviour) +autoload -Uz edit-command-line +bindkey -M vicmd 'v' edit-command-line + # if mode indicator wasn't setup by theme, define default if [[ "$MODE_INDICATOR" == "" ]]; then MODE_INDICATOR="%{$fg_bold[red]%}<%{$fg[red]%}<<%{$reset_color%}" diff --git a/plugins/vim-interaction/README.md b/plugins/vim-interaction/README.md new file mode 100644 index 000000000..681648018 --- /dev/null +++ b/plugins/vim-interaction/README.md @@ -0,0 +1,82 @@ +# Vim Interaction # + +The plugin presents a function called `callvim` whose usage is: + + usage: callvim [-b cmd] [-a cmd] [file ... fileN] + + -b cmd Run this command in GVIM before editing the first file + -a cmd Run this command in GVIM after editing the first file + file The file to edit + ... fileN The other files to add to the argslist + +## Rationale ## + +The idea for this script is to give you some decent interaction with a running +GVim session. Normally you'll be running around your filesystem doing any +number of amazing things and you'll need to load some files into GVim for +editing, inspecting, destruction, or other bits of mayhem. This script lets you +do that. + +## Aliases ## + +There are a few aliases presented as well: + +* `v` A shorthand for `callvim` +* `vvsp` Edits the passed in file but first makes a vertical split +* `vhsp` Edits the passed in file but first makes a horizontal split + +## Post Callout ## + +At the end of the `callvim` function we invoke the `postCallVim` function if it +exists. If you're using MacVim, for example, you could define a function that +brings window focus to it after the file is loaded: + + function postCallVim + { + osascript -e 'tell application "MacVim" to activate' + } + +This'll be different depending on your OS / Window Manager. + +## Examples ## + +This will load `/tmp/myfile.scala` into the running GVim session: + + > v /tmp/myfile.scala + +This will load it after first doing a vertical split: + + > vvsp /tmp/myfile.scala + or + > v -b':vsp' /tmp/myfile.scala + +This will load it after doing a horizontal split, then moving to the bottom of +the file: + + > vhsp -aG /tmp/myfile.scala + or + > v -b':sp' -aG /tmp/myfile.scala + +This will load the file and then copy the first line to the end (Why you would +ever want to do this... I dunno): + + > v -a':1t$' /tmp/myfile.scala + +And this will load all of the `*.txt` files into the args list: + + > v *.txt + +If you want to load files into areas that are already split, use one of the +aliases for that: + + # Do a ':wincmd h' first + > vh /tmp/myfile.scala + + # Do a ':wincmd j' first + > vj /tmp/myfile.scala + + # Do a ':wincmd k' first + > vk /tmp/myfile.scala + + # Do a ':wincmd l' first + > vl /tmp/myfile.scala diff --git a/plugins/vim-interaction/vim-interaction.plugin.zsh b/plugins/vim-interaction/vim-interaction.plugin.zsh new file mode 100644 index 000000000..5142f1f9b --- /dev/null +++ b/plugins/vim-interaction/vim-interaction.plugin.zsh @@ -0,0 +1,72 @@ +# +# See README.md +# +# Derek Wyatt (derek@{myfirstnamemylastname}.org +# + +function resolveFile +{ + if [ -f "$1" ]; then + echo $(readlink -f "$1") + elif [[ "${1#/}" == "$1" ]]; then + echo "$(pwd)/$1" + else + echo $1 + fi +} + +function callvim +{ + if [[ $# == 0 ]]; then + cat <<EOH +usage: callvim [-b cmd] [-a cmd] [file ... fileN] + + -b cmd Run this command in GVIM before editing the first file + -a cmd Run this command in GVIM after editing the first file + file The file to edit + ... fileN The other files to add to the argslist +EOH + return 0 + fi + + local cmd="" + local before="<esc>" + local after="" + while getopts ":b:a:" option + do + case $option in + a) after="$OPTARG" + ;; + b) before="$OPTARG" + ;; + esac + done + shift $((OPTIND-1)) + if [[ ${after#:} != $after && ${after%<cr>} == $after ]]; then + after="$after<cr>" + fi + if [[ ${before#:} != $before && ${before%<cr>} == $before ]]; then + before="$before<cr>" + fi + local files="" + for f in $@ + do + files="$files $(resolveFile $f)" + done + if [[ -n $files ]]; then + files=':args! '"$files<cr>" + fi + cmd="$before$files$after" + gvim --remote-send "$cmd" + if typeset -f postCallVim > /dev/null; then + postCallVim + fi +} + +alias v=callvim +alias vvsp="callvim -b':vsp'" +alias vhsp="callvim -b':sp'" +alias vk="callvim -b':wincmd k'" +alias vj="callvim -b':wincmd j'" +alias vl="callvim -b':wincmd l'" +alias vh="callvim -b':wincmd h'" diff --git a/plugins/virtualenv/virtualenv.plugin.zsh b/plugins/virtualenv/virtualenv.plugin.zsh new file mode 100644 index 000000000..8e06450b1 --- /dev/null +++ b/plugins/virtualenv/virtualenv.plugin.zsh @@ -0,0 +1,8 @@ +function virtualenv_prompt_info(){ + if [[ -n $VIRTUAL_ENV ]]; then + printf "%s[%s] " "%{${fg[yellow]}%}" ${${VIRTUAL_ENV}:t} + fi +} + +# disables prompt mangling in virtual_env/bin/activate +export VIRTUAL_ENV_DISABLE_PROMPT=1 diff --git a/plugins/virtualenvwrapper/virtualenvwrapper.plugin.zsh b/plugins/virtualenvwrapper/virtualenvwrapper.plugin.zsh index 0ed2565b4..f58bda1ad 100644 --- a/plugins/virtualenvwrapper/virtualenvwrapper.plugin.zsh +++ b/plugins/virtualenvwrapper/virtualenvwrapper.plugin.zsh @@ -1,40 +1,59 @@ -wrapsource=`which virtualenvwrapper_lazy.sh` - -if [[ -f "$wrapsource" ]]; then - source $wrapsource +virtualenvwrapper='virtualenvwrapper.sh' +if (( $+commands[$virtualenvwrapper] )); then + source ${${virtualenvwrapper}:c} if [[ ! $DISABLE_VENV_CD -eq 1 ]]; then - # Automatically activate Git projects' virtual environments based on the + # Automatically activate Git projects's virtual environments based on the # directory name of the project. Virtual environment name can be overridden # by placing a .venv file in the project root with a virtualenv name in it function workon_cwd { - # Check that this is a Git repo - PROJECT_ROOT=`git rev-parse --show-toplevel 2> /dev/null` - if (( $? == 0 )); then + if [ ! $WORKON_CWD ]; then + WORKON_CWD=1 + # Check if this is a Git repo + PROJECT_ROOT=`git rev-parse --show-toplevel 2> /dev/null` + if (( $? != 0 )); then + PROJECT_ROOT="." + fi # Check for virtualenv name override - ENV_NAME=`basename "$PROJECT_ROOT"` if [[ -f "$PROJECT_ROOT/.venv" ]]; then ENV_NAME=`cat "$PROJECT_ROOT/.venv"` + elif [[ -f "$PROJECT_ROOT/.venv/bin/activate" ]];then + ENV_NAME="$PROJECT_ROOT/.venv" + elif [[ "$PROJECT_ROOT" != "." ]]; then + ENV_NAME=`basename "$PROJECT_ROOT"` + else + ENV_NAME="" fi - # Activate the environment only if it is not already active - if [[ "$VIRTUAL_ENV" != "$WORKON_HOME/$ENV_NAME" ]]; then - if [[ -e "$WORKON_HOME/$ENV_NAME/bin/activate" ]]; then - workon "$ENV_NAME" && export CD_VIRTUAL_ENV="$ENV_NAME" + if [[ "$ENV_NAME" != "" ]]; then + # Activate the environment only if it is not already active + if [[ "$VIRTUAL_ENV" != "$WORKON_HOME/$ENV_NAME" ]]; then + if [[ -e "$WORKON_HOME/$ENV_NAME/bin/activate" ]]; then + workon "$ENV_NAME" && export CD_VIRTUAL_ENV="$ENV_NAME" + elif [[ -e "$ENV_NAME/bin/activate" ]]; then + source $ENV_NAME/bin/activate && export CD_VIRTUAL_ENV="$ENV_NAME" + fi fi + elif [ $CD_VIRTUAL_ENV ]; then + # We've just left the repo, deactivate the environment + # Note: this only happens if the virtualenv was activated automatically + deactivate && unset CD_VIRTUAL_ENV fi - elif [ $CD_VIRTUAL_ENV ]; then - # We've just left the repo, deactivate the environment - # Note: this only happens if the virtualenv was activated automatically - deactivate && unset CD_VIRTUAL_ENV + unset PROJECT_ROOT + unset WORKON_CWD fi - unset PROJECT_ROOT } - # New cd function that does the virtualenv magic - function cd { - builtin cd "$@" && workon_cwd - } + # Append workon_cwd to the chpwd_functions array, so it will be called on cd + # http://zsh.sourceforge.net/Doc/Release/Functions.html + # TODO: replace with 'add-zsh-hook chpwd workon_cwd' when oh-my-zsh min version is raised above 4.3.4 + if (( ${+chpwd_functions} )); then + if (( $chpwd_functions[(I)workon_cwd] == 0 )); then + set -A chpwd_functions $chpwd_functions workon_cwd + fi + else + set -A chpwd_functions workon_cwd + fi fi else - print "zsh virtualenvwrapper plugin: Cannot find virtualenvwrapper_lazy.sh. Please install with \`pip install virtualenvwrapper\`." + print "zsh virtualenvwrapper plugin: Cannot find ${virtualenvwrapper}. Please install with \`pip install virtualenvwrapper\`." fi diff --git a/plugins/vundle/vundle.plugin.zsh b/plugins/vundle/vundle.plugin.zsh index 005a58476..830774fe3 100644 --- a/plugins/vundle/vundle.plugin.zsh +++ b/plugins/vundle/vundle.plugin.zsh @@ -4,7 +4,7 @@ function vundle-init () { mkdir -p ~/.vim/bundle/vundle/ fi - if [ ! -d ~/.vim/bundle/vundle/.git/ ] + if [ ! -d ~/.vim/bundle/vundle/.git ] && [ ! -f ~/.vim/bundle/vundle/.git ] then git clone http://github.com/gmarik/vundle.git ~/.vim/bundle/vundle echo "\n\tRead about vim configuration for vundle at https://github.com/gmarik/vundle\n" @@ -16,8 +16,12 @@ function vundle () { vim -c "execute \"BundleInstall\" | q | q" } - function vundle-update () { vundle-init vim -c "execute \"BundleInstall!\" | q | q" } + +function vundle-clean () { + vundle-init + vim -c "execute \"BundleClean!\" | q | q" +} diff --git a/plugins/wd/LICENSE b/plugins/wd/LICENSE new file mode 100644 index 000000000..8caa6c6ce --- /dev/null +++ b/plugins/wd/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Markus Færevaag + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
\ No newline at end of file diff --git a/plugins/wd/README.md b/plugins/wd/README.md new file mode 100644 index 000000000..bc0ebe334 --- /dev/null +++ b/plugins/wd/README.md @@ -0,0 +1,40 @@ +## wd + +**Maintainer:** [mfaerevaag](https://github.com/mfaerevaag) + +`wd` (*warp directory*) lets you jump to custom directories in zsh, without using `cd`. Why? Because `cd` seems ineffecient when the folder is frequently visited or has a long path. [Source](https://github.com/mfaerevaag/wd) + +### Usage + + * Add warp point to current working directory: + + $ wd add foo + + If a warp point with the same name exists, use `add!` to overwrite it. + + Note, a warp point cannot contain colons, or only consist of only spaces and dots. The first will conflict in how `wd` stores the warp points, and the second will conflict other features, as below. + + * From an other directory (not necessarily), warp to `foo` with: + + $ wd foo + + * You can warp back to previous directory, and so on, with this dot syntax: + + $ wd .. + $ wd ... + + This is a wrapper for the zsh `dirs` function. + + * Remove warp point test point: + + $ wd rm foo + + * List all warp points (stored in `~/.warprc`): + + $ wd ls + + * List warp points to current directory + + $ wd show + + * Print usage with no opts or the `help` argument. diff --git a/plugins/wd/_wd.sh b/plugins/wd/_wd.sh new file mode 100644 index 000000000..0b03d8fff --- /dev/null +++ b/plugins/wd/_wd.sh @@ -0,0 +1,61 @@ +#compdef wd + +zstyle ':completion:*:descriptions' format '%B%d%b' +zstyle ':completion::complete:wd:*:commands' group-name commands +zstyle ':completion::complete:wd:*:warp_points' group-name warp_points +zstyle ':completion::complete:wd::' list-grouped + +zmodload zsh/mapfile + +function _wd() { + local CONFIG=$HOME/.warprc + local ret=1 + + local -a commands + local -a warp_points + + warp_points=( "${(f)mapfile[$CONFIG]//$HOME/~}" ) + + commands=( + 'add:Adds the current working directory to your warp points' + 'add!:Overwrites existing warp point' + 'rm:Removes the given warp point' + 'ls:Outputs all stored warp points' + 'show:Outputs all warp points that point to the current directory' + 'help:Show this extremely helpful text' + '..:Go back to last directory' + ) + + _arguments -C \ + '1: :->first_arg' \ + '2: :->second_arg' && ret=0 + + case $state in + first_arg) + _describe -t warp_points "Warp points" warp_points && ret=0 + _describe -t commands "Commands" commands && ret=0 + ;; + second_arg) + case $words[2] in + add\!|rm) + _describe -t points "Warp points" warp_points && ret=0 + ;; + add) + _message 'Write the name of your warp point' && ret=0 + ;; + esac + ;; + esac + + return $ret +} + +_wd "$@" + +# Local Variables: +# mode: Shell-Script +# sh-indentation: 2 +# indent-tabs-mode: nil +# sh-basic-offset: 2 +# End: +# vim: ft=zsh sw=2 ts=2 et diff --git a/plugins/wd/wd.plugin.zsh b/plugins/wd/wd.plugin.zsh new file mode 100644 index 000000000..c0559293d --- /dev/null +++ b/plugins/wd/wd.plugin.zsh @@ -0,0 +1,11 @@ +#!/bin/zsh + +# WARP DIRECTORY +# ============== +# oh-my-zsh plugin +# +# @github.com/mfaerevaag/wd + +wd() { + . $ZSH/plugins/wd/wd.sh +} diff --git a/plugins/wd/wd.sh b/plugins/wd/wd.sh new file mode 100755 index 000000000..dfb9ad89a --- /dev/null +++ b/plugins/wd/wd.sh @@ -0,0 +1,236 @@ +#!/bin/zsh + +# WARP DIRECTORY +# ============== +# Jump to custom directories in terminal +# because `cd` takes too long... +# +# @github.com/mfaerevaag/wd + + +## variables +readonly CONFIG=$HOME/.warprc + +# colors +readonly BLUE="\033[96m" +readonly GREEN="\033[92m" +readonly YELLOW="\033[93m" +readonly RED="\033[91m" +readonly NOC="\033[m" + + +## init + +# check if config file exists +if [ ! -e $CONFIG ] +then + # if not, create config file + touch $CONFIG +fi + +# load warp points +typeset -A points +while read -r line +do + arr=(${(s,:,)line}) + key=${arr[1]} + val=${arr[2]} + + points[$key]=$val +done < $CONFIG + + +## functions + +wd_warp() +{ + local point=$1 + + if [[ $point =~ "^\.+$" ]] + then + if [ $#1 < 2 ] + then + wd_print_msg $YELLOW "Warping to current directory?" + else + (( n = $#1 - 1 )) + cd -$n > /dev/null + fi + elif [[ ${points[$point]} != "" ]] + then + cd ${points[$point]} + else + wd_print_msg $RED "Unknown warp point '${point}'" + fi +} + +wd_add() +{ + local force=$1 + local point=$2 + + if [[ $point =~ "^[\.]+$" ]] + then + wd_print_msg $RED "Warp point cannot be just dots" + elif [[ $point =~ "(\s|\ )+" ]] + then + wd_print_msg $RED "Warp point should not contain whitespace" + elif [[ $point == *:* ]] + then + wd_print_msg $RED "Warp point cannot contain colons" + elif [[ $point == "" ]] + then + wd_print_msg $RED "Warp point cannot be empty" + elif [[ ${points[$2]} == "" ]] || $force + then + wd_remove $point > /dev/null + printf "%q:%q\n" "${point}" "${PWD}" >> $CONFIG + + wd_print_msg $GREEN "Warp point added" + else + wd_print_msg $YELLOW "Warp point '${point}' already exists. Use 'add!' to overwrite." + fi +} + +wd_remove() +{ + local point=$1 + + if [[ ${points[$point]} != "" ]] + then + if sed -i.bak "s,^${point}:.*$,,g" $CONFIG + then + wd_print_msg $GREEN "Warp point removed" + else + wd_print_msg $RED "Something bad happened! Sorry." + fi + else + wd_print_msg $RED "Warp point was not found" + fi +} + +wd_list_all() +{ + wd_print_msg $BLUE "All warp points:" + + while IFS= read -r line + do + if [[ $line != "" ]] + then + arr=(${(s,:,)line}) + key=${arr[1]} + val=${arr[2]} + + printf "%20s -> %s\n" $key $val + fi + done <<< $(sed "s:${HOME}:~:g" $CONFIG) +} + +wd_show() +{ + local cwd=$(print $PWD | sed "s:^${HOME}:~:") + + wd_print_msg $BLUE "Warp points to current directory:" + wd_list_all | grep -e "${cwd}$" +} + +wd_print_msg() +{ + local color=$1 + local msg=$2 + + if [[ $color == "" || $msg == "" ]] + then + print " ${RED}*${NOC} Could not print message. Sorry!" + else + print " ${color}*${NOC} ${msg}" + fi +} + +wd_print_usage() +{ + cat <<- EOF +Usage: wd [add|-a|--add] [rm|-r|--remove] <point> + +Commands: + add Adds the current working directory to your warp points + add! Overwrites existing warp point + rm Removes the given warp point + show Outputs warp points to current directory + ls Outputs all stored warp points + help Show this extremely helpful text +EOF +} + + +## run + +# get opts +args=$(getopt -o a:r:lhs -l add:,rm:,ls,help,show -- $*) + +# check if no arguments were given +if [[ $? -ne 0 || $#* -eq 0 ]] +then + wd_print_usage + +# check if config file is writeable +elif [ ! -w $CONFIG ] +then + # do nothing + # can't run `exit`, as this would exit the executing shell + wd_print_msg $RED "\'$CONFIG\' is not writeable." + +else + for o + do + case "$o" + in + -a|--add|add) + wd_add false $2 + break + ;; + -a!|--add!|add!) + wd_add true $2 + break + ;; + -r|--remove|rm) + wd_remove $2 + break + ;; + -l|--list|ls) + wd_list_all + break + ;; + -h|--help|help) + wd_print_usage + break + ;; + -s|--show|show) + wd_show + break + ;; + *) + wd_warp $o + break + ;; + --) + break + ;; + esac + done +fi + +## garbage collection +# if not, next time warp will pick up variables from this run +# remember, there's no sub shell + +unset wd_warp +unset wd_add +unset wd_remove +unset wd_show +unset wd_list_all +unset wd_print_msg +unset wd_print_usage + +unset args +unset points +unset val &> /dev/null # fixes issue #1 diff --git a/plugins/web-search/web-search.plugin.zsh b/plugins/web-search/web-search.plugin.zsh new file mode 100644 index 000000000..8eedb90ee --- /dev/null +++ b/plugins/web-search/web-search.plugin.zsh @@ -0,0 +1,56 @@ +# web_search from terminal + +function web_search() { + + # get the open command + local open_cmd + if [[ $(uname -s) == 'Darwin' ]]; then + open_cmd='open' + else + open_cmd='xdg-open' + fi + + # check whether the search engine is supported + if [[ ! $1 =~ '(google|bing|yahoo|duckduckgo)' ]]; + then + echo "Search engine $1 not supported." + return 1 + fi + + local url="http://www.$1.com" + + # no keyword provided, simply open the search engine homepage + if [[ $# -le 1 ]]; then + $open_cmd "$url" + return + fi + if [[ $1 == 'duckduckgo' ]]; then + #slightly different search syntax for DDG + url="${url}/?q=" + else + url="${url}/search?q=" + fi + shift # shift out $1 + + while [[ $# -gt 0 ]]; do + url="${url}$1+" + shift + done + + url="${url%?}" # remove the last '+' + + $open_cmd "$url" +} + + +alias bing='web_search bing' +alias google='web_search google' +alias yahoo='web_search yahoo' +alias ddg='web_search duckduckgo' +#add your own !bang searches here +alias wiki='web_search duckduckgo \!w' +alias news='web_search duckduckgo \!n' +alias youtube='web_search duckduckgo \!yt' +alias map='web_search duckduckgo \!m' +alias image='web_search duckduckgo \!i' +alias ducky='web_search duckduckgo \!' diff --git a/plugins/xcode/xcode.plugin.zsh b/plugins/xcode/xcode.plugin.zsh new file mode 100644 index 000000000..e59bee8c7 --- /dev/null +++ b/plugins/xcode/xcode.plugin.zsh @@ -0,0 +1,19 @@ +#xc function courtesy of http://gist.github.com/subdigital/5420709 +function xc { + xcode_proj=`ls | grep "\.xc" | sort -r | head -1` + if [[ `echo -n $xcode_proj | wc -m` == 0 ]] + then + echo "No xcworkspace/xcodeproj file found in the current directory." + else + echo "Found $xcode_proj" + open "$xcode_proj" + fi +} + +function xcsel { + sudo xcode-select --switch "$*" +} + +alias xcb='xcodebuild' +alias xcp='xcode-select --print-path' +alias simulator='open $(xcode-select -p)/Platforms/iPhoneSimulator.platform/Developer/Applications/iPhone\ Simulator.app' diff --git a/plugins/yii/yii.plugin.zsh b/plugins/yii/yii.plugin.zsh new file mode 100644 index 000000000..b816160f0 --- /dev/null +++ b/plugins/yii/yii.plugin.zsh @@ -0,0 +1,17 @@ +# Yii basic command completion + +_yii_get_command_list () { + protected/yiic | awk '/^ - [a-z]+/ { print $2 }' +} + +_yii () { + if [ -f protected/yiic ]; then + compadd `_yii_get_command_list` + fi +} + +compdef _yii protected/yiic +compdef _yii yiic + +# Aliases +alias yiic='protected/yiic' diff --git a/plugins/z/Makefile b/plugins/z/Makefile new file mode 100644 index 000000000..dcf433d40 --- /dev/null +++ b/plugins/z/Makefile @@ -0,0 +1,4 @@ +readme: + @groff -man -Tascii z.1 | col -bx + +.PHONY: readme diff --git a/plugins/z/README b/plugins/z/README new file mode 100644 index 000000000..ec5abc6f5 --- /dev/null +++ b/plugins/z/README @@ -0,0 +1,135 @@ +Z(1) User Commands Z(1) + + + +NAME + z - jump around + +SYNOPSIS + z [-chlrt] [regex1 regex2 ... regexn] + +AVAILABILITY + bash, zsh + +DESCRIPTION + Tracks your most used directories, based on 'frecency'. + + After a short learning phase, z will take you to the most 'frecent' + directory that matches ALL of the regexes given on the command line. + +OPTIONS + -c restrict matches to subdirectories of the current directory. + + -h show a brief help message + + -l list only + + -r match by rank only + + -t match by recent access only + +EXAMPLES + z foo cd to most frecent dir matching foo + + z foo bar cd to most frecent dir matching foo and bar + + z -r foo cd to highest ranked dir matching foo + + z -t foo cd to most recently accessed dir matching foo + + z -l foo list all dirs matching foo (by frecency) + +NOTES + Installation: + Put something like this in your $HOME/.bashrc or $HOME/.zshrc: + + . /path/to/z.sh + + cd around for a while to build up the db. + + PROFIT!! + + Optionally: + Set $_Z_CMD to change the command name (default z). + Set $_Z_DATA to change the datafile (default $HOME/.z). + Set $_Z_NO_RESOLVE_SYMLINKS to prevent symlink resolution. + Set $_Z_NO_PROMPT_COMMAND to handle PROMPT_COMMAND/precmd your- + self. + Set $_Z_EXCLUDE_DIRS to an array of directories to exclude. + (These settings should go in .bashrc/.zshrc before the lines + added above.) + Install the provided man page z.1 somewhere like + /usr/local/man/man1. + + Aging: + The rank of directories maintained by z undergoes aging based on a sim- + ple formula. The rank of each entry is incremented every time it is + accessed. When the sum of ranks is greater than 6000, all ranks are + multiplied by 0.99. Entries with a rank lower than 1 are forgotten. + + Frecency: + Frecency is a portmantaeu of 'recent' and 'frequency'. It is a weighted + rank that depends on how often and how recently something occured. As + far as I know, Mozilla came up with the term. + + To z, a directory that has low ranking but has been accessed recently + will quickly have higher rank than a directory accessed frequently a + long time ago. + + Frecency is determined at runtime. + + Common: + When multiple directories match all queries, and they all have a common + prefix, z will cd to the shortest matching directory, without regard to + priority. This has been in effect, if undocumented, for quite some + time, but should probably be configurable or reconsidered. + + Tab Completion: + z supports tab completion. After any number of arguments, press TAB to + complete on directories that match each argument. Due to limitations of + the completion implementations, only the last argument will be com- + pleted in the shell. + + Internally, z decides you've requested a completion if the last argu- + ment passed is an absolute path to an existing directory. This may + cause unexpected behavior if the last argument to z begins with /. + +ENVIRONMENT + A function _z() is defined. + + The contents of the variable $_Z_CMD is aliased to _z 2>&1. If not set, + $_Z_CMD defaults to z. + + The environment variable $_Z_DATA can be used to control the datafile + location. If it is not defined, the location defaults to $HOME/.z. + + The environment variable $_Z_NO_RESOLVE_SYMLINKS can be set to prevent + resolving of symlinks. If it is not set, symbolic links will be + resolved when added to the datafile. + + In bash, z prepends a command to the PROMPT_COMMAND environment vari- + able to maintain its database. In zsh, z appends a function _z_precmd + to the precmd_functions array. + + The environment variable $_Z_NO_PROMPT_COMMAND can be set if you want + to handle PROMPT_COMMAND or precmd yourself. + + The environment variable $_Z_EXCLUDE_DIRS can be set to an array of + directories to exclude from tracking. $HOME is always excluded. Direc- + tories must be full paths without trailing slashes. + +FILES + Data is stored in $HOME/.z. This can be overridden by setting the + $_Z_DATA environment variable. When initialized, z will raise an error + if this path is a directory, and not function correctly. + + A man page (z.1) is provided. + +SEE ALSO + regex(7), pushd, popd, autojump, cdargs + + Please file bugs at https://github.com/rupa/z/ + + + +z January 2013 Z(1) diff --git a/plugins/z/z.1 b/plugins/z/z.1 new file mode 100644 index 000000000..022a4b35d --- /dev/null +++ b/plugins/z/z.1 @@ -0,0 +1,155 @@ +.TH "Z" "1" "January 2013" "z" "User Commands" +.SH +NAME +z \- jump around +.SH +SYNOPSIS +z [\-chlrt] [regex1 regex2 ... regexn] +.SH +AVAILABILITY +bash, zsh +.SH +DESCRIPTION +Tracks your most used directories, based on 'frecency'. +.P +After a short learning phase, \fBz\fR will take you to the most 'frecent' +directory that matches ALL of the regexes given on the command line. +.SH +OPTIONS +.TP +\fB\-c\fR +restrict matches to subdirectories of the current directory. +.TP +\fB\-h\fR +show a brief help message +.TP +\fB\-l\fR +list only +.TP +\fB\-r\fR +match by rank only +.TP +\fB\-t\fR +match by recent access only +.SH EXAMPLES +.TP 14 +\fBz foo\fR +cd to most frecent dir matching foo +.TP 14 +\fBz foo bar\fR +cd to most frecent dir matching foo and bar +.TP 14 +\fBz -r foo\fR +cd to highest ranked dir matching foo +.TP 14 +\fBz -t foo\fR +cd to most recently accessed dir matching foo +.TP 14 +\fBz -l foo\fR +list all dirs matching foo (by frecency) +.SH +NOTES +.SS +Installation: +.P +Put something like this in your \fB$HOME/.bashrc\fR or \fB$HOME/.zshrc\fR: +.RS +.P +\fB. /path/to/z.sh\fR +.RE +.P +\fBcd\fR around for a while to build up the db. +.P +PROFIT!! +.P +Optionally: +.RS +Set \fB$_Z_CMD\fR to change the command name (default \fBz\fR). +.RE +.RS +Set \fB$_Z_DATA\fR to change the datafile (default \fB$HOME/.z\fR). +.RE +.RS +Set \fB$_Z_NO_RESOLVE_SYMLINKS\fR to prevent symlink resolution. +.RE +.RS +Set \fB$_Z_NO_PROMPT_COMMAND\fR to handle \fBPROMPT_COMMAND/precmd\fR yourself. +.RE +.RS +Set \fB$_Z_EXCLUDE_DIRS\fR to an array of directories to exclude. +.RE +.RS +(These settings should go in .bashrc/.zshrc before the lines added above.) +.RE +.RS +Install the provided man page \fBz.1\fR somewhere like \fB/usr/local/man/man1\fR. +.RE +.SS +Aging: +The rank of directories maintained by \fBz\fR undergoes aging based on a simple +formula. The rank of each entry is incremented every time it is accessed. When +the sum of ranks is greater than 6000, all ranks are multiplied by 0.99. Entries +with a rank lower than 1 are forgotten. +.SS +Frecency: +Frecency is a portmantaeu of 'recent' and 'frequency'. It is a weighted rank +that depends on how often and how recently something occured. As far as I +know, Mozilla came up with the term. +.P +To \fBz\fR, a directory that has low ranking but has been accessed recently +will quickly have higher rank than a directory accessed frequently a long time +ago. +.P +Frecency is determined at runtime. +.SS +Common: +When multiple directories match all queries, and they all have a common prefix, +\fBz\fR will cd to the shortest matching directory, without regard to priority. +This has been in effect, if undocumented, for quite some time, but should +probably be configurable or reconsidered. +.SS +Tab Completion: +\fBz\fR supports tab completion. After any number of arguments, press TAB to +complete on directories that match each argument. Due to limitations of the +completion implementations, only the last argument will be completed in the +shell. +.P +Internally, \fBz\fR decides you've requested a completion if the last argument +passed is an absolute path to an existing directory. This may cause unexpected +behavior if the last argument to \fBz\fR begins with \fB/\fR. +.SH +ENVIRONMENT +A function \fB_z()\fR is defined. +.P +The contents of the variable \fB$_Z_CMD\fR is aliased to \fB_z 2>&1\fR. If not +set, \fB$_Z_CMD\fR defaults to \fBz\fR. +.P +The environment variable \fB$_Z_DATA\fR can be used to control the datafile +location. If it is not defined, the location defaults to \fB$HOME/.z\fR. +.P +The environment variable \fB$_Z_NO_RESOLVE_SYMLINKS\fR can be set to prevent +resolving of symlinks. If it is not set, symbolic links will be resolved when +added to the datafile. +.P +In bash, \fBz\fR prepends a command to the \fBPROMPT_COMMAND\fR environment +variable to maintain its database. In zsh, \fBz\fR appends a function +\fB_z_precmd\fR to the \fBprecmd_functions\fR array. +.P +The environment variable \fB$_Z_NO_PROMPT_COMMAND\fR can be set if you want to +handle \fBPROMPT_COMMAND\fR or \fBprecmd\fR yourself. +.P +The environment variable \fB$_Z_EXCLUDE_DIRS\fR can be set to an array of +directories to exclude from tracking. \fB$HOME\fR is always excluded. +Directories must be full paths without trailing slashes. +.SH +FILES +Data is stored in \fB$HOME/.z\fR. This can be overridden by setting the +\fB$_Z_DATA\fR environment variable. When initialized, \fBz\fR will raise an +error if this path is a directory, and not function correctly. +.P +A man page (\fBz.1\fR) is provided. +.SH +SEE ALSO +regex(7), pushd, popd, autojump, cdargs +.P +Please file bugs at https://github.com/rupa/z/ diff --git a/plugins/z/z.plugin.zsh b/plugins/z/z.plugin.zsh new file mode 100644 index 000000000..196b88b12 --- /dev/null +++ b/plugins/z/z.plugin.zsh @@ -0,0 +1,6 @@ +_load_z() { + source $1/z.sh +} + +[[ -f $ZSH_CUSTOM/plugins/z/z.plugin.zsh ]] && _load_z $ZSH_CUSTOM/plugins/z +[[ -f $ZSH/plugins/z/z.plugin.zsh ]] && _load_z $ZSH/plugins/z diff --git a/plugins/z/z.sh b/plugins/z/z.sh new file mode 100644 index 000000000..7e444ef46 --- /dev/null +++ b/plugins/z/z.sh @@ -0,0 +1,228 @@ +# Copyright (c) 2009 rupa deadwyler under the WTFPL license + +# maintains a jump-list of the directories you actually use +# +# INSTALL: +# * put something like this in your .bashrc/.zshrc: +# . /path/to/z.sh +# * cd around for a while to build up the db +# * PROFIT!! +# * optionally: +# set $_Z_CMD in .bashrc/.zshrc to change the command (default z). +# set $_Z_DATA in .bashrc/.zshrc to change the datafile (default ~/.z). +# set $_Z_NO_RESOLVE_SYMLINKS to prevent symlink resolution. +# set $_Z_NO_PROMPT_COMMAND if you're handling PROMPT_COMMAND yourself. +# set $_Z_EXCLUDE_DIRS to an array of directories to exclude. +# +# USE: +# * z foo # cd to most frecent dir matching foo +# * z foo bar # cd to most frecent dir matching foo and bar +# * z -r foo # cd to highest ranked dir matching foo +# * z -t foo # cd to most recently accessed dir matching foo +# * z -l foo # list matches instead of cd +# * z -c foo # restrict matches to subdirs of $PWD + +case $- in + *i*) ;; + *) echo 'ERROR: z.sh is meant to be sourced, not directly executed.' +esac + +[ -d "${_Z_DATA:-$HOME/.z}" ] && { + echo "ERROR: z.sh's datafile (${_Z_DATA:-$HOME/.z}) is a directory." +} + +_z() { + + local datafile="${_Z_DATA:-$HOME/.z}" + + # bail out if we don't own ~/.z (we're another user but our ENV is still set) + [ -f "$datafile" -a ! -O "$datafile" ] && return + + # add entries + if [ "$1" = "--add" ]; then + shift + + # $HOME isn't worth matching + [ "$*" = "$HOME" ] && return + + # don't track excluded dirs + local exclude + for exclude in "${_Z_EXCLUDE_DIRS[@]}"; do + [ "$*" = "$exclude" ] && return + done + + # maintain the file + local tempfile + tempfile="$(mktemp "$datafile.XXXXXX")" || return + while read line; do + [ -d "${line%%\|*}" ] && echo $line + done < "$datafile" | awk -v path="$*" -v now="$(date +%s)" -F"|" ' + BEGIN { + rank[path] = 1 + time[path] = now + } + $2 >= 1 { + if( $1 == path ) { + rank[$1] = $2 + 1 + time[$1] = now + } else { + rank[$1] = $2 + time[$1] = $3 + } + count += $2 + } + END { + if( count > 6000 ) { + for( i in rank ) print i "|" 0.99*rank[i] "|" time[i] # aging + } else for( i in rank ) print i "|" rank[i] "|" time[i] + } + ' 2>/dev/null >| "$tempfile" + if [ $? -ne 0 -a -f "$datafile" ]; then + env rm -f "$tempfile" + else + env mv -f "$tempfile" "$datafile" + fi + + # tab completion + elif [ "$1" = "--complete" ]; then + while read line; do + [ -d "${line%%\|*}" ] && echo $line + done < "$datafile" | awk -v q="$2" -F"|" ' + BEGIN { + if( q == tolower(q) ) nocase = 1 + split(substr(q,3),fnd," ") + } + { + if( nocase ) { + for( i in fnd ) tolower($1) !~ tolower(fnd[i]) && $1 = "" + } else { + for( i in fnd ) $1 !~ fnd[i] && $1 = "" + } + if( $1 ) print $1 + } + ' 2>/dev/null + + else + # list/go + while [ "$1" ]; do case "$1" in + --) while [ "$1" ]; do shift; local fnd="$fnd $1";done;; + -*) local opt=${1:1}; while [ "$opt" ]; do case ${opt:0:1} in + c) local fnd="^$PWD $fnd";; + h) echo "${_Z_CMD:-z} [-chlrt] args" >&2; return;; + l) local list=1;; + r) local typ="rank";; + t) local typ="recent";; + esac; opt=${opt:1}; done;; + *) local fnd="$fnd $1";; + esac; local last=$1; shift; done + [ "$fnd" -a "$fnd" != "^$PWD " ] || local list=1 + + # if we hit enter on a completion just go there + case "$last" in + # completions will always start with / + /*) [ -z "$list" -a -d "$last" ] && cd "$last" && return;; + esac + + # no file yet + [ -f "$datafile" ] || return + + local cd + cd="$(while read line; do + [ -d "${line%%\|*}" ] && echo $line + done < "$datafile" | awk -v t="$(date +%s)" -v list="$list" -v typ="$typ" -v q="$fnd" -F"|" ' + function frecent(rank, time) { + dx = t-time + if( dx < 3600 ) return rank*4 + if( dx < 86400 ) return rank*2 + if( dx < 604800 ) return rank/2 + return rank/4 + } + function output(files, toopen, override) { + if( list ) { + cmd = "sort -n >&2" + for( i in files ) if( files[i] ) printf "%-10s %s\n", files[i], i | cmd + if( override ) printf "%-10s %s\n", "common:", override > "/dev/stderr" + } else { + if( override ) toopen = override + print toopen + } + } + function common(matches) { + # shortest match + for( i in matches ) { + if( matches[i] && (!short || length(i) < length(short)) ) short = i + } + if( short == "/" ) return + # shortest match must be common to each match. escape special characters in + # a copy when testing, so we can return the original. + clean_short = short + gsub(/[\(\)\[\]\|]/, "\\\\&", clean_short) + for( i in matches ) if( matches[i] && i !~ clean_short ) return + return short + } + BEGIN { split(q, a, " "); oldf = noldf = -9999999999 } + { + if( typ == "rank" ) { + f = $2 + } else if( typ == "recent" ) { + f = $3-t + } else f = frecent($2, $3) + wcase[$1] = nocase[$1] = f + for( i in a ) { + if( $1 !~ a[i] ) delete wcase[$1] + if( tolower($1) !~ tolower(a[i]) ) delete nocase[$1] + } + if( wcase[$1] && wcase[$1] > oldf ) { + cx = $1 + oldf = wcase[$1] + } else if( nocase[$1] && nocase[$1] > noldf ) { + ncx = $1 + noldf = nocase[$1] + } + } + END { + if( cx ) { + output(wcase, cx, common(wcase)) + } else if( ncx ) output(nocase, ncx, common(nocase)) + } + ')" + [ $? -gt 0 ] && return + [ "$cd" ] && cd "$cd" + fi +} + +alias ${_Z_CMD:-z}='_z 2>&1' + +[ "$_Z_NO_RESOLVE_SYMLINKS" ] || _Z_RESOLVE_SYMLINKS="-P" + +if compctl &> /dev/null; then + [ "$_Z_NO_PROMPT_COMMAND" ] || { + # zsh populate directory list, avoid clobbering any other precmds + if [ "$_Z_NO_RESOLVE_SYMLINKS" ]; then + _z_precmd() { + _z --add "${PWD:a}" + } + else + _z_precmd() { + _z --add "${PWD:A}" + } + fi + precmd_functions+=(_z_precmd) + } + # zsh tab completion + _z_zsh_tab_completion() { + local compl + read -l compl + reply=(${(f)"$(_z --complete "$compl")"}) + } + compctl -U -K _z_zsh_tab_completion _z +elif complete &> /dev/null; then + # bash tab completion + complete -o filenames -C '_z --complete "$COMP_LINE"' ${_Z_CMD:-z} + [ "$_Z_NO_PROMPT_COMMAND" ] || { + # bash populate directory list. avoid clobbering other PROMPT_COMMANDs. + echo $PROMPT_COMMAND | grep -q "_z --add" || { + PROMPT_COMMAND='_z --add "$(pwd '$_Z_RESOLVE_SYMLINKS' 2>/dev/null)" 2>/dev/null;'"$PROMPT_COMMAND" + } + } +fi diff --git a/plugins/zeus/README.md b/plugins/zeus/README.md index 4409943fe..8964eaaec 100644 --- a/plugins/zeus/README.md +++ b/plugins/zeus/README.md @@ -25,6 +25,8 @@ * `zcu` aliases `zeus cucumber` * `zucumber` aliases `zeus cucumber` +* `zspec` aliases `zeus rspec` + * `zt` aliases `zeus test` * `zest` aliases `zeus test` @@ -39,3 +41,13 @@ * `zsw` aliases `rm .zeus.sock` * `zweep` aliases `rm .zeus.sock` + +`zdbr` aliases `zeus rake db:reset db:test:prepare` +`zdbreset` aliases `zeus rake db:reset db:test:prepare` + +`zdbm` aliases `zeus rake db:migrate db:test:prepare` +`zdbmigrate` aliases `zeus rake db:migrate db:test:prepare` + +`zdbc` aliases `zeus rake db:create` + +`zdbcm` aliases `zeus rake db:create db:migrate db:test:prepare` diff --git a/plugins/zeus/_zeus b/plugins/zeus/_zeus new file mode 100644 index 000000000..5a13bd9ec --- /dev/null +++ b/plugins/zeus/_zeus @@ -0,0 +1,34 @@ +#compdef zeus +#autoload + +# in order to make this work, you will need to have the gem zeus installed + +# zeus zsh completion, based on adb completion + +local -a _1st_arguments +_1st_arguments=( +'console:Lets you interact with your Rails application from the command line. (alias = c)' +'cucumber:Runs cucumber.' +'dbconsole:Figures out which database you are using and drops you into whichever command line interface.' +'destroy:Figures out what generate did, and undoes it. (alias = d)' +'generate:Uses templates to create a whole lot of things. (alias = g)' +'rake:Execute rake tasks.' +'runner:Runs Ruby code in the context of Rails non-interactively. (alias = r)' +'server:Launches a small web server named WEBrick which comes bundled with Ruby. (alias = s)' +'start:Preloads the zeus environment' +'test:Runs RSpec tests. (alias = rspec, testrb)' +'version:Shows the version number.' +) + +local expl +local -a pkgs installed_pkgs + +_arguments \ + '*:: :->subcmds' && return 0 + +if (( CURRENT == 1 )); then + _describe -t commands "zeus subcommand" _1st_arguments + return +fi + +_files diff --git a/plugins/zeus/zeus.plugin.zsh b/plugins/zeus/zeus.plugin.zsh index 2fc7e1ebf..5ec9fa579 100644 --- a/plugins/zeus/zeus.plugin.zsh +++ b/plugins/zeus/zeus.plugin.zsh @@ -2,12 +2,6 @@ # Zeus preloads your Rails environment and forks that process whenever # needed. This effectively speeds up Rails' boot process to under 1 sec. -# Always use bundler. -# Rails depends on bundler, so we can be pretty sure, that there are no -# problems with this command. For all the other aliases I provided an -# alternative, in case people have conflicts with other plugins (e.g. suse). -alias zeus='bundle exec zeus' - # Init alias zi='zeus init' alias zinit='zeus init' @@ -40,6 +34,9 @@ alias zunner='zeus runner' alias zcu='zeus cucumber' alias zucumber='zeus cucumber' +# Rspec +alias zspec='zeus rspec' + # Test alias zt='zeus test' alias zest='zeus test' @@ -56,3 +53,17 @@ alias zall='zeus test test/unit/*; zeus test test/functional/; zeus cucumber' # Clean up crashed zeus instances. alias zsw='rm .zeus.sock' alias zweep='rm .zeus.sock' + +# Reset database +alias zdbr='zeus rake db:reset db:test:prepare' +alias zdbreset='zeus rake db:reset db:test:prepare' + +# Migrate and prepare database +alias zdbm='zeus rake db:migrate db:test:prepare' +alias zdbmigrate='zeus rake db:migrate db:test:prepare' + +# Create database +alias zdbc='zeus rake db:create' + +# Create, migrate and prepare database +alias zdbcm='zeus rake db:create db:migrate db:test:prepare'
\ No newline at end of file diff --git a/plugins/zsh_reload/zsh_reload.plugin.zsh b/plugins/zsh_reload/zsh_reload.plugin.zsh new file mode 100644 index 000000000..3f44b99c6 --- /dev/null +++ b/plugins/zsh_reload/zsh_reload.plugin.zsh @@ -0,0 +1,13 @@ +# reload zshrc +function src() +{ + local cache="$ZSH/cache" + autoload -U compinit zrecompile + compinit -d "$cache/zcomp-$HOST" + + for f in ~/.zshrc "$cache/zcomp-$HOST"; do + zrecompile -p $f && command rm -f $f.zwc.old + done + + source ~/.zshrc +} |