commit
stringlengths
40
40
old_file
stringlengths
4
184
new_file
stringlengths
4
184
old_contents
stringlengths
1
3.6k
new_contents
stringlengths
5
3.38k
subject
stringlengths
15
778
message
stringlengths
16
6.74k
lang
stringclasses
201 values
license
stringclasses
13 values
repos
stringlengths
6
116k
config
stringclasses
201 values
content
stringlengths
137
7.24k
diff
stringlengths
26
5.55k
diff_length
int64
1
123
relative_diff_length
float64
0.01
89
n_lines_added
int64
0
108
n_lines_deleted
int64
0
106
7f441dc502ef5d83147feeb06981df7bc7257565
src/manifest.json
src/manifest.json
{ "name": "Picture-in-Picture", "description": "Watch video using Picture-in-Picture", "version": "1.1", "icons": { "128": "assets/icon128.png" }, "background": { "persistent": false, "scripts": ["background.js"] }, "browser_action": { "default_icon": { "19": "assets/icon19.png", "38": "assets/icon38.png" } }, "permissions": [ "<all_urls>" ], "minimum_chrome_version": "69.0.3483.0", "manifest_version": 2 }
{ "name": "Picture-in-Picture Extension (by Google)", "description": "Watch video using Picture-in-Picture", "version": "1.1", "icons": { "128": "assets/icon128.png" }, "background": { "persistent": false, "scripts": ["background.js"] }, "browser_action": { "default_icon": { "19": "assets/icon19.png", "38": "assets/icon38.png" } }, "permissions": [ "<all_urls>" ], "minimum_chrome_version": "69.0.3483.0", "manifest_version": 2 }
Add "(by Google") to name
Add "(by Google") to name
JSON
apache-2.0
GoogleChromeLabs/picture-in-picture-chrome-extension,GoogleChromeLabs/picture-in-picture-chrome-extension
json
## Code Before: { "name": "Picture-in-Picture", "description": "Watch video using Picture-in-Picture", "version": "1.1", "icons": { "128": "assets/icon128.png" }, "background": { "persistent": false, "scripts": ["background.js"] }, "browser_action": { "default_icon": { "19": "assets/icon19.png", "38": "assets/icon38.png" } }, "permissions": [ "<all_urls>" ], "minimum_chrome_version": "69.0.3483.0", "manifest_version": 2 } ## Instruction: Add "(by Google") to name ## Code After: { "name": "Picture-in-Picture Extension (by Google)", "description": "Watch video using Picture-in-Picture", "version": "1.1", "icons": { "128": "assets/icon128.png" }, "background": { "persistent": false, "scripts": ["background.js"] }, "browser_action": { "default_icon": { "19": "assets/icon19.png", "38": "assets/icon38.png" } }, "permissions": [ "<all_urls>" ], "minimum_chrome_version": "69.0.3483.0", "manifest_version": 2 }
{ - "name": "Picture-in-Picture", + "name": "Picture-in-Picture Extension (by Google)", "description": "Watch video using Picture-in-Picture", "version": "1.1", "icons": { "128": "assets/icon128.png" }, "background": { "persistent": false, "scripts": ["background.js"] }, "browser_action": { "default_icon": { "19": "assets/icon19.png", "38": "assets/icon38.png" } }, "permissions": [ "<all_urls>" ], "minimum_chrome_version": "69.0.3483.0", "manifest_version": 2 }
2
0.083333
1
1
8350f22c0f0a7fa5ae57e613621c111df4e48d7d
desktop/src/main/java/bisq/desktop/common/view/CachingViewLoader.java
desktop/src/main/java/bisq/desktop/common/view/CachingViewLoader.java
/* * This file is part of Bisq. * * Bisq is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at * your option) any later version. * * Bisq is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public * License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Bisq. If not, see <http://www.gnu.org/licenses/>. */ package bisq.desktop.common.view; import javax.inject.Inject; import javax.inject.Singleton; import java.util.HashMap; @Singleton public class CachingViewLoader implements ViewLoader { private final HashMap<Object, View> cache = new HashMap<>(); private final ViewLoader viewLoader; @Inject public CachingViewLoader(ViewLoader viewLoader) { this.viewLoader = viewLoader; } @Override public View load(Class<? extends View> viewClass) { if (cache.containsKey(viewClass)) return cache.get(viewClass); View view = viewLoader.load(viewClass); cache.put(viewClass, view); return view; } }
/* * This file is part of Bisq. * * Bisq is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at * your option) any later version. * * Bisq is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public * License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Bisq. If not, see <http://www.gnu.org/licenses/>. */ package bisq.desktop.common.view; import javax.inject.Inject; import javax.inject.Singleton; import java.util.HashMap; import java.util.Map; @Singleton public class CachingViewLoader implements ViewLoader { private final Map<Class<? extends View>, View> cache = new HashMap<>(); private final ViewLoader viewLoader; @Inject public CachingViewLoader(ViewLoader viewLoader) { this.viewLoader = viewLoader; } @Override public View load(Class<? extends View> viewClass) { if (cache.containsKey(viewClass)) return cache.get(viewClass); View view = viewLoader.load(viewClass); cache.put(viewClass, view); return view; } }
Set correct type of cache map
Set correct type of cache map
Java
agpl-3.0
bitsquare/bitsquare,bitsquare/bitsquare,bisq-network/exchange,bisq-network/exchange
java
## Code Before: /* * This file is part of Bisq. * * Bisq is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at * your option) any later version. * * Bisq is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public * License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Bisq. If not, see <http://www.gnu.org/licenses/>. */ package bisq.desktop.common.view; import javax.inject.Inject; import javax.inject.Singleton; import java.util.HashMap; @Singleton public class CachingViewLoader implements ViewLoader { private final HashMap<Object, View> cache = new HashMap<>(); private final ViewLoader viewLoader; @Inject public CachingViewLoader(ViewLoader viewLoader) { this.viewLoader = viewLoader; } @Override public View load(Class<? extends View> viewClass) { if (cache.containsKey(viewClass)) return cache.get(viewClass); View view = viewLoader.load(viewClass); cache.put(viewClass, view); return view; } } ## Instruction: Set correct type of cache map ## Code After: /* * This file is part of Bisq. * * Bisq is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at * your option) any later version. * * Bisq is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public * License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Bisq. If not, see <http://www.gnu.org/licenses/>. */ package bisq.desktop.common.view; import javax.inject.Inject; import javax.inject.Singleton; import java.util.HashMap; import java.util.Map; @Singleton public class CachingViewLoader implements ViewLoader { private final Map<Class<? extends View>, View> cache = new HashMap<>(); private final ViewLoader viewLoader; @Inject public CachingViewLoader(ViewLoader viewLoader) { this.viewLoader = viewLoader; } @Override public View load(Class<? extends View> viewClass) { if (cache.containsKey(viewClass)) return cache.get(viewClass); View view = viewLoader.load(viewClass); cache.put(viewClass, view); return view; } }
/* * This file is part of Bisq. * * Bisq is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at * your option) any later version. * * Bisq is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public * License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Bisq. If not, see <http://www.gnu.org/licenses/>. */ package bisq.desktop.common.view; import javax.inject.Inject; import javax.inject.Singleton; import java.util.HashMap; + import java.util.Map; @Singleton public class CachingViewLoader implements ViewLoader { - private final HashMap<Object, View> cache = new HashMap<>(); ? ---- ^^^ ^ + private final Map<Class<? extends View>, View> cache = new HashMap<>(); ? ^^^^^^^^ ^ ++++++++++ private final ViewLoader viewLoader; @Inject public CachingViewLoader(ViewLoader viewLoader) { this.viewLoader = viewLoader; } @Override public View load(Class<? extends View> viewClass) { if (cache.containsKey(viewClass)) return cache.get(viewClass); View view = viewLoader.load(viewClass); cache.put(viewClass, view); return view; } }
3
0.066667
2
1
0cffaa75d8f7dc0b44add26bd97495abcf37485b
cmake/project_version.cmake
cmake/project_version.cmake
find_package(Git) if(GIT_EXECUTABLE) execute_process( COMMAND ${GIT_EXECUTABLE} describe --match "v[0-9]*.[0-9]*.[0-9]*" --always --tags --dirty OUTPUT_VARIABLE PROJECT_VERSION ERROR_QUIET ) # v{VERSION}-{N}-g{HASH} -> {VERSION}-{HASH} string(STRIP ${PROJECT_VERSION} PROJECT_VERSION) string(REGEX REPLACE "^v?([0-9]*.[0-9]*.[0-9]*)-[0-9]+-g([0-9a-f]*)" "\\1-\\2" PROJECT_VERSION ${PROJECT_VERSION} ) else() set(PROJECT_VERSION "0.0.0") endif()
find_package(Git) if(GIT_EXECUTABLE) execute_process( COMMAND ${GIT_EXECUTABLE} describe --match "v[0-9]*.[0-9]*.[0-9]*" --always --tags --dirty WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE PROJECT_VERSION ERROR_QUIET ) # v{VERSION}-{N}-g{HASH} -> {VERSION}-{HASH} string(STRIP ${PROJECT_VERSION} PROJECT_VERSION) string(REGEX REPLACE "^v?([0-9]*.[0-9]*.[0-9]*)-[0-9]+-g([0-9a-f]*)" "\\1-\\2" PROJECT_VERSION ${PROJECT_VERSION} ) else() set(PROJECT_VERSION "0.0.0") endif()
Fix version detection for out-of-source builds
Fix version detection for out-of-source builds
CMake
mit
svens/sal,svens/sal
cmake
## Code Before: find_package(Git) if(GIT_EXECUTABLE) execute_process( COMMAND ${GIT_EXECUTABLE} describe --match "v[0-9]*.[0-9]*.[0-9]*" --always --tags --dirty OUTPUT_VARIABLE PROJECT_VERSION ERROR_QUIET ) # v{VERSION}-{N}-g{HASH} -> {VERSION}-{HASH} string(STRIP ${PROJECT_VERSION} PROJECT_VERSION) string(REGEX REPLACE "^v?([0-9]*.[0-9]*.[0-9]*)-[0-9]+-g([0-9a-f]*)" "\\1-\\2" PROJECT_VERSION ${PROJECT_VERSION} ) else() set(PROJECT_VERSION "0.0.0") endif() ## Instruction: Fix version detection for out-of-source builds ## Code After: find_package(Git) if(GIT_EXECUTABLE) execute_process( COMMAND ${GIT_EXECUTABLE} describe --match "v[0-9]*.[0-9]*.[0-9]*" --always --tags --dirty WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE PROJECT_VERSION ERROR_QUIET ) # v{VERSION}-{N}-g{HASH} -> {VERSION}-{HASH} string(STRIP ${PROJECT_VERSION} PROJECT_VERSION) string(REGEX REPLACE "^v?([0-9]*.[0-9]*.[0-9]*)-[0-9]+-g([0-9a-f]*)" "\\1-\\2" PROJECT_VERSION ${PROJECT_VERSION} ) else() set(PROJECT_VERSION "0.0.0") endif()
find_package(Git) if(GIT_EXECUTABLE) execute_process( COMMAND ${GIT_EXECUTABLE} describe --match "v[0-9]*.[0-9]*.[0-9]*" --always --tags --dirty + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE PROJECT_VERSION ERROR_QUIET ) # v{VERSION}-{N}-g{HASH} -> {VERSION}-{HASH} string(STRIP ${PROJECT_VERSION} PROJECT_VERSION) string(REGEX REPLACE "^v?([0-9]*.[0-9]*.[0-9]*)-[0-9]+-g([0-9a-f]*)" "\\1-\\2" PROJECT_VERSION ${PROJECT_VERSION} ) else() set(PROJECT_VERSION "0.0.0") endif()
1
0.055556
1
0
c1cf84504f1a4a0c41ea764c9f86e64158193f54
user_api/com/mdi/storage/collection_definitions_mapping.rb
user_api/com/mdi/storage/collection_definitions_mapping.rb
module UserApis module Mdi module Storage # @api private class CollectionDefinitionsMappingClass def initialize(apis) @user_apis = apis end def user_api @user_apis end # return a collection definition structs array def get_all() RagentApi::CollectionDefinitionMapping.get_all(user_api.account) end end end #Storage end #Mdi end #UserApis
module UserApis module Mdi module Storage # @api private class CollectionDefinitionsMappingClass def initialize(apis) @user_apis = apis end def user_api @user_apis end # return a collection definition structs array def get_all() @all_definitions ||= RagentApi::CollectionDefinitionMapping.get_all(user_api.account) end def get_for_asset_with_type(imei, type) asset_definitions = [] definitions = self.get_all() definitions.each do |definition| if (definition.assets == [] || definition.assets.include? imei) && definition.collects.include? type asset_definitions << definition end end asset_definitions end end end #Storage end #Mdi end #UserApis
Add method to get definitions for an asset
Add method to get definitions for an asset
Ruby
mit
mobile-devices/ragent_bay,mobile-devices/ragent_bay,mobile-devices/ragent_bay,mobile-devices/ragent_bay
ruby
## Code Before: module UserApis module Mdi module Storage # @api private class CollectionDefinitionsMappingClass def initialize(apis) @user_apis = apis end def user_api @user_apis end # return a collection definition structs array def get_all() RagentApi::CollectionDefinitionMapping.get_all(user_api.account) end end end #Storage end #Mdi end #UserApis ## Instruction: Add method to get definitions for an asset ## Code After: module UserApis module Mdi module Storage # @api private class CollectionDefinitionsMappingClass def initialize(apis) @user_apis = apis end def user_api @user_apis end # return a collection definition structs array def get_all() @all_definitions ||= RagentApi::CollectionDefinitionMapping.get_all(user_api.account) end def get_for_asset_with_type(imei, type) asset_definitions = [] definitions = self.get_all() definitions.each do |definition| if (definition.assets == [] || definition.assets.include? imei) && definition.collects.include? type asset_definitions << definition end end asset_definitions end end end #Storage end #Mdi end #UserApis
module UserApis module Mdi module Storage # @api private class CollectionDefinitionsMappingClass def initialize(apis) @user_apis = apis end def user_api @user_apis end # return a collection definition structs array def get_all() - RagentApi::CollectionDefinitionMapping.get_all(user_api.account) + @all_definitions ||= RagentApi::CollectionDefinitionMapping.get_all(user_api.account) ? +++++++++++++++++++++ end + def get_for_asset_with_type(imei, type) + asset_definitions = [] + definitions = self.get_all() + + definitions.each do |definition| + if (definition.assets == [] || definition.assets.include? imei) && definition.collects.include? type + asset_definitions << definition + end + end + asset_definitions + end end end #Storage end #Mdi end #UserApis
13
0.481481
12
1
c19e0bea9f17d44dc1d26f1ee7bbaa2f2a4dc257
lib/filepicker/rails/policy.rb
lib/filepicker/rails/policy.rb
require 'base64' require 'openssl' module Filepicker module Rails class Policy attr_accessor :expiry, :call, :handle, :maxsize, :minsize def initialize(options = {}) [:expiry, :call, :handle, :maxsize, :minsize].each do |input| send("#{input}=", options[input]) unless options[input].nil? end end def policy Base64.urlsafe_encode64(json_policy) end def signature OpenSSL::HMAC.hexdigest('sha256', ::Rails.application.config.filepicker_rails.secret_key, policy) end private def json_policy hash = Hash.new @expiry ||= Time.now.to_i + ::Rails.application.config.filepicker_rails.default_expiry [:expiry, :call, :handle, :maxsize, :minsize].each do |input| hash[input] = send(input) unless send(input).nil? end MultiJson.dump(hash) end end end end
require 'base64' require 'openssl' module Filepicker module Rails class Policy attr_accessor :expiry, :call, :handle, :maxsize, :minsize, :path def initialize(options = {}) [:expiry, :call, :handle, :maxsize, :minsize, :path].each do |input| send("#{input}=", options[input]) unless options[input].nil? end end def policy Base64.urlsafe_encode64(json_policy) end def signature OpenSSL::HMAC.hexdigest('sha256', ::Rails.application.config.filepicker_rails.secret_key, policy) end private def json_policy hash = Hash.new @expiry ||= Time.now.to_i + ::Rails.application.config.filepicker_rails.default_expiry [:expiry, :call, :handle, :maxsize, :minsize, :path].each do |input| hash[input] = send(input) unless send(input).nil? end MultiJson.dump(hash) end end end end
Add support for new path option
Add support for new path option
Ruby
mit
SchoolKeep/filepicker-rails,SchoolKeep/filepicker-rails,SchoolKeep/filepicker-rails,Ink/filepicker-rails,munirent/filepicker-rails,Ink/filepicker-rails,munirent/filepicker-rails,munirent/filepicker-rails,Ink/filepicker-rails
ruby
## Code Before: require 'base64' require 'openssl' module Filepicker module Rails class Policy attr_accessor :expiry, :call, :handle, :maxsize, :minsize def initialize(options = {}) [:expiry, :call, :handle, :maxsize, :minsize].each do |input| send("#{input}=", options[input]) unless options[input].nil? end end def policy Base64.urlsafe_encode64(json_policy) end def signature OpenSSL::HMAC.hexdigest('sha256', ::Rails.application.config.filepicker_rails.secret_key, policy) end private def json_policy hash = Hash.new @expiry ||= Time.now.to_i + ::Rails.application.config.filepicker_rails.default_expiry [:expiry, :call, :handle, :maxsize, :minsize].each do |input| hash[input] = send(input) unless send(input).nil? end MultiJson.dump(hash) end end end end ## Instruction: Add support for new path option ## Code After: require 'base64' require 'openssl' module Filepicker module Rails class Policy attr_accessor :expiry, :call, :handle, :maxsize, :minsize, :path def initialize(options = {}) [:expiry, :call, :handle, :maxsize, :minsize, :path].each do |input| send("#{input}=", options[input]) unless options[input].nil? end end def policy Base64.urlsafe_encode64(json_policy) end def signature OpenSSL::HMAC.hexdigest('sha256', ::Rails.application.config.filepicker_rails.secret_key, policy) end private def json_policy hash = Hash.new @expiry ||= Time.now.to_i + ::Rails.application.config.filepicker_rails.default_expiry [:expiry, :call, :handle, :maxsize, :minsize, :path].each do |input| hash[input] = send(input) unless send(input).nil? end MultiJson.dump(hash) end end end end
require 'base64' require 'openssl' module Filepicker module Rails class Policy - attr_accessor :expiry, :call, :handle, :maxsize, :minsize + attr_accessor :expiry, :call, :handle, :maxsize, :minsize, :path ? +++++++ def initialize(options = {}) - [:expiry, :call, :handle, :maxsize, :minsize].each do |input| + [:expiry, :call, :handle, :maxsize, :minsize, :path].each do |input| ? +++++++ send("#{input}=", options[input]) unless options[input].nil? end end def policy Base64.urlsafe_encode64(json_policy) end def signature OpenSSL::HMAC.hexdigest('sha256', ::Rails.application.config.filepicker_rails.secret_key, policy) end private def json_policy hash = Hash.new @expiry ||= Time.now.to_i + ::Rails.application.config.filepicker_rails.default_expiry - [:expiry, :call, :handle, :maxsize, :minsize].each do |input| + [:expiry, :call, :handle, :maxsize, :minsize, :path].each do |input| ? +++++++ hash[input] = send(input) unless send(input).nil? end MultiJson.dump(hash) end end end end
6
0.162162
3
3
eb3bc986f8b48cb3d59a60c73bdbc4244aa10922
src/test/steps/gvm/aeroplane_mode_steps.groovy
src/test/steps/gvm/aeroplane_mode_steps.groovy
package gvm import cucumber.runtime.PendingException import static cucumber.api.groovy.EN.* final SERVICE_DOWN = "http://localhost:0" final FAKE_JDK_PATH = "/path/to/my/openjdk" Given(~'^the internet is not reachable$') {-> bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_SERVICE: SERVICE_DOWN, JAVA_HOME: FAKE_JDK_PATH]) bash.start() bash.execute("source $binDir/gvm-init.sh") } And(~'^the internet is reachable$') {-> bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_SERVICE: serviceUrlEnv, JAVA_HOME: FAKE_JDK_PATH]) bash.start() bash.execute("source $binDir/gvm-init.sh") } And(~'^offline mode is disabled$') {-> // Express the Regexp above with the code you wish you had throw new PendingException() } And(~'^offline mode is enabled$') {-> // Express the Regexp above with the code you wish you had throw new PendingException() }
package gvm import static cucumber.api.groovy.EN.And import static cucumber.api.groovy.EN.Given final SERVICE_DOWN = "http://localhost:0" final FAKE_JDK_PATH = "/path/to/my/openjdk" And(~'^offline mode is disabled$') {-> forceOffline = false } And(~'^offline mode is enabled$') {-> forceOffline = true } Given(~'^the internet is not reachable$') {-> def online = "false" def forceOffline = forceOffline ?: "false" initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, SERVICE_DOWN, FAKE_JDK_PATH) } And(~'^the internet is reachable$') {-> def online = "true" def forceOffline = forceOffline ?: "false" initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, serviceUrlEnv, FAKE_JDK_PATH) } private initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, serviceUrlEnv, javaHome){ bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_ONLINE:online, GVM_FORCE_OFFLINE: forceOffline, GVM_SERVICE: serviceUrlEnv, JAVA_HOME: javaHome]) bash.start() bash.execute("source $gvmDirEnv/bin/gvm-init.sh") }
Introduce forced offline mode step defs, extract environment initialisation to private method.
Introduce forced offline mode step defs, extract environment initialisation to private method.
Groovy
apache-2.0
shanman190/sdkman-cli,nobeans/gvm-cli,gvmtool/gvm-cli,skpal/sdkman-cli,jbovet/gvm,GsusRecovery/sdkman-cli,DealerDotCom/gvm-cli,sdkman/sdkman-cli,skpal/sdkman-cli,GsusRecovery/sdkman-cli,nobeans/gvm-cli,DealerDotCom/gvm-cli,busches/gvm-cli
groovy
## Code Before: package gvm import cucumber.runtime.PendingException import static cucumber.api.groovy.EN.* final SERVICE_DOWN = "http://localhost:0" final FAKE_JDK_PATH = "/path/to/my/openjdk" Given(~'^the internet is not reachable$') {-> bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_SERVICE: SERVICE_DOWN, JAVA_HOME: FAKE_JDK_PATH]) bash.start() bash.execute("source $binDir/gvm-init.sh") } And(~'^the internet is reachable$') {-> bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_SERVICE: serviceUrlEnv, JAVA_HOME: FAKE_JDK_PATH]) bash.start() bash.execute("source $binDir/gvm-init.sh") } And(~'^offline mode is disabled$') {-> // Express the Regexp above with the code you wish you had throw new PendingException() } And(~'^offline mode is enabled$') {-> // Express the Regexp above with the code you wish you had throw new PendingException() } ## Instruction: Introduce forced offline mode step defs, extract environment initialisation to private method. ## Code After: package gvm import static cucumber.api.groovy.EN.And import static cucumber.api.groovy.EN.Given final SERVICE_DOWN = "http://localhost:0" final FAKE_JDK_PATH = "/path/to/my/openjdk" And(~'^offline mode is disabled$') {-> forceOffline = false } And(~'^offline mode is enabled$') {-> forceOffline = true } Given(~'^the internet is not reachable$') {-> def online = "false" def forceOffline = forceOffline ?: "false" initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, SERVICE_DOWN, FAKE_JDK_PATH) } And(~'^the internet is reachable$') {-> def online = "true" def forceOffline = forceOffline ?: "false" initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, serviceUrlEnv, FAKE_JDK_PATH) } private initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, serviceUrlEnv, javaHome){ bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_ONLINE:online, GVM_FORCE_OFFLINE: forceOffline, GVM_SERVICE: serviceUrlEnv, JAVA_HOME: javaHome]) bash.start() bash.execute("source $gvmDirEnv/bin/gvm-init.sh") }
package gvm - import cucumber.runtime.PendingException - - import static cucumber.api.groovy.EN.* ? ^ + import static cucumber.api.groovy.EN.And ? ^^^ + import static cucumber.api.groovy.EN.Given final SERVICE_DOWN = "http://localhost:0" final FAKE_JDK_PATH = "/path/to/my/openjdk" + And(~'^offline mode is disabled$') {-> + forceOffline = false + } + + And(~'^offline mode is enabled$') {-> + forceOffline = true + } + Given(~'^the internet is not reachable$') {-> - bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_SERVICE: SERVICE_DOWN, JAVA_HOME: FAKE_JDK_PATH]) - bash.start() - bash.execute("source $binDir/gvm-init.sh") + def online = "false" + def forceOffline = forceOffline ?: "false" + initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, SERVICE_DOWN, FAKE_JDK_PATH) } And(~'^the internet is reachable$') {-> - bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_SERVICE: serviceUrlEnv, JAVA_HOME: FAKE_JDK_PATH]) - bash.start() - bash.execute("source $binDir/gvm-init.sh") + def online = "true" + def forceOffline = forceOffline ?: "false" + initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, serviceUrlEnv, FAKE_JDK_PATH) } - And(~'^offline mode is disabled$') {-> - // Express the Regexp above with the code you wish you had - throw new PendingException() + private initialiseEnvironment(gvmBaseEnv, gvmDirEnv, online, forceOffline, serviceUrlEnv, javaHome){ + bash = new BashEnv(gvmBaseEnv, [GVM_DIR: gvmDirEnv, GVM_ONLINE:online, GVM_FORCE_OFFLINE: forceOffline, GVM_SERVICE: serviceUrlEnv, JAVA_HOME: javaHome]) + bash.start() + bash.execute("source $gvmDirEnv/bin/gvm-init.sh") } - - And(~'^offline mode is enabled$') {-> - // Express the Regexp above with the code you wish you had - throw new PendingException() - }
37
1.193548
20
17
a2bb23d7f717d19d3c6d4b2aabbd9fb6d0425fd6
mmtf-serialization/src/main/java/org/rcsb/mmtf/serialization/MessagePackSerialization.java
mmtf-serialization/src/main/java/org/rcsb/mmtf/serialization/MessagePackSerialization.java
package org.rcsb.mmtf.serialization; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.msgpack.jackson.dataformat.MessagePackFactory; import org.rcsb.mmtf.dataholders.MmtfStructure; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; /** * A message pack implementation of the {@link MmtfStructure} serializer / deserializer. * @author Anthony Bradley * */ public class MessagePackSerialization implements MmtfStructureSerializationInterface { @Override public MmtfStructure deserialize(InputStream inputStream){ MmtfStructure mmtfBean = null; try { mmtfBean = new ObjectMapper(new MessagePackFactory()).readValue(inputStream, MmtfStructure.class); } catch (IOException e) { e.printStackTrace(); } return mmtfBean; } @Override public void serialize(MmtfStructure mmtfStructure, OutputStream outputStream) { ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory()); objectMapper.setSerializationInclusion(Include.NON_NULL); try { objectMapper.writeValue(outputStream, mmtfStructure); } catch (IOException e) { e.printStackTrace(); } } }
package org.rcsb.mmtf.serialization; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.msgpack.jackson.dataformat.MessagePackFactory; import org.rcsb.mmtf.dataholders.MmtfStructure; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; /** * A message pack implementation of the {@link MmtfStructure} serializer / deserializer. * @author Anthony Bradley * */ public class MessagePackSerialization implements MmtfStructureSerializationInterface { private ObjectMapper objectMapper; /** * Constructor for the {@link MessagePackSerialization} class. * Generates {@link ObjectMapper} and sets to include non-null. */ public MessagePackSerialization() { objectMapper = new ObjectMapper(new MessagePackFactory()); objectMapper.setSerializationInclusion(Include.NON_NULL); } @Override public MmtfStructure deserialize(InputStream inputStream){ MmtfStructure mmtfBean = null; try { mmtfBean = objectMapper.readValue(inputStream, MmtfStructure.class); } catch (IOException e) { e.printStackTrace(); } return mmtfBean; } @Override public void serialize(MmtfStructure mmtfStructure, OutputStream outputStream) { try { objectMapper.writeValue(outputStream, mmtfStructure); } catch (IOException e) { e.printStackTrace(); } } }
Update to the serialization module - only construct object mapper once.
Update to the serialization module - only construct object mapper once.
Java
apache-2.0
pwrose/mmtf-java,josemduarte/mmtf-java,rcsb/mmtf-java
java
## Code Before: package org.rcsb.mmtf.serialization; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.msgpack.jackson.dataformat.MessagePackFactory; import org.rcsb.mmtf.dataholders.MmtfStructure; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; /** * A message pack implementation of the {@link MmtfStructure} serializer / deserializer. * @author Anthony Bradley * */ public class MessagePackSerialization implements MmtfStructureSerializationInterface { @Override public MmtfStructure deserialize(InputStream inputStream){ MmtfStructure mmtfBean = null; try { mmtfBean = new ObjectMapper(new MessagePackFactory()).readValue(inputStream, MmtfStructure.class); } catch (IOException e) { e.printStackTrace(); } return mmtfBean; } @Override public void serialize(MmtfStructure mmtfStructure, OutputStream outputStream) { ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory()); objectMapper.setSerializationInclusion(Include.NON_NULL); try { objectMapper.writeValue(outputStream, mmtfStructure); } catch (IOException e) { e.printStackTrace(); } } } ## Instruction: Update to the serialization module - only construct object mapper once. ## Code After: package org.rcsb.mmtf.serialization; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.msgpack.jackson.dataformat.MessagePackFactory; import org.rcsb.mmtf.dataholders.MmtfStructure; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; /** * A message pack implementation of the {@link MmtfStructure} serializer / deserializer. * @author Anthony Bradley * */ public class MessagePackSerialization implements MmtfStructureSerializationInterface { private ObjectMapper objectMapper; /** * Constructor for the {@link MessagePackSerialization} class. * Generates {@link ObjectMapper} and sets to include non-null. */ public MessagePackSerialization() { objectMapper = new ObjectMapper(new MessagePackFactory()); objectMapper.setSerializationInclusion(Include.NON_NULL); } @Override public MmtfStructure deserialize(InputStream inputStream){ MmtfStructure mmtfBean = null; try { mmtfBean = objectMapper.readValue(inputStream, MmtfStructure.class); } catch (IOException e) { e.printStackTrace(); } return mmtfBean; } @Override public void serialize(MmtfStructure mmtfStructure, OutputStream outputStream) { try { objectMapper.writeValue(outputStream, mmtfStructure); } catch (IOException e) { e.printStackTrace(); } } }
package org.rcsb.mmtf.serialization; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.msgpack.jackson.dataformat.MessagePackFactory; import org.rcsb.mmtf.dataholders.MmtfStructure; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; /** * A message pack implementation of the {@link MmtfStructure} serializer / deserializer. * @author Anthony Bradley * */ public class MessagePackSerialization implements MmtfStructureSerializationInterface { + private ObjectMapper objectMapper; + + /** + * Constructor for the {@link MessagePackSerialization} class. + * Generates {@link ObjectMapper} and sets to include non-null. + */ + public MessagePackSerialization() { + objectMapper = new ObjectMapper(new MessagePackFactory()); + objectMapper.setSerializationInclusion(Include.NON_NULL); + } + @Override public MmtfStructure deserialize(InputStream inputStream){ MmtfStructure mmtfBean = null; try { - mmtfBean = new ObjectMapper(new MessagePackFactory()).readValue(inputStream, MmtfStructure.class); ? ^^^^^ -------------------------- + mmtfBean = objectMapper.readValue(inputStream, MmtfStructure.class); ? ^ } catch (IOException e) { e.printStackTrace(); } return mmtfBean; } @Override public void serialize(MmtfStructure mmtfStructure, OutputStream outputStream) { - ObjectMapper objectMapper = new ObjectMapper(new MessagePackFactory()); - objectMapper.setSerializationInclusion(Include.NON_NULL); try { objectMapper.writeValue(outputStream, mmtfStructure); } catch (IOException e) { e.printStackTrace(); } } }
15
0.357143
12
3
d55ad9757411f41f6df2aceb3facd6bb332c605e
.travis.yml
.travis.yml
language: python sudo: false python: - "2.7" - "3.3" - "3.4" - "3.5" - "3.6-dev" - "nightly" - "pypy" - "pypy3" matrix: allow_failures: - python: "3.6-dev" - python: "nightly" install: - pip install codecov tox tox-travis script: - tox after_success: - codecov -e TOXENV notifications: email: false deploy: provider: pypi user: Bruno.Alla distributions: bdist_wheel sdist on: branch: master repo: browniebroke/deezer-python tags: true condition: "$TOXENV = py27" password: secure: gPn7phpAJc0WUAueMM4Drz4Uds/oYghLA8qiyG2GyFP5i2HnJiQDd+ZZVaSsLSVdiAubfR8RPBuKAbpZ3+g1RzHIiACNLh96G0q/rthytlPowxLxrI3c4BfjBTZ7qDdg1GzWcddrSAjKLN1v1OswuZZ7/T4YG7mdT8SMrLJq+8s=
language: python sudo: false python: - "2.7" - "3.3" - "3.4" - "3.5" - "3.6-dev" - "nightly" - "pypy" - "pypy3" matrix: allow_failures: - python: "3.6-dev" - python: "nightly" - python: "pypy3" install: - pip install codecov tox tox-travis script: - tox after_success: - codecov -e TOXENV notifications: email: false deploy: provider: pypi user: Bruno.Alla distributions: bdist_wheel sdist on: branch: master repo: browniebroke/deezer-python tags: true condition: "$TOXENV = py27" password: secure: gPn7phpAJc0WUAueMM4Drz4Uds/oYghLA8qiyG2GyFP5i2HnJiQDd+ZZVaSsLSVdiAubfR8RPBuKAbpZ3+g1RzHIiACNLh96G0q/rthytlPowxLxrI3c4BfjBTZ7qDdg1GzWcddrSAjKLN1v1OswuZZ7/T4YG7mdT8SMrLJq+8s=
Add pypy3 to allowed failures as it's not working yet
Add pypy3 to allowed failures as it's not working yet
YAML
mit
browniebroke/deezer-python,browniebroke/deezer-python,browniebroke/deezer-python
yaml
## Code Before: language: python sudo: false python: - "2.7" - "3.3" - "3.4" - "3.5" - "3.6-dev" - "nightly" - "pypy" - "pypy3" matrix: allow_failures: - python: "3.6-dev" - python: "nightly" install: - pip install codecov tox tox-travis script: - tox after_success: - codecov -e TOXENV notifications: email: false deploy: provider: pypi user: Bruno.Alla distributions: bdist_wheel sdist on: branch: master repo: browniebroke/deezer-python tags: true condition: "$TOXENV = py27" password: secure: gPn7phpAJc0WUAueMM4Drz4Uds/oYghLA8qiyG2GyFP5i2HnJiQDd+ZZVaSsLSVdiAubfR8RPBuKAbpZ3+g1RzHIiACNLh96G0q/rthytlPowxLxrI3c4BfjBTZ7qDdg1GzWcddrSAjKLN1v1OswuZZ7/T4YG7mdT8SMrLJq+8s= ## Instruction: Add pypy3 to allowed failures as it's not working yet ## Code After: language: python sudo: false python: - "2.7" - "3.3" - "3.4" - "3.5" - "3.6-dev" - "nightly" - "pypy" - "pypy3" matrix: allow_failures: - python: "3.6-dev" - python: "nightly" - python: "pypy3" install: - pip install codecov tox tox-travis script: - tox after_success: - codecov -e TOXENV notifications: email: false deploy: provider: pypi user: Bruno.Alla distributions: bdist_wheel sdist on: branch: master repo: browniebroke/deezer-python tags: true condition: "$TOXENV = py27" password: secure: gPn7phpAJc0WUAueMM4Drz4Uds/oYghLA8qiyG2GyFP5i2HnJiQDd+ZZVaSsLSVdiAubfR8RPBuKAbpZ3+g1RzHIiACNLh96G0q/rthytlPowxLxrI3c4BfjBTZ7qDdg1GzWcddrSAjKLN1v1OswuZZ7/T4YG7mdT8SMrLJq+8s=
language: python sudo: false python: - "2.7" - "3.3" - "3.4" - "3.5" - "3.6-dev" - "nightly" - "pypy" - "pypy3" matrix: allow_failures: - python: "3.6-dev" - python: "nightly" + - python: "pypy3" install: - pip install codecov tox tox-travis script: - tox after_success: - codecov -e TOXENV notifications: email: false deploy: provider: pypi user: Bruno.Alla distributions: bdist_wheel sdist on: branch: master repo: browniebroke/deezer-python tags: true condition: "$TOXENV = py27" password: secure: gPn7phpAJc0WUAueMM4Drz4Uds/oYghLA8qiyG2GyFP5i2HnJiQDd+ZZVaSsLSVdiAubfR8RPBuKAbpZ3+g1RzHIiACNLh96G0q/rthytlPowxLxrI3c4BfjBTZ7qDdg1GzWcddrSAjKLN1v1OswuZZ7/T4YG7mdT8SMrLJq+8s=
1
0.02381
1
0
adb14248ea2a0e60a5a8bb0ff1adac2fde2a3c46
glib/Android.mk
glib/Android.mk
LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ gdir.c \ gerror.c \ giochannel.c \ gkeyfile.c \ gmain.c \ gmem.c \ goption.c \ gslice.c \ gslist.c \ gstring.c \ gstrfuncs.c \ gtimer.c \ giounix.c \ gmessages.c \ gutf8.c \ gfileutils.c \ gconvert.c \ gdataset.c \ gtestutils.c \ ghash.c \ glist.c \ gthread.c \ garray.c \ gutils.c \ gatomic.c \ gprintf.c \ gpattern.c \ guniprop.c \ gpoll.c \ grand.c \ gunidecomp.c \ gqsort.c \ gstdio.c LOCAL_C_INCLUDES:= \ $(LOCAL_PATH)/../ \ $(LOCAL_PATH) LOCAL_CFLAGS:= \ -DANDROID_STUB LOCAL_MODULE:=libglib include $(BUILD_SHARED_LIBRARY)
LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ gdir.c \ gerror.c \ giochannel.c \ gkeyfile.c \ gmain.c \ gmem.c \ goption.c \ gslice.c \ gslist.c \ gstring.c \ gstrfuncs.c \ gtimer.c \ giounix.c \ gmessages.c \ gutf8.c \ gfileutils.c \ gconvert.c \ gdataset.c \ gtestutils.c \ ghash.c \ glist.c \ gthread.c \ garray.c \ gutils.c \ gatomic.c \ gprintf.c \ gpattern.c \ guniprop.c \ gpoll.c \ grand.c \ gunidecomp.c \ gqsort.c \ gstdio.c LOCAL_C_INCLUDES:= \ $(LOCAL_PATH)/../ \ $(LOCAL_PATH) LOCAL_CFLAGS:= \ -DANDROID_STUB LOCAL_MODULE:=libglib LOCAL_PRELINK_MODULE := false include $(BUILD_SHARED_LIBRARY)
Fix build - don't prelink glib - do not merge
Fix build - don't prelink glib - do not merge Change-Id: I6b98022ddc8710e1cb65ba7c4cca114ffd14c4a4
Makefile
lgpl-2.1
bhargavkumar040/android-source-browsing.platform--external--bluetooth--glib,codewalkerster/external_bluetooth_glib,tguillem/android-glib,codewalkerster/external_bluetooth_glib,CyanogenMod/android_external_bluetooth_glib,android-ia/platform_external_bluetooth_glib,Pankaj-Sakariya/android-source-browsing.platform--external--bluetooth--glib,android-ia/platform_external_bluetooth_glib,Pankaj-Sakariya/android-source-browsing.platform--external--bluetooth--glib,CyanogenMod/android_external_bluetooth_glib,android-ia/platform_external_bluetooth_glib,ThangBK2009/android-source-browsing.platform--external--bluetooth--glib,AOKP/external_bluetooth_glib,CyanogenMod/android_external_bluetooth_glib,tguillem/android-glib,AOKP/external_bluetooth_glib,CyanogenMod/android_external_bluetooth_glib,Pankaj-Sakariya/android-source-browsing.platform--external--bluetooth--glib,xin3liang/platform_external_bluetooth_glib,cubieboard/openbox_external_bluetooth_glib,codewalkerster/android_external_bluetooth_glib,bhargavkumar040/android-source-browsing.platform--external--bluetooth--glib,AOKP/external_bluetooth_glib,ThangBK2009/android-source-browsing.platform--external--bluetooth--glib,codewalkerster/android_external_bluetooth_glib,cubieboard/openbox_external_bluetooth_glib,cubieboard/openbox_external_bluetooth_glib,xin3liang/platform_external_bluetooth_glib,codewalkerster/android_external_bluetooth_glib,codewalkerster/external_bluetooth_glib,codewalkerster/external_bluetooth_glib,android-ia/platform_external_bluetooth_glib,AOKP/external_bluetooth_glib,ThangBK2009/android-source-browsing.platform--external--bluetooth--glib,cubieboard/openbox_external_bluetooth_glib,bhargavkumar040/android-source-browsing.platform--external--bluetooth--glib,ThangBK2009/android-source-browsing.platform--external--bluetooth--glib,bhargavkumar040/android-source-browsing.platform--external--bluetooth--glib,xin3liang/platform_external_bluetooth_glib,tguillem/android-glib,xin3liang/platform_external_bluetooth_glib,Pankaj-Sakariya/android-source-browsing.platform--external--bluetooth--glib,tguillem/android-glib,codewalkerster/android_external_bluetooth_glib
makefile
## Code Before: LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ gdir.c \ gerror.c \ giochannel.c \ gkeyfile.c \ gmain.c \ gmem.c \ goption.c \ gslice.c \ gslist.c \ gstring.c \ gstrfuncs.c \ gtimer.c \ giounix.c \ gmessages.c \ gutf8.c \ gfileutils.c \ gconvert.c \ gdataset.c \ gtestutils.c \ ghash.c \ glist.c \ gthread.c \ garray.c \ gutils.c \ gatomic.c \ gprintf.c \ gpattern.c \ guniprop.c \ gpoll.c \ grand.c \ gunidecomp.c \ gqsort.c \ gstdio.c LOCAL_C_INCLUDES:= \ $(LOCAL_PATH)/../ \ $(LOCAL_PATH) LOCAL_CFLAGS:= \ -DANDROID_STUB LOCAL_MODULE:=libglib include $(BUILD_SHARED_LIBRARY) ## Instruction: Fix build - don't prelink glib - do not merge Change-Id: I6b98022ddc8710e1cb65ba7c4cca114ffd14c4a4 ## Code After: LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ gdir.c \ gerror.c \ giochannel.c \ gkeyfile.c \ gmain.c \ gmem.c \ goption.c \ gslice.c \ gslist.c \ gstring.c \ gstrfuncs.c \ gtimer.c \ giounix.c \ gmessages.c \ gutf8.c \ gfileutils.c \ gconvert.c \ gdataset.c \ gtestutils.c \ ghash.c \ glist.c \ gthread.c \ garray.c \ gutils.c \ gatomic.c \ gprintf.c \ gpattern.c \ guniprop.c \ gpoll.c \ grand.c \ gunidecomp.c \ gqsort.c \ gstdio.c LOCAL_C_INCLUDES:= \ $(LOCAL_PATH)/../ \ $(LOCAL_PATH) LOCAL_CFLAGS:= \ -DANDROID_STUB LOCAL_MODULE:=libglib LOCAL_PRELINK_MODULE := false include $(BUILD_SHARED_LIBRARY)
LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ gdir.c \ gerror.c \ giochannel.c \ gkeyfile.c \ gmain.c \ gmem.c \ goption.c \ gslice.c \ gslist.c \ gstring.c \ gstrfuncs.c \ gtimer.c \ giounix.c \ gmessages.c \ gutf8.c \ gfileutils.c \ gconvert.c \ gdataset.c \ gtestutils.c \ ghash.c \ glist.c \ gthread.c \ garray.c \ gutils.c \ gatomic.c \ gprintf.c \ gpattern.c \ guniprop.c \ gpoll.c \ grand.c \ gunidecomp.c \ gqsort.c \ gstdio.c LOCAL_C_INCLUDES:= \ $(LOCAL_PATH)/../ \ $(LOCAL_PATH) LOCAL_CFLAGS:= \ -DANDROID_STUB LOCAL_MODULE:=libglib + LOCAL_PRELINK_MODULE := false + include $(BUILD_SHARED_LIBRARY)
2
0.041667
2
0
23b38b89d05fb8c676b80c04d668596bd2ce2509
.travis.yml
.travis.yml
language: node_js cache: npm node_js: - "6.2" os: - linux - osx - windows script: - npm run test
language: node_js cache: npm node_js: - "6.2" os: - windows - osx - linux script: - npm run test - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then npm run build-win; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then npm run build-mac; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then npm run build-linux; fi
Build app in Travis CI on each platform
Build app in Travis CI on each platform
YAML
apache-2.0
pluralsight/mob-timer,pluralsight/mob-timer
yaml
## Code Before: language: node_js cache: npm node_js: - "6.2" os: - linux - osx - windows script: - npm run test ## Instruction: Build app in Travis CI on each platform ## Code After: language: node_js cache: npm node_js: - "6.2" os: - windows - osx - linux script: - npm run test - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then npm run build-win; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then npm run build-mac; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then npm run build-linux; fi
language: node_js cache: npm node_js: - "6.2" os: + - windows + - osx - linux - - osx - - windows script: - npm run test + - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then npm run build-win; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then npm run build-mac; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then npm run build-linux; fi
7
0.7
5
2
547f4da202fd457eec44c20ef15df7508ec911f7
app/models/ecm/cms/navigation.rb
app/models/ecm/cms/navigation.rb
class Ecm::Cms::Navigation < ActiveRecord::Base # associations has_many :ecm_cms_navigation_items, class_name: 'Ecm::Cms::NavigationItem', dependent: :destroy, foreign_key: 'ecm_cms_navigation_id' # validations validates :locale, inclusion: I18n.available_locales.map(&:to_s), allow_nil: true validates :name, presence: true, uniqueness: { scope: [:locale] } delegate :count, to: :ecm_cms_navigation_items, prefix: true def to_s "#{name} (#{locale})" end end
class Ecm::Cms::Navigation < ActiveRecord::Base # associations has_many :ecm_cms_navigation_items, class_name: 'Ecm::Cms::NavigationItem', dependent: :destroy, foreign_key: 'ecm_cms_navigation_id' # validations validates :locale, inclusion: I18n.available_locales.map(&:to_s), allow_nil: true, allow_blank: true validates :name, presence: true, uniqueness: { scope: [:locale] } delegate :count, to: :ecm_cms_navigation_items, prefix: true def to_s "#{name} (#{locale})" end end
Fix validations in rails 5.
Fix validations in rails 5.
Ruby
mit
robotex82/ecm_cms2,robotex82/ecm_cms2,robotex82/ecm_cms2
ruby
## Code Before: class Ecm::Cms::Navigation < ActiveRecord::Base # associations has_many :ecm_cms_navigation_items, class_name: 'Ecm::Cms::NavigationItem', dependent: :destroy, foreign_key: 'ecm_cms_navigation_id' # validations validates :locale, inclusion: I18n.available_locales.map(&:to_s), allow_nil: true validates :name, presence: true, uniqueness: { scope: [:locale] } delegate :count, to: :ecm_cms_navigation_items, prefix: true def to_s "#{name} (#{locale})" end end ## Instruction: Fix validations in rails 5. ## Code After: class Ecm::Cms::Navigation < ActiveRecord::Base # associations has_many :ecm_cms_navigation_items, class_name: 'Ecm::Cms::NavigationItem', dependent: :destroy, foreign_key: 'ecm_cms_navigation_id' # validations validates :locale, inclusion: I18n.available_locales.map(&:to_s), allow_nil: true, allow_blank: true validates :name, presence: true, uniqueness: { scope: [:locale] } delegate :count, to: :ecm_cms_navigation_items, prefix: true def to_s "#{name} (#{locale})" end end
class Ecm::Cms::Navigation < ActiveRecord::Base # associations has_many :ecm_cms_navigation_items, class_name: 'Ecm::Cms::NavigationItem', dependent: :destroy, foreign_key: 'ecm_cms_navigation_id' # validations validates :locale, inclusion: I18n.available_locales.map(&:to_s), - allow_nil: true + allow_nil: true, ? + + allow_blank: true validates :name, presence: true, uniqueness: { scope: [:locale] } delegate :count, to: :ecm_cms_navigation_items, prefix: true def to_s "#{name} (#{locale})" end end
3
0.157895
2
1
5749d5413c1afaa8ac34b1f1a424ce17fe77cb60
requirements.txt
requirements.txt
argparse six>=1.7.0
pbr>=0.6,!=0.7,<1.0 argparse six>=1.7.0
Add pbr to dependency list
Add pbr to dependency list We use pbr to install stevedore, and because of the way setuptools deals with install-time requirements we also need to include it as a runtime dependency to ensure it is installed by pip and not easy_install. Change-Id: I0a39bb50218a1a7cdb2d42fb474a9b8f6d6ac32a Closes-Bug: #1384919
Text
apache-2.0
varunarya10/stevedore,nelsnelson/stevedore,mandeepdhami/stevedore,varunarya10/stevedore,openstack/stevedore,JioCloud/stevedore,JioCloud/stevedore,nelsnelson/stevedore,mandeepdhami/stevedore
text
## Code Before: argparse six>=1.7.0 ## Instruction: Add pbr to dependency list We use pbr to install stevedore, and because of the way setuptools deals with install-time requirements we also need to include it as a runtime dependency to ensure it is installed by pip and not easy_install. Change-Id: I0a39bb50218a1a7cdb2d42fb474a9b8f6d6ac32a Closes-Bug: #1384919 ## Code After: pbr>=0.6,!=0.7,<1.0 argparse six>=1.7.0
+ pbr>=0.6,!=0.7,<1.0 argparse six>=1.7.0
1
0.333333
1
0
9d8d426c452492fb3d5e255d31f2c5f96f257b8d
setup.py
setup.py
from setuptools import setup, find_packages def parse_requirements(requirement_file): with open(requirement_file) as f: return f.readlines() setup( name="swimlane", author="Swimlane LLC", author_email="[email protected]", url="https://github.com/swimlane/swimlane-python", packages=find_packages(exclude=('tests', 'tests.*')), description="A Python client for Swimlane.", install_requires=parse_requirements('./requirements.txt'), setup_requires=[ 'setuptools_scm', 'pytest-runner' ], use_scm_version=True, tests_require=parse_requirements('./test-requirements.txt'), classifiers=[ "License :: OSI Approved :: GNU Affero General Public License v3", "Development Status :: 4 - Beta", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: PyPy" ] )
from setuptools import setup, find_packages def parse_requirements(requirement_file): with open(requirement_file) as f: return f.readlines() setup( name="swimlane", author="Swimlane LLC", author_email="[email protected]", url="https://github.com/swimlane/swimlane-python", packages=find_packages(exclude=('tests', 'tests.*')), description="A Python client for Swimlane.", install_requires=parse_requirements('./requirements.txt'), setup_requires=[ 'setuptools_scm', 'pytest-runner' ], use_scm_version=True, tests_require=parse_requirements('./test-requirements.txt'), classifiers=[ "License :: OSI Approved :: GNU Affero General Public License v3", "Development Status :: 4 - Beta", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ] )
Remove Pypy from list of supported Python versions
Remove Pypy from list of supported Python versions
Python
mit
Swimlane/sw-python-client
python
## Code Before: from setuptools import setup, find_packages def parse_requirements(requirement_file): with open(requirement_file) as f: return f.readlines() setup( name="swimlane", author="Swimlane LLC", author_email="[email protected]", url="https://github.com/swimlane/swimlane-python", packages=find_packages(exclude=('tests', 'tests.*')), description="A Python client for Swimlane.", install_requires=parse_requirements('./requirements.txt'), setup_requires=[ 'setuptools_scm', 'pytest-runner' ], use_scm_version=True, tests_require=parse_requirements('./test-requirements.txt'), classifiers=[ "License :: OSI Approved :: GNU Affero General Public License v3", "Development Status :: 4 - Beta", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: PyPy" ] ) ## Instruction: Remove Pypy from list of supported Python versions ## Code After: from setuptools import setup, find_packages def parse_requirements(requirement_file): with open(requirement_file) as f: return f.readlines() setup( name="swimlane", author="Swimlane LLC", author_email="[email protected]", url="https://github.com/swimlane/swimlane-python", packages=find_packages(exclude=('tests', 'tests.*')), description="A Python client for Swimlane.", install_requires=parse_requirements('./requirements.txt'), setup_requires=[ 'setuptools_scm', 'pytest-runner' ], use_scm_version=True, tests_require=parse_requirements('./test-requirements.txt'), classifiers=[ "License :: OSI Approved :: GNU Affero General Public License v3", "Development Status :: 4 - Beta", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", ] )
from setuptools import setup, find_packages def parse_requirements(requirement_file): with open(requirement_file) as f: return f.readlines() setup( name="swimlane", author="Swimlane LLC", author_email="[email protected]", url="https://github.com/swimlane/swimlane-python", packages=find_packages(exclude=('tests', 'tests.*')), description="A Python client for Swimlane.", install_requires=parse_requirements('./requirements.txt'), setup_requires=[ 'setuptools_scm', 'pytest-runner' ], use_scm_version=True, tests_require=parse_requirements('./test-requirements.txt'), classifiers=[ "License :: OSI Approved :: GNU Affero General Public License v3", "Development Status :: 4 - Beta", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: Implementation :: PyPy" ] )
1
0.030303
0
1
02365cb9154097ab437c092a7ad5fa620a30a306
src/CollegeCrazies/Bundle/MainBundle/Resources/views/Game/list.html.twig
src/CollegeCrazies/Bundle/MainBundle/Resources/views/Game/list.html.twig
<ul class="game-list unstyled"> {% for game in games %} <li class="game"> <h5>{{ game.name}} {% if game.complete %} (F){% endif %}</h5> <small>{{ game.gameDate|date('m/d/Y') }}</small> {% if game.complete %} {% if game.winner == game.homeTeam %} {% set homeClass = "success" %} {% set awayClass = "important" %} {% else %} {% set homeClass = "important" %} {% set awayClass = "success" %} {% endif %} <p> <span class="label {{ homeClass }}"><strong>{{ game.homeTeam.id }} ({{ game.homeTeamScore }})</strong></span> vs <span class="label {{ awayClass }}"><em>{{ game.awayTeam.id }} ({{ game.awayTeamScore }})</em></span> </p> {% else %} <p>{{ game.homeTeam.id}} vs. {{ game.awayTeam.id }} on <i> {{ game.network }}</i> <time>{{ game.gameDate|date('g:i a') }}</time></p> {% endif %} </li> {% endfor %} </ul>
<ul class="game-list unstyled"> {% for game in games %} <li class="game"> <h5>{{ game.name}} {% if game.complete %} (F){% endif %}</h5> <small>{{ game.gameDate|date('m/d/Y') }}</small> {% if game.complete %} {% if game.winner == game.homeTeam %} {% set homeClass = "success" %} {% set awayClass = "important" %} {% else %} {% set homeClass = "important" %} {% set awayClass = "success" %} {% endif %} <p> <span class="label {{ homeClass }}">{{ game.homeTeam.id }} ({{ game.homeTeamScore }})</span> vs <span class="label {{ awayClass }}">{{ game.awayTeam.id }} ({{ game.awayTeamScore }})</span> </p> {% else %} <p>{{ game.homeTeam.id}} vs. {{ game.awayTeam.id }} on <i> {{ game.network }}</i> <time>{{ game.gameDate|date('g:i a') }}</time></p> {% endif %} </li> {% endfor %} </ul>
Remove markup from game list
Remove markup from game list
Twig
mit
jsuggs/CollegeCrazies,jsuggs/CollegeCrazies,jsuggs/CollegeCrazies,jsuggs/CollegeCrazies,jsuggs/CollegeCrazies,jsuggs/CollegeCrazies
twig
## Code Before: <ul class="game-list unstyled"> {% for game in games %} <li class="game"> <h5>{{ game.name}} {% if game.complete %} (F){% endif %}</h5> <small>{{ game.gameDate|date('m/d/Y') }}</small> {% if game.complete %} {% if game.winner == game.homeTeam %} {% set homeClass = "success" %} {% set awayClass = "important" %} {% else %} {% set homeClass = "important" %} {% set awayClass = "success" %} {% endif %} <p> <span class="label {{ homeClass }}"><strong>{{ game.homeTeam.id }} ({{ game.homeTeamScore }})</strong></span> vs <span class="label {{ awayClass }}"><em>{{ game.awayTeam.id }} ({{ game.awayTeamScore }})</em></span> </p> {% else %} <p>{{ game.homeTeam.id}} vs. {{ game.awayTeam.id }} on <i> {{ game.network }}</i> <time>{{ game.gameDate|date('g:i a') }}</time></p> {% endif %} </li> {% endfor %} </ul> ## Instruction: Remove markup from game list ## Code After: <ul class="game-list unstyled"> {% for game in games %} <li class="game"> <h5>{{ game.name}} {% if game.complete %} (F){% endif %}</h5> <small>{{ game.gameDate|date('m/d/Y') }}</small> {% if game.complete %} {% if game.winner == game.homeTeam %} {% set homeClass = "success" %} {% set awayClass = "important" %} {% else %} {% set homeClass = "important" %} {% set awayClass = "success" %} {% endif %} <p> <span class="label {{ homeClass }}">{{ game.homeTeam.id }} ({{ game.homeTeamScore }})</span> vs <span class="label {{ awayClass }}">{{ game.awayTeam.id }} ({{ game.awayTeamScore }})</span> </p> {% else %} <p>{{ game.homeTeam.id}} vs. {{ game.awayTeam.id }} on <i> {{ game.network }}</i> <time>{{ game.gameDate|date('g:i a') }}</time></p> {% endif %} </li> {% endfor %} </ul>
<ul class="game-list unstyled"> {% for game in games %} <li class="game"> <h5>{{ game.name}} {% if game.complete %} (F){% endif %}</h5> <small>{{ game.gameDate|date('m/d/Y') }}</small> {% if game.complete %} {% if game.winner == game.homeTeam %} {% set homeClass = "success" %} {% set awayClass = "important" %} {% else %} {% set homeClass = "important" %} {% set awayClass = "success" %} {% endif %} <p> - <span class="label {{ homeClass }}"><strong>{{ game.homeTeam.id }} ({{ game.homeTeamScore }})</strong></span> ? -------- --------- + <span class="label {{ homeClass }}">{{ game.homeTeam.id }} ({{ game.homeTeamScore }})</span> vs - <span class="label {{ awayClass }}"><em>{{ game.awayTeam.id }} ({{ game.awayTeamScore }})</em></span> ? ---- ----- + <span class="label {{ awayClass }}">{{ game.awayTeam.id }} ({{ game.awayTeamScore }})</span> </p> {% else %} <p>{{ game.homeTeam.id}} vs. {{ game.awayTeam.id }} on <i> {{ game.network }}</i> <time>{{ game.gameDate|date('g:i a') }}</time></p> {% endif %} </li> {% endfor %} </ul>
4
0.16
2
2
e6918c3e528eca6ea79cc699b7ea8ba964d4e24f
data_bags/users/STAGINGDMZ.json
data_bags/users/STAGINGDMZ.json
{ "id": "admin", "groups": [ "staging" ], "uid": 2001, "shell": "\/bin\/bash", "nagios": { "pager": "[email protected]", "email": "[email protected]" }, }
{ "id": "admin", "groups": [ "staging" ], "htpasswd": "$1$wuvwulpl$e5qocnltmnyysfq.yw8y41", "nagios": { "pager": "[email protected]", "email": "[email protected]" }, }
Remove unnecessary values and add an htpasswd hash so admin can login to Nagios with admin/Bijoux01
Remove unnecessary values and add an htpasswd hash so admin can login to Nagios with admin/Bijoux01 Former-commit-id: 91b1b5ddb0e151035c428d21d3fd5a669fd9658a [formerly 1c22aa2f81554a319baf92c0bc615d480a812cd0] [formerly e98b1cbae6112c8fbde2d32f1e47149c4e983e63 [formerly 8ca4f2a48a5587566dcf9dfce82aa29f5a0a8959]] Former-commit-id: e5f25e9fc75e43457c14ad25cf8c4639df3c077c [formerly f04306966219075ce3ea1ffa88a71a6678d3b106] Former-commit-id: 9addc1f6d878c194ce05c7d0c2ce44001ad2021a
JSON
apache-2.0
marthag8/rundeck,tas50/rundeck,ARentz07/rundeck,edwlarkey/rundeck,marthag8/rundeck,ARentz07/rundeck,ARentz07/rundeck,ronabop/rundeck,tas50/rundeck,marthag8/rundeck,edwlarkey/rundeck,edwlarkey/rundeck,ronabop/rundeck,ronabop/rundeck,tas50/rundeck
json
## Code Before: { "id": "admin", "groups": [ "staging" ], "uid": 2001, "shell": "\/bin\/bash", "nagios": { "pager": "[email protected]", "email": "[email protected]" }, } ## Instruction: Remove unnecessary values and add an htpasswd hash so admin can login to Nagios with admin/Bijoux01 Former-commit-id: 91b1b5ddb0e151035c428d21d3fd5a669fd9658a [formerly 1c22aa2f81554a319baf92c0bc615d480a812cd0] [formerly e98b1cbae6112c8fbde2d32f1e47149c4e983e63 [formerly 8ca4f2a48a5587566dcf9dfce82aa29f5a0a8959]] Former-commit-id: e5f25e9fc75e43457c14ad25cf8c4639df3c077c [formerly f04306966219075ce3ea1ffa88a71a6678d3b106] Former-commit-id: 9addc1f6d878c194ce05c7d0c2ce44001ad2021a ## Code After: { "id": "admin", "groups": [ "staging" ], "htpasswd": "$1$wuvwulpl$e5qocnltmnyysfq.yw8y41", "nagios": { "pager": "[email protected]", "email": "[email protected]" }, }
{ "id": "admin", "groups": [ "staging" ], + "htpasswd": "$1$wuvwulpl$e5qocnltmnyysfq.yw8y41", - "uid": 2001, - "shell": "\/bin\/bash", "nagios": { "pager": "[email protected]", "email": "[email protected]" }, }
3
0.3
1
2
0ac080271a93820605e031d44a6b117e15f9c0b2
docs/index.rst
docs/index.rst
**This documentation has moved to GitHub_.** .. _GitHub: https://simpleflow.readthedocs.io/
This documentation has moved to `GitHub`_. .. _GitHub: https://botify-labs.github.io/simpleflow/
Make redirection link work on readthedocs (2) + fix link
Make redirection link work on readthedocs (2) + fix link
reStructuredText
mit
botify-labs/simpleflow,botify-labs/simpleflow
restructuredtext
## Code Before: **This documentation has moved to GitHub_.** .. _GitHub: https://simpleflow.readthedocs.io/ ## Instruction: Make redirection link work on readthedocs (2) + fix link ## Code After: This documentation has moved to `GitHub`_. .. _GitHub: https://botify-labs.github.io/simpleflow/
- **This documentation has moved to GitHub_.** ? -- -- + This documentation has moved to `GitHub`_. ? + + - .. _GitHub: https://simpleflow.readthedocs.io/ + .. _GitHub: https://botify-labs.github.io/simpleflow/
4
1.333333
2
2
b7919d1e830bbae832c13cef49e3b50750b439da
las_input/run.bash
las_input/run.bash
unzip ./Lasfiles.zip
unzip ./Lasfiles.zip #Rename to remove spaces for simplicity mv ./Lasfiles/Penobscot\ B-41\ LASOut_W4.las ./Lasfiles/Penobscot_B-41_LASOut_W4.las mv ./Lasfiles/Penobscot\ L-30\ LASOut_W7.las ./Lasfiles/Penobscot_L-30_LASOut_W7.las
Rename LAS file to remove spaces from file names.
Rename LAS file to remove spaces from file names.
Shell
mit
MrHiccups/open-well-ties,MrHiccups/open-well-ties,MrHiccups/open-well-ties
shell
## Code Before: unzip ./Lasfiles.zip ## Instruction: Rename LAS file to remove spaces from file names. ## Code After: unzip ./Lasfiles.zip #Rename to remove spaces for simplicity mv ./Lasfiles/Penobscot\ B-41\ LASOut_W4.las ./Lasfiles/Penobscot_B-41_LASOut_W4.las mv ./Lasfiles/Penobscot\ L-30\ LASOut_W7.las ./Lasfiles/Penobscot_L-30_LASOut_W7.las
unzip ./Lasfiles.zip + #Rename to remove spaces for simplicity + mv ./Lasfiles/Penobscot\ B-41\ LASOut_W4.las ./Lasfiles/Penobscot_B-41_LASOut_W4.las + mv ./Lasfiles/Penobscot\ L-30\ LASOut_W7.las ./Lasfiles/Penobscot_L-30_LASOut_W7.las +
4
2
4
0
e24e5ff5e510cab05cc503de0a5c42e84ae508a8
scripts/commands/cfn/stack-set-instances.bash
scripts/commands/cfn/stack-set-instances.bash
while getopts "a:r:" opt; do case "$opt" in a) STACK_SET_ACCOUNT="--stack-instance-account $OPTARG" ;; r) STACK_SET_REGION="--stack-instance-region $OPTARG" ;; esac done shift $(($OPTIND-1)) split_args "$@" STACKSET_LISTING=$(awscli cloudformation list-stack-sets --status ACTIVE --output text --query "sort_by(Summaries,&StackSetName)[$(auto_filter StackSetName -- $FIRST_RESOURCE)].[StackSetName]") select_one StackSet "$STACKSET_LISTING" awscli cloudformation list-stack-instances --max-results 100 --stack-set-name $SELECTED ${STACK_SET_ACCOUNT:-} ${STACK_SET_REGION:-} --output json --query "sort_by(Summaries,&join('',[@.Account,@.Region]))[$(auto_filter Account Region StackId Status -- $SECOND_RESOURCE)].{ \"1.Account\":Account, \"2.Region\":Region, \"3.StackName\":StackId, \"4.Status\":Status, \"4.StatusReason\":StatusReason}" | sed "s/arn.*stack\/\(.*\)\/.*\"/\1\"/g" | print_table ListStackInstances
while getopts "a:r:" opt; do case "$opt" in a) STACK_SET_ACCOUNT="--stack-instance-account $OPTARG" ;; r) STACK_SET_REGION="--stack-instance-region $OPTARG" ;; esac done shift $(($OPTIND-1)) split_args "$@" STACKSET_LISTING=$(awscli cloudformation list-stack-sets --status ACTIVE --output text --query "sort_by(Summaries,&StackSetName)[$(auto_filter StackSetName -- $FIRST_RESOURCE)].[StackSetName]") select_one StackSet "$STACKSET_LISTING" awscli cloudformation list-stack-instances --stack-set-name $SELECTED ${STACK_SET_ACCOUNT:-} ${STACK_SET_REGION:-} --output json --query "sort_by(Summaries,&join('',[@.Account,@.Region]))[$(auto_filter Account Region StackId Status StatusReason -- $SECOND_RESOURCE)].{ \"1.Account\":Account, \"2.Region\":Region, \"3.StackName\":StackId, \"4.Status\":Status, \"4.StatusReason\":StatusReason}" | sed "s/arn.*stack\/\(.*\)\/.*\"/\1\"/g" | print_table ListStackInstances
Remove Stack Results from cfn stack listing
Remove Stack Results from cfn stack listing
Shell
mit
flomotlik/awsinfo,flomotlik/awsinfo
shell
## Code Before: while getopts "a:r:" opt; do case "$opt" in a) STACK_SET_ACCOUNT="--stack-instance-account $OPTARG" ;; r) STACK_SET_REGION="--stack-instance-region $OPTARG" ;; esac done shift $(($OPTIND-1)) split_args "$@" STACKSET_LISTING=$(awscli cloudformation list-stack-sets --status ACTIVE --output text --query "sort_by(Summaries,&StackSetName)[$(auto_filter StackSetName -- $FIRST_RESOURCE)].[StackSetName]") select_one StackSet "$STACKSET_LISTING" awscli cloudformation list-stack-instances --max-results 100 --stack-set-name $SELECTED ${STACK_SET_ACCOUNT:-} ${STACK_SET_REGION:-} --output json --query "sort_by(Summaries,&join('',[@.Account,@.Region]))[$(auto_filter Account Region StackId Status -- $SECOND_RESOURCE)].{ \"1.Account\":Account, \"2.Region\":Region, \"3.StackName\":StackId, \"4.Status\":Status, \"4.StatusReason\":StatusReason}" | sed "s/arn.*stack\/\(.*\)\/.*\"/\1\"/g" | print_table ListStackInstances ## Instruction: Remove Stack Results from cfn stack listing ## Code After: while getopts "a:r:" opt; do case "$opt" in a) STACK_SET_ACCOUNT="--stack-instance-account $OPTARG" ;; r) STACK_SET_REGION="--stack-instance-region $OPTARG" ;; esac done shift $(($OPTIND-1)) split_args "$@" STACKSET_LISTING=$(awscli cloudformation list-stack-sets --status ACTIVE --output text --query "sort_by(Summaries,&StackSetName)[$(auto_filter StackSetName -- $FIRST_RESOURCE)].[StackSetName]") select_one StackSet "$STACKSET_LISTING" awscli cloudformation list-stack-instances --stack-set-name $SELECTED ${STACK_SET_ACCOUNT:-} ${STACK_SET_REGION:-} --output json --query "sort_by(Summaries,&join('',[@.Account,@.Region]))[$(auto_filter Account Region StackId Status StatusReason -- $SECOND_RESOURCE)].{ \"1.Account\":Account, \"2.Region\":Region, \"3.StackName\":StackId, \"4.Status\":Status, \"4.StatusReason\":StatusReason}" | sed "s/arn.*stack\/\(.*\)\/.*\"/\1\"/g" | print_table ListStackInstances
while getopts "a:r:" opt; do case "$opt" in a) STACK_SET_ACCOUNT="--stack-instance-account $OPTARG" ;; r) STACK_SET_REGION="--stack-instance-region $OPTARG" ;; esac done shift $(($OPTIND-1)) split_args "$@" STACKSET_LISTING=$(awscli cloudformation list-stack-sets --status ACTIVE --output text --query "sort_by(Summaries,&StackSetName)[$(auto_filter StackSetName -- $FIRST_RESOURCE)].[StackSetName]") select_one StackSet "$STACKSET_LISTING" - awscli cloudformation list-stack-instances --max-results 100 --stack-set-name $SELECTED ${STACK_SET_ACCOUNT:-} ${STACK_SET_REGION:-} --output json --query "sort_by(Summaries,&join('',[@.Account,@.Region]))[$(auto_filter Account Region StackId Status -- $SECOND_RESOURCE)].{ ? ------------------ + awscli cloudformation list-stack-instances --stack-set-name $SELECTED ${STACK_SET_ACCOUNT:-} ${STACK_SET_REGION:-} --output json --query "sort_by(Summaries,&join('',[@.Account,@.Region]))[$(auto_filter Account Region StackId Status StatusReason -- $SECOND_RESOURCE)].{ ? +++++++++++++ \"1.Account\":Account, \"2.Region\":Region, \"3.StackName\":StackId, \"4.Status\":Status, \"4.StatusReason\":StatusReason}" | sed "s/arn.*stack\/\(.*\)\/.*\"/\1\"/g" | print_table ListStackInstances
2
0.1
1
1
fca77e996741ee780342247e7ad2fe75db84133a
.travis.yml
.travis.yml
sudo: false language: node_js node_js: - "4.5" - "6.4" - "0.12"
sudo: false language: node_js node_js: - "0.12" - "4.8.0" - "6.10.0" - "7.7.1"
Update NodeJS versions which are used in Travis CI
Update NodeJS versions which are used in Travis CI
YAML
mit
lautis/rollup-plugin-coffee-script
yaml
## Code Before: sudo: false language: node_js node_js: - "4.5" - "6.4" - "0.12" ## Instruction: Update NodeJS versions which are used in Travis CI ## Code After: sudo: false language: node_js node_js: - "0.12" - "4.8.0" - "6.10.0" - "7.7.1"
sudo: false language: node_js node_js: - - "4.5" - - "6.4" - "0.12" + - "4.8.0" + - "6.10.0" + - "7.7.1"
5
0.833333
3
2
4635fcd9fa04709f156a492fb9141e48917c5fc0
source/layouts/layout.haml
source/layouts/layout.haml
!!! 5 %html{ lang: 'en' } %head %meta{ charset: 'utf-8' } %meta{ content: 'IE=edge,chrome=1', 'http-equiv' => 'X-UA-Compatible' } %meta{ name: 'viewport', content: 'width=device-width, initial-scale=1.0' } / Use title if it's in the page YAML frontmatter %title= current_page.data.title || 'The Middleman' = stylesheet_link_tag 'application' %body{ class: page_classes } = yield = javascript_include_tag 'vendor' = javascript_include_tag 'application'
!!! 5 %html{ lang: 'en' } %head %meta{ charset: 'utf-8' } %meta{ content: 'IE=edge,chrome=1', 'http-equiv' => 'X-UA-Compatible' } %meta{ name: 'viewport', content: 'width=device-width, initial-scale=1.0' } / Use title if it's in the page YAML frontmatter %title= current_page.data.title || 'The Middleman' = stylesheet_link_tag 'application' %body = yield = javascript_include_tag 'vendor' = javascript_include_tag 'application'
Remove page_classes method from the body tag
Remove page_classes method from the body tag
Haml
mit
drewbarontini/baseman,drewbarontini/baseman,drewbarontini/baseman
haml
## Code Before: !!! 5 %html{ lang: 'en' } %head %meta{ charset: 'utf-8' } %meta{ content: 'IE=edge,chrome=1', 'http-equiv' => 'X-UA-Compatible' } %meta{ name: 'viewport', content: 'width=device-width, initial-scale=1.0' } / Use title if it's in the page YAML frontmatter %title= current_page.data.title || 'The Middleman' = stylesheet_link_tag 'application' %body{ class: page_classes } = yield = javascript_include_tag 'vendor' = javascript_include_tag 'application' ## Instruction: Remove page_classes method from the body tag ## Code After: !!! 5 %html{ lang: 'en' } %head %meta{ charset: 'utf-8' } %meta{ content: 'IE=edge,chrome=1', 'http-equiv' => 'X-UA-Compatible' } %meta{ name: 'viewport', content: 'width=device-width, initial-scale=1.0' } / Use title if it's in the page YAML frontmatter %title= current_page.data.title || 'The Middleman' = stylesheet_link_tag 'application' %body = yield = javascript_include_tag 'vendor' = javascript_include_tag 'application'
!!! 5 %html{ lang: 'en' } %head %meta{ charset: 'utf-8' } %meta{ content: 'IE=edge,chrome=1', 'http-equiv' => 'X-UA-Compatible' } %meta{ name: 'viewport', content: 'width=device-width, initial-scale=1.0' } / Use title if it's in the page YAML frontmatter %title= current_page.data.title || 'The Middleman' = stylesheet_link_tag 'application' - %body{ class: page_classes } + %body = yield = javascript_include_tag 'vendor' = javascript_include_tag 'application'
2
0.111111
1
1
83c880a69cecde81b0e38404df0f4e31dd5c48c3
app/services/diff_outputs.rb
app/services/diff_outputs.rb
class DiffOutputs def initialize filename1, filename2 @filename1 = filename1 @filename2 = filename2 end def call line_counter = 0 File.readlines(filename1).zip(File.readlines(filename2)).each do |line1, line2| line_counter += 1 stripped_line1 = line1.nil? ? line1 : line1.strip stripped_line2 = line2.nil? ? line2 : line2.strip if stripped_line1 != stripped_line2 unless stripped_line1.blank? && stripped_line2.blank? puts "Difference at line #{ line_counter }:" puts "=== #{ filename1 }" puts "> #{ line1 }" puts "=== #{ filename2 }" puts "< #{ line2 }" return false end end end true end private attr_reader :filename1, :filename2 end
class DiffOutputs def initialize filename1, filename2 @filename1 = filename1 @filename2 = filename2 end def call line_counter = 0 File.readlines(filename1).zip(File.readlines(filename2)).each do |line1, line2| line_counter += 1 stripped_line1 = line1.nil? ? '' : line1.strip stripped_line2 = line2.nil? ? '' : line2.strip if stripped_line1 != stripped_line2 unless stripped_line1.blank? && stripped_line2.blank? puts "Difference at line #{ line_counter }:" puts "=== #{ filename1 }" puts "> #{ stripped_line1[0..100] }" puts "=== #{ filename2 }" puts "< #{ stripped_line2[0..100] }" return false end end end true end private attr_reader :filename1, :filename2 end
Fix the file differ to handle long outputs.
Fix the file differ to handle long outputs.
Ruby
mit
antonrd/codemarathon-grader,antonrd/codemarathon-grader,antonrd/codemarathon-grader,antonrd/codemarathon-grader,antonrd/codemarathon-grader
ruby
## Code Before: class DiffOutputs def initialize filename1, filename2 @filename1 = filename1 @filename2 = filename2 end def call line_counter = 0 File.readlines(filename1).zip(File.readlines(filename2)).each do |line1, line2| line_counter += 1 stripped_line1 = line1.nil? ? line1 : line1.strip stripped_line2 = line2.nil? ? line2 : line2.strip if stripped_line1 != stripped_line2 unless stripped_line1.blank? && stripped_line2.blank? puts "Difference at line #{ line_counter }:" puts "=== #{ filename1 }" puts "> #{ line1 }" puts "=== #{ filename2 }" puts "< #{ line2 }" return false end end end true end private attr_reader :filename1, :filename2 end ## Instruction: Fix the file differ to handle long outputs. ## Code After: class DiffOutputs def initialize filename1, filename2 @filename1 = filename1 @filename2 = filename2 end def call line_counter = 0 File.readlines(filename1).zip(File.readlines(filename2)).each do |line1, line2| line_counter += 1 stripped_line1 = line1.nil? ? '' : line1.strip stripped_line2 = line2.nil? ? '' : line2.strip if stripped_line1 != stripped_line2 unless stripped_line1.blank? && stripped_line2.blank? puts "Difference at line #{ line_counter }:" puts "=== #{ filename1 }" puts "> #{ stripped_line1[0..100] }" puts "=== #{ filename2 }" puts "< #{ stripped_line2[0..100] }" return false end end end true end private attr_reader :filename1, :filename2 end
class DiffOutputs def initialize filename1, filename2 @filename1 = filename1 @filename2 = filename2 end def call line_counter = 0 File.readlines(filename1).zip(File.readlines(filename2)).each do |line1, line2| line_counter += 1 - stripped_line1 = line1.nil? ? line1 : line1.strip ? ^^^^^ + stripped_line1 = line1.nil? ? '' : line1.strip ? ^^ - stripped_line2 = line2.nil? ? line2 : line2.strip ? ^^^^^ + stripped_line2 = line2.nil? ? '' : line2.strip ? ^^ if stripped_line1 != stripped_line2 unless stripped_line1.blank? && stripped_line2.blank? puts "Difference at line #{ line_counter }:" puts "=== #{ filename1 }" - puts "> #{ line1 }" + puts "> #{ stripped_line1[0..100] }" ? +++++++++ ++++++++ puts "=== #{ filename2 }" - puts "< #{ line2 }" + puts "< #{ stripped_line2[0..100] }" ? +++++++++ ++++++++ return false end end end true end private attr_reader :filename1, :filename2 end
8
0.242424
4
4
064ea4945629ab3694c995c49542e96d2fb447c3
.travis.yml
.travis.yml
language: objective-c osx_image: xcode7.2b1 xcode_project: SwiftMongoDB.xcodeproj xcode_scheme: SwiftMongoDB-OSX before_install: - brew update - brew install carthage install: - carthage bootstrap --verbose script: - xcodebuild -project SwiftMongoDB.xcodeproj -scheme SwiftMongoDB-OSX test -sdk macosx | xcpretty -c - carthage build --no-skip-current
language: objective-c osx_image: xcode7.2b1 xcode_project: SwiftMongoDB.xcodeproj xcode_scheme: SwiftMongoDB-OSX before_install: - brew update - brew install carthage install: - carthage bootstrap --verbose --platform macosx,ios | xcpretty -c script: - xcodebuild -project SwiftMongoDB.xcodeproj -scheme SwiftMongoDB-OSX test -sdk macosx | xcpretty -c - carthage build --platform macosx,ios --no-skip-current --verbose | xcpretty -c
Update Carthage build parameters in Travis
Update Carthage build parameters in Travis
YAML
mit
Danappelxx/SwiftMongoDB
yaml
## Code Before: language: objective-c osx_image: xcode7.2b1 xcode_project: SwiftMongoDB.xcodeproj xcode_scheme: SwiftMongoDB-OSX before_install: - brew update - brew install carthage install: - carthage bootstrap --verbose script: - xcodebuild -project SwiftMongoDB.xcodeproj -scheme SwiftMongoDB-OSX test -sdk macosx | xcpretty -c - carthage build --no-skip-current ## Instruction: Update Carthage build parameters in Travis ## Code After: language: objective-c osx_image: xcode7.2b1 xcode_project: SwiftMongoDB.xcodeproj xcode_scheme: SwiftMongoDB-OSX before_install: - brew update - brew install carthage install: - carthage bootstrap --verbose --platform macosx,ios | xcpretty -c script: - xcodebuild -project SwiftMongoDB.xcodeproj -scheme SwiftMongoDB-OSX test -sdk macosx | xcpretty -c - carthage build --platform macosx,ios --no-skip-current --verbose | xcpretty -c
language: objective-c osx_image: xcode7.2b1 xcode_project: SwiftMongoDB.xcodeproj xcode_scheme: SwiftMongoDB-OSX before_install: - brew update - brew install carthage install: - - carthage bootstrap --verbose + - carthage bootstrap --verbose --platform macosx,ios | xcpretty -c script: - xcodebuild -project SwiftMongoDB.xcodeproj -scheme SwiftMongoDB-OSX test -sdk macosx | xcpretty -c - - carthage build --no-skip-current + - carthage build --platform macosx,ios --no-skip-current --verbose | xcpretty -c
4
0.333333
2
2
05b506a4ae67133501bfdb18701d533f50d8a301
terminal/bash/functions.sh
terminal/bash/functions.sh
reload() { deactivate 2> /dev/null exec $SHELL -l } # Makes new Python virtualenv for current directory mkvirtualenv() { virtualenv -p "$1" "$VIRTUAL_ENV_NAME" # Activate virtualenv so packages can be installed source ./"$VIRTUAL_ENV_NAME"/bin/activate } # Removes existing Python virtualenv rmvirtualenv() { rm -r ./"$VIRTUAL_ENV_NAME" } # Flushes all DNS caches for OS X 10.10.4 and onward flushdns() { sudo dscacheutil -flushcache sudo killall -HUP mDNSResponder }
''#!/bin/bash # functions.sh # Caleb Evans # Reloads entire shell, including .bash_profile and any activated virtualenv reload() { deactivate 2> /dev/null exec $SHELL -l } # Makes new Python virtualenv for current directory mkvirtualenv() { virtualenv --python="$1" "$VIRTUAL_ENV_NAME" # Activate virtualenv so packages can be installed source ./"$VIRTUAL_ENV_NAME"/bin/activate } # Removes existing Python virtualenv rmvirtualenv() { rm -r ./"$VIRTUAL_ENV_NAME" } # Flushes all DNS caches for OS X 10.10.4 and onward flushdns() { sudo dscacheutil -flushcache sudo killall -HUP mDNSResponder }
Expand -p to --python for virtualenv command
Expand -p to --python for virtualenv command
Shell
mit
caleb531/dotfiles,caleb531/dotfiles,caleb531/dotfiles,caleb531/dotfiles
shell
## Code Before: reload() { deactivate 2> /dev/null exec $SHELL -l } # Makes new Python virtualenv for current directory mkvirtualenv() { virtualenv -p "$1" "$VIRTUAL_ENV_NAME" # Activate virtualenv so packages can be installed source ./"$VIRTUAL_ENV_NAME"/bin/activate } # Removes existing Python virtualenv rmvirtualenv() { rm -r ./"$VIRTUAL_ENV_NAME" } # Flushes all DNS caches for OS X 10.10.4 and onward flushdns() { sudo dscacheutil -flushcache sudo killall -HUP mDNSResponder } ## Instruction: Expand -p to --python for virtualenv command ## Code After: ''#!/bin/bash # functions.sh # Caleb Evans # Reloads entire shell, including .bash_profile and any activated virtualenv reload() { deactivate 2> /dev/null exec $SHELL -l } # Makes new Python virtualenv for current directory mkvirtualenv() { virtualenv --python="$1" "$VIRTUAL_ENV_NAME" # Activate virtualenv so packages can be installed source ./"$VIRTUAL_ENV_NAME"/bin/activate } # Removes existing Python virtualenv rmvirtualenv() { rm -r ./"$VIRTUAL_ENV_NAME" } # Flushes all DNS caches for OS X 10.10.4 and onward flushdns() { sudo dscacheutil -flushcache sudo killall -HUP mDNSResponder }
+ ''#!/bin/bash + # functions.sh + # Caleb Evans + + # Reloads entire shell, including .bash_profile and any activated virtualenv reload() { deactivate 2> /dev/null exec $SHELL -l } # Makes new Python virtualenv for current directory mkvirtualenv() { - virtualenv -p "$1" "$VIRTUAL_ENV_NAME" ? ^ + virtualenv --python="$1" "$VIRTUAL_ENV_NAME" ? + ^^^^^^ # Activate virtualenv so packages can be installed source ./"$VIRTUAL_ENV_NAME"/bin/activate } # Removes existing Python virtualenv rmvirtualenv() { rm -r ./"$VIRTUAL_ENV_NAME" } # Flushes all DNS caches for OS X 10.10.4 and onward flushdns() { sudo dscacheutil -flushcache sudo killall -HUP mDNSResponder }
7
0.318182
6
1
23bf8e9097c1cdf62dee9f2568401dfcbe7f1bbe
macosx-miniconda/.bash.d/50-macosx-miniconda.sh
macosx-miniconda/.bash.d/50-macosx-miniconda.sh
is_interactive_shell || return ## Set up PATH for Anaconda miniconda Python distribution # Actually, I prefer to have the "system" Python in PATH by default, and # choose to use the conda ones only by explicitly setting a conda env #PATH=$(path_prepend "$PATH" "~/miniconda/bin") ## Convenience function for Anaconda miniconda Python environments cenv () { source ~/miniconda/bin/activate $* }
is_interactive_shell || return ## Set up PATH for Anaconda miniconda Python distribution # Actually, I prefer to have the "system" Python in PATH by default, and # choose to use the conda ones only by explicitly setting a conda env #PATH=$(path_prepend "$PATH" "~/miniconda/bin") ## Convenience function for Anaconda miniconda Python environments cenv () { if [ -z "$*" ]; then echo "CONDA_DEFAULT_ENV: ${CONDA_DEFAULT_ENV:-[none]}" else source ~/miniconda/bin/activate $* fi }
Make conda cenv alias show current environment
Make conda cenv alias show current environment ... if no environment is given as argument.
Shell
mit
ehdr/dotfiles,ehdr/dotfiles
shell
## Code Before: is_interactive_shell || return ## Set up PATH for Anaconda miniconda Python distribution # Actually, I prefer to have the "system" Python in PATH by default, and # choose to use the conda ones only by explicitly setting a conda env #PATH=$(path_prepend "$PATH" "~/miniconda/bin") ## Convenience function for Anaconda miniconda Python environments cenv () { source ~/miniconda/bin/activate $* } ## Instruction: Make conda cenv alias show current environment ... if no environment is given as argument. ## Code After: is_interactive_shell || return ## Set up PATH for Anaconda miniconda Python distribution # Actually, I prefer to have the "system" Python in PATH by default, and # choose to use the conda ones only by explicitly setting a conda env #PATH=$(path_prepend "$PATH" "~/miniconda/bin") ## Convenience function for Anaconda miniconda Python environments cenv () { if [ -z "$*" ]; then echo "CONDA_DEFAULT_ENV: ${CONDA_DEFAULT_ENV:-[none]}" else source ~/miniconda/bin/activate $* fi }
is_interactive_shell || return ## Set up PATH for Anaconda miniconda Python distribution # Actually, I prefer to have the "system" Python in PATH by default, and # choose to use the conda ones only by explicitly setting a conda env #PATH=$(path_prepend "$PATH" "~/miniconda/bin") ## Convenience function for Anaconda miniconda Python environments cenv () { + if [ -z "$*" ]; then + echo "CONDA_DEFAULT_ENV: ${CONDA_DEFAULT_ENV:-[none]}" + else - source ~/miniconda/bin/activate $* + source ~/miniconda/bin/activate $* ? ++ + fi }
6
0.545455
5
1
4e63db0d699eeb7a313708f82c129637222e1014
src/penn_chime/utils.py
src/penn_chime/utils.py
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe to a base64-encoded CSV representation of that data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(str_excel_filename) -> str: data = open(str_excel_filename, 'rb').read() b64 = b64encode(data).decode() return b64
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe into csv base64-encoded data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(filename: str) -> str: """Converts an excel document into base64-encoded data.""" with open(filename, 'rb') as fin: return b64encode(fin.read()).decode()
Update excel_to_base64 to always close file handles
Update excel_to_base64 to always close file handles
Python
mit
CodeForPhilly/chime,CodeForPhilly/chime,CodeForPhilly/chime
python
## Code Before: """Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe to a base64-encoded CSV representation of that data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(str_excel_filename) -> str: data = open(str_excel_filename, 'rb').read() b64 = b64encode(data).decode() return b64 ## Instruction: Update excel_to_base64 to always close file handles ## Code After: """Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe into csv base64-encoded data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(filename: str) -> str: """Converts an excel document into base64-encoded data.""" with open(filename, 'rb') as fin: return b64encode(fin.read()).decode()
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: - """Converts a dataframe to a base64-encoded CSV representation of that data. ? ^ --------------------------- + """Converts a dataframe into csv base64-encoded data. ? ++ ^^^ This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 - def excel_to_base64(str_excel_filename) -> str: ? ---------- + def excel_to_base64(filename: str) -> str: ? +++++ - data = open(str_excel_filename, 'rb').read() - b64 = b64encode(data).decode() - return b64 + """Converts an excel document into base64-encoded data.""" + with open(filename, 'rb') as fin: + return b64encode(fin.read()).decode()
10
0.416667
5
5
d4e5af537be36bd50405e60fdb46f31b88537916
src/commoner_i/views.py
src/commoner_i/views.py
from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png')
from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse, Http404 def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.free: # return a 404 for FREE profiles raise Http404 if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png')
Raise a 404 when for FREE profile badge requests
Raise a 404 when for FREE profile badge requests
Python
agpl-3.0
cc-archive/commoner,cc-archive/commoner
python
## Code Before: from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png') ## Instruction: Raise a 404 when for FREE profile badge requests ## Code After: from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User from django.http import HttpResponse, Http404 def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() if profile.free: # return a 404 for FREE profiles raise Http404 if profile.active: # serve the active badge filename = 'images/badge%s/active.png' % size # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png')
from django.core.files.storage import default_storage from django.shortcuts import get_object_or_404 from django.contrib.auth.models import User - from django.http import HttpResponse + from django.http import HttpResponse, Http404 ? +++++++++ def badge(request, username, size=''): # serve the inactive badge by default filename = 'images/badge/%sinactive.png' % size # get a handle for the user profile profile = get_object_or_404(User, username=username) profile = profile.get_profile() + if profile.free: + # return a 404 for FREE profiles + raise Http404 + if profile.active: - # serve the active badge + # serve the active badge ? + - filename = 'images/badge%s/active.png' % size + filename = 'images/badge%s/active.png' % size ? + # set the content type appropriately return HttpResponse(default_storage.open(filename).read(), content_type='image/png')
10
0.47619
7
3
f31f1e5263f317ad0a1a27c02bb6fe19948ac0e5
resources/views/inc/photo_masonry.js.twig
resources/views/inc/photo_masonry.js.twig
<figure class="gallery-item-masonry masonry-150"> \ <div class="gallery-item-content"> \ <div class="gallery-item-content-images"> \ <a href="{{ image.url_800 }}" rel="gallery-0" title="{{ image.title|escape }}"> \ <img src="{{ image.url_thumb }}" width="{{ image.dimensions.width }}" height="{{ image.dimensions.height }}"> \ </a> \ </div> \ <figcaption class="wp-caption-text showcase-caption">{{ image.caption.title|escape }}<br><span class="wp-caption-credit">Credit: {{ image.caption.credit|escape }}</span></figcaption> \ </div> \ </figure>
<figure class="gallery-item-masonry masonry-150"> \ <div class="gallery-item-content"> \ <div class="gallery-item-content-images"> \ <a href="{{ image.url_800 }}" rel="gallery-0" title="{{ image.title|escape }}"> \ <img src="{{ image.url_thumb }}" width="{{ image.dimensions.width }}" height="{{ image.dimensions.height }}"> \ </a> \ </div> \ <figcaption class="wp-caption-text showcase-caption">{{ image.caption.title|trim|escape }}<br><span class="wp-caption-credit">Credit: {{ image.caption.credit|trim|escape }}</span></figcaption> \ </div> \ </figure>
Make sure we trim the output
Make sure we trim the output
Twig
bsd-3-clause
petervanderdoes/AVH-Raritan-Photographic-Society,petervanderdoes/AVH-Raritan-Photographic-Society,petervanderdoes/AVH-Raritan-Photographic-Society,petervanderdoes/AVH-Raritan-Photographic-Society
twig
## Code Before: <figure class="gallery-item-masonry masonry-150"> \ <div class="gallery-item-content"> \ <div class="gallery-item-content-images"> \ <a href="{{ image.url_800 }}" rel="gallery-0" title="{{ image.title|escape }}"> \ <img src="{{ image.url_thumb }}" width="{{ image.dimensions.width }}" height="{{ image.dimensions.height }}"> \ </a> \ </div> \ <figcaption class="wp-caption-text showcase-caption">{{ image.caption.title|escape }}<br><span class="wp-caption-credit">Credit: {{ image.caption.credit|escape }}</span></figcaption> \ </div> \ </figure> ## Instruction: Make sure we trim the output ## Code After: <figure class="gallery-item-masonry masonry-150"> \ <div class="gallery-item-content"> \ <div class="gallery-item-content-images"> \ <a href="{{ image.url_800 }}" rel="gallery-0" title="{{ image.title|escape }}"> \ <img src="{{ image.url_thumb }}" width="{{ image.dimensions.width }}" height="{{ image.dimensions.height }}"> \ </a> \ </div> \ <figcaption class="wp-caption-text showcase-caption">{{ image.caption.title|trim|escape }}<br><span class="wp-caption-credit">Credit: {{ image.caption.credit|trim|escape }}</span></figcaption> \ </div> \ </figure>
<figure class="gallery-item-masonry masonry-150"> \ <div class="gallery-item-content"> \ <div class="gallery-item-content-images"> \ <a href="{{ image.url_800 }}" rel="gallery-0" title="{{ image.title|escape }}"> \ <img src="{{ image.url_thumb }}" width="{{ image.dimensions.width }}" height="{{ image.dimensions.height }}"> \ </a> \ </div> \ - <figcaption class="wp-caption-text showcase-caption">{{ image.caption.title|escape }}<br><span class="wp-caption-credit">Credit: {{ image.caption.credit|escape }}</span></figcaption> \ + <figcaption class="wp-caption-text showcase-caption">{{ image.caption.title|trim|escape }}<br><span class="wp-caption-credit">Credit: {{ image.caption.credit|trim|escape }}</span></figcaption> \ ? +++++ +++++ </div> \ </figure>
2
0.2
1
1
3c9aa011f815a9fe7e81b3efddfacf0566da21cb
app/controllers/calendars_controller.rb
app/controllers/calendars_controller.rb
class CalendarsController < ApplicationController def show raise ActionController::RoutingError.new('Not Found') if current_region.nil? @start_selector = StartSelector.new(start_date) @calendar = Calendar.new(start_date, current_region, current_user) end private def start_date @start_date ||= params[:start].present? ? Date.parse(params[:start]) : Date.today end end
class CalendarsController < ApplicationController before_action :require_region!, only: [ :show ] def show @start_selector = StartSelector.new(start_date) @calendar = Calendar.new(start_date, current_region, current_user) end private def start_date @start_date ||= params[:start].present? ? Date.parse(params[:start]) : Date.today end # Raise a Not Found Routing Exception if no region was set def require_region! raise ActionController::RoutingError.new('Not Found') if current_region.nil? end end
Move checking for the route to a before_filter
Move checking for the route to a before_filter
Ruby
mit
hacken-in/hacken-in,hacken-in/website,hacken-in/hacken-in,hacken-in/website,hacken-in/hacken-in,hacken-in/hacken-in,hacken-in/website,hacken-in/website
ruby
## Code Before: class CalendarsController < ApplicationController def show raise ActionController::RoutingError.new('Not Found') if current_region.nil? @start_selector = StartSelector.new(start_date) @calendar = Calendar.new(start_date, current_region, current_user) end private def start_date @start_date ||= params[:start].present? ? Date.parse(params[:start]) : Date.today end end ## Instruction: Move checking for the route to a before_filter ## Code After: class CalendarsController < ApplicationController before_action :require_region!, only: [ :show ] def show @start_selector = StartSelector.new(start_date) @calendar = Calendar.new(start_date, current_region, current_user) end private def start_date @start_date ||= params[:start].present? ? Date.parse(params[:start]) : Date.today end # Raise a Not Found Routing Exception if no region was set def require_region! raise ActionController::RoutingError.new('Not Found') if current_region.nil? end end
class CalendarsController < ApplicationController + before_action :require_region!, only: [ :show ] def show - raise ActionController::RoutingError.new('Not Found') if current_region.nil? - @start_selector = StartSelector.new(start_date) @calendar = Calendar.new(start_date, current_region, current_user) end private def start_date @start_date ||= params[:start].present? ? Date.parse(params[:start]) : Date.today end + + # Raise a Not Found Routing Exception if no region was set + def require_region! + raise ActionController::RoutingError.new('Not Found') if current_region.nil? + end end
8
0.533333
6
2
7db480aaa2cb7d54c90bb4d804b5a71cd5295432
src/ethanjones/cubes/launcher/UncaughtExceptionHandler.java
src/ethanjones/cubes/launcher/UncaughtExceptionHandler.java
package ethanjones.cubes.launcher; import javax.swing.JOptionPane; public class UncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { public static final UncaughtExceptionHandler INSTANCE = new UncaughtExceptionHandler(); private UncaughtExceptionHandler() { } @Override public void uncaughtException(Thread t, Throwable e) { JOptionPane.showMessageDialog(null, e.toString(), "Cubes Launcher Error", JOptionPane.ERROR_MESSAGE); } }
package ethanjones.cubes.launcher; import javax.swing.JOptionPane; public class UncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { public static final UncaughtExceptionHandler INSTANCE = new UncaughtExceptionHandler(); private UncaughtExceptionHandler() { } @Override public void uncaughtException(Thread t, Throwable e) { try { System.err.println("Uncaught exception:"); e.printStackTrace(); } catch (Exception ignored) { } try { JOptionPane.showMessageDialog(null, e.toString(), "Cubes Launcher Error", JOptionPane.ERROR_MESSAGE); } catch (Exception ignored) { } } }
Print uncaught exceptions to System.err as well as displaying dialog box
Print uncaught exceptions to System.err as well as displaying dialog box
Java
mit
ictrobot/CubesLauncher
java
## Code Before: package ethanjones.cubes.launcher; import javax.swing.JOptionPane; public class UncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { public static final UncaughtExceptionHandler INSTANCE = new UncaughtExceptionHandler(); private UncaughtExceptionHandler() { } @Override public void uncaughtException(Thread t, Throwable e) { JOptionPane.showMessageDialog(null, e.toString(), "Cubes Launcher Error", JOptionPane.ERROR_MESSAGE); } } ## Instruction: Print uncaught exceptions to System.err as well as displaying dialog box ## Code After: package ethanjones.cubes.launcher; import javax.swing.JOptionPane; public class UncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { public static final UncaughtExceptionHandler INSTANCE = new UncaughtExceptionHandler(); private UncaughtExceptionHandler() { } @Override public void uncaughtException(Thread t, Throwable e) { try { System.err.println("Uncaught exception:"); e.printStackTrace(); } catch (Exception ignored) { } try { JOptionPane.showMessageDialog(null, e.toString(), "Cubes Launcher Error", JOptionPane.ERROR_MESSAGE); } catch (Exception ignored) { } } }
package ethanjones.cubes.launcher; import javax.swing.JOptionPane; public class UncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { public static final UncaughtExceptionHandler INSTANCE = new UncaughtExceptionHandler(); private UncaughtExceptionHandler() { } @Override public void uncaughtException(Thread t, Throwable e) { + try { + System.err.println("Uncaught exception:"); + e.printStackTrace(); + } catch (Exception ignored) { + } + try { - JOptionPane.showMessageDialog(null, e.toString(), "Cubes Launcher Error", JOptionPane.ERROR_MESSAGE); + JOptionPane.showMessageDialog(null, e.toString(), "Cubes Launcher Error", JOptionPane.ERROR_MESSAGE); ? ++ + } catch (Exception ignored) { + } } }
10
0.625
9
1
0d8751046a31520215a4c741eeabe1d0edb7be92
src/com/samovich/java/basics/collections/list/arraylist/ArrayListForEach.java
src/com/samovich/java/basics/collections/list/arraylist/ArrayListForEach.java
package com.samovich.java.basics.collections.list.arraylist; /** * Created by Samovich on 1/20/2015. */ public class ArrayListForEach { }
/** * @file ArrayListForEach.java * @author Valery Samovich * @version 1 * @date 2014/01/09 */ /* * Following code shows an example how to create a Collection of type List * which is parameterized with <String> to indicate to the Java compiler * that only Strings are allowed in this list. */ package com.samovich.java.basics.collections.list.arraylist; import java.util.ArrayList; import java.util.List; public class ArrayListForEach { public static void main(String[] args) { // Create a list, use is ArrayList as concrete type // ArrayList<> infers the String type from the left side List<String> myList = new ArrayList<String>(); myList.add("string1"); myList.add("string2"); myList.add("string3"); // Loop over it and print the result to the console for(String value : myList) { System.out.println(value); } } }
Refactor the code and add the ArrayList with For Statement
Refactor the code and add the ArrayList with For Statement
Java
unknown
vsamov/java-technologies,vsamov/java-technologies,valerysamovich/java-technologies
java
## Code Before: package com.samovich.java.basics.collections.list.arraylist; /** * Created by Samovich on 1/20/2015. */ public class ArrayListForEach { } ## Instruction: Refactor the code and add the ArrayList with For Statement ## Code After: /** * @file ArrayListForEach.java * @author Valery Samovich * @version 1 * @date 2014/01/09 */ /* * Following code shows an example how to create a Collection of type List * which is parameterized with <String> to indicate to the Java compiler * that only Strings are allowed in this list. */ package com.samovich.java.basics.collections.list.arraylist; import java.util.ArrayList; import java.util.List; public class ArrayListForEach { public static void main(String[] args) { // Create a list, use is ArrayList as concrete type // ArrayList<> infers the String type from the left side List<String> myList = new ArrayList<String>(); myList.add("string1"); myList.add("string2"); myList.add("string3"); // Loop over it and print the result to the console for(String value : myList) { System.out.println(value); } } }
+ /** + * @file ArrayListForEach.java + * @author Valery Samovich + * @version 1 + * @date 2014/01/09 + */ + + /* + * Following code shows an example how to create a Collection of type List + * which is parameterized with <String> to indicate to the Java compiler + * that only Strings are allowed in this list. + */ package com.samovich.java.basics.collections.list.arraylist; - /** - * Created by Samovich on 1/20/2015. - */ + import java.util.ArrayList; + import java.util.List; + public class ArrayListForEach { + + public static void main(String[] args) { + + // Create a list, use is ArrayList as concrete type + // ArrayList<> infers the String type from the left side + List<String> myList = new ArrayList<String>(); + + myList.add("string1"); + myList.add("string2"); + myList.add("string3"); + + // Loop over it and print the result to the console + for(String value : myList) { + System.out.println(value); + } + } }
34
4.857143
31
3
beb5f824bf0c404eace53cede68c6f325bfc379d
app/Http/Controllers/DashboardController.php
app/Http/Controllers/DashboardController.php
<?php namespace App\Http\Controllers; use App\DuesTransaction; use Illuminate\Http\Request; class DashboardController extends Controller { /** * Returns view with data for the user dashboard * * @param Request $request * @return mixed */ public function index(Request $request) { $user = $request->user(); $needsPayment = $user->needsPayment; $transactions = DuesTransaction::where('user_id', $user->id) ->whereHas('package', function ($q) { $q->whereDate('effective_start', '<=', date('Y-m-d')) ->whereDate('effective_end', '>=', date('Y-m-d')); })->get(); $needsTransaction = (count($transactions) == 0); $data = ['needsTransaction' => $needsTransaction, 'needsPayment' => $needsPayment]; return view('welcome', $data); } }
<?php namespace App\Http\Controllers; use App\DuesTransaction; use Illuminate\Http\Request; class DashboardController extends Controller { /** * Returns view with data for the user dashboard * * @param Request $request * @return mixed */ public function index(Request $request) { //User needs a transaction if they don't have one for an active dues package $user = $request->user(); $transactionsQuery = DuesTransaction::where('user_id', $user->id) ->whereHas('package', function ($q) { $q->whereDate('effective_start', '<=', date('Y-m-d')) ->whereDate('effective_end', '>=', date('Y-m-d')); }); $needsTransaction = (count($transactionsQuery->get()) == 0); /* User needs a payment if they either: * (1) Have a DuesTransaction for an active DuesPackage with payment less than payable amount, OR * (2) Have a DuesTransaction for an active DuesPackage without any payment attempts */ //Get transactions with payments $txnWithPayment = $transactionsQuery->whereHas('payment')->get(); if (count($txnWithPayment) > 0) { //Compare sum of payments for last transaction to package payable amount $paidSum = $txnWithPayment->last()->payment->sum('amount'); $needsPayment = ($paidSum < $txnWithPayment->last()->getPayableAmount()); } elseif ($needsTransaction == false && count($txnWithPayment) == 0) { //Transaction already exists, but no payment attempts have been made $needsPayment = true; } else { //Transaction already exists, full amount has been paid //I don't think we'll ever make it to this part of the conditional $needsPayment = false; } $data = ['needsTransaction' => $needsTransaction, 'needsPayment' => $needsPayment]; return view('welcome', $data); } }
Put payment logic here instead of in the user model; Allow for multiple payments instead of a single payment
Put payment logic here instead of in the user model; Allow for multiple payments instead of a single payment
PHP
apache-2.0
RoboJackets/apiary,RoboJackets/apiary
php
## Code Before: <?php namespace App\Http\Controllers; use App\DuesTransaction; use Illuminate\Http\Request; class DashboardController extends Controller { /** * Returns view with data for the user dashboard * * @param Request $request * @return mixed */ public function index(Request $request) { $user = $request->user(); $needsPayment = $user->needsPayment; $transactions = DuesTransaction::where('user_id', $user->id) ->whereHas('package', function ($q) { $q->whereDate('effective_start', '<=', date('Y-m-d')) ->whereDate('effective_end', '>=', date('Y-m-d')); })->get(); $needsTransaction = (count($transactions) == 0); $data = ['needsTransaction' => $needsTransaction, 'needsPayment' => $needsPayment]; return view('welcome', $data); } } ## Instruction: Put payment logic here instead of in the user model; Allow for multiple payments instead of a single payment ## Code After: <?php namespace App\Http\Controllers; use App\DuesTransaction; use Illuminate\Http\Request; class DashboardController extends Controller { /** * Returns view with data for the user dashboard * * @param Request $request * @return mixed */ public function index(Request $request) { //User needs a transaction if they don't have one for an active dues package $user = $request->user(); $transactionsQuery = DuesTransaction::where('user_id', $user->id) ->whereHas('package', function ($q) { $q->whereDate('effective_start', '<=', date('Y-m-d')) ->whereDate('effective_end', '>=', date('Y-m-d')); }); $needsTransaction = (count($transactionsQuery->get()) == 0); /* User needs a payment if they either: * (1) Have a DuesTransaction for an active DuesPackage with payment less than payable amount, OR * (2) Have a DuesTransaction for an active DuesPackage without any payment attempts */ //Get transactions with payments $txnWithPayment = $transactionsQuery->whereHas('payment')->get(); if (count($txnWithPayment) > 0) { //Compare sum of payments for last transaction to package payable amount $paidSum = $txnWithPayment->last()->payment->sum('amount'); $needsPayment = ($paidSum < $txnWithPayment->last()->getPayableAmount()); } elseif ($needsTransaction == false && count($txnWithPayment) == 0) { //Transaction already exists, but no payment attempts have been made $needsPayment = true; } else { //Transaction already exists, full amount has been paid //I don't think we'll ever make it to this part of the conditional $needsPayment = false; } $data = ['needsTransaction' => $needsTransaction, 'needsPayment' => $needsPayment]; return view('welcome', $data); } }
<?php namespace App\Http\Controllers; use App\DuesTransaction; use Illuminate\Http\Request; class DashboardController extends Controller { /** * Returns view with data for the user dashboard * * @param Request $request * @return mixed */ public function index(Request $request) { + //User needs a transaction if they don't have one for an active dues package $user = $request->user(); - $needsPayment = $user->needsPayment; - $transactions = DuesTransaction::where('user_id', $user->id) + $transactionsQuery = DuesTransaction::where('user_id', $user->id) ? +++++ ->whereHas('package', function ($q) { $q->whereDate('effective_start', '<=', date('Y-m-d')) ->whereDate('effective_end', '>=', date('Y-m-d')); - })->get(); ? ------- + }); - $needsTransaction = (count($transactions) == 0); + $needsTransaction = (count($transactionsQuery->get()) == 0); ? ++++++++++++ + + /* User needs a payment if they either: + * (1) Have a DuesTransaction for an active DuesPackage with payment less than payable amount, OR + * (2) Have a DuesTransaction for an active DuesPackage without any payment attempts + */ + //Get transactions with payments + $txnWithPayment = $transactionsQuery->whereHas('payment')->get(); + if (count($txnWithPayment) > 0) { + //Compare sum of payments for last transaction to package payable amount + $paidSum = $txnWithPayment->last()->payment->sum('amount'); + $needsPayment = ($paidSum < $txnWithPayment->last()->getPayableAmount()); + } elseif ($needsTransaction == false && count($txnWithPayment) == 0) { + //Transaction already exists, but no payment attempts have been made + $needsPayment = true; + } else { + //Transaction already exists, full amount has been paid + //I don't think we'll ever make it to this part of the conditional + $needsPayment = false; + } + $data = ['needsTransaction' => $needsTransaction, 'needsPayment' => $needsPayment]; return view('welcome', $data); } }
28
0.965517
24
4
78df0a964c8c19d1833cd6855392bf0f37559caf
showrss.go
showrss.go
package main import ( "fmt" "log" "net/http" "os" "os/signal" "showrss/handlers" "flag" "syscall" "github.com/braintree/manners" ) const version = "1.0.0" func main() { var httpAddr = flag.String("http", "localhost:7000", "HTTP service address") flag.Parse() log.Println("Starting server ...") log.Printf("HTTP service listening on %s", *httpAddr) errChan := make(chan error, 10) mux := http.NewServeMux() mux.HandleFunc("/", handlers.EpisodeHandler) httpServer := manners.NewServer() httpServer.Addr = *httpAddr httpServer.Handler = handlers.LoggingHandler(mux) go func() { errChan <- httpServer.ListenAndServe() }() signalChan := make(chan os.Signal, 1) signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM) for { select { case err := <-errChan: if err != nil { log.Fatal(err) } case s := <-signalChan: log.Println(fmt.Sprintf("Captured %v. Exiting...", s)) httpServer.BlockingClose() os.Exit(0) } } }
package main import ( "fmt" "log" "net/http" "os" "os/signal" "showrss/handlers" "flag" "syscall" "github.com/braintree/manners" ) const version = "1.0.0" func main() { var httpAddr = flag.String("http", "0.0.0.0:8000", "HTTP service address") flag.Parse() log.Println("Starting server ...") log.Printf("HTTP service listening on %s", *httpAddr) errChan := make(chan error, 10) mux := http.NewServeMux() mux.HandleFunc("/", handlers.HelloHandler) httpServer := manners.NewServer() httpServer.Addr = *httpAddr httpServer.Handler = handlers.LoggingHandler(mux) go func() { errChan <- httpServer.ListenAndServe() }() signalChan := make(chan os.Signal, 1) signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM) for { select { case err := <-errChan: if err != nil { log.Fatal(err) } case s := <-signalChan: log.Println(fmt.Sprintf("Captured %v. Exiting...", s)) httpServer.BlockingClose() os.Exit(0) } } }
Change default addr for http server
Change default addr for http server
Go
mit
teambrookie/showrss,teambrookie/showrss
go
## Code Before: package main import ( "fmt" "log" "net/http" "os" "os/signal" "showrss/handlers" "flag" "syscall" "github.com/braintree/manners" ) const version = "1.0.0" func main() { var httpAddr = flag.String("http", "localhost:7000", "HTTP service address") flag.Parse() log.Println("Starting server ...") log.Printf("HTTP service listening on %s", *httpAddr) errChan := make(chan error, 10) mux := http.NewServeMux() mux.HandleFunc("/", handlers.EpisodeHandler) httpServer := manners.NewServer() httpServer.Addr = *httpAddr httpServer.Handler = handlers.LoggingHandler(mux) go func() { errChan <- httpServer.ListenAndServe() }() signalChan := make(chan os.Signal, 1) signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM) for { select { case err := <-errChan: if err != nil { log.Fatal(err) } case s := <-signalChan: log.Println(fmt.Sprintf("Captured %v. Exiting...", s)) httpServer.BlockingClose() os.Exit(0) } } } ## Instruction: Change default addr for http server ## Code After: package main import ( "fmt" "log" "net/http" "os" "os/signal" "showrss/handlers" "flag" "syscall" "github.com/braintree/manners" ) const version = "1.0.0" func main() { var httpAddr = flag.String("http", "0.0.0.0:8000", "HTTP service address") flag.Parse() log.Println("Starting server ...") log.Printf("HTTP service listening on %s", *httpAddr) errChan := make(chan error, 10) mux := http.NewServeMux() mux.HandleFunc("/", handlers.HelloHandler) httpServer := manners.NewServer() httpServer.Addr = *httpAddr httpServer.Handler = handlers.LoggingHandler(mux) go func() { errChan <- httpServer.ListenAndServe() }() signalChan := make(chan os.Signal, 1) signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM) for { select { case err := <-errChan: if err != nil { log.Fatal(err) } case s := <-signalChan: log.Println(fmt.Sprintf("Captured %v. Exiting...", s)) httpServer.BlockingClose() os.Exit(0) } } }
package main import ( "fmt" "log" "net/http" "os" "os/signal" "showrss/handlers" "flag" "syscall" "github.com/braintree/manners" ) const version = "1.0.0" func main() { - var httpAddr = flag.String("http", "localhost:7000", "HTTP service address") ? ^^^^^^^^^ ^ + var httpAddr = flag.String("http", "0.0.0.0:8000", "HTTP service address") ? ^^^^^^^ ^ flag.Parse() log.Println("Starting server ...") log.Printf("HTTP service listening on %s", *httpAddr) errChan := make(chan error, 10) mux := http.NewServeMux() - mux.HandleFunc("/", handlers.EpisodeHandler) ? ^^^^ -- + mux.HandleFunc("/", handlers.HelloHandler) ? ^^^^ httpServer := manners.NewServer() httpServer.Addr = *httpAddr httpServer.Handler = handlers.LoggingHandler(mux) go func() { errChan <- httpServer.ListenAndServe() }() signalChan := make(chan os.Signal, 1) signal.Notify(signalChan, syscall.SIGINT, syscall.SIGTERM) for { select { case err := <-errChan: if err != nil { log.Fatal(err) } case s := <-signalChan: log.Println(fmt.Sprintf("Captured %v. Exiting...", s)) httpServer.BlockingClose() os.Exit(0) } } }
4
0.072727
2
2
e66505d2205383d1d37c45271043cedeef2a306f
README.md
README.md
This uses Jekyll and GitHub pages. To compile the sources to static HTML follow the instructions from GitHub: https://help.github.com/articles/using-jekyll-with-pages/ To run the site locally install ruby bundler and run: ``` bundle exec jekyll serve ```
This uses Jekyll and GitHub pages. To compile the sources to static HTML follow the instructions from GitHub: https://help.github.com/articles/using-jekyll-with-pages/ To run the site locally install ruby bundler and run: ``` bundle exec jekyll serve ``` ##### Avoid the nav bar overlapping headers in the content (in-document links) - #5 This is achieved by the following CSS snippet in [theme.css](css/theme.css). It introduces an invisible pseudo element before the element in question. ``` .anchor:before { display: block; content: " "; margin-top: -30px; height: 30px; visibility: hidden; } ``` Use this by assigning class `anchor` to the target element. E.g., `<article class="anchor" id="download">`.
Add documentation for overlap fix
Add documentation for overlap fix
Markdown
apache-2.0
corpus-tools/corpus-tools.github.io,corpus-tools/corpus-tools.github.io,corpus-tools/corpus-tools.github.io,corpus-tools/corpus-tools.github.io
markdown
## Code Before: This uses Jekyll and GitHub pages. To compile the sources to static HTML follow the instructions from GitHub: https://help.github.com/articles/using-jekyll-with-pages/ To run the site locally install ruby bundler and run: ``` bundle exec jekyll serve ``` ## Instruction: Add documentation for overlap fix ## Code After: This uses Jekyll and GitHub pages. To compile the sources to static HTML follow the instructions from GitHub: https://help.github.com/articles/using-jekyll-with-pages/ To run the site locally install ruby bundler and run: ``` bundle exec jekyll serve ``` ##### Avoid the nav bar overlapping headers in the content (in-document links) - #5 This is achieved by the following CSS snippet in [theme.css](css/theme.css). It introduces an invisible pseudo element before the element in question. ``` .anchor:before { display: block; content: " "; margin-top: -30px; height: 30px; visibility: hidden; } ``` Use this by assigning class `anchor` to the target element. E.g., `<article class="anchor" id="download">`.
This uses Jekyll and GitHub pages. To compile the sources to static HTML follow the instructions from GitHub: https://help.github.com/articles/using-jekyll-with-pages/ To run the site locally install ruby bundler and run: ``` bundle exec jekyll serve ``` + + ##### Avoid the nav bar overlapping headers in the content (in-document links) - #5 + + This is achieved by the following CSS snippet in [theme.css](css/theme.css). It introduces an invisible pseudo element before the element in question. + + ``` + .anchor:before { + display: block; + content: " "; + margin-top: -30px; + height: 30px; + visibility: hidden; + } + ``` + + Use this by assigning class `anchor` to the target element. E.g., `<article class="anchor" id="download">`.
16
1.777778
16
0
29ce3458d8474870805dd1439cf857d19519bfb1
crypto/engine/eng_all.c
crypto/engine/eng_all.c
/* * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include "internal/cryptlib.h" #include "eng_int.h" void ENGINE_load_builtin_engines(void) { /* Some ENGINEs need this */ OPENSSL_cpuid_setup(); OPENSSL_init_crypto(OPENSSL_INIT_ENGINE_ALL_BUILTIN, NULL); } #if (defined(__OpenBSD__) || defined(__FreeBSD__) || defined(__DragonFly__)) \ && !OPENSSL_API_1_1_0 void ENGINE_setup_bsd_cryptodev(void) { } #endif
/* * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include "internal/cryptlib.h" #include "eng_int.h" void ENGINE_load_builtin_engines(void) { OPENSSL_init_crypto(OPENSSL_INIT_ENGINE_ALL_BUILTIN, NULL); } #if (defined(__OpenBSD__) || defined(__FreeBSD__) || defined(__DragonFly__)) \ && !OPENSSL_API_1_1_0 void ENGINE_setup_bsd_cryptodev(void) { } #endif
Remove superfluous call to OPENSSL_cpuid_setup
Remove superfluous call to OPENSSL_cpuid_setup Signed-off-by: Patrick Steuer <[email protected]> Reviewed-by: Kurt Roeckx <[email protected]> Reviewed-by: Matt Caswell <[email protected]> (Merged from https://github.com/openssl/openssl/pull/9417)
C
apache-2.0
openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl
c
## Code Before: /* * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include "internal/cryptlib.h" #include "eng_int.h" void ENGINE_load_builtin_engines(void) { /* Some ENGINEs need this */ OPENSSL_cpuid_setup(); OPENSSL_init_crypto(OPENSSL_INIT_ENGINE_ALL_BUILTIN, NULL); } #if (defined(__OpenBSD__) || defined(__FreeBSD__) || defined(__DragonFly__)) \ && !OPENSSL_API_1_1_0 void ENGINE_setup_bsd_cryptodev(void) { } #endif ## Instruction: Remove superfluous call to OPENSSL_cpuid_setup Signed-off-by: Patrick Steuer <[email protected]> Reviewed-by: Kurt Roeckx <[email protected]> Reviewed-by: Matt Caswell <[email protected]> (Merged from https://github.com/openssl/openssl/pull/9417) ## Code After: /* * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include "internal/cryptlib.h" #include "eng_int.h" void ENGINE_load_builtin_engines(void) { OPENSSL_init_crypto(OPENSSL_INIT_ENGINE_ALL_BUILTIN, NULL); } #if (defined(__OpenBSD__) || defined(__FreeBSD__) || defined(__DragonFly__)) \ && !OPENSSL_API_1_1_0 void ENGINE_setup_bsd_cryptodev(void) { } #endif
/* * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include "internal/cryptlib.h" #include "eng_int.h" void ENGINE_load_builtin_engines(void) { - /* Some ENGINEs need this */ - OPENSSL_cpuid_setup(); - OPENSSL_init_crypto(OPENSSL_INIT_ENGINE_ALL_BUILTIN, NULL); } #if (defined(__OpenBSD__) || defined(__FreeBSD__) || defined(__DragonFly__)) \ && !OPENSSL_API_1_1_0 void ENGINE_setup_bsd_cryptodev(void) { } #endif
3
0.115385
0
3
8413df377e4517c29d6627be7c03af6c99754c27
lib/index.js
lib/index.js
/** * Dependencies * --------------------------------------------------------------------------*/ var Promise = require('bluebird'), _ = require('underscore'); /** * Returns a promise of a document's save operation */ function saveP(doc) { return Promise.promisify(doc.save, doc)() .then(_.last); } /** * Returns a promise of a document's remove operation */ function removeP(doc) { return Promise.promisify(doc.remove, doc)() .then(_.last); } /** * Returns a promise of a Model's findOne operation, with an optional not found * error message */ function findOneP(model, query, not_found_msg) { var findOne = Promise.promisify(model.findOne, model), docP = findOne(query); if(not_found_msg) { return docP.then(function(doc) { if(!doc){ throw _.extend(new Error(not_found_msg), { status: 404 }); } else return doc; }); } else return docP; } function findByIdP(model, id, not_found_msg) { return findOneP(model, id, not_found_msg); } module.exports = { saveP: saveP, removeP: removeP, findOneP: findOneP, findByIdP: findByIdP };
/** * Dependencies * --------------------------------------------------------------------------*/ var Promise = require('bluebird'), _ = require('underscore'); /** * Returns a promise of a document's save operation */ function saveP(doc) { return Promise.promisify(doc.save, doc)() .then(_.last); } /** * Returns a promise of a document's remove operation */ function removeP(doc) { return Promise.promisify(doc.remove, doc)() .then(_.last); } /** * Returns a promise of a Model's findOne operation, with an optional not found * error message */ function findOneP(model, query, not_found_msg) { var findOne = model.findOneAsync || Promise.promisify(model.findOne, model), docP = findOne(query); if(not_found_msg) { return docP.then(function(doc) { if(!doc) throw _.extend(new Error(not_found_msg), { status: 404 }); else return doc; }); } else return docP; } function findByIdP(model, id, not_found_msg) { return findOneP(model, id, not_found_msg); } module.exports = { saveP: saveP, removeP: removeP, findOneP: findOneP, findByIdP: findByIdP };
Tweak the findOneP function slightly
Tweak the findOneP function slightly
JavaScript
mit
yamadapc/mongoose-bluebird-utils
javascript
## Code Before: /** * Dependencies * --------------------------------------------------------------------------*/ var Promise = require('bluebird'), _ = require('underscore'); /** * Returns a promise of a document's save operation */ function saveP(doc) { return Promise.promisify(doc.save, doc)() .then(_.last); } /** * Returns a promise of a document's remove operation */ function removeP(doc) { return Promise.promisify(doc.remove, doc)() .then(_.last); } /** * Returns a promise of a Model's findOne operation, with an optional not found * error message */ function findOneP(model, query, not_found_msg) { var findOne = Promise.promisify(model.findOne, model), docP = findOne(query); if(not_found_msg) { return docP.then(function(doc) { if(!doc){ throw _.extend(new Error(not_found_msg), { status: 404 }); } else return doc; }); } else return docP; } function findByIdP(model, id, not_found_msg) { return findOneP(model, id, not_found_msg); } module.exports = { saveP: saveP, removeP: removeP, findOneP: findOneP, findByIdP: findByIdP }; ## Instruction: Tweak the findOneP function slightly ## Code After: /** * Dependencies * --------------------------------------------------------------------------*/ var Promise = require('bluebird'), _ = require('underscore'); /** * Returns a promise of a document's save operation */ function saveP(doc) { return Promise.promisify(doc.save, doc)() .then(_.last); } /** * Returns a promise of a document's remove operation */ function removeP(doc) { return Promise.promisify(doc.remove, doc)() .then(_.last); } /** * Returns a promise of a Model's findOne operation, with an optional not found * error message */ function findOneP(model, query, not_found_msg) { var findOne = model.findOneAsync || Promise.promisify(model.findOne, model), docP = findOne(query); if(not_found_msg) { return docP.then(function(doc) { if(!doc) throw _.extend(new Error(not_found_msg), { status: 404 }); else return doc; }); } else return docP; } function findByIdP(model, id, not_found_msg) { return findOneP(model, id, not_found_msg); } module.exports = { saveP: saveP, removeP: removeP, findOneP: findOneP, findByIdP: findByIdP };
/** * Dependencies * --------------------------------------------------------------------------*/ var Promise = require('bluebird'), _ = require('underscore'); /** * Returns a promise of a document's save operation */ function saveP(doc) { return Promise.promisify(doc.save, doc)() .then(_.last); } /** * Returns a promise of a document's remove operation */ function removeP(doc) { return Promise.promisify(doc.remove, doc)() .then(_.last); } /** * Returns a promise of a Model's findOne operation, with an optional not found * error message */ function findOneP(model, query, not_found_msg) { - var findOne = Promise.promisify(model.findOne, model), + var findOne = model.findOneAsync || Promise.promisify(model.findOne, model), ? ++++++++++++++++++++++ docP = findOne(query); if(not_found_msg) { return docP.then(function(doc) { - if(!doc){ - throw _.extend(new Error(not_found_msg), ? ^ + if(!doc) throw _.extend(new Error(not_found_msg), { status: 404 }); ? ^^^^^^^^ ++++++++++++++++++ - { status: 404 }); - } else return doc; ? -- + else return doc; ? ++++ }); } else return docP; } function findByIdP(model, id, not_found_msg) { return findOneP(model, id, not_found_msg); } module.exports = { saveP: saveP, removeP: removeP, findOneP: findOneP, findByIdP: findByIdP };
8
0.145455
3
5
c42b456db02cb3f785045307004efbd872be7bd2
app/helpers/admin/avatar_helper.rb
app/helpers/admin/avatar_helper.rb
module Admin::AvatarHelper def avatar_url(email, size) gravatar_id = Digest::MD5.hexdigest(email.downcase) "http://gravatar.com/avatar/#{gravatar_id}.png?s=#{size}" end def small_avatar_image(user) image_tag avatar_url(user.email, 25) end def large_avatar_image(user) image_tag avatar_url(user.email, 50) end end
module Admin::AvatarHelper def avatar_url(user, size) gravatar_id = Digest::MD5.hexdigest(user.email.downcase) "http://gravatar.com/avatar/#{gravatar_id}.png?s=#{size}" end def small_avatar_image(user) image_tag avatar_url(user, 25) end def large_avatar_image(user) image_tag avatar_url(user, 50) end end
Make interface of avatar helper methods consistent.
Make interface of avatar helper methods consistent.
Ruby
mit
fw42/cubecomp,fw42/cubecomp,fw42/cubecomp,fw42/cubecomp
ruby
## Code Before: module Admin::AvatarHelper def avatar_url(email, size) gravatar_id = Digest::MD5.hexdigest(email.downcase) "http://gravatar.com/avatar/#{gravatar_id}.png?s=#{size}" end def small_avatar_image(user) image_tag avatar_url(user.email, 25) end def large_avatar_image(user) image_tag avatar_url(user.email, 50) end end ## Instruction: Make interface of avatar helper methods consistent. ## Code After: module Admin::AvatarHelper def avatar_url(user, size) gravatar_id = Digest::MD5.hexdigest(user.email.downcase) "http://gravatar.com/avatar/#{gravatar_id}.png?s=#{size}" end def small_avatar_image(user) image_tag avatar_url(user, 25) end def large_avatar_image(user) image_tag avatar_url(user, 50) end end
module Admin::AvatarHelper - def avatar_url(email, size) ? ^^^^ + def avatar_url(user, size) ? ++ ^ - gravatar_id = Digest::MD5.hexdigest(email.downcase) + gravatar_id = Digest::MD5.hexdigest(user.email.downcase) ? +++++ "http://gravatar.com/avatar/#{gravatar_id}.png?s=#{size}" end def small_avatar_image(user) - image_tag avatar_url(user.email, 25) ? ------ + image_tag avatar_url(user, 25) end def large_avatar_image(user) - image_tag avatar_url(user.email, 50) ? ------ + image_tag avatar_url(user, 50) end end
8
0.571429
4
4
90cbf4c99471c8f9a6415a665186c9e7de0305c6
.publish-docs.sh
.publish-docs.sh
set -e echo TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST echo TRAVIS_BRANCH=$TRAVIS_BRANCH if [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_BRANCH" == "master" ]; then echo "Publishing generated documentation..." DIST_DIR="$(pwd)/dist" # Clone gh-pages cd git config --global user.email "[email protected]" git config --global user.name "travis-ci" git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/troykinsella/junkie gh-pages > /dev/null # Update gh-pages cd gh-pages git rm -rf coverage > /dev/null git rm -rf docs > /dev/null cp -R $DIST_DIR/coverage coverage cp -R $DIST_DIR/docs docs # Commit and push changes git add -f . git commit -m "Generated docs for master build $TRAVIS_BUILD_NUMBER" git push -fq origin gh-pages > /dev/null echo "Successfully published generated documentation" fi
set -e if [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_BRANCH" == "master" ]; then echo "Publishing generated documentation..." DIST_DIR="$(pwd)/dist" # Clone gh-pages cd git config --global user.email "[email protected]" git config --global user.name "travis-ci" git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/troykinsella/junkie gh-pages > /dev/null # Update gh-pages cd gh-pages git rm -rf coverage > /dev/null git rm -rf docs > /dev/null cp -R $DIST_DIR/coverage coverage cp -R $DIST_DIR/docs docs # Commit and push changes git add -f . git commit -m "Generated docs for master build $TRAVIS_BUILD_NUMBER" git push -fq origin gh-pages > /dev/null echo "Successfully published generated documentation" fi
Revert debugging code for travis jsdoc publication troubleshooting.
Revert debugging code for travis jsdoc publication troubleshooting.
Shell
mit
troykinsella/junkie,troykinsella/junkie,troykinsella/junkie
shell
## Code Before: set -e echo TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST echo TRAVIS_BRANCH=$TRAVIS_BRANCH if [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_BRANCH" == "master" ]; then echo "Publishing generated documentation..." DIST_DIR="$(pwd)/dist" # Clone gh-pages cd git config --global user.email "[email protected]" git config --global user.name "travis-ci" git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/troykinsella/junkie gh-pages > /dev/null # Update gh-pages cd gh-pages git rm -rf coverage > /dev/null git rm -rf docs > /dev/null cp -R $DIST_DIR/coverage coverage cp -R $DIST_DIR/docs docs # Commit and push changes git add -f . git commit -m "Generated docs for master build $TRAVIS_BUILD_NUMBER" git push -fq origin gh-pages > /dev/null echo "Successfully published generated documentation" fi ## Instruction: Revert debugging code for travis jsdoc publication troubleshooting. ## Code After: set -e if [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_BRANCH" == "master" ]; then echo "Publishing generated documentation..." DIST_DIR="$(pwd)/dist" # Clone gh-pages cd git config --global user.email "[email protected]" git config --global user.name "travis-ci" git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/troykinsella/junkie gh-pages > /dev/null # Update gh-pages cd gh-pages git rm -rf coverage > /dev/null git rm -rf docs > /dev/null cp -R $DIST_DIR/coverage coverage cp -R $DIST_DIR/docs docs # Commit and push changes git add -f . git commit -m "Generated docs for master build $TRAVIS_BUILD_NUMBER" git push -fq origin gh-pages > /dev/null echo "Successfully published generated documentation" fi
set -e - - echo TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST - echo TRAVIS_BRANCH=$TRAVIS_BRANCH if [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_BRANCH" == "master" ]; then echo "Publishing generated documentation..." DIST_DIR="$(pwd)/dist" # Clone gh-pages cd git config --global user.email "[email protected]" git config --global user.name "travis-ci" git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/troykinsella/junkie gh-pages > /dev/null # Update gh-pages cd gh-pages git rm -rf coverage > /dev/null git rm -rf docs > /dev/null cp -R $DIST_DIR/coverage coverage cp -R $DIST_DIR/docs docs # Commit and push changes git add -f . git commit -m "Generated docs for master build $TRAVIS_BUILD_NUMBER" git push -fq origin gh-pages > /dev/null echo "Successfully published generated documentation" fi
3
0.09375
0
3
7d888d853f4f6c1fd8ee8c80fa7598fe071628a1
sample/src/main/java/com/squareup/seismic/sample/Demo.java
sample/src/main/java/com/squareup/seismic/sample/Demo.java
package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } }
package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } @Override public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } }
Add @Override annotation for implemented method
Add @Override annotation for implemented method `hearShake` is implemented from `ShakeDetector.Listener`.
Java
apache-2.0
square/seismic,square/seismic
java
## Code Before: package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } } ## Instruction: Add @Override annotation for implemented method `hearShake` is implemented from `ShakeDetector.Listener`. ## Code After: package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } @Override public void hearShake() { Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } }
package com.squareup.seismic.sample; import android.app.Activity; import android.hardware.SensorManager; import android.os.Bundle; import android.widget.TextView; import android.widget.Toast; import com.squareup.seismic.ShakeDetector; import static android.view.Gravity.CENTER; import static android.view.ViewGroup.LayoutParams; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; public class Demo extends Activity implements ShakeDetector.Listener { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SensorManager sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); ShakeDetector sd = new ShakeDetector(this); sd.start(sensorManager); TextView tv = new TextView(this); tv.setGravity(CENTER); tv.setText("Shake me, bro!"); setContentView(tv, new LayoutParams(MATCH_PARENT, MATCH_PARENT)); } - public void hearShake() { + @Override public void hearShake() { ? ++++++++++ Toast.makeText(this, "Don't shake me, bro!", Toast.LENGTH_SHORT).show(); } }
2
0.064516
1
1
5e6d62ce7a567282a88530a2db80b775c9c4406e
swf/core.py
swf/core.py
import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {k: v for k, v in SETTINGS.iteritems()} settings_.update(kwargs) self.region = (settings_.pop('region', None) or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = boto.swf.connect_to_region(self.region, **settings_) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in ('aws_access_key_id', 'aws_secret_access_key')} self.region = (SETTINGS.get('region') or kwargs.get('region') or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = (kwargs.pop('connection', None) or boto.swf.connect_to_region(self.region, **settings_)) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
Fix ConnectedSWFObject: restrict attributes set by constructor
Fix ConnectedSWFObject: restrict attributes set by constructor - credentials: SETTINGS | kwargs - region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName - connection: kwargs
Python
mit
botify-labs/python-simple-workflow,botify-labs/python-simple-workflow
python
## Code Before: import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {k: v for k, v in SETTINGS.iteritems()} settings_.update(kwargs) self.region = (settings_.pop('region', None) or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = boto.swf.connect_to_region(self.region, **settings_) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region)) ## Instruction: Fix ConnectedSWFObject: restrict attributes set by constructor - credentials: SETTINGS | kwargs - region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName - connection: kwargs ## Code After: import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in ('aws_access_key_id', 'aws_secret_access_key')} self.region = (SETTINGS.get('region') or kwargs.get('region') or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = (kwargs.pop('connection', None) or boto.swf.connect_to_region(self.region, **settings_)) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): - settings_ = {k: v for k, v in SETTINGS.iteritems()} - settings_.update(kwargs) + settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in + ('aws_access_key_id', + 'aws_secret_access_key')} - self.region = (settings_.pop('region', None) or + self.region = (SETTINGS.get('region') or + kwargs.get('region') or boto.swf.layer1.Layer1.DefaultRegionName) + self.connection = (kwargs.pop('connection', None) or - self.connection = boto.swf.connect_to_region(self.region, **settings_) ? --------------- ^ + boto.swf.connect_to_region(self.region, **settings_)) ? ^^^^^^^^^^^^^^^^^ + if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
11
0.333333
7
4
31afb24d29bf9b6c238929029e8384abdd72dea3
README.md
README.md
[![Build status](https://travis-ci.org/pagarme/pagarme-python.svg?branch=master)](https://secure.travis-ci.org/pagarme/pagarme-python) [![Coverage](https://coveralls.io/repos/pagarme/pagarme-python/badge.svg?branch=master&service=github)](https://coveralls.io/github/pagarme/pagarme-python) Pagar.me Python library ## Installing This lib can be found on [pip](https://pypi.python.org/pypi/pagarme-python). To install it, use: ``` $ pip install pagarme-python ``` ## Documentation * [API Guide](http://pagar.me/docs) ## Support If you have any problem or suggestion please open an issue [here](https://github.com/pagarme/pagarme-python/issues). ## License Check [here](LICENSE).
[![Build status](https://travis-ci.org/pagarme/pagarme-python.svg?branch=master)](https://secure.travis-ci.org/pagarme/pagarme-python) [![Coverage](https://coveralls.io/repos/pagarme/pagarme-python/badge.svg?branch=master&service=github)](https://coveralls.io/github/pagarme/pagarme-python) Pagar.me Python library ## Installing This lib can be found on [pip](https://pypi.python.org/pypi/pagarme-python). To install it, use: ``` $ pip install pagarme-python ``` ## Documentation * [API Guide](https://docs.pagar.me/) ## Support If you have any problem or suggestion please open an issue [here](https://github.com/pagarme/pagarme-python/issues). ## License Check [here](LICENSE).
Change broken link to docs
Change broken link to docs Changed the link to Pagar.me's docs. It was http://pagar.me/docs, returning 404 error. Changed to https://docs.pagar.me/, right link.
Markdown
mit
pagarme/pagarme-python
markdown
## Code Before: [![Build status](https://travis-ci.org/pagarme/pagarme-python.svg?branch=master)](https://secure.travis-ci.org/pagarme/pagarme-python) [![Coverage](https://coveralls.io/repos/pagarme/pagarme-python/badge.svg?branch=master&service=github)](https://coveralls.io/github/pagarme/pagarme-python) Pagar.me Python library ## Installing This lib can be found on [pip](https://pypi.python.org/pypi/pagarme-python). To install it, use: ``` $ pip install pagarme-python ``` ## Documentation * [API Guide](http://pagar.me/docs) ## Support If you have any problem or suggestion please open an issue [here](https://github.com/pagarme/pagarme-python/issues). ## License Check [here](LICENSE). ## Instruction: Change broken link to docs Changed the link to Pagar.me's docs. It was http://pagar.me/docs, returning 404 error. Changed to https://docs.pagar.me/, right link. ## Code After: [![Build status](https://travis-ci.org/pagarme/pagarme-python.svg?branch=master)](https://secure.travis-ci.org/pagarme/pagarme-python) [![Coverage](https://coveralls.io/repos/pagarme/pagarme-python/badge.svg?branch=master&service=github)](https://coveralls.io/github/pagarme/pagarme-python) Pagar.me Python library ## Installing This lib can be found on [pip](https://pypi.python.org/pypi/pagarme-python). To install it, use: ``` $ pip install pagarme-python ``` ## Documentation * [API Guide](https://docs.pagar.me/) ## Support If you have any problem or suggestion please open an issue [here](https://github.com/pagarme/pagarme-python/issues). ## License Check [here](LICENSE).
[![Build status](https://travis-ci.org/pagarme/pagarme-python.svg?branch=master)](https://secure.travis-ci.org/pagarme/pagarme-python) [![Coverage](https://coveralls.io/repos/pagarme/pagarme-python/badge.svg?branch=master&service=github)](https://coveralls.io/github/pagarme/pagarme-python) Pagar.me Python library ## Installing This lib can be found on [pip](https://pypi.python.org/pypi/pagarme-python). To install it, use: ``` $ pip install pagarme-python ``` ## Documentation - * [API Guide](http://pagar.me/docs) ? ---- + * [API Guide](https://docs.pagar.me/) ? + +++++ ## Support If you have any problem or suggestion please open an issue [here](https://github.com/pagarme/pagarme-python/issues). ## License Check [here](LICENSE).
2
0.086957
1
1
8dc80a493011b7d41d81473211635c4927c8e5f4
common.sh
common.sh
sudo -v # We need to distinguish sources and binary packages for Brew & Cask on OSX COMMON_PACKAGES="git vim jnettop hfsutils unrar subversion colordiff faac flac lame x264 inkscape graphviz qemu lftp shntool testdisk fdupes recode pngcrush exiftool rtmpdump optipng colortail colorsvn mercurial" BIN_PACKAGES="audacity avidemux dropbox firefox gimp inkscape vlc blender thunderbird virtualbox bitcoin-qt wireshark" # Define global Python packages PYTHON_PACKAGES="readline pip setuptools virtualenv distribute pep8 pyflakes" # Sync dot files rsync --exclude ".git/" --exclude ".DS_Store" --exclude ".gitignore" --exclude ".gitmodules" --exclude "*.sh" --exclude "*.swp" --exclude "*.md" -av --no-perms . ~
sudo -v # Keep-alive: update existing `sudo` time stamp until `.osx` has finished while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null & # We need to distinguish sources and binary packages for Brew & Cask on OSX COMMON_PACKAGES="git vim jnettop hfsutils unrar subversion ack colordiff faac flac lame x264 inkscape graphviz qemu lftp shntool testdisk fdupes recode pngcrush exiftool rtmpdump optipng colortail colorsvn mercurial" BIN_PACKAGES="audacity avidemux dropbox firefox gimp inkscape vlc blender thunderbird virtualbox bitcoin-qt wireshark" # Define global Python packages PYTHON_PACKAGES="readline pip setuptools distribute virtualenv virtualenvwrapper pep8 pylint pyflakes coverage" # Sync dot files rsync --exclude ".git/" --exclude ".DS_Store" --exclude ".gitignore" --exclude ".gitmodules" --exclude "*.sh" --exclude "*.swp" --exclude "*.md" -av --no-perms . ~
Add some more Python packages.
Add some more Python packages.
Shell
bsd-2-clause
kdeldycke/dotfiles,kdeldycke/dotfiles
shell
## Code Before: sudo -v # We need to distinguish sources and binary packages for Brew & Cask on OSX COMMON_PACKAGES="git vim jnettop hfsutils unrar subversion colordiff faac flac lame x264 inkscape graphviz qemu lftp shntool testdisk fdupes recode pngcrush exiftool rtmpdump optipng colortail colorsvn mercurial" BIN_PACKAGES="audacity avidemux dropbox firefox gimp inkscape vlc blender thunderbird virtualbox bitcoin-qt wireshark" # Define global Python packages PYTHON_PACKAGES="readline pip setuptools virtualenv distribute pep8 pyflakes" # Sync dot files rsync --exclude ".git/" --exclude ".DS_Store" --exclude ".gitignore" --exclude ".gitmodules" --exclude "*.sh" --exclude "*.swp" --exclude "*.md" -av --no-perms . ~ ## Instruction: Add some more Python packages. ## Code After: sudo -v # Keep-alive: update existing `sudo` time stamp until `.osx` has finished while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null & # We need to distinguish sources and binary packages for Brew & Cask on OSX COMMON_PACKAGES="git vim jnettop hfsutils unrar subversion ack colordiff faac flac lame x264 inkscape graphviz qemu lftp shntool testdisk fdupes recode pngcrush exiftool rtmpdump optipng colortail colorsvn mercurial" BIN_PACKAGES="audacity avidemux dropbox firefox gimp inkscape vlc blender thunderbird virtualbox bitcoin-qt wireshark" # Define global Python packages PYTHON_PACKAGES="readline pip setuptools distribute virtualenv virtualenvwrapper pep8 pylint pyflakes coverage" # Sync dot files rsync --exclude ".git/" --exclude ".DS_Store" --exclude ".gitignore" --exclude ".gitmodules" --exclude "*.sh" --exclude "*.swp" --exclude "*.md" -av --no-perms . ~
sudo -v + # Keep-alive: update existing `sudo` time stamp until `.osx` has finished + while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null & + # We need to distinguish sources and binary packages for Brew & Cask on OSX - COMMON_PACKAGES="git vim jnettop hfsutils unrar subversion colordiff faac flac lame x264 inkscape graphviz qemu lftp shntool testdisk fdupes recode pngcrush exiftool rtmpdump optipng colortail colorsvn mercurial" + COMMON_PACKAGES="git vim jnettop hfsutils unrar subversion ack colordiff faac flac lame x264 inkscape graphviz qemu lftp shntool testdisk fdupes recode pngcrush exiftool rtmpdump optipng colortail colorsvn mercurial" ? ++++ BIN_PACKAGES="audacity avidemux dropbox firefox gimp inkscape vlc blender thunderbird virtualbox bitcoin-qt wireshark" # Define global Python packages - PYTHON_PACKAGES="readline pip setuptools virtualenv distribute pep8 pyflakes" ? ^ ^ ^^^^ + PYTHON_PACKAGES="readline pip setuptools distribute virtualenv virtualenvwrapper pep8 pylint pyflakes coverage" ? +++++++++++ ^ ^ +++++++ ^^^ + +++++++ +++++++++ # Sync dot files rsync --exclude ".git/" --exclude ".DS_Store" --exclude ".gitignore" --exclude ".gitmodules" --exclude "*.sh" --exclude "*.swp" --exclude "*.md" -av --no-perms . ~
7
0.636364
5
2
c55d38a42a3e733bb52ada6081e0ed26d998d2fe
features/project-gem.feature
features/project-gem.feature
Feature: project gem To test that dependencies are installed correctly in a clean gemset, it is necessary to be able to install the project under test's gem into the clean gemset while the dependencies are fetched using normal procedures, like 'gem install'. Scenario: When I build gem from project's "aruba.gemspec" Then a file matching %r<aruba-.*\.gem> should exist @gem-install Scenario: Given I'm using a clean gemset "aruba-installer" And I build gem from project's "aruba.gemspec" And I install latest local "aruba" gem When I run `gem list` Then the output from "gem list" should contain "aruba"
Feature: project gem To test that dependencies are installed correctly in a clean gemset, it is necessary to be able to install the project under test's gem into the clean gemset while the dependencies are fetched using normal procedures, like 'gem install'. Scenario: When I build gem from project's "aruba.gemspec" Then a file matching %r<aruba-.*\.gem> should exist @gem-install Scenario: Given I build gem from project's "aruba.gemspec" And I'm using a clean gemset "aruba-installer" And I install latest local "aruba" gem When I run `gem list` Then the output from "gem list" should contain "aruba"
Build gem in project gemset instead of clean gemset
Build gem in project gemset instead of clean gemset MSP-11400 It shouldn't be expected that a gem can be built outside its native gemset, so reorder steps so building the gem happens before using a clean gemset.
Cucumber
mit
rapid7/aruba,rapid7/aruba
cucumber
## Code Before: Feature: project gem To test that dependencies are installed correctly in a clean gemset, it is necessary to be able to install the project under test's gem into the clean gemset while the dependencies are fetched using normal procedures, like 'gem install'. Scenario: When I build gem from project's "aruba.gemspec" Then a file matching %r<aruba-.*\.gem> should exist @gem-install Scenario: Given I'm using a clean gemset "aruba-installer" And I build gem from project's "aruba.gemspec" And I install latest local "aruba" gem When I run `gem list` Then the output from "gem list" should contain "aruba" ## Instruction: Build gem in project gemset instead of clean gemset MSP-11400 It shouldn't be expected that a gem can be built outside its native gemset, so reorder steps so building the gem happens before using a clean gemset. ## Code After: Feature: project gem To test that dependencies are installed correctly in a clean gemset, it is necessary to be able to install the project under test's gem into the clean gemset while the dependencies are fetched using normal procedures, like 'gem install'. Scenario: When I build gem from project's "aruba.gemspec" Then a file matching %r<aruba-.*\.gem> should exist @gem-install Scenario: Given I build gem from project's "aruba.gemspec" And I'm using a clean gemset "aruba-installer" And I install latest local "aruba" gem When I run `gem list` Then the output from "gem list" should contain "aruba"
Feature: project gem To test that dependencies are installed correctly in a clean gemset, it is necessary to be able to install the project under test's gem into the clean gemset while the dependencies are fetched using normal procedures, like 'gem install'. Scenario: When I build gem from project's "aruba.gemspec" Then a file matching %r<aruba-.*\.gem> should exist @gem-install Scenario: - Given I'm using a clean gemset "aruba-installer" - And I build gem from project's "aruba.gemspec" ? ^ - + Given I build gem from project's "aruba.gemspec" ? ^^^^ + And I'm using a clean gemset "aruba-installer" And I install latest local "aruba" gem When I run `gem list` Then the output from "gem list" should contain "aruba"
4
0.235294
2
2
489f8ad90e228bc1d87c6094f3856b1eab85bc8a
app/views/groups/show.html.erb
app/views/groups/show.html.erb
<div class="container"> <div class="row-fluid"> <article class="span12"> <h3><%= @group.name %></h3> <p>is meeting at <strong><%= @group.address %></strong><br> on <strong><%= @group.time %>.</strong> </p> <p>There are <strong><%= @group.number_of_members %></strong> members.</p> </article> </div> <div class="row-fluid"> <div class="span12"> <%= link_to groups_path do %> <i class="icon-arrow-left"></i> back to groups <% end %> </div> </div> </div>
<div class="container"> <div class="row-fluid"> <article class="span12"> <h3><%= @group.name %></h3> <p>is meeting at <strong><%= @group.address %></strong><br> on <strong><%= @group.time %>.</strong> </p> <p>We prefer to have up to <strong><%= @group.number_of_members %></strong> members.</p> </article> </div> <div class="row-fluid"> <div class="span12"> <%= link_to groups_path do %> <i class="icon-arrow-left"></i> back to groups <% end %> </div> </div> </div>
Change statement of max participants
Change statement of max participants
HTML+ERB
agpl-3.0
rubycorns/rorganize.it,dondonz/rorganize.it,dondonz/rorganize.it,rubycorns/rorganize.it,dondonz/rorganize.it,rubycorns/rorganize.it,dondonz/rorganize.it,rubycorns/rorganize.it
html+erb
## Code Before: <div class="container"> <div class="row-fluid"> <article class="span12"> <h3><%= @group.name %></h3> <p>is meeting at <strong><%= @group.address %></strong><br> on <strong><%= @group.time %>.</strong> </p> <p>There are <strong><%= @group.number_of_members %></strong> members.</p> </article> </div> <div class="row-fluid"> <div class="span12"> <%= link_to groups_path do %> <i class="icon-arrow-left"></i> back to groups <% end %> </div> </div> </div> ## Instruction: Change statement of max participants ## Code After: <div class="container"> <div class="row-fluid"> <article class="span12"> <h3><%= @group.name %></h3> <p>is meeting at <strong><%= @group.address %></strong><br> on <strong><%= @group.time %>.</strong> </p> <p>We prefer to have up to <strong><%= @group.number_of_members %></strong> members.</p> </article> </div> <div class="row-fluid"> <div class="span12"> <%= link_to groups_path do %> <i class="icon-arrow-left"></i> back to groups <% end %> </div> </div> </div>
<div class="container"> <div class="row-fluid"> <article class="span12"> <h3><%= @group.name %></h3> <p>is meeting at <strong><%= @group.address %></strong><br> on <strong><%= @group.time %>.</strong> </p> - <p>There are <strong><%= @group.number_of_members %></strong> members.</p> ? ^^ ^^^ + <p>We prefer to have up to <strong><%= @group.number_of_members %></strong> members.</p> ? ^^^^^^^ +++++++ ^^^^^ </article> </div> <div class="row-fluid"> <div class="span12"> <%= link_to groups_path do %> <i class="icon-arrow-left"></i> back to groups <% end %> </div> </div> </div>
2
0.111111
1
1
03213983d4ab505b4534a5f3940014d5b5670e43
native/make_native.sh
native/make_native.sh
if [[ $SL_BUILD_NATIVE == "false" ]]; then echo "Skipping the native image build because SL_BUILD_NATIVE is set to false." exit 0 fi "$JAVA_HOME"/bin/native-image --tool:truffle -H:MaxRuntimeCompileMethods=1200 \ -cp ../language/target/simplelanguage.jar:../launcher/target/launcher-19.0.0-SNAPSHOT.jar \ com.oracle.truffle.sl.launcher.SLMain \ slnative
if [[ $SL_BUILD_NATIVE == "false" ]]; then echo "Skipping the native image build because SL_BUILD_NATIVE is set to false." exit 0 fi "$JAVA_HOME"/bin/native-image \ --macro:truffle --no-fallback --initialize-at-build-time \ -cp ../language/target/simplelanguage.jar:../launcher/target/launcher-19.0.0-SNAPSHOT.jar \ com.oracle.truffle.sl.launcher.SLMain \ slnative
Use new native image flags.
Use new native image flags.
Shell
mit
m50d/amalie
shell
## Code Before: if [[ $SL_BUILD_NATIVE == "false" ]]; then echo "Skipping the native image build because SL_BUILD_NATIVE is set to false." exit 0 fi "$JAVA_HOME"/bin/native-image --tool:truffle -H:MaxRuntimeCompileMethods=1200 \ -cp ../language/target/simplelanguage.jar:../launcher/target/launcher-19.0.0-SNAPSHOT.jar \ com.oracle.truffle.sl.launcher.SLMain \ slnative ## Instruction: Use new native image flags. ## Code After: if [[ $SL_BUILD_NATIVE == "false" ]]; then echo "Skipping the native image build because SL_BUILD_NATIVE is set to false." exit 0 fi "$JAVA_HOME"/bin/native-image \ --macro:truffle --no-fallback --initialize-at-build-time \ -cp ../language/target/simplelanguage.jar:../launcher/target/launcher-19.0.0-SNAPSHOT.jar \ com.oracle.truffle.sl.launcher.SLMain \ slnative
if [[ $SL_BUILD_NATIVE == "false" ]]; then echo "Skipping the native image build because SL_BUILD_NATIVE is set to false." exit 0 fi - "$JAVA_HOME"/bin/native-image --tool:truffle -H:MaxRuntimeCompileMethods=1200 \ + "$JAVA_HOME"/bin/native-image \ + --macro:truffle --no-fallback --initialize-at-build-time \ -cp ../language/target/simplelanguage.jar:../launcher/target/launcher-19.0.0-SNAPSHOT.jar \ com.oracle.truffle.sl.launcher.SLMain \ slnative
3
0.375
2
1
28dc472fab5f9657957fb8b0c12f607a6a71af38
src/scripts/modules/media/footer/metadata/metadata.less
src/scripts/modules/media/footer/metadata/metadata.less
@import "../../../../../styles/variables.less"; .media-footer > .metadata { position: relative; dl { margin-bottom: 0; > dt { padding: 10px 0; width: 225px; text-align: left; color: @gray-dark; } > dd { margin: 0; border-bottom: 1px solid @gray-lighter; &:last-child { border: none; } > div { padding: 10px 10px 10px 225px; > ul { padding: 0 0 0 15px; } } } } .name { &::after { content: ","; } &:last-child::after { content: ""; } } }
@import "../../../../../styles/variables.less"; .media-footer > .metadata { position: relative; > div > dl { margin-bottom: 0; > dt { padding: 10px 0; width: 225px; text-align: left; color: @gray-dark; } > dd { margin: 0; border-bottom: 1px solid @gray-lighter; &:last-child { border: none; } > div { padding: 10px 10px 10px 225px; > ul { padding: 0 0 0 15px; } > .name { &::after { content: ","; } &:last-child::after { content: ""; } } } } } }
Use child selectors when possible
Use child selectors when possible
Less
agpl-3.0
katalysteducation/webview,katalysteducation/webview,katalysteducation/webview,carolinelane10/webview,katalysteducation/webview,Connexions/webview,dak/webview,Connexions/webview,dak/webview,Connexions/webview,dak/webview,Connexions/webview
less
## Code Before: @import "../../../../../styles/variables.less"; .media-footer > .metadata { position: relative; dl { margin-bottom: 0; > dt { padding: 10px 0; width: 225px; text-align: left; color: @gray-dark; } > dd { margin: 0; border-bottom: 1px solid @gray-lighter; &:last-child { border: none; } > div { padding: 10px 10px 10px 225px; > ul { padding: 0 0 0 15px; } } } } .name { &::after { content: ","; } &:last-child::after { content: ""; } } } ## Instruction: Use child selectors when possible ## Code After: @import "../../../../../styles/variables.less"; .media-footer > .metadata { position: relative; > div > dl { margin-bottom: 0; > dt { padding: 10px 0; width: 225px; text-align: left; color: @gray-dark; } > dd { margin: 0; border-bottom: 1px solid @gray-lighter; &:last-child { border: none; } > div { padding: 10px 10px 10px 225px; > ul { padding: 0 0 0 15px; } > .name { &::after { content: ","; } &:last-child::after { content: ""; } } } } } }
@import "../../../../../styles/variables.less"; .media-footer > .metadata { position: relative; - dl { + > div > dl { margin-bottom: 0; > dt { padding: 10px 0; width: 225px; text-align: left; color: @gray-dark; } > dd { margin: 0; border-bottom: 1px solid @gray-lighter; &:last-child { border: none; } > div { padding: 10px 10px 10px 225px; > ul { padding: 0 0 0 15px; } + + > .name { + &::after { + content: ","; + } + + &:last-child::after { + content: ""; + } + } } } } - - .name { - &::after { - content: ","; - } - - &:last-child::after { - content: ""; - } - } }
22
0.511628
11
11
63518f439577632667e653e7097f2938a33fb99e
res/xml/preferences_device.xml
res/xml/preferences_device.xml
<?xml version="1.0" encoding="utf-8"?> <PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android"> <PreferenceCategory android:title="Device"> <CheckBoxPreference android:key="screenlock" android:defaultValue="false" android:title="Prevent display sleep"/> <ListPreference android:entries="@array/pref_timeout_names" android:title="Connection timeout" android:key="timeout" android:entryValues="@array/pref_timeout_values" android:summary="Remain connected to IRCCloud while running in the background" android:defaultValue="300000" /> </PreferenceCategory> </PreferenceScreen>
<?xml version="1.0" encoding="utf-8"?> <PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android"> <PreferenceCategory android:title="Device"> <CheckBoxPreference android:key="screenlock" android:defaultValue="false" android:title="Prevent display sleep"/> <ListPreference android:entries="@array/pref_timeout_names" android:title="Connection timeout" android:key="timeout" android:entryValues="@array/pref_timeout_values" android:summary="Remain connected to IRCCloud while running in the background" android:defaultValue="300000" /> <CheckBoxPreference android:key="acra.enable" android:title="Crash Reporting" android:summaryOn="Crash reports will be sent to IRCCloud" android:summaryOff="Crash reports will not be sent to IRCCloud" android:defaultValue="true"/> </PreferenceCategory> </PreferenceScreen>
Add a setting to disable sending ACRA crash reports
Add a setting to disable sending ACRA crash reports
XML
apache-2.0
irccloud/android,irccloud/android,irccloud/android,irccloud/android,irccloud/android
xml
## Code Before: <?xml version="1.0" encoding="utf-8"?> <PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android"> <PreferenceCategory android:title="Device"> <CheckBoxPreference android:key="screenlock" android:defaultValue="false" android:title="Prevent display sleep"/> <ListPreference android:entries="@array/pref_timeout_names" android:title="Connection timeout" android:key="timeout" android:entryValues="@array/pref_timeout_values" android:summary="Remain connected to IRCCloud while running in the background" android:defaultValue="300000" /> </PreferenceCategory> </PreferenceScreen> ## Instruction: Add a setting to disable sending ACRA crash reports ## Code After: <?xml version="1.0" encoding="utf-8"?> <PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android"> <PreferenceCategory android:title="Device"> <CheckBoxPreference android:key="screenlock" android:defaultValue="false" android:title="Prevent display sleep"/> <ListPreference android:entries="@array/pref_timeout_names" android:title="Connection timeout" android:key="timeout" android:entryValues="@array/pref_timeout_values" android:summary="Remain connected to IRCCloud while running in the background" android:defaultValue="300000" /> <CheckBoxPreference android:key="acra.enable" android:title="Crash Reporting" android:summaryOn="Crash reports will be sent to IRCCloud" android:summaryOff="Crash reports will not be sent to IRCCloud" android:defaultValue="true"/> </PreferenceCategory> </PreferenceScreen>
<?xml version="1.0" encoding="utf-8"?> <PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android"> <PreferenceCategory android:title="Device"> <CheckBoxPreference android:key="screenlock" android:defaultValue="false" android:title="Prevent display sleep"/> <ListPreference android:entries="@array/pref_timeout_names" android:title="Connection timeout" android:key="timeout" android:entryValues="@array/pref_timeout_values" android:summary="Remain connected to IRCCloud while running in the background" android:defaultValue="300000" /> + <CheckBoxPreference android:key="acra.enable" + android:title="Crash Reporting" + android:summaryOn="Crash reports will be sent to IRCCloud" + android:summaryOff="Crash reports will not be sent to IRCCloud" + android:defaultValue="true"/> </PreferenceCategory> </PreferenceScreen>
5
0.3125
5
0
6889946ebb1c1559e0e1c7b83e1d7b1d6896e0b0
tests/test_train_dictionary.py
tests/test_train_dictionary.py
import unittest import zstd class TestTrainDictionary(unittest.TestCase): def test_no_args(self): with self.assertRaises(TypeError): zstd.train_dictionary() def test_bad_args(self): with self.assertRaises(TypeError): zstd.train_dictionary(8192, u'foo') with self.assertRaises(ValueError): zstd.train_dictionary(8192, [u'foo']) def test_basic(self): samples = [] for i in range(128): samples.append(b'foo' * 64) samples.append(b'bar' * 64) samples.append(b'foobar' * 64) samples.append(b'baz' * 64) samples.append(b'foobaz' * 64) samples.append(b'bazfoo' * 64) d = zstd.train_dictionary(8192, samples) self.assertLessEqual(len(d), 8192) dict_id = zstd.dictionary_id(d) self.assertIsInstance(dict_id, int)
import sys import unittest import zstd if sys.version_info[0] >= 3: int_type = int else: int_type = long class TestTrainDictionary(unittest.TestCase): def test_no_args(self): with self.assertRaises(TypeError): zstd.train_dictionary() def test_bad_args(self): with self.assertRaises(TypeError): zstd.train_dictionary(8192, u'foo') with self.assertRaises(ValueError): zstd.train_dictionary(8192, [u'foo']) def test_basic(self): samples = [] for i in range(128): samples.append(b'foo' * 64) samples.append(b'bar' * 64) samples.append(b'foobar' * 64) samples.append(b'baz' * 64) samples.append(b'foobaz' * 64) samples.append(b'bazfoo' * 64) d = zstd.train_dictionary(8192, samples) self.assertLessEqual(len(d), 8192) dict_id = zstd.dictionary_id(d) self.assertIsInstance(dict_id, int_type)
Check for appropriate long type on Python 2
Check for appropriate long type on Python 2 The extension always returns a long, which is not an "int" on Python 2. Fix the test.
Python
bsd-3-clause
terrelln/python-zstandard,terrelln/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard,terrelln/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard,indygreg/python-zstandard
python
## Code Before: import unittest import zstd class TestTrainDictionary(unittest.TestCase): def test_no_args(self): with self.assertRaises(TypeError): zstd.train_dictionary() def test_bad_args(self): with self.assertRaises(TypeError): zstd.train_dictionary(8192, u'foo') with self.assertRaises(ValueError): zstd.train_dictionary(8192, [u'foo']) def test_basic(self): samples = [] for i in range(128): samples.append(b'foo' * 64) samples.append(b'bar' * 64) samples.append(b'foobar' * 64) samples.append(b'baz' * 64) samples.append(b'foobaz' * 64) samples.append(b'bazfoo' * 64) d = zstd.train_dictionary(8192, samples) self.assertLessEqual(len(d), 8192) dict_id = zstd.dictionary_id(d) self.assertIsInstance(dict_id, int) ## Instruction: Check for appropriate long type on Python 2 The extension always returns a long, which is not an "int" on Python 2. Fix the test. ## Code After: import sys import unittest import zstd if sys.version_info[0] >= 3: int_type = int else: int_type = long class TestTrainDictionary(unittest.TestCase): def test_no_args(self): with self.assertRaises(TypeError): zstd.train_dictionary() def test_bad_args(self): with self.assertRaises(TypeError): zstd.train_dictionary(8192, u'foo') with self.assertRaises(ValueError): zstd.train_dictionary(8192, [u'foo']) def test_basic(self): samples = [] for i in range(128): samples.append(b'foo' * 64) samples.append(b'bar' * 64) samples.append(b'foobar' * 64) samples.append(b'baz' * 64) samples.append(b'foobaz' * 64) samples.append(b'bazfoo' * 64) d = zstd.train_dictionary(8192, samples) self.assertLessEqual(len(d), 8192) dict_id = zstd.dictionary_id(d) self.assertIsInstance(dict_id, int_type)
+ import sys import unittest import zstd + + + if sys.version_info[0] >= 3: + int_type = int + else: + int_type = long class TestTrainDictionary(unittest.TestCase): def test_no_args(self): with self.assertRaises(TypeError): zstd.train_dictionary() def test_bad_args(self): with self.assertRaises(TypeError): zstd.train_dictionary(8192, u'foo') with self.assertRaises(ValueError): zstd.train_dictionary(8192, [u'foo']) def test_basic(self): samples = [] for i in range(128): samples.append(b'foo' * 64) samples.append(b'bar' * 64) samples.append(b'foobar' * 64) samples.append(b'baz' * 64) samples.append(b'foobaz' * 64) samples.append(b'bazfoo' * 64) d = zstd.train_dictionary(8192, samples) self.assertLessEqual(len(d), 8192) dict_id = zstd.dictionary_id(d) - self.assertIsInstance(dict_id, int) + self.assertIsInstance(dict_id, int_type) ? +++++
9
0.28125
8
1
bd4e1c3f511ac1163e39d99fdc8e70f261023c44
setup/create_player_seasons.py
setup/create_player_seasons.py
import concurrent.futures from db.common import session_scope from db.player import Player from utils.player_data_retriever import PlayerDataRetriever def create_player_seasons(simulation=False): data_retriever = PlayerDataRetriever() with session_scope() as session: players = session.query(Player).all()[:25] with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads: future_tasks = { threads.submit( data_retriever.retrieve_player_seasons, player.player_id, simulation ): player for player in players } for future in concurrent.futures.as_completed(future_tasks): try: plr_seasons = future.result() print(len(plr_seasons)) except Exception as e: print("Concurrent task generated an exception: %s" % e)
import concurrent.futures from db.common import session_scope from db.player import Player from utils.player_data_retriever import PlayerDataRetriever def create_player_seasons(simulation=False): data_retriever = PlayerDataRetriever() with session_scope() as session: players = session.query(Player).all()[:] with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads: future_tasks = { threads.submit( data_retriever.retrieve_player_seasons, player.player_id, simulation ): player for player in players } for future in concurrent.futures.as_completed(future_tasks): try: plr_seasons = future.result() except Exception as e: print("Concurrent task generated an exception: %s" % e)
Update player season retrieval function
Update player season retrieval function
Python
mit
leaffan/pynhldb
python
## Code Before: import concurrent.futures from db.common import session_scope from db.player import Player from utils.player_data_retriever import PlayerDataRetriever def create_player_seasons(simulation=False): data_retriever = PlayerDataRetriever() with session_scope() as session: players = session.query(Player).all()[:25] with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads: future_tasks = { threads.submit( data_retriever.retrieve_player_seasons, player.player_id, simulation ): player for player in players } for future in concurrent.futures.as_completed(future_tasks): try: plr_seasons = future.result() print(len(plr_seasons)) except Exception as e: print("Concurrent task generated an exception: %s" % e) ## Instruction: Update player season retrieval function ## Code After: import concurrent.futures from db.common import session_scope from db.player import Player from utils.player_data_retriever import PlayerDataRetriever def create_player_seasons(simulation=False): data_retriever = PlayerDataRetriever() with session_scope() as session: players = session.query(Player).all()[:] with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads: future_tasks = { threads.submit( data_retriever.retrieve_player_seasons, player.player_id, simulation ): player for player in players } for future in concurrent.futures.as_completed(future_tasks): try: plr_seasons = future.result() except Exception as e: print("Concurrent task generated an exception: %s" % e)
import concurrent.futures from db.common import session_scope from db.player import Player from utils.player_data_retriever import PlayerDataRetriever def create_player_seasons(simulation=False): data_retriever = PlayerDataRetriever() with session_scope() as session: - players = session.query(Player).all()[:25] ? -- + players = session.query(Player).all()[:] with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads: future_tasks = { threads.submit( data_retriever.retrieve_player_seasons, player.player_id, simulation ): player for player in players } for future in concurrent.futures.as_completed(future_tasks): try: plr_seasons = future.result() - print(len(plr_seasons)) except Exception as e: print("Concurrent task generated an exception: %s" % e)
3
0.1
1
2
6844cab28d4cc5c6126eede3b49c88e403989350
Applications/opensense/CMakeLists.txt
Applications/opensense/CMakeLists.txt
if(OPENSIM_BUILD_INDIVIDUAL_APPS) OpenSimAddApplication(NAME opensense) endif() if(BUILD_TESTING) subdirs(test) endif(BUILD_TESTING)
OpenSimAddApplication(NAME opensense) if(BUILD_TESTING) subdirs(test) endif(BUILD_TESTING)
Build opensense executable regardless of the value of BUILD_INDIVIDUAL_APPS CMake variable
Build opensense executable regardless of the value of BUILD_INDIVIDUAL_APPS CMake variable
Text
apache-2.0
opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core
text
## Code Before: if(OPENSIM_BUILD_INDIVIDUAL_APPS) OpenSimAddApplication(NAME opensense) endif() if(BUILD_TESTING) subdirs(test) endif(BUILD_TESTING) ## Instruction: Build opensense executable regardless of the value of BUILD_INDIVIDUAL_APPS CMake variable ## Code After: OpenSimAddApplication(NAME opensense) if(BUILD_TESTING) subdirs(test) endif(BUILD_TESTING)
- if(OPENSIM_BUILD_INDIVIDUAL_APPS) - OpenSimAddApplication(NAME opensense) ? ---- + OpenSimAddApplication(NAME opensense) - endif() if(BUILD_TESTING) subdirs(test) endif(BUILD_TESTING)
4
0.571429
1
3
a52a9ad69a86c54910870071a4fe788ec9117b51
01-Go/01_hello_world/main.go
01-Go/01_hello_world/main.go
package main import "fmt" func main() { fmt.Printf("Hello World\n") }
package main import "fmt" func main() { str := "Hello World" fmt.Println(str) }
Update hello world to include a variable
Update hello world to include a variable
Go
mit
sampov2/one-language-per-month-2015,sampov2/one-language-per-month-2015,sampov2/one-language-per-month-2015
go
## Code Before: package main import "fmt" func main() { fmt.Printf("Hello World\n") } ## Instruction: Update hello world to include a variable ## Code After: package main import "fmt" func main() { str := "Hello World" fmt.Println(str) }
package main import "fmt" func main() { - fmt.Printf("Hello World\n") + str := "Hello World" + fmt.Println(str) }
3
0.428571
2
1
9157ef16b964ea01633bd648b41df57a9422616b
groovy/LibraryJobs.groovy
groovy/LibraryJobs.groovy
/* * Copyright (c) 2016-2019 SIL International * This software is licensed under the MIT license (http://opensource.org/licenses/MIT) */ for (repo in ['icu-dotnet', 'icu4c', 'SIL.BuildTasks']) { multibranchPipelineJob(repo) { description """<p>Builds of ${repo}</p> <p>The job is created by the DSL plugin from <i>LibraryJobs.groovy</i> script.</p>""" branchSources { github { id(repo) repoOwner('sillsdev') repository(repo) scanCredentialsId('github-sillsdevgerrit') if (repo != 'icu4c') { includes('master PR-*') } buildOriginBranch(true) buildOriginBranchWithPR(false) buildOriginPRMerge(true) buildForkPRMerge(true) } orphanedItemStrategy { discardOldItems { daysToKeep(60) numToKeep(10) } } triggers { // check once a day if not otherwise run periodicFolderTrigger { interval('1d') } } } } }
/* * Copyright (c) 2016-2019 SIL International * This software is licensed under the MIT license (http://opensource.org/licenses/MIT) */ for (repo in ['icu-dotnet', 'icu4c', 'SIL.BuildTasks']) { multibranchPipelineJob(repo) { description """<p>Builds of ${repo}</p> <p>The job is created by the DSL plugin from <i>LibraryJobs.groovy</i> script.</p>""" branchSources { github { id(repo) repoOwner('sillsdev') repository(repo) scanCredentialsId('github-sillsdevgerrit') if (repo != 'icu4c') { includes('master PR-*') } buildOriginBranch(true) buildOriginBranchWithPR(false) buildOriginPRMerge(true) buildForkPRMerge(true) } orphanedItemStrategy { discardOldItems { daysToKeep(60) numToKeep(10) } } triggers { // check once a day if not otherwise run periodicFolderTrigger { interval('1d') } } } if (repo == 'icu4c') { factory { workflowBranchProjectFactory { scriptPath('icu4c/Jenkinsfile') } } } } }
Update to new location of Jenkinsfile
[icu4c] Update to new location of Jenkinsfile The ICU project restructured their repo. Since we re-applied our previous patches the location for Jenkinsfile now also moved. This change makes the necessary adjustment. Change-Id: I74749a3331df036609d81936fcffa246cdcd6772
Groovy
mit
sillsdev/ci-builder-scripts,sillsdev/ci-builder-scripts,sillsdev/ci-builder-scripts
groovy
## Code Before: /* * Copyright (c) 2016-2019 SIL International * This software is licensed under the MIT license (http://opensource.org/licenses/MIT) */ for (repo in ['icu-dotnet', 'icu4c', 'SIL.BuildTasks']) { multibranchPipelineJob(repo) { description """<p>Builds of ${repo}</p> <p>The job is created by the DSL plugin from <i>LibraryJobs.groovy</i> script.</p>""" branchSources { github { id(repo) repoOwner('sillsdev') repository(repo) scanCredentialsId('github-sillsdevgerrit') if (repo != 'icu4c') { includes('master PR-*') } buildOriginBranch(true) buildOriginBranchWithPR(false) buildOriginPRMerge(true) buildForkPRMerge(true) } orphanedItemStrategy { discardOldItems { daysToKeep(60) numToKeep(10) } } triggers { // check once a day if not otherwise run periodicFolderTrigger { interval('1d') } } } } } ## Instruction: [icu4c] Update to new location of Jenkinsfile The ICU project restructured their repo. Since we re-applied our previous patches the location for Jenkinsfile now also moved. This change makes the necessary adjustment. Change-Id: I74749a3331df036609d81936fcffa246cdcd6772 ## Code After: /* * Copyright (c) 2016-2019 SIL International * This software is licensed under the MIT license (http://opensource.org/licenses/MIT) */ for (repo in ['icu-dotnet', 'icu4c', 'SIL.BuildTasks']) { multibranchPipelineJob(repo) { description """<p>Builds of ${repo}</p> <p>The job is created by the DSL plugin from <i>LibraryJobs.groovy</i> script.</p>""" branchSources { github { id(repo) repoOwner('sillsdev') repository(repo) scanCredentialsId('github-sillsdevgerrit') if (repo != 'icu4c') { includes('master PR-*') } buildOriginBranch(true) buildOriginBranchWithPR(false) buildOriginPRMerge(true) buildForkPRMerge(true) } orphanedItemStrategy { discardOldItems { daysToKeep(60) numToKeep(10) } } triggers { // check once a day if not otherwise run periodicFolderTrigger { interval('1d') } } } if (repo == 'icu4c') { factory { workflowBranchProjectFactory { scriptPath('icu4c/Jenkinsfile') } } } } }
/* * Copyright (c) 2016-2019 SIL International * This software is licensed under the MIT license (http://opensource.org/licenses/MIT) */ for (repo in ['icu-dotnet', 'icu4c', 'SIL.BuildTasks']) { multibranchPipelineJob(repo) { description """<p>Builds of ${repo}</p> <p>The job is created by the DSL plugin from <i>LibraryJobs.groovy</i> script.</p>""" branchSources { github { id(repo) repoOwner('sillsdev') repository(repo) scanCredentialsId('github-sillsdevgerrit') if (repo != 'icu4c') { includes('master PR-*') } buildOriginBranch(true) buildOriginBranchWithPR(false) buildOriginPRMerge(true) buildForkPRMerge(true) } orphanedItemStrategy { discardOldItems { daysToKeep(60) numToKeep(10) } } triggers { // check once a day if not otherwise run periodicFolderTrigger { interval('1d') } } } + + if (repo == 'icu4c') { + factory { + workflowBranchProjectFactory { + scriptPath('icu4c/Jenkinsfile') + } + } + } } }
8
0.195122
8
0
c85a1cf83761db41ca2b0ebbbc4737e7dcc6a464
bin/build_el7_docker_image.sh
bin/build_el7_docker_image.sh
set -evx SCRIPT_NAME=$(basename "$0") TAG="$1" if [ -z "$TAG" ]; then echo "Usage: ${SCRIPT_NAME} TAG" >&2 exit 1 fi rm -rf tmp/docker mkdir -p tmp/docker/rpms cp dist/onearth-*.el7.*.rpm tmp/docker/rpms/ cp docker/el7/run-onearth.sh tmp/docker/run-onearth.sh echo "FROM $(cat docker/el7/gibs-gdal-image.txt)" > tmp/docker/Dockerfile grep -Ev '^FROM' docker/el7/Dockerfile >> tmp/docker/Dockerfile ( set -evx cd tmp/docker docker build -t "$TAG" . ) rm -rf tmp/docker
set -e if ! ls dist/gibs-gdal-*.el7.*.rpm >/dev/null 2>&1; then echo "No RPMs found in ./dist/" >&2 exit 1 fi SCRIPT_NAME=$(basename "$0") TAG="$1" if [ -z "$TAG" ]; then echo "Usage: ${SCRIPT_NAME} TAG" >&2 exit 1 fi rm -rf tmp/docker mkdir -p tmp/docker/rpms cp dist/onearth-*.el7.*.rpm tmp/docker/rpms/ cp docker/el7/run-onearth.sh tmp/docker/run-onearth.sh echo "FROM $(cat docker/el7/gibs-gdal-image.txt)" > tmp/docker/Dockerfile grep -Ev '^FROM' docker/el7/Dockerfile >> tmp/docker/Dockerfile ( set -e cd tmp/docker docker build -t "$TAG" .) rm -rf tmp/docker
Check for RPMs before building Docker image
Check for RPMs before building Docker image
Shell
apache-2.0
nasa-gibs/onearth,nasa-gibs/onearth,nasa-gibs/onearth,nasa-gibs/onearth,nasa-gibs/onearth,nasa-gibs/onearth
shell
## Code Before: set -evx SCRIPT_NAME=$(basename "$0") TAG="$1" if [ -z "$TAG" ]; then echo "Usage: ${SCRIPT_NAME} TAG" >&2 exit 1 fi rm -rf tmp/docker mkdir -p tmp/docker/rpms cp dist/onearth-*.el7.*.rpm tmp/docker/rpms/ cp docker/el7/run-onearth.sh tmp/docker/run-onearth.sh echo "FROM $(cat docker/el7/gibs-gdal-image.txt)" > tmp/docker/Dockerfile grep -Ev '^FROM' docker/el7/Dockerfile >> tmp/docker/Dockerfile ( set -evx cd tmp/docker docker build -t "$TAG" . ) rm -rf tmp/docker ## Instruction: Check for RPMs before building Docker image ## Code After: set -e if ! ls dist/gibs-gdal-*.el7.*.rpm >/dev/null 2>&1; then echo "No RPMs found in ./dist/" >&2 exit 1 fi SCRIPT_NAME=$(basename "$0") TAG="$1" if [ -z "$TAG" ]; then echo "Usage: ${SCRIPT_NAME} TAG" >&2 exit 1 fi rm -rf tmp/docker mkdir -p tmp/docker/rpms cp dist/onearth-*.el7.*.rpm tmp/docker/rpms/ cp docker/el7/run-onearth.sh tmp/docker/run-onearth.sh echo "FROM $(cat docker/el7/gibs-gdal-image.txt)" > tmp/docker/Dockerfile grep -Ev '^FROM' docker/el7/Dockerfile >> tmp/docker/Dockerfile ( set -e cd tmp/docker docker build -t "$TAG" .) rm -rf tmp/docker
- set -evx ? -- + set -e + + if ! ls dist/gibs-gdal-*.el7.*.rpm >/dev/null 2>&1; then + echo "No RPMs found in ./dist/" >&2 + exit 1 + fi SCRIPT_NAME=$(basename "$0") TAG="$1" if [ -z "$TAG" ]; then echo "Usage: ${SCRIPT_NAME} TAG" >&2 exit 1 fi rm -rf tmp/docker mkdir -p tmp/docker/rpms cp dist/onearth-*.el7.*.rpm tmp/docker/rpms/ cp docker/el7/run-onearth.sh tmp/docker/run-onearth.sh echo "FROM $(cat docker/el7/gibs-gdal-image.txt)" > tmp/docker/Dockerfile grep -Ev '^FROM' docker/el7/Dockerfile >> tmp/docker/Dockerfile ( - set -evx ? -- + set -e cd tmp/docker - docker build -t "$TAG" . + docker build -t "$TAG" .) ? + - ) + rm -rf tmp/docker
13
0.5
9
4
02951bd1b265db750afbbbfc9e8ae58a863cd0b7
lib/pagerduty/clt/config.rb
lib/pagerduty/clt/config.rb
require 'dotenv' require 'hashie' module Pagerduty module CLT class Config include PathHelper DEFAULT_CONFIG_FILE = File.join(ENV['HOME'], '.pagerduty_env') def initialize(config_file = DEFAULT_CONFIG_FILE) @config_file = config_file Dotenv.load(config_file) end def settings @settings ||= Hashie::Mash.new(hash) end def me @me ||= begin path = users_path(settings.user_id) User.new($connection.get(path).user) end end private attr_reader :config_file def hash { account: { name: ENV['PAGERDUTY_ACCOUNT_NAME'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_NAME'], add to #{config_file}"), token: ENV['PAGERDUTY_ACCOUNT_TOKEN'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_TOKEN'], add to #{config_file}") }, user_id: ENV['PAGERDUTY_USER_ID'] || fail("Missing ENV['PAGERDUTY_USER_ID'], add to #{config_file}") } end end end end
require 'dotenv' require 'hashie' module Pagerduty module CLT class Config include PathHelper DEFAULT_CONFIG_FILE = File.join(ENV['HOME'], '.pagerduty_env') def initialize(config_file = DEFAULT_CONFIG_FILE) @config_file = config_file Dotenv.load(config_file) end def settings @settings ||= Hashie::Mash.new(hash) end def me @me ||= User.find(settings.user_id) end private attr_reader :config_file def hash { account: { name: ENV['PAGERDUTY_ACCOUNT_NAME'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_NAME'], add to #{config_file}"), token: ENV['PAGERDUTY_ACCOUNT_TOKEN'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_TOKEN'], add to #{config_file}") }, user_id: ENV['PAGERDUTY_USER_ID'] || fail("Missing ENV['PAGERDUTY_USER_ID'], add to #{config_file}") } end end end end
Use new User.find in Config
Use new User.find in Config
Ruby
mit
ashmckenzie/pagerduty-clt
ruby
## Code Before: require 'dotenv' require 'hashie' module Pagerduty module CLT class Config include PathHelper DEFAULT_CONFIG_FILE = File.join(ENV['HOME'], '.pagerduty_env') def initialize(config_file = DEFAULT_CONFIG_FILE) @config_file = config_file Dotenv.load(config_file) end def settings @settings ||= Hashie::Mash.new(hash) end def me @me ||= begin path = users_path(settings.user_id) User.new($connection.get(path).user) end end private attr_reader :config_file def hash { account: { name: ENV['PAGERDUTY_ACCOUNT_NAME'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_NAME'], add to #{config_file}"), token: ENV['PAGERDUTY_ACCOUNT_TOKEN'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_TOKEN'], add to #{config_file}") }, user_id: ENV['PAGERDUTY_USER_ID'] || fail("Missing ENV['PAGERDUTY_USER_ID'], add to #{config_file}") } end end end end ## Instruction: Use new User.find in Config ## Code After: require 'dotenv' require 'hashie' module Pagerduty module CLT class Config include PathHelper DEFAULT_CONFIG_FILE = File.join(ENV['HOME'], '.pagerduty_env') def initialize(config_file = DEFAULT_CONFIG_FILE) @config_file = config_file Dotenv.load(config_file) end def settings @settings ||= Hashie::Mash.new(hash) end def me @me ||= User.find(settings.user_id) end private attr_reader :config_file def hash { account: { name: ENV['PAGERDUTY_ACCOUNT_NAME'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_NAME'], add to #{config_file}"), token: ENV['PAGERDUTY_ACCOUNT_TOKEN'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_TOKEN'], add to #{config_file}") }, user_id: ENV['PAGERDUTY_USER_ID'] || fail("Missing ENV['PAGERDUTY_USER_ID'], add to #{config_file}") } end end end end
require 'dotenv' require 'hashie' module Pagerduty module CLT class Config include PathHelper DEFAULT_CONFIG_FILE = File.join(ENV['HOME'], '.pagerduty_env') def initialize(config_file = DEFAULT_CONFIG_FILE) @config_file = config_file Dotenv.load(config_file) end def settings @settings ||= Hashie::Mash.new(hash) end def me + @me ||= User.find(settings.user_id) - @me ||= begin - path = users_path(settings.user_id) - User.new($connection.get(path).user) - end end private attr_reader :config_file def hash { account: { name: ENV['PAGERDUTY_ACCOUNT_NAME'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_NAME'], add to #{config_file}"), token: ENV['PAGERDUTY_ACCOUNT_TOKEN'] || fail("Missing ENV['PAGERDUTY_ACCOUNT_TOKEN'], add to #{config_file}") }, user_id: ENV['PAGERDUTY_USER_ID'] || fail("Missing ENV['PAGERDUTY_USER_ID'], add to #{config_file}") } end end end end
5
0.116279
1
4
58cef8ab8c056325f53ecf65c362a08a9400579b
source/templates/subject.html.haml
source/templates/subject.html.haml
- content_for(:subject_title, subject_title) %h1= subject_title %ol.video-list{ start: pages.values.first.metadata.number } - pages.each do |page, page_data| %li = link_to "/#{subject_url}/#{page_data.metadata.url}" do = image_tag "http://img.youtube.com/vi/#{page_data.video_url}/0.jpg", {alt: "#{page_data.metadata.page_title} video thumbnail" } %small.nice-number= page_data.metadata.number %strong= page_data.metadata.page_title
- content_for(:subject_title, subject_title) %h1= subject_title %ol.video-list{ start: pages.values.first.metadata.number } - pages.each do |page, page_data| %li = link_to "/#{subject_url}/#{page_data.metadata.url}" do = image_tag "http://img.youtube.com/vi/#{page_data.video_url}/0.jpg", {alt: "#{page_data.metadata.page_title} video thumbnail" } - if page_data.metadata.number %small.nice-number= page_data.metadata.number %strong= page_data.metadata.page_title
Make video number optional in subject pages
Make video number optional in subject pages
Haml
mit
DrummerHead/aprend.io,DrummerHead/aprend.io,DrummerHead/aprend.io
haml
## Code Before: - content_for(:subject_title, subject_title) %h1= subject_title %ol.video-list{ start: pages.values.first.metadata.number } - pages.each do |page, page_data| %li = link_to "/#{subject_url}/#{page_data.metadata.url}" do = image_tag "http://img.youtube.com/vi/#{page_data.video_url}/0.jpg", {alt: "#{page_data.metadata.page_title} video thumbnail" } %small.nice-number= page_data.metadata.number %strong= page_data.metadata.page_title ## Instruction: Make video number optional in subject pages ## Code After: - content_for(:subject_title, subject_title) %h1= subject_title %ol.video-list{ start: pages.values.first.metadata.number } - pages.each do |page, page_data| %li = link_to "/#{subject_url}/#{page_data.metadata.url}" do = image_tag "http://img.youtube.com/vi/#{page_data.video_url}/0.jpg", {alt: "#{page_data.metadata.page_title} video thumbnail" } - if page_data.metadata.number %small.nice-number= page_data.metadata.number %strong= page_data.metadata.page_title
- content_for(:subject_title, subject_title) %h1= subject_title %ol.video-list{ start: pages.values.first.metadata.number } - pages.each do |page, page_data| %li = link_to "/#{subject_url}/#{page_data.metadata.url}" do = image_tag "http://img.youtube.com/vi/#{page_data.video_url}/0.jpg", {alt: "#{page_data.metadata.page_title} video thumbnail" } + - if page_data.metadata.number - %small.nice-number= page_data.metadata.number + %small.nice-number= page_data.metadata.number ? ++ %strong= page_data.metadata.page_title
3
0.272727
2
1
8f993412a0110085fee10331daecfb3d36973518
__init__.py
__init__.py
import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(config) reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
Add reload to init for config
Add reload to init for config
Python
mit
reticulatingspline/Scores,cottongin/Scores
python
## Code Before: import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79: ## Instruction: Add reload to init for config ## Code After: import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin reload(config) reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "" # XXX Replace this with an appropriate author or supybot.Author instance. __author__ = supybot.authors.unknown # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} # This is a url where the most recent plugin package can be downloaded. __url__ = '' # 'http://supybot.com/Members/yourname/Scores/download' import config import plugin + reload(config) reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
1
0.03125
1
0
7beaff19fd76c1c9b93f121c27d4ec5da54102b2
packages/re/releaser.yaml
packages/re/releaser.yaml
homepage: '' changelog-type: markdown hash: 5d13c74cc134ed6698b6a1602c27f417c100206a55a5c3dc95ad46c299fae2a3 test-bench-deps: {} maintainer: [email protected] synopsis: Automation of Haskell package release process changelog: | # Revision history for releaser ## 0.1.0.0 -- YYYY-mm-dd * First version. Released on an unsuspecting world. basic-deps: regex-pcre: -any Cabal: -any base: ! '>=4.7 && <5' releaser: -any process: -any pretty-terminal: -any all-versions: - 0.1.0.0 author: Domen Kozar latest: 0.1.0.0 description-type: haddock description: '' license-name: Apache-2.0
homepage: '' changelog-type: markdown hash: 7dcbbcdf6e86cf9fc2d41627fd029d6fa7e2b5706c95fc5f9b355d12d05dc3ce test-bench-deps: {} maintainer: [email protected] synopsis: Automation of Haskell package release process changelog: "# Revision history for releaser\n\n## 0.2.0.0 -- 2019-09-16\n\n* Write cabal versions using a regex\n \n Unfortunately, cabal api can't operate on Cabal AST,\n so we just resort to good old perl methods.\n\n* If any of the primitives fail, wait to retry.\n\n* Avoid checking out git branch since it's confusing.\n\n## 0.1.0.0 -- 2019-09-09\n\n* First version. Released on an unsuspecting world.\n" basic-deps: regex-tdfa-text: -any Cabal: -any base: ! '>=4.7 && <5' text: -any releaser: -any process: -any pretty-terminal: -any regex-tdfa: -any all-versions: - 0.1.0.0 - 0.2.0.0 author: Domen Kozar latest: 0.2.0.0 description-type: haddock description: '' license-name: Apache-2.0
Update from Hackage at 2019-09-16T15:16:50Z
Update from Hackage at 2019-09-16T15:16:50Z
YAML
mit
commercialhaskell/all-cabal-metadata
yaml
## Code Before: homepage: '' changelog-type: markdown hash: 5d13c74cc134ed6698b6a1602c27f417c100206a55a5c3dc95ad46c299fae2a3 test-bench-deps: {} maintainer: [email protected] synopsis: Automation of Haskell package release process changelog: | # Revision history for releaser ## 0.1.0.0 -- YYYY-mm-dd * First version. Released on an unsuspecting world. basic-deps: regex-pcre: -any Cabal: -any base: ! '>=4.7 && <5' releaser: -any process: -any pretty-terminal: -any all-versions: - 0.1.0.0 author: Domen Kozar latest: 0.1.0.0 description-type: haddock description: '' license-name: Apache-2.0 ## Instruction: Update from Hackage at 2019-09-16T15:16:50Z ## Code After: homepage: '' changelog-type: markdown hash: 7dcbbcdf6e86cf9fc2d41627fd029d6fa7e2b5706c95fc5f9b355d12d05dc3ce test-bench-deps: {} maintainer: [email protected] synopsis: Automation of Haskell package release process changelog: "# Revision history for releaser\n\n## 0.2.0.0 -- 2019-09-16\n\n* Write cabal versions using a regex\n \n Unfortunately, cabal api can't operate on Cabal AST,\n so we just resort to good old perl methods.\n\n* If any of the primitives fail, wait to retry.\n\n* Avoid checking out git branch since it's confusing.\n\n## 0.1.0.0 -- 2019-09-09\n\n* First version. Released on an unsuspecting world.\n" basic-deps: regex-tdfa-text: -any Cabal: -any base: ! '>=4.7 && <5' text: -any releaser: -any process: -any pretty-terminal: -any regex-tdfa: -any all-versions: - 0.1.0.0 - 0.2.0.0 author: Domen Kozar latest: 0.2.0.0 description-type: haddock description: '' license-name: Apache-2.0
homepage: '' changelog-type: markdown - hash: 5d13c74cc134ed6698b6a1602c27f417c100206a55a5c3dc95ad46c299fae2a3 + hash: 7dcbbcdf6e86cf9fc2d41627fd029d6fa7e2b5706c95fc5f9b355d12d05dc3ce test-bench-deps: {} maintainer: [email protected] synopsis: Automation of Haskell package release process + changelog: "# Revision history for releaser\n\n## 0.2.0.0 -- 2019-09-16\n\n* Write + cabal versions using a regex\n \n Unfortunately, cabal api can't operate on Cabal + AST,\n so we just resort to good old perl methods.\n\n* If any of the primitives + fail, wait to retry.\n\n* Avoid checking out git branch since it's confusing.\n\n## - changelog: | - # Revision history for releaser - - ## 0.1.0.0 -- YYYY-mm-dd - - * First version. Released on an unsuspecting world. + 0.1.0.0 -- 2019-09-09\n\n* First version. Released on an unsuspecting world.\n" ? +++++++++++++++++++++++++ +++ basic-deps: - regex-pcre: -any + regex-tdfa-text: -any Cabal: -any base: ! '>=4.7 && <5' + text: -any releaser: -any process: -any pretty-terminal: -any + regex-tdfa: -any all-versions: - 0.1.0.0 + - 0.2.0.0 author: Domen Kozar - latest: 0.1.0.0 ? ^ + latest: 0.2.0.0 ? ^ description-type: haddock description: '' license-name: Apache-2.0
20
0.769231
11
9
a6fd829669aa8c3c42afd0750e8b52b95d716dc8
scripts-available/CDB_UserTables.sql
scripts-available/CDB_UserTables.sql
-- Function returning list of cartodb user tables -- -- The optional argument restricts the result to tables -- of the specified access type. -- -- Currently accepted permissions are: 'public', 'private' or 'all' -- CREATE OR REPLACE FUNCTION CDB_UserTables(perm text DEFAULT 'all') RETURNS SETOF name AS $$ SELECT c.relname FROM pg_class c JOIN pg_roles r ON r.oid = c.relowner JOIN pg_namespace n ON n.oid = c.relnamespace WHERE r.rolname = current_user AND c.relkind = 'r' AND c.relname NOT IN ('cdb_tablemetadata', 'spatial_ref_sys') AND n.nspname NOT IN ('pg_catalog', 'information_schema') AND CASE WHEN perm = 'public' THEN has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'private' THEN has_table_privilege(c.oid, 'SELECT') AND NOT has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'all' THEN has_table_privilege(c.oid, 'SELECT') ELSE false END; $$ LANGUAGE 'sql'; -- This is to migrate from pre-0.2.0 version -- See http://github.com/CartoDB/cartodb-postgresql/issues/36 GRANT EXECUTE ON FUNCTION CDB_UserTables(text) TO public;
-- Function returning list of cartodb user tables -- -- The optional argument restricts the result to tables -- of the specified access type. -- -- Currently accepted permissions are: 'public', 'private' or 'all' -- DROP FUNCTION IF EXISTS cdb_usertables(text); CREATE OR REPLACE FUNCTION CDB_UserTables(perm text DEFAULT 'all') RETURNS SETOF name AS $$ SELECT c.relname FROM pg_class c JOIN pg_roles r ON r.oid = c.relowner JOIN pg_namespace n ON n.oid = c.relnamespace WHERE r.rolname = current_user AND c.relkind = 'r' AND c.relname NOT IN ('cdb_tablemetadata', 'spatial_ref_sys') AND n.nspname NOT IN ('pg_catalog', 'information_schema') AND CASE WHEN perm = 'public' THEN has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'private' THEN has_table_privilege(c.oid, 'SELECT') AND NOT has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'all' THEN has_table_privilege(c.oid, 'SELECT') ELSE false END; $$ LANGUAGE 'sql'; -- This is to migrate from pre-0.2.0 version -- See http://github.com/CartoDB/cartodb-postgresql/issues/36 GRANT EXECUTE ON FUNCTION CDB_UserTables(text) TO public;
Fix for upgrade issue from 0.7.4 to 0.8.0
Fix for upgrade issue from 0.7.4 to 0.8.0 This fixes the following problem found during testing: ``` ALTER EXTENSION cartodb UPDATE TO '0.8.0'; ERROR: cannot change return type of existing function HINT: Use DROP FUNCTION cdb_usertables(text) first. ```
SQL
bsd-3-clause
CartoDB/cartodb-postgresql,CartoDB/cartodb-postgresql
sql
## Code Before: -- Function returning list of cartodb user tables -- -- The optional argument restricts the result to tables -- of the specified access type. -- -- Currently accepted permissions are: 'public', 'private' or 'all' -- CREATE OR REPLACE FUNCTION CDB_UserTables(perm text DEFAULT 'all') RETURNS SETOF name AS $$ SELECT c.relname FROM pg_class c JOIN pg_roles r ON r.oid = c.relowner JOIN pg_namespace n ON n.oid = c.relnamespace WHERE r.rolname = current_user AND c.relkind = 'r' AND c.relname NOT IN ('cdb_tablemetadata', 'spatial_ref_sys') AND n.nspname NOT IN ('pg_catalog', 'information_schema') AND CASE WHEN perm = 'public' THEN has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'private' THEN has_table_privilege(c.oid, 'SELECT') AND NOT has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'all' THEN has_table_privilege(c.oid, 'SELECT') ELSE false END; $$ LANGUAGE 'sql'; -- This is to migrate from pre-0.2.0 version -- See http://github.com/CartoDB/cartodb-postgresql/issues/36 GRANT EXECUTE ON FUNCTION CDB_UserTables(text) TO public; ## Instruction: Fix for upgrade issue from 0.7.4 to 0.8.0 This fixes the following problem found during testing: ``` ALTER EXTENSION cartodb UPDATE TO '0.8.0'; ERROR: cannot change return type of existing function HINT: Use DROP FUNCTION cdb_usertables(text) first. ``` ## Code After: -- Function returning list of cartodb user tables -- -- The optional argument restricts the result to tables -- of the specified access type. -- -- Currently accepted permissions are: 'public', 'private' or 'all' -- DROP FUNCTION IF EXISTS cdb_usertables(text); CREATE OR REPLACE FUNCTION CDB_UserTables(perm text DEFAULT 'all') RETURNS SETOF name AS $$ SELECT c.relname FROM pg_class c JOIN pg_roles r ON r.oid = c.relowner JOIN pg_namespace n ON n.oid = c.relnamespace WHERE r.rolname = current_user AND c.relkind = 'r' AND c.relname NOT IN ('cdb_tablemetadata', 'spatial_ref_sys') AND n.nspname NOT IN ('pg_catalog', 'information_schema') AND CASE WHEN perm = 'public' THEN has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'private' THEN has_table_privilege(c.oid, 'SELECT') AND NOT has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'all' THEN has_table_privilege(c.oid, 'SELECT') ELSE false END; $$ LANGUAGE 'sql'; -- This is to migrate from pre-0.2.0 version -- See http://github.com/CartoDB/cartodb-postgresql/issues/36 GRANT EXECUTE ON FUNCTION CDB_UserTables(text) TO public;
-- Function returning list of cartodb user tables -- -- The optional argument restricts the result to tables -- of the specified access type. -- -- Currently accepted permissions are: 'public', 'private' or 'all' -- + DROP FUNCTION IF EXISTS cdb_usertables(text); CREATE OR REPLACE FUNCTION CDB_UserTables(perm text DEFAULT 'all') RETURNS SETOF name AS $$ SELECT c.relname FROM pg_class c JOIN pg_roles r ON r.oid = c.relowner JOIN pg_namespace n ON n.oid = c.relnamespace WHERE r.rolname = current_user AND c.relkind = 'r' AND c.relname NOT IN ('cdb_tablemetadata', 'spatial_ref_sys') AND n.nspname NOT IN ('pg_catalog', 'information_schema') AND CASE WHEN perm = 'public' THEN has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'private' THEN has_table_privilege(c.oid, 'SELECT') AND NOT has_table_privilege('public', c.oid, 'SELECT') WHEN perm = 'all' THEN has_table_privilege(c.oid, 'SELECT') ELSE false END; $$ LANGUAGE 'sql'; -- This is to migrate from pre-0.2.0 version -- See http://github.com/CartoDB/cartodb-postgresql/issues/36 GRANT EXECUTE ON FUNCTION CDB_UserTables(text) TO public;
1
0.033333
1
0
a5cc0d6467bf46563f1cf56586af1227273353b0
TTOpenInAppActivity.podspec
TTOpenInAppActivity.podspec
Pod::Spec.new do |s| s.name = 'TTOpenInAppActivity' s.version = '1.0' s.license = 'MIT' s.summary = 'TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController.' s.description = <<-DESC TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController. TTOpenInAppActivity uses an UIDocumentInteractionController to present all Apps than can handle the document specified by the activity item. Supported item types are NSURL instances that point to local files and UIImage instances. DESC s.homepage = 'https://github.com/honkmaster/TTOpenInAppActivity' s.authors = { 'Tobias Tiemerding' => 'http://www.tiemerding.com' } s.source = { :git => 'https://github.com/tomco/TTOpenInAppActivity.git', :commit => '65b8fb0cbafe92073a883f8706b0d725fcd58d01' } s.source_files = 'TTOpenInAppActivity/*.{h,m}' s.resources = 'TTOpenInAppActivity/*.png' s.frameworks = 'UIKit', 'MobileCoreServices' s.requires_arc = true s.platform = :ios, '6.0' end
Pod::Spec.new do |s| s.name = 'TTOpenInAppActivity' s.version = '1.0' s.license = { :type => 'MIT', :file => 'README.md' } s.summary = 'TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController.' s.description = <<-DESC TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController. TTOpenInAppActivity uses an UIDocumentInteractionController to present all Apps than can handle the document specified by the activity item. Supported item types are NSURL instances that point to local files and UIImage instances. DESC s.homepage = 'https://github.com/honkmaster/TTOpenInAppActivity' s.authors = { 'Tobias Tiemerding' => 'http://www.tiemerding.com' } s.source = { :git => 'https://github.com/honkmaster/TTOpenInAppActivity.git', :tag => '1.0' } s.source_files = 'TTOpenInAppActivity/*.{h,m}' s.resources = 'TTOpenInAppActivity/*.png' s.frameworks = 'UIKit', 'MobileCoreServices' s.requires_arc = true s.platform = :ios, '6.0' end
Fix podspec for version 1.0
Fix podspec for version 1.0
Ruby
mit
jum/TTOpenInAppActivity,honkmaster/TTOpenInAppActivity,WolfgangD/TTOpenInAppActivity
ruby
## Code Before: Pod::Spec.new do |s| s.name = 'TTOpenInAppActivity' s.version = '1.0' s.license = 'MIT' s.summary = 'TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController.' s.description = <<-DESC TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController. TTOpenInAppActivity uses an UIDocumentInteractionController to present all Apps than can handle the document specified by the activity item. Supported item types are NSURL instances that point to local files and UIImage instances. DESC s.homepage = 'https://github.com/honkmaster/TTOpenInAppActivity' s.authors = { 'Tobias Tiemerding' => 'http://www.tiemerding.com' } s.source = { :git => 'https://github.com/tomco/TTOpenInAppActivity.git', :commit => '65b8fb0cbafe92073a883f8706b0d725fcd58d01' } s.source_files = 'TTOpenInAppActivity/*.{h,m}' s.resources = 'TTOpenInAppActivity/*.png' s.frameworks = 'UIKit', 'MobileCoreServices' s.requires_arc = true s.platform = :ios, '6.0' end ## Instruction: Fix podspec for version 1.0 ## Code After: Pod::Spec.new do |s| s.name = 'TTOpenInAppActivity' s.version = '1.0' s.license = { :type => 'MIT', :file => 'README.md' } s.summary = 'TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController.' s.description = <<-DESC TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController. TTOpenInAppActivity uses an UIDocumentInteractionController to present all Apps than can handle the document specified by the activity item. Supported item types are NSURL instances that point to local files and UIImage instances. DESC s.homepage = 'https://github.com/honkmaster/TTOpenInAppActivity' s.authors = { 'Tobias Tiemerding' => 'http://www.tiemerding.com' } s.source = { :git => 'https://github.com/honkmaster/TTOpenInAppActivity.git', :tag => '1.0' } s.source_files = 'TTOpenInAppActivity/*.{h,m}' s.resources = 'TTOpenInAppActivity/*.png' s.frameworks = 'UIKit', 'MobileCoreServices' s.requires_arc = true s.platform = :ios, '6.0' end
Pod::Spec.new do |s| s.name = 'TTOpenInAppActivity' s.version = '1.0' - s.license = 'MIT' + s.license = { :type => 'MIT', :file => 'README.md' } s.summary = 'TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController.' s.description = <<-DESC TTOpenInAppActivity is a UIActivity subclass that provides an "Open In ..." action to a UIActivityViewController. TTOpenInAppActivity uses an UIDocumentInteractionController to present all Apps than can handle the document specified by the activity item. Supported item types are NSURL instances that point to local files and UIImage instances. DESC s.homepage = 'https://github.com/honkmaster/TTOpenInAppActivity' s.authors = { 'Tobias Tiemerding' => 'http://www.tiemerding.com' } - s.source = { :git => 'https://github.com/tomco/TTOpenInAppActivity.git', :commit => '65b8fb0cbafe92073a883f8706b0d725fcd58d01' } + s.source = { :git => 'https://github.com/honkmaster/TTOpenInAppActivity.git', :tag => '1.0' } s.source_files = 'TTOpenInAppActivity/*.{h,m}' s.resources = 'TTOpenInAppActivity/*.png' s.frameworks = 'UIKit', 'MobileCoreServices' s.requires_arc = true s.platform = :ios, '6.0' end
4
0.210526
2
2
b8ec971aac7b60fe701cae1daa09f6702890be4a
Sources/Compatibility.swift
Sources/Compatibility.swift
// // Compatibility.swift // BonMot // // Created by Brian King on 8/24/16. // Copyright © 2016 Raizlabs. All rights reserved. // #if os(OSX) import AppKit #else import UIKit #endif /// This file declares extensions to system types to provide a compatible API /// between Swift iOS, macOS, watchOS, and tvOS. #if os(OSX) #else public extension NSParagraphStyle { typealias LineBreakMode = NSLineBreakMode } public extension NSLayoutConstraint { typealias Attribute = NSLayoutAttribute typealias Relation = NSLayoutRelation } #endif
// // Compatibility.swift // BonMot // // Created by Brian King on 8/24/16. // Copyright © 2016 Raizlabs. All rights reserved. // #if os(OSX) import AppKit #else import UIKit #endif /// This file declares extensions to system types to provide a compatible API /// between Swift iOS, macOS, watchOS, and tvOS. #if os(OSX) #else public extension NSParagraphStyle { typealias LineBreakMode = NSLineBreakMode } #if os(iOS) || os(tvOS) public extension NSLayoutConstraint { typealias Attribute = NSLayoutAttribute typealias Relation = NSLayoutRelation } #endif #endif
Extend NSLayoutConstraint only on platforms where it exists.
Extend NSLayoutConstraint only on platforms where it exists.
Swift
mit
Raizlabs/BonMot,Raizlabs/BonMot,Raizlabs/BonMot
swift
## Code Before: // // Compatibility.swift // BonMot // // Created by Brian King on 8/24/16. // Copyright © 2016 Raizlabs. All rights reserved. // #if os(OSX) import AppKit #else import UIKit #endif /// This file declares extensions to system types to provide a compatible API /// between Swift iOS, macOS, watchOS, and tvOS. #if os(OSX) #else public extension NSParagraphStyle { typealias LineBreakMode = NSLineBreakMode } public extension NSLayoutConstraint { typealias Attribute = NSLayoutAttribute typealias Relation = NSLayoutRelation } #endif ## Instruction: Extend NSLayoutConstraint only on platforms where it exists. ## Code After: // // Compatibility.swift // BonMot // // Created by Brian King on 8/24/16. // Copyright © 2016 Raizlabs. All rights reserved. // #if os(OSX) import AppKit #else import UIKit #endif /// This file declares extensions to system types to provide a compatible API /// between Swift iOS, macOS, watchOS, and tvOS. #if os(OSX) #else public extension NSParagraphStyle { typealias LineBreakMode = NSLineBreakMode } #if os(iOS) || os(tvOS) public extension NSLayoutConstraint { typealias Attribute = NSLayoutAttribute typealias Relation = NSLayoutRelation } #endif #endif
// // Compatibility.swift // BonMot // // Created by Brian King on 8/24/16. // Copyright © 2016 Raizlabs. All rights reserved. // #if os(OSX) import AppKit #else import UIKit #endif /// This file declares extensions to system types to provide a compatible API /// between Swift iOS, macOS, watchOS, and tvOS. #if os(OSX) #else public extension NSParagraphStyle { typealias LineBreakMode = NSLineBreakMode } + #if os(iOS) || os(tvOS) - public extension NSLayoutConstraint { + public extension NSLayoutConstraint { ? ++++ - typealias Attribute = NSLayoutAttribute + typealias Attribute = NSLayoutAttribute ? ++++ - typealias Relation = NSLayoutRelation + typealias Relation = NSLayoutRelation ? ++++ - } + } + #endif #endif
10
0.322581
6
4
a61ba61a3fdd2db4555a1b524f8ea1fcf16623b7
overcloud-resource-registry-puppet.yaml
overcloud-resource-registry-puppet.yaml
resource_registry: OS::TripleO::BlockStorage: puppet/cinder-storage-puppet.yaml OS::TripleO::Compute: puppet/compute-puppet.yaml OS::TripleO::SoftwareDeployment: OS::Heat::StructuredDeployment OS::TripleO::Controller: puppet/controller-puppet.yaml OS::TripleO::ObjectStorage: puppet/swift-storage-puppet.yaml OS::TripleO::Net::SoftwareConfig: net-config-bridge.yaml OS::TripleO::CephStorage: puppet/ceph-storage-puppet.yaml OS::TripleO::ControllerPostDeployment: puppet/controller-post-puppet.yaml OS::TripleO::ComputePostDeployment: puppet/compute-post-puppet.yaml OS::TripleO::ObjectStoragePostDeployment: puppet/swift-storage-post.yaml OS::TripleO::BlockStoragePostDeployment: puppet/cinder-storage-post.yaml OS::TripleO::CephStoragePostDeployment: puppet/ceph-storage-post-puppet.yaml OS::TripleO::SwiftDevicesAndProxy::SoftwareConfig: puppet/swift-devices-and-proxy-config.yaml OS::TripleO::CephClusterConfig::SoftwareConfig: puppet/ceph-cluster-config.yaml OS::TripleO::AllNodes::SoftwareConfig: puppet/all-nodes-config.yaml OS::TripleO::BootstrapNode::SoftwareConfig: puppet/bootstrap-config.yaml # NOTE(dprince): requires a new release of python-heatclient #default_parameters: #EnablePackageInstall: false
resource_registry: OS::TripleO::BlockStorage: puppet/cinder-storage-puppet.yaml OS::TripleO::Compute: puppet/compute-puppet.yaml OS::TripleO::SoftwareDeployment: OS::Heat::StructuredDeployment OS::TripleO::Controller: puppet/controller-puppet.yaml OS::TripleO::ObjectStorage: puppet/swift-storage-puppet.yaml OS::TripleO::Net::SoftwareConfig: net-config-bridge.yaml OS::TripleO::CephStorage: puppet/ceph-storage-puppet.yaml OS::TripleO::ControllerPostDeployment: puppet/controller-post-puppet.yaml OS::TripleO::ComputePostDeployment: puppet/compute-post-puppet.yaml OS::TripleO::ObjectStoragePostDeployment: puppet/swift-storage-post.yaml OS::TripleO::BlockStoragePostDeployment: puppet/cinder-storage-post.yaml OS::TripleO::CephStoragePostDeployment: puppet/ceph-storage-post-puppet.yaml OS::TripleO::SwiftDevicesAndProxy::SoftwareConfig: puppet/swift-devices-and-proxy-config.yaml OS::TripleO::CephClusterConfig::SoftwareConfig: puppet/ceph-cluster-config.yaml OS::TripleO::AllNodes::SoftwareConfig: puppet/all-nodes-config.yaml OS::TripleO::BootstrapNode::SoftwareConfig: puppet/bootstrap-config.yaml parameter_defaults: EnablePackageInstall: false
Correct the parameter_defaults section name.
Correct the parameter_defaults section name. Also, we can actually uncomment this now that heatclient 0.3 has been released. Change-Id: I0b4ce13f1426c364ea7921596022e5165e025fdb
YAML
apache-2.0
trozet/opnfv-tht,trozet/opnfv-tht,wyg3958/tripleo-heat-templates,bcrochet/tripleo-heat-templates,rdo-management/tripleo-heat-templates,bcrochet/tripleo-heat-templates,trozet/opnfv-tht,dprince/tripleo-heat-templates,openstack/tripleo-heat-templates,bcrochet/tripleo-heat-templates,wyg3958/tripleo-heat-templates,openstack/tripleo-heat-templates,dprince/tripleo-heat-templates,rdo-management/tripleo-heat-templates,trozet/tripleo-heat-templates,trozet/tripleo-heat-templates
yaml
## Code Before: resource_registry: OS::TripleO::BlockStorage: puppet/cinder-storage-puppet.yaml OS::TripleO::Compute: puppet/compute-puppet.yaml OS::TripleO::SoftwareDeployment: OS::Heat::StructuredDeployment OS::TripleO::Controller: puppet/controller-puppet.yaml OS::TripleO::ObjectStorage: puppet/swift-storage-puppet.yaml OS::TripleO::Net::SoftwareConfig: net-config-bridge.yaml OS::TripleO::CephStorage: puppet/ceph-storage-puppet.yaml OS::TripleO::ControllerPostDeployment: puppet/controller-post-puppet.yaml OS::TripleO::ComputePostDeployment: puppet/compute-post-puppet.yaml OS::TripleO::ObjectStoragePostDeployment: puppet/swift-storage-post.yaml OS::TripleO::BlockStoragePostDeployment: puppet/cinder-storage-post.yaml OS::TripleO::CephStoragePostDeployment: puppet/ceph-storage-post-puppet.yaml OS::TripleO::SwiftDevicesAndProxy::SoftwareConfig: puppet/swift-devices-and-proxy-config.yaml OS::TripleO::CephClusterConfig::SoftwareConfig: puppet/ceph-cluster-config.yaml OS::TripleO::AllNodes::SoftwareConfig: puppet/all-nodes-config.yaml OS::TripleO::BootstrapNode::SoftwareConfig: puppet/bootstrap-config.yaml # NOTE(dprince): requires a new release of python-heatclient #default_parameters: #EnablePackageInstall: false ## Instruction: Correct the parameter_defaults section name. Also, we can actually uncomment this now that heatclient 0.3 has been released. Change-Id: I0b4ce13f1426c364ea7921596022e5165e025fdb ## Code After: resource_registry: OS::TripleO::BlockStorage: puppet/cinder-storage-puppet.yaml OS::TripleO::Compute: puppet/compute-puppet.yaml OS::TripleO::SoftwareDeployment: OS::Heat::StructuredDeployment OS::TripleO::Controller: puppet/controller-puppet.yaml OS::TripleO::ObjectStorage: puppet/swift-storage-puppet.yaml OS::TripleO::Net::SoftwareConfig: net-config-bridge.yaml OS::TripleO::CephStorage: puppet/ceph-storage-puppet.yaml OS::TripleO::ControllerPostDeployment: puppet/controller-post-puppet.yaml OS::TripleO::ComputePostDeployment: puppet/compute-post-puppet.yaml OS::TripleO::ObjectStoragePostDeployment: puppet/swift-storage-post.yaml OS::TripleO::BlockStoragePostDeployment: puppet/cinder-storage-post.yaml OS::TripleO::CephStoragePostDeployment: puppet/ceph-storage-post-puppet.yaml OS::TripleO::SwiftDevicesAndProxy::SoftwareConfig: puppet/swift-devices-and-proxy-config.yaml OS::TripleO::CephClusterConfig::SoftwareConfig: puppet/ceph-cluster-config.yaml OS::TripleO::AllNodes::SoftwareConfig: puppet/all-nodes-config.yaml OS::TripleO::BootstrapNode::SoftwareConfig: puppet/bootstrap-config.yaml parameter_defaults: EnablePackageInstall: false
resource_registry: OS::TripleO::BlockStorage: puppet/cinder-storage-puppet.yaml OS::TripleO::Compute: puppet/compute-puppet.yaml OS::TripleO::SoftwareDeployment: OS::Heat::StructuredDeployment OS::TripleO::Controller: puppet/controller-puppet.yaml OS::TripleO::ObjectStorage: puppet/swift-storage-puppet.yaml OS::TripleO::Net::SoftwareConfig: net-config-bridge.yaml OS::TripleO::CephStorage: puppet/ceph-storage-puppet.yaml OS::TripleO::ControllerPostDeployment: puppet/controller-post-puppet.yaml OS::TripleO::ComputePostDeployment: puppet/compute-post-puppet.yaml OS::TripleO::ObjectStoragePostDeployment: puppet/swift-storage-post.yaml OS::TripleO::BlockStoragePostDeployment: puppet/cinder-storage-post.yaml OS::TripleO::CephStoragePostDeployment: puppet/ceph-storage-post-puppet.yaml OS::TripleO::SwiftDevicesAndProxy::SoftwareConfig: puppet/swift-devices-and-proxy-config.yaml OS::TripleO::CephClusterConfig::SoftwareConfig: puppet/ceph-cluster-config.yaml OS::TripleO::AllNodes::SoftwareConfig: puppet/all-nodes-config.yaml OS::TripleO::BootstrapNode::SoftwareConfig: puppet/bootstrap-config.yaml + parameter_defaults: - # NOTE(dprince): requires a new release of python-heatclient - #default_parameters: - #EnablePackageInstall: false ? - + EnablePackageInstall: false
5
0.238095
2
3
0f5a632d625d65f4edf9e31efa75708a79eee16c
CaseStudies/glass/Implementations/Python_Simplified/Implementation/readTable.py
CaseStudies/glass/Implementations/Python_Simplified/Implementation/readTable.py
import numpy as np def read_num_col(filename): with open(filename, 'rb') as f: num_col = [f.readline()] num_col = np.genfromtxt(num_col, delimiter=',', dtype=str) num_col = num_col[1::2].astype(float) return num_col def read_array1(filename, length): array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1) return array1 def read_array2(filename, length): array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1) return array2
def read_num_col(filename): with open(filename, "r") as f: line = f.readline() z_array = line.split(",")[1::2] z_array = [float(i) for i in z_array] return z_array def read_array1(filename, length): with open(filename, "r") as f: lines = f.readlines() lines = lines[1:] x_array = [line.split(",")[0::2] for line in lines] for i in range(len(x_array)): x_array[i] = [float(j) for j in x_array[i]] return x_array def read_array2(filename, length): with open(filename, "r") as f: lines = f.readlines() lines = lines[1:] y_array = [line.split(",")[1::2] for line in lines] for i in range(len(y_array)): y_array[i] = [float(j) for j in y_array[i]] return y_array
Remove numpy dependency from glassbr python code
Remove numpy dependency from glassbr python code
Python
bsd-2-clause
JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software
python
## Code Before: import numpy as np def read_num_col(filename): with open(filename, 'rb') as f: num_col = [f.readline()] num_col = np.genfromtxt(num_col, delimiter=',', dtype=str) num_col = num_col[1::2].astype(float) return num_col def read_array1(filename, length): array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1) return array1 def read_array2(filename, length): array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1) return array2 ## Instruction: Remove numpy dependency from glassbr python code ## Code After: def read_num_col(filename): with open(filename, "r") as f: line = f.readline() z_array = line.split(",")[1::2] z_array = [float(i) for i in z_array] return z_array def read_array1(filename, length): with open(filename, "r") as f: lines = f.readlines() lines = lines[1:] x_array = [line.split(",")[0::2] for line in lines] for i in range(len(x_array)): x_array[i] = [float(j) for j in x_array[i]] return x_array def read_array2(filename, length): with open(filename, "r") as f: lines = f.readlines() lines = lines[1:] y_array = [line.split(",")[1::2] for line in lines] for i in range(len(y_array)): y_array[i] = [float(j) for j in y_array[i]] return y_array
- - import numpy as np def read_num_col(filename): - with open(filename, 'rb') as f: ? ^ ^^ + with open(filename, "r") as f: ? ^ ^ - num_col = [f.readline()] ? ^^^^^^ - - + line = f.readline() ? ++ ^ + z_array = line.split(",")[1::2] + z_array = [float(i) for i in z_array] + return z_array - - num_col = np.genfromtxt(num_col, delimiter=',', dtype=str) - num_col = num_col[1::2].astype(float) - - return num_col def read_array1(filename, length): - array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1) + with open(filename, "r") as f: + lines = f.readlines() + lines = lines[1:] + x_array = [line.split(",")[0::2] for line in lines] + for i in range(len(x_array)): + x_array[i] = [float(j) for j in x_array[i]] - return array1 ? - + return x_array ? ++ def read_array2(filename, length): - array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1) + with open(filename, "r") as f: + lines = f.readlines() + lines = lines[1:] + y_array = [line.split(",")[1::2] for line in lines] + for i in range(len(y_array)): + y_array[i] = [float(j) for j in y_array[i]] - return array2 ? - + return y_array ? ++
32
1.6
19
13
e2b86752f9b9718cc42aaf5fc8d8293c3c43181f
book/1-06-0-math.md
book/1-06-0-math.md
<ol class="breadcrumb"> <li><a href="/">Home</a></li> <li><a href="/book/">Book</a></li> <li><a href="/book/1-0-0-overview/">Part One: Grokking Lisp</a></li> <li class="active">Numbers and Math</li> </ol> # Numbers and Math > "Relevant quote..." > <footer>Source</footer> Chapter text. * Numeric Types * Built-in arithmetic and algebraic functions Moved to next Extra Credit section: * Arrays * Vectors <ul class="pager"> <li class="previous"><a href="/book/1-05-0-lookups-trees/">&laquo; Previous</a></li> <li><a href="/book/">Table of Contents</a></li> <li class="next"><a href="/book/1-07-0-arrays/">Next &raquo;</a><li> </ul>
<ol class="breadcrumb"> <li><a href="/">Home</a></li> <li><a href="/book/">Book</a></li> <li><a href="/book/1-0-0-overview/">Part One: Grokking Lisp</a></li> <li class="active">Numbers and Math</li> </ol> # Numbers and Math > "The 3-legged stool of understanding is held up by history, languages, and mathematics. Equipped with these three you can learn anything you want to learn." > <footer>Robert A. Heinlein, <em>The Happy Days Ahead, Expanded Universe</em></footer> This chapter will contain exercises on: * Integers * Hexadecimal Notation * Octal Notation * Binary Notation * Ratios * Floats * Numeric Constants * Complex Numbers * Arithmetic * Exponents * Logarithms * Trigonometry * Numeric Type Manipulation * Random Numbers <ul class="pager"> <li class="previous"><a href="/book/1-05-0-lookups-trees/">&laquo; Previous</a></li> <li><a href="/book/">Table of Contents</a></li> <li class="next"><a href="/book/1-07-0-arrays/">Next &raquo;</a><li> </ul>
Add list of exercise material to ch. 1.6
Add list of exercise material to ch. 1.6
Markdown
mit
tang-junjie/llthw,LispTO/llthw,LispTO/llthw,LispTO/llthw,tang-junjie/llthw,tang-junjie/llthw
markdown
## Code Before: <ol class="breadcrumb"> <li><a href="/">Home</a></li> <li><a href="/book/">Book</a></li> <li><a href="/book/1-0-0-overview/">Part One: Grokking Lisp</a></li> <li class="active">Numbers and Math</li> </ol> # Numbers and Math > "Relevant quote..." > <footer>Source</footer> Chapter text. * Numeric Types * Built-in arithmetic and algebraic functions Moved to next Extra Credit section: * Arrays * Vectors <ul class="pager"> <li class="previous"><a href="/book/1-05-0-lookups-trees/">&laquo; Previous</a></li> <li><a href="/book/">Table of Contents</a></li> <li class="next"><a href="/book/1-07-0-arrays/">Next &raquo;</a><li> </ul> ## Instruction: Add list of exercise material to ch. 1.6 ## Code After: <ol class="breadcrumb"> <li><a href="/">Home</a></li> <li><a href="/book/">Book</a></li> <li><a href="/book/1-0-0-overview/">Part One: Grokking Lisp</a></li> <li class="active">Numbers and Math</li> </ol> # Numbers and Math > "The 3-legged stool of understanding is held up by history, languages, and mathematics. Equipped with these three you can learn anything you want to learn." > <footer>Robert A. Heinlein, <em>The Happy Days Ahead, Expanded Universe</em></footer> This chapter will contain exercises on: * Integers * Hexadecimal Notation * Octal Notation * Binary Notation * Ratios * Floats * Numeric Constants * Complex Numbers * Arithmetic * Exponents * Logarithms * Trigonometry * Numeric Type Manipulation * Random Numbers <ul class="pager"> <li class="previous"><a href="/book/1-05-0-lookups-trees/">&laquo; Previous</a></li> <li><a href="/book/">Table of Contents</a></li> <li class="next"><a href="/book/1-07-0-arrays/">Next &raquo;</a><li> </ul>
<ol class="breadcrumb"> <li><a href="/">Home</a></li> <li><a href="/book/">Book</a></li> <li><a href="/book/1-0-0-overview/">Part One: Grokking Lisp</a></li> <li class="active">Numbers and Math</li> </ol> # Numbers and Math - > "Relevant quote..." - > <footer>Source</footer> + > "The 3-legged stool of understanding is held up by history, languages, and mathematics. Equipped with these three you can learn anything you want to learn." + > <footer>Robert A. Heinlein, <em>The Happy Days Ahead, Expanded Universe</em></footer> - Chapter text. + This chapter will contain exercises on: - * Numeric Types - * Built-in arithmetic and algebraic functions - - Moved to next Extra Credit section: - * Arrays - * Vectors + * Integers + * Hexadecimal Notation + * Octal Notation + * Binary Notation + * Ratios + * Floats + * Numeric Constants + * Complex Numbers + * Arithmetic + * Exponents + * Logarithms + * Trigonometry + * Numeric Type Manipulation + * Random Numbers <ul class="pager"> <li class="previous"><a href="/book/1-05-0-lookups-trees/">&laquo; Previous</a></li> <li><a href="/book/">Table of Contents</a></li> <li class="next"><a href="/book/1-07-0-arrays/">Next &raquo;</a><li> </ul>
26
1
17
9
98d6851b6ef72e24a26c49d26df49deb391223c9
source/assets/css/_base.scss
source/assets/css/_base.scss
/* ========================================================================== * $ Imports * ========================================================================== */ // Compass @import "compass/css3"; @import "compass/typography/vertical_rhythm"; // Susy grid system @import "susy"; // Fonts @import url(http://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700); /* ========================================================================== * $ Variables * ========================================================================== */ // == Font Sizes == // $base-font-size : 16px; $base-line-height : 24px; // == Colors == // $color__text : #353b3d; $color__header : #22262B; $color__link : #7E46E0; $color__link--hover : shade($color__link, 50%); $color__blockquote : tint($color__text, 30%); // == Typography == // // These values are picked up by _normalize.scss which then handles the vertical // rhythm for headers. $font_size__h1: 2.5; $font_size__h2: 2; $font_size__h3: 1.5; $font_size__h4: 1.17; $font_size__h5: 1; $font_size__h6: 0.83;
/* ========================================================================== * $ Imports * ========================================================================== */ // Compass @import "compass/css3"; @import "compass/typography/vertical_rhythm"; // Susy grid system @import "susy"; // Fonts @import url(http://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700); /* ========================================================================== * $ Variables * ========================================================================== */ // == Font Sizes == // $base-font-size : 16px; $base-line-height : 24px; // == Colors == // $color__text : #353b3d; $color__header : #3A3E42; $color__link : #9215AB; // 2995D9 $color__link--hover : shade($color__link, 40%); // EDAE00 $color__blockquote : tint($color__text, 30%); // == Typography == // // These values are picked up by _normalize.scss which then handles the vertical // rhythm for headers. $font_size__h1: 2.5; $font_size__h2: 2; $font_size__h3: 1.5; $font_size__h4: 1.17; $font_size__h5: 1; $font_size__h6: 0.83;
Change Header and Link colors
Change Header and Link colors
SCSS
mit
vocksel/my-website,VoxelDavid/voxeldavid-website,vocksel/my-website,VoxelDavid/voxeldavid-website
scss
## Code Before: /* ========================================================================== * $ Imports * ========================================================================== */ // Compass @import "compass/css3"; @import "compass/typography/vertical_rhythm"; // Susy grid system @import "susy"; // Fonts @import url(http://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700); /* ========================================================================== * $ Variables * ========================================================================== */ // == Font Sizes == // $base-font-size : 16px; $base-line-height : 24px; // == Colors == // $color__text : #353b3d; $color__header : #22262B; $color__link : #7E46E0; $color__link--hover : shade($color__link, 50%); $color__blockquote : tint($color__text, 30%); // == Typography == // // These values are picked up by _normalize.scss which then handles the vertical // rhythm for headers. $font_size__h1: 2.5; $font_size__h2: 2; $font_size__h3: 1.5; $font_size__h4: 1.17; $font_size__h5: 1; $font_size__h6: 0.83; ## Instruction: Change Header and Link colors ## Code After: /* ========================================================================== * $ Imports * ========================================================================== */ // Compass @import "compass/css3"; @import "compass/typography/vertical_rhythm"; // Susy grid system @import "susy"; // Fonts @import url(http://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700); /* ========================================================================== * $ Variables * ========================================================================== */ // == Font Sizes == // $base-font-size : 16px; $base-line-height : 24px; // == Colors == // $color__text : #353b3d; $color__header : #3A3E42; $color__link : #9215AB; // 2995D9 $color__link--hover : shade($color__link, 40%); // EDAE00 $color__blockquote : tint($color__text, 30%); // == Typography == // // These values are picked up by _normalize.scss which then handles the vertical // rhythm for headers. $font_size__h1: 2.5; $font_size__h2: 2; $font_size__h3: 1.5; $font_size__h4: 1.17; $font_size__h5: 1; $font_size__h6: 0.83;
/* ========================================================================== * $ Imports * ========================================================================== */ // Compass @import "compass/css3"; @import "compass/typography/vertical_rhythm"; // Susy grid system @import "susy"; // Fonts @import url(http://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700); /* ========================================================================== * $ Variables * ========================================================================== */ // == Font Sizes == // $base-font-size : 16px; $base-line-height : 24px; // == Colors == // $color__text : #353b3d; - $color__header : #22262B; ? ----- + $color__header : #3A3E42; ? +++++ - $color__link : #7E46E0; + $color__link : #9215AB; // 2995D9 - $color__link--hover : shade($color__link, 50%); ? ^ + $color__link--hover : shade($color__link, 40%); // EDAE00 ? ^ ++++++++++ $color__blockquote : tint($color__text, 30%); // == Typography == // // These values are picked up by _normalize.scss which then handles the vertical // rhythm for headers. $font_size__h1: 2.5; $font_size__h2: 2; $font_size__h3: 1.5; $font_size__h4: 1.17; $font_size__h5: 1; $font_size__h6: 0.83;
6
0.136364
3
3
a5dcda0ed258722b556442ca0bfd609847554f94
.travis.yml
.travis.yml
language: go go: - 1.7 - 1.8 notifications: email: false before_install: - go get -u github.com/golang/lint/golint - go get -u honnef.co/go/tools/cmd/gosimple - go get -u honnef.co/go/tools/cmd/staticcheck - go get -u honnef.co/go/tools/cmd/unused script: # $pkgs avoids testing anything in vendor/ - pkgs=$(go list ./... | grep -v /vendor/) - test -z $(gofmt -s -l .) - go test -v -race $pkgs - go vet $pkgs - staticcheck $pkgs - gosimple $pkgs - unused $pkgs - golint $pkgs
language: go go: - 1.7 - 1.8 notifications: email: false before_install: - go get -u github.com/golang/lint/golint - go get -u honnef.co/go/tools/cmd/gosimple - go get -u honnef.co/go/tools/cmd/staticcheck - go get -u honnef.co/go/tools/cmd/unused script: - go_files=$(find . -iname '*.go' | grep -v /vendor/) - pkgs=$(go list ./... | grep -v /vendor/) - test -z $(gofmt -s -l $go_files) - go test -v -race $pkgs - go vet $pkgs - staticcheck $pkgs - gosimple $pkgs - unused $pkgs - golint $pkgs
Exclude vendor/ from gofmt -s check
Exclude vendor/ from gofmt -s check
YAML
mit
y0ssar1an/qq,y0ssar1an/qq,y0ssar1an/q
yaml
## Code Before: language: go go: - 1.7 - 1.8 notifications: email: false before_install: - go get -u github.com/golang/lint/golint - go get -u honnef.co/go/tools/cmd/gosimple - go get -u honnef.co/go/tools/cmd/staticcheck - go get -u honnef.co/go/tools/cmd/unused script: # $pkgs avoids testing anything in vendor/ - pkgs=$(go list ./... | grep -v /vendor/) - test -z $(gofmt -s -l .) - go test -v -race $pkgs - go vet $pkgs - staticcheck $pkgs - gosimple $pkgs - unused $pkgs - golint $pkgs ## Instruction: Exclude vendor/ from gofmt -s check ## Code After: language: go go: - 1.7 - 1.8 notifications: email: false before_install: - go get -u github.com/golang/lint/golint - go get -u honnef.co/go/tools/cmd/gosimple - go get -u honnef.co/go/tools/cmd/staticcheck - go get -u honnef.co/go/tools/cmd/unused script: - go_files=$(find . -iname '*.go' | grep -v /vendor/) - pkgs=$(go list ./... | grep -v /vendor/) - test -z $(gofmt -s -l $go_files) - go test -v -race $pkgs - go vet $pkgs - staticcheck $pkgs - gosimple $pkgs - unused $pkgs - golint $pkgs
language: go go: - 1.7 - 1.8 notifications: email: false before_install: - go get -u github.com/golang/lint/golint - go get -u honnef.co/go/tools/cmd/gosimple - go get -u honnef.co/go/tools/cmd/staticcheck - go get -u honnef.co/go/tools/cmd/unused script: - # $pkgs avoids testing anything in vendor/ + - go_files=$(find . -iname '*.go' | grep -v /vendor/) - pkgs=$(go list ./... | grep -v /vendor/) - - test -z $(gofmt -s -l .) ? ^ + - test -z $(gofmt -s -l $go_files) ? ^^^^^^^^^ - go test -v -race $pkgs - go vet $pkgs - staticcheck $pkgs - gosimple $pkgs - unused $pkgs - golint $pkgs
4
0.16
2
2
d0757aa36e63dfbdacbde25fdeeb9cd47fcecfa8
lib/json_test_data/data_structures/number.rb
lib/json_test_data/data_structures/number.rb
require_relative "./helpers/number_helper" module JsonTestData class Number extend NumberHelper class << self def create(schema) factor, minimum, maximum = schema.fetch(:multipleOf, nil), schema.fetch(:minimum, -infinity), schema.fetch(:maximum, infinity) num = factor || 1 step_size = schema.fetch(:type) == "integer" ? 1 : 0.5 adjust_for_maximum(number: num, maximum: maximum, step_size: factor || step_size) adjust_for_minimum(number: num, minimum: minimum, step_size: factor || step_size) end end end end
require_relative "./helpers/number_helper" module JsonTestData class Number extend NumberHelper class << self def create(schema) factor = schema.fetch(:multipleOf, nil) minimum, maximum = schema.fetch(:minimum, -infinity), schema.fetch(:maximum, infinity) num = factor || 1 step_size = schema.fetch(:type) == "integer" ? 1 : 0.5 num = adjust_for_maximum(number: num, maximum: maximum, step_size: factor || step_size) adjust_for_minimum(number: num, minimum: minimum, step_size: factor || step_size) end end end end
Fix issue with adjustments for maximum and minimum
Fix issue with adjustments for maximum and minimum
Ruby
mit
danascheider/json_test_data
ruby
## Code Before: require_relative "./helpers/number_helper" module JsonTestData class Number extend NumberHelper class << self def create(schema) factor, minimum, maximum = schema.fetch(:multipleOf, nil), schema.fetch(:minimum, -infinity), schema.fetch(:maximum, infinity) num = factor || 1 step_size = schema.fetch(:type) == "integer" ? 1 : 0.5 adjust_for_maximum(number: num, maximum: maximum, step_size: factor || step_size) adjust_for_minimum(number: num, minimum: minimum, step_size: factor || step_size) end end end end ## Instruction: Fix issue with adjustments for maximum and minimum ## Code After: require_relative "./helpers/number_helper" module JsonTestData class Number extend NumberHelper class << self def create(schema) factor = schema.fetch(:multipleOf, nil) minimum, maximum = schema.fetch(:minimum, -infinity), schema.fetch(:maximum, infinity) num = factor || 1 step_size = schema.fetch(:type) == "integer" ? 1 : 0.5 num = adjust_for_maximum(number: num, maximum: maximum, step_size: factor || step_size) adjust_for_minimum(number: num, minimum: minimum, step_size: factor || step_size) end end end end
require_relative "./helpers/number_helper" module JsonTestData class Number extend NumberHelper class << self def create(schema) + factor = schema.fetch(:multipleOf, nil) - factor, minimum, maximum = schema.fetch(:multipleOf, nil), schema.fetch(:minimum, -infinity), schema.fetch(:maximum, infinity) ? -------- -------------------------------- + minimum, maximum = schema.fetch(:minimum, -infinity), schema.fetch(:maximum, infinity) num = factor || 1 step_size = schema.fetch(:type) == "integer" ? 1 : 0.5 - adjust_for_maximum(number: num, maximum: maximum, step_size: factor || step_size) + num = adjust_for_maximum(number: num, maximum: maximum, step_size: factor || step_size) ? ++++++ adjust_for_minimum(number: num, minimum: minimum, step_size: factor || step_size) end end end end
5
0.263158
3
2
6431033412c682c9684885370c0d4de1c0767667
spec/agharta/user_stream_spec.rb
spec/agharta/user_stream_spec.rb
require 'spec_helper' describe Agharta::UserStream do before do @context = DummyRecipe.new @stream = Agharta::UserStream.new(@context) @client = Twitter::Client.new Twitter::Client.stub(:new).and_return(@client) end describe '#log_path' do it 'should build from context name' do File.basename(@stream.log_path).should == 'dummyrecipe.log' end end describe '#logger' do it 'should be a MultiLogger' do @stream.logger.should be_a Agharta::MultiLogger end end describe '#current_user' do it 'should call Twitter::Client#verify_credentials' do @client.should_receive(:verify_credentials) @stream.current_user end end end
require 'spec_helper' describe Agharta::UserStream do before do @context = DummyRecipe.new @stream = Agharta::UserStream.new(@context) @client = Twitter::Client.new Twitter::Client.stub(:new).and_return(@client) end describe '#log_path' do it 'should build from context name' do File.basename(@stream.log_path).should == 'dummyrecipe.log' end end describe '#logger' do before do @logger = Logger.new($stdout) Logger.should_receive(:new).with($stdout).and_return(@logger) Logger.should_receive(:new).with(@stream.log_path).and_return(@logger) end it 'should be a MultiLogger' do @stream.logger.should be_a Agharta::MultiLogger end end describe '#current_user' do it 'should call Twitter::Client#verify_credentials' do @client.should_receive(:verify_credentials) @stream.current_user end end end
Fix MultiLogger spec to use dummy logger.
Fix MultiLogger spec to use dummy logger.
Ruby
mit
mitukiii/agharta
ruby
## Code Before: require 'spec_helper' describe Agharta::UserStream do before do @context = DummyRecipe.new @stream = Agharta::UserStream.new(@context) @client = Twitter::Client.new Twitter::Client.stub(:new).and_return(@client) end describe '#log_path' do it 'should build from context name' do File.basename(@stream.log_path).should == 'dummyrecipe.log' end end describe '#logger' do it 'should be a MultiLogger' do @stream.logger.should be_a Agharta::MultiLogger end end describe '#current_user' do it 'should call Twitter::Client#verify_credentials' do @client.should_receive(:verify_credentials) @stream.current_user end end end ## Instruction: Fix MultiLogger spec to use dummy logger. ## Code After: require 'spec_helper' describe Agharta::UserStream do before do @context = DummyRecipe.new @stream = Agharta::UserStream.new(@context) @client = Twitter::Client.new Twitter::Client.stub(:new).and_return(@client) end describe '#log_path' do it 'should build from context name' do File.basename(@stream.log_path).should == 'dummyrecipe.log' end end describe '#logger' do before do @logger = Logger.new($stdout) Logger.should_receive(:new).with($stdout).and_return(@logger) Logger.should_receive(:new).with(@stream.log_path).and_return(@logger) end it 'should be a MultiLogger' do @stream.logger.should be_a Agharta::MultiLogger end end describe '#current_user' do it 'should call Twitter::Client#verify_credentials' do @client.should_receive(:verify_credentials) @stream.current_user end end end
require 'spec_helper' describe Agharta::UserStream do before do @context = DummyRecipe.new @stream = Agharta::UserStream.new(@context) @client = Twitter::Client.new Twitter::Client.stub(:new).and_return(@client) end describe '#log_path' do it 'should build from context name' do File.basename(@stream.log_path).should == 'dummyrecipe.log' end end describe '#logger' do + before do + @logger = Logger.new($stdout) + Logger.should_receive(:new).with($stdout).and_return(@logger) + Logger.should_receive(:new).with(@stream.log_path).and_return(@logger) + end + it 'should be a MultiLogger' do @stream.logger.should be_a Agharta::MultiLogger end end describe '#current_user' do it 'should call Twitter::Client#verify_credentials' do @client.should_receive(:verify_credentials) @stream.current_user end end end
6
0.2
6
0
6c67cc46b828c35e02d763f7a40b8e56b9d9b3cc
app/assets/stylesheets/resources.css.scss
app/assets/stylesheets/resources.css.scss
// Place all the styles related to the resources controller here. // They will automatically be included in application.css. // You can use Sass (SCSS) here: http://sass-lang.com/ .main-container { padding-top: 70px; } .well { background:rgba(255,255,255, 0.85); } .navbar-default { background: rgba(255, 255, 255, 0.85); border: none; color:#777; display: inline-block; text-align: center; position:fixed; } .navbar-nav{ margin: 0 auto; padding: 0; list-style: none; display: inline-block; float:none; font-size: 1em; font-weight: bold; } .nav-pills { padding-bottom: 15px; padding-top: 15px; a { background-color: white; } } ul.pagination { margin-top: 5px; } .navbar-brand { font-size: 25px; }
// Place all the styles related to the resources controller here. // They will automatically be included in application.css. // You can use Sass (SCSS) here: http://sass-lang.com/ .main-container { padding-top: 70px; } .navbar-default { border: none; color:#777; display: inline-block; text-align: center; position:fixed; } .navbar-nav{ margin: 0 auto; padding: 0; list-style: none; display: inline-block; float:none; font-size: 1em; font-weight: bold; } .nav-pills { padding-bottom: 15px; padding-top: 15px; a { background-color: white; } } ul.pagination { margin-top: 5px; } .navbar-brand { font-size: 25px; }
Remove transparent background from header and wells
Remove transparent background from header and wells
SCSS
mit
amberbit/programming-resources
scss
## Code Before: // Place all the styles related to the resources controller here. // They will automatically be included in application.css. // You can use Sass (SCSS) here: http://sass-lang.com/ .main-container { padding-top: 70px; } .well { background:rgba(255,255,255, 0.85); } .navbar-default { background: rgba(255, 255, 255, 0.85); border: none; color:#777; display: inline-block; text-align: center; position:fixed; } .navbar-nav{ margin: 0 auto; padding: 0; list-style: none; display: inline-block; float:none; font-size: 1em; font-weight: bold; } .nav-pills { padding-bottom: 15px; padding-top: 15px; a { background-color: white; } } ul.pagination { margin-top: 5px; } .navbar-brand { font-size: 25px; } ## Instruction: Remove transparent background from header and wells ## Code After: // Place all the styles related to the resources controller here. // They will automatically be included in application.css. // You can use Sass (SCSS) here: http://sass-lang.com/ .main-container { padding-top: 70px; } .navbar-default { border: none; color:#777; display: inline-block; text-align: center; position:fixed; } .navbar-nav{ margin: 0 auto; padding: 0; list-style: none; display: inline-block; float:none; font-size: 1em; font-weight: bold; } .nav-pills { padding-bottom: 15px; padding-top: 15px; a { background-color: white; } } ul.pagination { margin-top: 5px; } .navbar-brand { font-size: 25px; }
// Place all the styles related to the resources controller here. // They will automatically be included in application.css. // You can use Sass (SCSS) here: http://sass-lang.com/ .main-container { padding-top: 70px; } - .well { - background:rgba(255,255,255, 0.85); - } - .navbar-default { - background: rgba(255, 255, 255, 0.85); border: none; color:#777; display: inline-block; text-align: center; position:fixed; } .navbar-nav{ margin: 0 auto; padding: 0; list-style: none; display: inline-block; float:none; font-size: 1em; font-weight: bold; } .nav-pills { padding-bottom: 15px; padding-top: 15px; a { background-color: white; } } ul.pagination { margin-top: 5px; } .navbar-brand { font-size: 25px; }
5
0.108696
0
5
989771478832213cc280e8ccc9b9611491f8151d
CHANGES.rst
CHANGES.rst
v4.0.0 ====== * First public release, based on CherryPy 3.2.2.
v4.0.0 ====== * First public release, based on CherryPy 3.2.2, plus... * #1: Replace the hard-coded state transitions with a generic transition Graph.
Update changelog to include recent changes.
Update changelog to include recent changes.
reStructuredText
bsd-3-clause
cherrypy/magicbus
restructuredtext
## Code Before: v4.0.0 ====== * First public release, based on CherryPy 3.2.2. ## Instruction: Update changelog to include recent changes. ## Code After: v4.0.0 ====== * First public release, based on CherryPy 3.2.2, plus... * #1: Replace the hard-coded state transitions with a generic transition Graph.
v4.0.0 ====== - * First public release, based on CherryPy 3.2.2. + * First public release, based on CherryPy 3.2.2, plus... ? ++++++ ++ + + * #1: Replace the hard-coded state transitions with a + generic transition Graph.
5
1.25
4
1
115a5ce93abd7c9769241ab0f0c3e62817021931
svq.java
svq.java
// Get JAFFE database from http://www.kasrl.org/jaffe_info.html // Extract pics in folder named "jaffe" // package image_test; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; public class svq { public static void main(String[] args) { // read image BufferedImage input = null; try { input = ImageIO.read(new File("jaffe/KA.AN1.39.tiff.bmp")); } catch (IOException e) { throw new RuntimeException(e); } System.out.println("Done!"); } }
// Get JAFFE database from http://www.kasrl.org/jaffe_info.html // Extract pics in folder named "jaffe" // package image_test; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; public class svq { public static void main(String[] args) { // read image BufferedImage input = null; try { input = ImageIO.read(new File("jaffe/KA.AN1.39.tiff.bmp")); } catch (IOException e) { throw new RuntimeException(e); } // to byte array ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { ImageIO.write( input, "bmp", baos ); baos.flush(); } catch (IOException e) { } byte[] bytearray = baos.toByteArray(); System.out.println("Done!"); } }
Convert bmp to byte array
Convert bmp to byte array
Java
mit
giuseppecuccu/jsvq,giuseppecuccu/jsvq
java
## Code Before: // Get JAFFE database from http://www.kasrl.org/jaffe_info.html // Extract pics in folder named "jaffe" // package image_test; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; public class svq { public static void main(String[] args) { // read image BufferedImage input = null; try { input = ImageIO.read(new File("jaffe/KA.AN1.39.tiff.bmp")); } catch (IOException e) { throw new RuntimeException(e); } System.out.println("Done!"); } } ## Instruction: Convert bmp to byte array ## Code After: // Get JAFFE database from http://www.kasrl.org/jaffe_info.html // Extract pics in folder named "jaffe" // package image_test; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; public class svq { public static void main(String[] args) { // read image BufferedImage input = null; try { input = ImageIO.read(new File("jaffe/KA.AN1.39.tiff.bmp")); } catch (IOException e) { throw new RuntimeException(e); } // to byte array ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { ImageIO.write( input, "bmp", baos ); baos.flush(); } catch (IOException e) { } byte[] bytearray = baos.toByteArray(); System.out.println("Done!"); } }
// Get JAFFE database from http://www.kasrl.org/jaffe_info.html // Extract pics in folder named "jaffe" // package image_test; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import javax.imageio.ImageIO; public class svq { public static void main(String[] args) { // read image BufferedImage input = null; try { input = ImageIO.read(new File("jaffe/KA.AN1.39.tiff.bmp")); } catch (IOException e) { throw new RuntimeException(e); } + // to byte array + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + try { + ImageIO.write( input, "bmp", baos ); + baos.flush(); + } catch (IOException e) { + + } + byte[] bytearray = baos.toByteArray(); + System.out.println("Done!"); } }
10
0.4
10
0
205da5364cf2bfb5c216fd2c2b05d0f44109c451
src/main/pig/fhr_payload_errors.pig
src/main/pig/fhr_payload_errors.pig
register 'akela-0.5-SNAPSHOT.jar' register 'fhr-toolbox-0.1-SNAPSHOT.jar' register 'jackson-core-2.1.1.jar' register 'jackson-databind-2.1.1.jar' register 'jackson-annotations-2.1.1.jar' SET pig.logfile fhr_payload_errors.log; /* SET default_parallel 8; */ SET pig.tmpfilecompression true; SET pig.tmpfilecompression.codec lzo; define IsMap com.mozilla.pig.filter.map.IsMap(); define Size com.mozilla.pig.eval.Size(); raw = LOAD 'hbase://metrics' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('data:json','-loadKey=true -caching=100') AS (k:bytearray,json:chararray); genmap = FOREACH raw GENERATE k, com.mozilla.pig.eval.json.JsonMap(json) AS json_map:map[]; filtered_genmap = FILTER genmap BY json_map#'errors' IS NOT NULL; data = FOREACH filtered_genmap GENERATE json_map#'thisPingDate' AS submission_date:chararray, json_map#'errors' AS errs:chararray; STORE data INTO 'fhr_payload_errors_out';
register 'akela-0.5-SNAPSHOT.jar' register 'fhr-toolbox-0.1-SNAPSHOT.jar' register 'jackson-core-2.1.1.jar' register 'jackson-databind-2.1.1.jar' register 'jackson-annotations-2.1.1.jar' SET pig.logfile fhr_payload_errors.log; /* SET default_parallel 8; */ SET pig.tmpfilecompression true; SET pig.tmpfilecompression.codec lzo; define IsMap com.mozilla.pig.filter.map.IsMap(); define Size com.mozilla.pig.eval.Size(); raw = LOAD 'hbase://metrics' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('data:json','-loadKey=true -caching=100') AS (k:bytearray,json:chararray); genmap = FOREACH raw GENERATE k, com.mozilla.pig.eval.json.JsonMap(json) AS json_map:map[]; filtered_genmap = FILTER genmap BY json_map#'errors' IS NOT NULL; data = FOREACH filtered_genmap GENERATE json_map#'thisPingDate' AS submission_date:chararray, json_map#'data'#'last'#'org.mozilla.appInfo.appinfo'#'appBuildID' as build_id:chararray, json_map#'errors' AS errs:chararray; STORE data INTO 'fhr_payload_errors_out';
Add build_id to payload errors output.
Add build_id to payload errors output.
PigLatin
apache-2.0
SamPenrose/fhr-toolbox,SamPenrose/fhr-toolbox,SamPenrose/fhr-toolbox,mozilla-metrics/fhr-toolbox,SamPenrose/fhr-toolbox,SamPenrose/fhr-toolbox,mozilla-metrics/fhr-toolbox,mozilla-metrics/fhr-toolbox,mozilla-metrics/fhr-toolbox,mozilla-metrics/fhr-toolbox
piglatin
## Code Before: register 'akela-0.5-SNAPSHOT.jar' register 'fhr-toolbox-0.1-SNAPSHOT.jar' register 'jackson-core-2.1.1.jar' register 'jackson-databind-2.1.1.jar' register 'jackson-annotations-2.1.1.jar' SET pig.logfile fhr_payload_errors.log; /* SET default_parallel 8; */ SET pig.tmpfilecompression true; SET pig.tmpfilecompression.codec lzo; define IsMap com.mozilla.pig.filter.map.IsMap(); define Size com.mozilla.pig.eval.Size(); raw = LOAD 'hbase://metrics' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('data:json','-loadKey=true -caching=100') AS (k:bytearray,json:chararray); genmap = FOREACH raw GENERATE k, com.mozilla.pig.eval.json.JsonMap(json) AS json_map:map[]; filtered_genmap = FILTER genmap BY json_map#'errors' IS NOT NULL; data = FOREACH filtered_genmap GENERATE json_map#'thisPingDate' AS submission_date:chararray, json_map#'errors' AS errs:chararray; STORE data INTO 'fhr_payload_errors_out'; ## Instruction: Add build_id to payload errors output. ## Code After: register 'akela-0.5-SNAPSHOT.jar' register 'fhr-toolbox-0.1-SNAPSHOT.jar' register 'jackson-core-2.1.1.jar' register 'jackson-databind-2.1.1.jar' register 'jackson-annotations-2.1.1.jar' SET pig.logfile fhr_payload_errors.log; /* SET default_parallel 8; */ SET pig.tmpfilecompression true; SET pig.tmpfilecompression.codec lzo; define IsMap com.mozilla.pig.filter.map.IsMap(); define Size com.mozilla.pig.eval.Size(); raw = LOAD 'hbase://metrics' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('data:json','-loadKey=true -caching=100') AS (k:bytearray,json:chararray); genmap = FOREACH raw GENERATE k, com.mozilla.pig.eval.json.JsonMap(json) AS json_map:map[]; filtered_genmap = FILTER genmap BY json_map#'errors' IS NOT NULL; data = FOREACH filtered_genmap GENERATE json_map#'thisPingDate' AS submission_date:chararray, json_map#'data'#'last'#'org.mozilla.appInfo.appinfo'#'appBuildID' as build_id:chararray, json_map#'errors' AS errs:chararray; STORE data INTO 'fhr_payload_errors_out';
register 'akela-0.5-SNAPSHOT.jar' register 'fhr-toolbox-0.1-SNAPSHOT.jar' register 'jackson-core-2.1.1.jar' register 'jackson-databind-2.1.1.jar' register 'jackson-annotations-2.1.1.jar' SET pig.logfile fhr_payload_errors.log; /* SET default_parallel 8; */ SET pig.tmpfilecompression true; SET pig.tmpfilecompression.codec lzo; define IsMap com.mozilla.pig.filter.map.IsMap(); define Size com.mozilla.pig.eval.Size(); raw = LOAD 'hbase://metrics' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('data:json','-loadKey=true -caching=100') AS (k:bytearray,json:chararray); genmap = FOREACH raw GENERATE k, com.mozilla.pig.eval.json.JsonMap(json) AS json_map:map[]; filtered_genmap = FILTER genmap BY json_map#'errors' IS NOT NULL; - data = FOREACH filtered_genmap GENERATE json_map#'thisPingDate' AS submission_date:chararray, json_map#'errors' AS errs:chararray; + data = FOREACH filtered_genmap GENERATE json_map#'thisPingDate' AS submission_date:chararray, json_map#'data'#'last'#'org.mozilla.appInfo.appinfo'#'appBuildID' as build_id:chararray, json_map#'errors' AS errs:chararray; STORE data INTO 'fhr_payload_errors_out';
2
0.095238
1
1
8442aac98f6ef7a874506d9b35d628787a168b6b
src/config/device-manager/contrail-device-manager.ini
src/config/device-manager/contrail-device-manager.ini
[program:contrail-device-manager] command=/usr/bin/contrail-device-manager --conf_file /etc/contrail/device_manager.conf priority=450 autostart=true autorestart=true killasgroup=true stopsignal=KILL redirect_stderr=true stdout_logfile=/var/log/contrail/contrail-device-manager-stdout.log stderr_logfile=/dev/null exitcodes=0 ; 'expected' exit codes for process (default 0,2) user=contrail
[program:contrail-device-manager] command=/usr/bin/contrail-device-manager --conf_file /etc/contrail/contrail-device-manager.conf priority=450 autostart=true autorestart=true killasgroup=true stopsignal=KILL redirect_stderr=true stdout_logfile=/var/log/contrail/contrail-device-manager-stdout.log stderr_logfile=/dev/null exitcodes=0 ; 'expected' exit codes for process (default 0,2) user=contrail
Fix the config file name for device manager
Fix the config file name for device manager Change-Id: I84664a3c1e515d49317238239dcfda74441c5e74
INI
apache-2.0
sajuptpm/contrail-controller,cloudwatt/contrail-controller,tcpcloud/contrail-controller,srajag/contrail-controller,sajuptpm/contrail-controller,nischalsheth/contrail-controller,tcpcloud/contrail-controller,eonpatapon/contrail-controller,cloudwatt/contrail-controller,DreamLab/contrail-controller,codilime/contrail-controller,vpramo/contrail-controller,sajuptpm/contrail-controller,rombie/contrail-controller,rombie/contrail-controller,nischalsheth/contrail-controller,eonpatapon/contrail-controller,sajuptpm/contrail-controller,srajag/contrail-controller,hthompson6/contrail-controller,sajuptpm/contrail-controller,rombie/contrail-controller,srajag/contrail-controller,vmahuli/contrail-controller,reiaaoyama/contrail-controller,nischalsheth/contrail-controller,vpramo/contrail-controller,tcpcloud/contrail-controller,rombie/contrail-controller,vpramo/contrail-controller,eonpatapon/contrail-controller,codilime/contrail-controller,nischalsheth/contrail-controller,numansiddique/contrail-controller,sajuptpm/contrail-controller,DreamLab/contrail-controller,reiaaoyama/contrail-controller,eonpatapon/contrail-controller,codilime/contrail-controller,eonpatapon/contrail-controller,tcpcloud/contrail-controller,reiaaoyama/contrail-controller,facetothefate/contrail-controller,tcpcloud/contrail-controller,numansiddique/contrail-controller,eonpatapon/contrail-controller,cloudwatt/contrail-controller,vmahuli/contrail-controller,reiaaoyama/contrail-controller,cloudwatt/contrail-controller,rombie/contrail-controller,reiaaoyama/contrail-controller,rombie/contrail-controller,DreamLab/contrail-controller,codilime/contrail-controller,nischalsheth/contrail-controller,rombie/contrail-controller,facetothefate/contrail-controller,nischalsheth/contrail-controller,vmahuli/contrail-controller,DreamLab/contrail-controller,vmahuli/contrail-controller,facetothefate/contrail-controller,tcpcloud/contrail-controller,hthompson6/contrail-controller,codilime/contrail-controller,facetothefate/contrail-controller,srajag/contrail-controller,cloudwatt/contrail-controller,numansiddique/contrail-controller,srajag/contrail-controller,DreamLab/contrail-controller,facetothefate/contrail-controller,numansiddique/contrail-controller,vpramo/contrail-controller,numansiddique/contrail-controller,hthompson6/contrail-controller,codilime/contrail-controller,nischalsheth/contrail-controller,hthompson6/contrail-controller,hthompson6/contrail-controller,vmahuli/contrail-controller,eonpatapon/contrail-controller,nischalsheth/contrail-controller,vpramo/contrail-controller
ini
## Code Before: [program:contrail-device-manager] command=/usr/bin/contrail-device-manager --conf_file /etc/contrail/device_manager.conf priority=450 autostart=true autorestart=true killasgroup=true stopsignal=KILL redirect_stderr=true stdout_logfile=/var/log/contrail/contrail-device-manager-stdout.log stderr_logfile=/dev/null exitcodes=0 ; 'expected' exit codes for process (default 0,2) user=contrail ## Instruction: Fix the config file name for device manager Change-Id: I84664a3c1e515d49317238239dcfda74441c5e74 ## Code After: [program:contrail-device-manager] command=/usr/bin/contrail-device-manager --conf_file /etc/contrail/contrail-device-manager.conf priority=450 autostart=true autorestart=true killasgroup=true stopsignal=KILL redirect_stderr=true stdout_logfile=/var/log/contrail/contrail-device-manager-stdout.log stderr_logfile=/dev/null exitcodes=0 ; 'expected' exit codes for process (default 0,2) user=contrail
[program:contrail-device-manager] - command=/usr/bin/contrail-device-manager --conf_file /etc/contrail/device_manager.conf ? ^ + command=/usr/bin/contrail-device-manager --conf_file /etc/contrail/contrail-device-manager.conf ? +++++++++ ^ priority=450 autostart=true autorestart=true killasgroup=true stopsignal=KILL redirect_stderr=true stdout_logfile=/var/log/contrail/contrail-device-manager-stdout.log stderr_logfile=/dev/null exitcodes=0 ; 'expected' exit codes for process (default 0,2) user=contrail
2
0.166667
1
1
467a7beda11f83464971272a4da2202fbdc917f6
src/apps/search/transformers.js
src/apps/search/transformers.js
const { find } = require('lodash') const { transformInvestmentProjectToListItem } = require('../investment-projects/transformers') const { transformContactToListItem } = require('../contacts/transformers') const { buildPagination } = require('../../lib/pagination') const { buildSearchAggregation } = require('./builders') const { entities } = require('./services') function transformResultsToCollection (results, searchEntity, options = {}) { const resultsData = results[`${searchEntity}s`] if (!resultsData) { return null } const entity = find(entities, ['entity', searchEntity]) let items = resultsData.map(item => Object.assign({}, item, { type: searchEntity })) if (searchEntity === 'investment_project') { items = items.map(transformInvestmentProjectToListItem) } if (searchEntity === 'contact') { items = items.map(transformContactToListItem) } return Object.assign({}, { items, count: results.count, countLabel: entity.noun, highlightTerm: options.searchTerm, pagination: buildPagination(options.query, results), aggregations: buildSearchAggregation(results.aggregations), }) } module.exports = { transformResultsToCollection, }
const { find } = require('lodash') const { transformInvestmentProjectToListItem } = require('../investment-projects/transformers') const { transformContactToListItem } = require('../contacts/transformers') const { buildPagination } = require('../../lib/pagination') const { buildSearchAggregation } = require('./builders') const { entities } = require('./services') function transformResultsToCollection (results, searchEntity, options = {}) { const resultsItems = results[`${searchEntity}s`] || results.items || results.results if (!resultsItems) { return null } const entity = find(entities, ['entity', searchEntity]) let items = resultsItems.map(item => Object.assign({}, item, { type: searchEntity })) if (searchEntity === 'investment_project') { items = items.map(transformInvestmentProjectToListItem) } if (searchEntity === 'contact') { items = items.map(transformContactToListItem) } return Object.assign({}, { items, count: results.count, countLabel: entity.noun, highlightTerm: options.searchTerm, pagination: buildPagination(options.query, results), aggregations: buildSearchAggregation(results.aggregations), }) } module.exports = { transformResultsToCollection, }
Allow transformResultsToCollection to fallback to other props
Allow transformResultsToCollection to fallback to other props
JavaScript
mit
uktrade/data-hub-fe-beta2,uktrade/data-hub-frontend,uktrade/data-hub-fe-beta2,uktrade/data-hub-frontend,uktrade/data-hub-frontend
javascript
## Code Before: const { find } = require('lodash') const { transformInvestmentProjectToListItem } = require('../investment-projects/transformers') const { transformContactToListItem } = require('../contacts/transformers') const { buildPagination } = require('../../lib/pagination') const { buildSearchAggregation } = require('./builders') const { entities } = require('./services') function transformResultsToCollection (results, searchEntity, options = {}) { const resultsData = results[`${searchEntity}s`] if (!resultsData) { return null } const entity = find(entities, ['entity', searchEntity]) let items = resultsData.map(item => Object.assign({}, item, { type: searchEntity })) if (searchEntity === 'investment_project') { items = items.map(transformInvestmentProjectToListItem) } if (searchEntity === 'contact') { items = items.map(transformContactToListItem) } return Object.assign({}, { items, count: results.count, countLabel: entity.noun, highlightTerm: options.searchTerm, pagination: buildPagination(options.query, results), aggregations: buildSearchAggregation(results.aggregations), }) } module.exports = { transformResultsToCollection, } ## Instruction: Allow transformResultsToCollection to fallback to other props ## Code After: const { find } = require('lodash') const { transformInvestmentProjectToListItem } = require('../investment-projects/transformers') const { transformContactToListItem } = require('../contacts/transformers') const { buildPagination } = require('../../lib/pagination') const { buildSearchAggregation } = require('./builders') const { entities } = require('./services') function transformResultsToCollection (results, searchEntity, options = {}) { const resultsItems = results[`${searchEntity}s`] || results.items || results.results if (!resultsItems) { return null } const entity = find(entities, ['entity', searchEntity]) let items = resultsItems.map(item => Object.assign({}, item, { type: searchEntity })) if (searchEntity === 'investment_project') { items = items.map(transformInvestmentProjectToListItem) } if (searchEntity === 'contact') { items = items.map(transformContactToListItem) } return Object.assign({}, { items, count: results.count, countLabel: entity.noun, highlightTerm: options.searchTerm, pagination: buildPagination(options.query, results), aggregations: buildSearchAggregation(results.aggregations), }) } module.exports = { transformResultsToCollection, }
const { find } = require('lodash') const { transformInvestmentProjectToListItem } = require('../investment-projects/transformers') const { transformContactToListItem } = require('../contacts/transformers') const { buildPagination } = require('../../lib/pagination') const { buildSearchAggregation } = require('./builders') const { entities } = require('./services') function transformResultsToCollection (results, searchEntity, options = {}) { - const resultsData = results[`${searchEntity}s`] + const resultsItems = results[`${searchEntity}s`] || results.items || results.results - if (!resultsData) { return null } ? ^^ ^ + if (!resultsItems) { return null } ? ^ ^^^ const entity = find(entities, ['entity', searchEntity]) - let items = resultsData.map(item => Object.assign({}, item, { type: searchEntity })) ? ^^ ^ + let items = resultsItems.map(item => Object.assign({}, item, { type: searchEntity })) ? ^ ^^^ if (searchEntity === 'investment_project') { items = items.map(transformInvestmentProjectToListItem) } if (searchEntity === 'contact') { items = items.map(transformContactToListItem) } return Object.assign({}, { items, count: results.count, countLabel: entity.noun, highlightTerm: options.searchTerm, pagination: buildPagination(options.query, results), aggregations: buildSearchAggregation(results.aggregations), }) } module.exports = { transformResultsToCollection, }
6
0.162162
3
3
384b461e32ca276009667459ea2c4941f15b3dab
fetch-members.js
fetch-members.js
const { GraphQLClient } = require("graphql-request"); const { writeFileSync, existsSync, mkdirSync } = require("fs"); async function fetchMembers(organisation) { const token = process.env.GH_TOKEN; if (!token) { console.error("'GH_TOKEN' not set. Could not fetch nteract members."); return []; } const client = new GraphQLClient("https://api.github.com/graphql", { headers: { Authorization: `Bearer ${token}` } }); const query = `{ organization(login: ${organisation}) { members(first: 100) { totalCount nodes { name login websiteUrl avatarUrl url } } } }`; try { const data = await client.request(query); if (data.organization.members.totalCount > 100) { console.error( "100+ members in the organization. That's too much for one GraphQL call." ); } return data.organization.members.nodes; } catch (e) { console.error(e); return []; } } async function main() { const members = await fetchMembers("nteract"); if (!existsSync("generated")) mkdirSync("generated"); writeFileSync("./generated/nteract-members.json", JSON.stringify(members)); } main();
const { GraphQLClient } = require("graphql-request"); const { writeFileSync, existsSync, mkdirSync } = require("fs"); async function fetchMembers(organisation) { const token = process.env.GH_TOKEN; if (!token) { console.error("'GH_TOKEN' not set. Could not fetch nteract members."); return []; } const client = new GraphQLClient("https://api.github.com/graphql", { headers: { Authorization: `Bearer ${token}` } }); const query = `{ organization(login: ${organisation}) { membersWithRole(first: 100) { totalCount nodes { name login websiteUrl avatarUrl url } } } }`; try { const data = await client.request(query); return data.organization.membersWithRole.nodes; } catch (e) { console.error(e); return []; } } async function main() { const members = await fetchMembers("nteract"); if (!existsSync("generated")) mkdirSync("generated"); writeFileSync("./generated/nteract-members.json", JSON.stringify(members)); } main();
Update GitHub API query for latest version
Update GitHub API query for latest version
JavaScript
bsd-3-clause
nteract/nteract.io
javascript
## Code Before: const { GraphQLClient } = require("graphql-request"); const { writeFileSync, existsSync, mkdirSync } = require("fs"); async function fetchMembers(organisation) { const token = process.env.GH_TOKEN; if (!token) { console.error("'GH_TOKEN' not set. Could not fetch nteract members."); return []; } const client = new GraphQLClient("https://api.github.com/graphql", { headers: { Authorization: `Bearer ${token}` } }); const query = `{ organization(login: ${organisation}) { members(first: 100) { totalCount nodes { name login websiteUrl avatarUrl url } } } }`; try { const data = await client.request(query); if (data.organization.members.totalCount > 100) { console.error( "100+ members in the organization. That's too much for one GraphQL call." ); } return data.organization.members.nodes; } catch (e) { console.error(e); return []; } } async function main() { const members = await fetchMembers("nteract"); if (!existsSync("generated")) mkdirSync("generated"); writeFileSync("./generated/nteract-members.json", JSON.stringify(members)); } main(); ## Instruction: Update GitHub API query for latest version ## Code After: const { GraphQLClient } = require("graphql-request"); const { writeFileSync, existsSync, mkdirSync } = require("fs"); async function fetchMembers(organisation) { const token = process.env.GH_TOKEN; if (!token) { console.error("'GH_TOKEN' not set. Could not fetch nteract members."); return []; } const client = new GraphQLClient("https://api.github.com/graphql", { headers: { Authorization: `Bearer ${token}` } }); const query = `{ organization(login: ${organisation}) { membersWithRole(first: 100) { totalCount nodes { name login websiteUrl avatarUrl url } } } }`; try { const data = await client.request(query); return data.organization.membersWithRole.nodes; } catch (e) { console.error(e); return []; } } async function main() { const members = await fetchMembers("nteract"); if (!existsSync("generated")) mkdirSync("generated"); writeFileSync("./generated/nteract-members.json", JSON.stringify(members)); } main();
const { GraphQLClient } = require("graphql-request"); const { writeFileSync, existsSync, mkdirSync } = require("fs"); async function fetchMembers(organisation) { const token = process.env.GH_TOKEN; if (!token) { console.error("'GH_TOKEN' not set. Could not fetch nteract members."); return []; } const client = new GraphQLClient("https://api.github.com/graphql", { headers: { Authorization: `Bearer ${token}` } }); const query = `{ organization(login: ${organisation}) { - members(first: 100) { + membersWithRole(first: 100) { ? ++++++++ totalCount nodes { name login websiteUrl avatarUrl url } } } }`; try { const data = await client.request(query); - if (data.organization.members.totalCount > 100) { - console.error( - "100+ members in the organization. That's too much for one GraphQL call." - ); - } - return data.organization.members.nodes; + return data.organization.membersWithRole.nodes; ? ++++++++ } catch (e) { console.error(e); return []; } } async function main() { const members = await fetchMembers("nteract"); if (!existsSync("generated")) mkdirSync("generated"); writeFileSync("./generated/nteract-members.json", JSON.stringify(members)); } main();
9
0.169811
2
7
1e574597db55627cd94e95b022a534f4169bca8c
pom.xml
pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>us.kohi</groupId> <artifactId>PermissionWhitelist</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <name>PermissionWhitelist</name> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <dependencies> <dependency> <groupId>org.spigotmc</groupId> <artifactId>spigot-api</artifactId> <version>1.7.2-R0.4-SNAPSHOT</version> </dependency> </dependencies> </project>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>us.kohi</groupId> <artifactId>PermissionWhitelist</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <name>PermissionWhitelist</name> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <repositories> <repository> <id>md5-repo</id> <name>md_5.net snapshots</name> <url>http://repo.md-5.net/content/repositories/snapshots/</url> </repository> </repositories> <dependencies> <dependency> <groupId>org.spigotmc</groupId> <artifactId>spigot-api</artifactId> <version>(1.7,1.8]</version> </dependency> </dependencies> </project>
Add md-5's repository and fix latest Spigot-API
Add md-5's repository and fix latest Spigot-API
XML
mit
chris752w/PermissionWhitelist
xml
## Code Before: <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>us.kohi</groupId> <artifactId>PermissionWhitelist</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <name>PermissionWhitelist</name> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <dependencies> <dependency> <groupId>org.spigotmc</groupId> <artifactId>spigot-api</artifactId> <version>1.7.2-R0.4-SNAPSHOT</version> </dependency> </dependencies> </project> ## Instruction: Add md-5's repository and fix latest Spigot-API ## Code After: <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>us.kohi</groupId> <artifactId>PermissionWhitelist</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <name>PermissionWhitelist</name> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> <repositories> <repository> <id>md5-repo</id> <name>md_5.net snapshots</name> <url>http://repo.md-5.net/content/repositories/snapshots/</url> </repository> </repositories> <dependencies> <dependency> <groupId>org.spigotmc</groupId> <artifactId>spigot-api</artifactId> <version>(1.7,1.8]</version> </dependency> </dependencies> </project>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>us.kohi</groupId> <artifactId>PermissionWhitelist</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <name>PermissionWhitelist</name> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> </properties> - + <repositories> + <repository> + <id>md5-repo</id> + <name>md_5.net snapshots</name> + <url>http://repo.md-5.net/content/repositories/snapshots/</url> + </repository> + </repositories> <dependencies> <dependency> <groupId>org.spigotmc</groupId> <artifactId>spigot-api</artifactId> - <version>1.7.2-R0.4-SNAPSHOT</version> ? ^^^^^^^^^^^^^^^ + <version>(1.7,1.8]</version> ? + ++ ^^ </dependency> </dependencies> </project>
10
0.434783
8
2
a1b8be5d60d3cae677cd8bf7f5d720e5868a11e4
src/structural_typing/validators.clj
src/structural_typing/validators.clj
(ns structural-typing.validators "Validators. These differ from Bouncer validators in that (1) they default to optional, and (2) the messages include the failing value." (:require [bouncer.validators :as v])) (defmacro defoptional [name doc message-format & body] `(do (v/defvalidator ~name {:optional true :default-message-format ~message-format} ~@body) (alter-meta! (var ~name) assoc :doc ~doc))) (v/defvalidator ^{:doc "Fails if key is missing or its value is `nil`."} required {:default-message-format "%s must be present and non-nil"} [v] (not (nil? v))) (defoptional number "Validates against optional `number?`" "%s is `%s`, which is not a number" [maybe-a-number] (number? maybe-a-number))
(ns structural-typing.validators "Validators. These differ from Bouncer validators in that (1) they default to optional, and (2) the messages include the failing value." (:require [bouncer.validators :as v])) (defmacro defoptional "Define a validator for an optional argument. (defoptional number \"Validates against optional `number?`\" ; doc string \"%s is `%s`, which is not a number\" [maybe-a-number] (number? maybe-a-number)) " [name doc message-format arglist & body] `(do (v/defvalidator ~name {:optional true :default-message-format ~message-format} ~arglist ~@body) (alter-meta! (var ~name) assoc :doc ~doc :arglists (list '~arglist)))) (v/defvalidator ^{:doc "Fails if key is missing or its value is `nil`."} required {:default-message-format "%s must be present and non-nil"} [v] (not (nil? v))) (defoptional number "Validates against optional `number?`" "%s is `%s`, which is not a number" [maybe-a-number] (number? maybe-a-number))
Make validator arglists available to codox
Make validator arglists available to codox
Clojure
mit
marick/structural-typing
clojure
## Code Before: (ns structural-typing.validators "Validators. These differ from Bouncer validators in that (1) they default to optional, and (2) the messages include the failing value." (:require [bouncer.validators :as v])) (defmacro defoptional [name doc message-format & body] `(do (v/defvalidator ~name {:optional true :default-message-format ~message-format} ~@body) (alter-meta! (var ~name) assoc :doc ~doc))) (v/defvalidator ^{:doc "Fails if key is missing or its value is `nil`."} required {:default-message-format "%s must be present and non-nil"} [v] (not (nil? v))) (defoptional number "Validates against optional `number?`" "%s is `%s`, which is not a number" [maybe-a-number] (number? maybe-a-number)) ## Instruction: Make validator arglists available to codox ## Code After: (ns structural-typing.validators "Validators. These differ from Bouncer validators in that (1) they default to optional, and (2) the messages include the failing value." (:require [bouncer.validators :as v])) (defmacro defoptional "Define a validator for an optional argument. (defoptional number \"Validates against optional `number?`\" ; doc string \"%s is `%s`, which is not a number\" [maybe-a-number] (number? maybe-a-number)) " [name doc message-format arglist & body] `(do (v/defvalidator ~name {:optional true :default-message-format ~message-format} ~arglist ~@body) (alter-meta! (var ~name) assoc :doc ~doc :arglists (list '~arglist)))) (v/defvalidator ^{:doc "Fails if key is missing or its value is `nil`."} required {:default-message-format "%s must be present and non-nil"} [v] (not (nil? v))) (defoptional number "Validates against optional `number?`" "%s is `%s`, which is not a number" [maybe-a-number] (number? maybe-a-number))
(ns structural-typing.validators "Validators. These differ from Bouncer validators in that (1) they default to optional, and (2) the messages include the failing value." (:require [bouncer.validators :as v])) - (defmacro defoptional [name doc message-format & body] + (defmacro defoptional + "Define a validator for an optional argument. + + (defoptional number + \"Validates against optional `number?`\" ; doc string + \"%s is `%s`, which is not a number\" + [maybe-a-number] + (number? maybe-a-number)) + " + [name doc message-format arglist & body] `(do (v/defvalidator ~name {:optional true - :default-message-format ~message-format} ~@body) + :default-message-format ~message-format} ~arglist ~@body) ? +++++++++ - (alter-meta! (var ~name) assoc :doc ~doc))) ? --- + (alter-meta! (var ~name) assoc :doc ~doc + :arglists (list '~arglist)))) (v/defvalidator ^{:doc "Fails if key is missing or its value is `nil`."} required {:default-message-format "%s must be present and non-nil"} [v] (not (nil? v))) (defoptional number "Validates against optional `number?`" "%s is `%s`, which is not a number" [maybe-a-number] (number? maybe-a-number))
16
0.761905
13
3
1dff7b90e06d6bb018fd838467271dfb17fcffc9
README.md
README.md
devise_header_token =================== Patches in support for token authentication via headers rather than basic auth or request params for Devise's `token_authenticatable` strategy. This is currently without tests. Usage ----- In your Gemfile: ```ruby gem 'devise' gem 'devise_header_token' ``` In your `config/initializers/devise.rb`, set the header key: ```ruby config.token_authentication_key = 'X-API-TOKEN' ``` And it all should Just Work™. *This gem is maintained by [Stovepipe Studios][stovepipe]* [stovepipe]: http://www.stovepipestudios.com
devise_header_token =================== Patches in support for token authentication via headers rather than basic auth or request params for Devise's `token_authenticatable` strategy. This is without tests. Orphaned! --------- This project isn't being maintained by us anymore! Would you like to give it a home? Usage ----- In your Gemfile: ```ruby gem 'devise' gem 'devise_header_token' ``` In your `config/initializers/devise.rb`, set the header key: ```ruby config.token_authentication_key = 'X-API-TOKEN' ``` And it all should Just Work™.
Add a note about the project's status.
Add a note about the project's status.
Markdown
mit
stvp/devise_header_token
markdown
## Code Before: devise_header_token =================== Patches in support for token authentication via headers rather than basic auth or request params for Devise's `token_authenticatable` strategy. This is currently without tests. Usage ----- In your Gemfile: ```ruby gem 'devise' gem 'devise_header_token' ``` In your `config/initializers/devise.rb`, set the header key: ```ruby config.token_authentication_key = 'X-API-TOKEN' ``` And it all should Just Work™. *This gem is maintained by [Stovepipe Studios][stovepipe]* [stovepipe]: http://www.stovepipestudios.com ## Instruction: Add a note about the project's status. ## Code After: devise_header_token =================== Patches in support for token authentication via headers rather than basic auth or request params for Devise's `token_authenticatable` strategy. This is without tests. Orphaned! --------- This project isn't being maintained by us anymore! Would you like to give it a home? Usage ----- In your Gemfile: ```ruby gem 'devise' gem 'devise_header_token' ``` In your `config/initializers/devise.rb`, set the header key: ```ruby config.token_authentication_key = 'X-API-TOKEN' ``` And it all should Just Work™.
devise_header_token =================== Patches in support for token authentication via headers rather than basic auth or request params for Devise's `token_authenticatable` strategy. - This is currently without tests. ? ---------- + This is without tests. + + Orphaned! + --------- + + This project isn't being maintained by us anymore! Would you like to give it a home? + Usage ----- In your Gemfile: ```ruby gem 'devise' gem 'devise_header_token' ``` In your `config/initializers/devise.rb`, set the header key: ```ruby config.token_authentication_key = 'X-API-TOKEN' ``` And it all should Just Work™. - *This gem is maintained by [Stovepipe Studios][stovepipe]* - - [stovepipe]: http://www.stovepipestudios.com -
12
0.413793
7
5
2e7fdd6a1db4a649f7e3e469729946d3978f83a1
test/test_buffer_small_read.c
test/test_buffer_small_read.c
int main() { unsigned char data[4]; bert_buffer_t buffer; bert_buffer_init(&buffer); memset(data,'A',DATA_SIZE); bert_buffer_write(&buffer,data,DATA_SIZE); bert_buffer_write(&buffer,data,DATA_SIZE); bert_buffer_write(&buffer,data,DATA_SIZE); unsigned char output[DATA_SIZE]; size_t result; if ((result = bert_buffer_read(output,&buffer,DATA_SIZE)) != DATA_SIZE) { test_fail("bert_buffer_read only read %u bytes, expected %u",result,DATA_SIZE); } if (memcmp(output,data,DATA_SIZE)) { test_fail("bert_buffer_read return %c%c%c%c, expected AAAA",output[0],output[1],output[2],output[3]); } return 0; }
int main() { unsigned char data[4]; bert_buffer_t buffer; bert_buffer_init(&buffer); memset(data,'A',DATA_SIZE); unsigned int i; for (i=0;i<((BERT_CHUNK_SIZE / DATA_SIZE) * 2);i++) { bert_buffer_write(&buffer,data,DATA_SIZE); } unsigned char output[DATA_SIZE]; size_t result; if ((result = bert_buffer_read(output,&buffer,DATA_SIZE)) != DATA_SIZE) { test_fail("bert_buffer_read only read %u bytes, expected %u",result,DATA_SIZE); } if (memcmp(output,data,DATA_SIZE)) { test_fail("bert_buffer_read return %c%c%c%c, expected AAAA",output[0],output[1],output[2],output[3]); } return 0; }
Make sure the buffer small read populates the buffer with multiple chunks.
Make sure the buffer small read populates the buffer with multiple chunks.
C
mit
postmodern/libBERT
c
## Code Before: int main() { unsigned char data[4]; bert_buffer_t buffer; bert_buffer_init(&buffer); memset(data,'A',DATA_SIZE); bert_buffer_write(&buffer,data,DATA_SIZE); bert_buffer_write(&buffer,data,DATA_SIZE); bert_buffer_write(&buffer,data,DATA_SIZE); unsigned char output[DATA_SIZE]; size_t result; if ((result = bert_buffer_read(output,&buffer,DATA_SIZE)) != DATA_SIZE) { test_fail("bert_buffer_read only read %u bytes, expected %u",result,DATA_SIZE); } if (memcmp(output,data,DATA_SIZE)) { test_fail("bert_buffer_read return %c%c%c%c, expected AAAA",output[0],output[1],output[2],output[3]); } return 0; } ## Instruction: Make sure the buffer small read populates the buffer with multiple chunks. ## Code After: int main() { unsigned char data[4]; bert_buffer_t buffer; bert_buffer_init(&buffer); memset(data,'A',DATA_SIZE); unsigned int i; for (i=0;i<((BERT_CHUNK_SIZE / DATA_SIZE) * 2);i++) { bert_buffer_write(&buffer,data,DATA_SIZE); } unsigned char output[DATA_SIZE]; size_t result; if ((result = bert_buffer_read(output,&buffer,DATA_SIZE)) != DATA_SIZE) { test_fail("bert_buffer_read only read %u bytes, expected %u",result,DATA_SIZE); } if (memcmp(output,data,DATA_SIZE)) { test_fail("bert_buffer_read return %c%c%c%c, expected AAAA",output[0],output[1],output[2],output[3]); } return 0; }
int main() { unsigned char data[4]; bert_buffer_t buffer; bert_buffer_init(&buffer); memset(data,'A',DATA_SIZE); + unsigned int i; + + for (i=0;i<((BERT_CHUNK_SIZE / DATA_SIZE) * 2);i++) + { - bert_buffer_write(&buffer,data,DATA_SIZE); + bert_buffer_write(&buffer,data,DATA_SIZE); ? + + } - bert_buffer_write(&buffer,data,DATA_SIZE); - bert_buffer_write(&buffer,data,DATA_SIZE); unsigned char output[DATA_SIZE]; size_t result; if ((result = bert_buffer_read(output,&buffer,DATA_SIZE)) != DATA_SIZE) { test_fail("bert_buffer_read only read %u bytes, expected %u",result,DATA_SIZE); } if (memcmp(output,data,DATA_SIZE)) { test_fail("bert_buffer_read return %c%c%c%c, expected AAAA",output[0],output[1],output[2],output[3]); } return 0; }
9
0.310345
6
3
33280ea228c49575767219910650aa0a16136931
lib/written/app/assets/stylesheets/written.scss
lib/written/app/assets/stylesheets/written.scss
[data-editor="written"] { white-space: pre-wrap; p { min-height: 1.2em; } h1 { font-size: 3em; } ul, li { list-style-type: none; } figure { background: rgba(230,230,230,1); & > div { display: flex; min-height: 200px; } img { margin: auto; } figcaption { padding: 4px; } } }
[data-editor="written"] { white-space: pre-wrap; p { min-height: 1.2em; } h1 { font-size: 3em; } ul, li { list-style-type: none; } figure { background: rgba(230,230,230,1); & > div { display: flex; min-height: 200px; padding: 4px; } img { margin: auto; } figcaption { padding: 4px; } } }
Add a small padding to images
Add a small padding to images
SCSS
mit
pothibo/written,pothibo/written,pothibo/written
scss
## Code Before: [data-editor="written"] { white-space: pre-wrap; p { min-height: 1.2em; } h1 { font-size: 3em; } ul, li { list-style-type: none; } figure { background: rgba(230,230,230,1); & > div { display: flex; min-height: 200px; } img { margin: auto; } figcaption { padding: 4px; } } } ## Instruction: Add a small padding to images ## Code After: [data-editor="written"] { white-space: pre-wrap; p { min-height: 1.2em; } h1 { font-size: 3em; } ul, li { list-style-type: none; } figure { background: rgba(230,230,230,1); & > div { display: flex; min-height: 200px; padding: 4px; } img { margin: auto; } figcaption { padding: 4px; } } }
[data-editor="written"] { white-space: pre-wrap; p { min-height: 1.2em; } h1 { font-size: 3em; } ul, li { list-style-type: none; } figure { background: rgba(230,230,230,1); & > div { display: flex; min-height: 200px; + padding: 4px; } img { margin: auto; } figcaption { padding: 4px; } } }
1
0.03125
1
0
e82d4a2af8cac1fbf8779cdae25788f4ca564ff7
website/templates/mobile/pages/index.html
website/templates/mobile/pages/index.html
{% extends "mobile/base.html" %}
{% extends "mobile/base.html" %} {% load mezzanine_tags staticfiles %} {% block meta_title %}Home{% endblock %} {% block body_class %}home{% endblock %} {% block logo %} <a href="/"><img src="{% static "images/logo-mobile.png" %}" alt="Jonge Democraten " /></a> {% endblock logo %} {% block main %} {% for item in column_left_items %} <section class="collapsable {{ item.title|slugify }}"> <div class="moduletable"> {% with template_name=item.get_template_name|stringformat:"s" %} {% include "elements/"|add:template_name %} {% endwith %} </div> </section> {% endfor %} {% for item in column_right_items %} <section class="collapsable {{ item.title|slugify }}"> <div class="moduletable"> {% with template_name=item.get_template_name|stringformat:"s" %} {% include "elements/"|add:template_name %} {% endwith %} </div> </section> {% endfor %} {{ block.super }} {% endblock main %}
Create home page in mobile template
Create home page in mobile template
HTML
mit
jonge-democraten/website,jonge-democraten/website,jonge-democraten/website,jonge-democraten/website
html
## Code Before: {% extends "mobile/base.html" %} ## Instruction: Create home page in mobile template ## Code After: {% extends "mobile/base.html" %} {% load mezzanine_tags staticfiles %} {% block meta_title %}Home{% endblock %} {% block body_class %}home{% endblock %} {% block logo %} <a href="/"><img src="{% static "images/logo-mobile.png" %}" alt="Jonge Democraten " /></a> {% endblock logo %} {% block main %} {% for item in column_left_items %} <section class="collapsable {{ item.title|slugify }}"> <div class="moduletable"> {% with template_name=item.get_template_name|stringformat:"s" %} {% include "elements/"|add:template_name %} {% endwith %} </div> </section> {% endfor %} {% for item in column_right_items %} <section class="collapsable {{ item.title|slugify }}"> <div class="moduletable"> {% with template_name=item.get_template_name|stringformat:"s" %} {% include "elements/"|add:template_name %} {% endwith %} </div> </section> {% endfor %} {{ block.super }} {% endblock main %}
{% extends "mobile/base.html" %} + {% load mezzanine_tags staticfiles %} + + {% block meta_title %}Home{% endblock %} + + {% block body_class %}home{% endblock %} + + {% block logo %} + <a href="/"><img src="{% static "images/logo-mobile.png" %}" alt="Jonge Democraten " /></a> + {% endblock logo %} + + {% block main %} + {% for item in column_left_items %} + <section class="collapsable {{ item.title|slugify }}"> + <div class="moduletable"> + {% with template_name=item.get_template_name|stringformat:"s" %} + {% include "elements/"|add:template_name %} + {% endwith %} + </div> + </section> + {% endfor %} + {% for item in column_right_items %} + <section class="collapsable {{ item.title|slugify }}"> + <div class="moduletable"> + {% with template_name=item.get_template_name|stringformat:"s" %} + {% include "elements/"|add:template_name %} + {% endwith %} + </div> + </section> + {% endfor %} + {{ block.super }} + {% endblock main %} +
32
16
32
0
02dcea03e43ae9ca23bf2fa17333983a870b2100
.travis.yml
.travis.yml
language: node_js node_js: - '0.10' script: gulp test before_install: - mkdir -p .npm - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start notifications: slack: secure: q2z7sQIuc3sqGnRoHdmbOBcsvvffK3abJRL+qd/7Kh0ET/BtN6QJqM5O2IZwttFiDjVF5+lyIU0VEHBGyGoru5ovyRl7aOW6kq2uxJqdZrZcsQqz2Uug1M0RZG/1wSG7E2MbRdO0UGoJUCWxmXxAIRdg/cD0zEgAPxBjPc4m+aQ=
language: node_js node_js: - '0.10' script: gulp test before_install: - mkdir -p .npm - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start notifications: slack: secure: CgXxNEQrQ82EatyF/wSdF0P5rXcWPL+fFZ1lb1aBb8RbVt5gwddJ6xWVD/nYSr6tIJvIYHYhoYsIDPENwezIPsesG7kWXerQhydsEcA34JKxzsStd/TmU6Moxuwy6KTN7yzmL6586nSvoAw9TNPgvRkJFkH07asjGIc9Rlaq7/Y=
Add new slack notification token
Add new slack notification token
YAML
apache-2.0
manran/js-stellar-base,manran/js-stellar-base,strllar/js-stellar-base,johansten/js-stellar-base,Payshare/js-stellar-base,stellar/js-stellar-base,johansten/js-stellar-base,johansten/js-stellar-base,manran/js-stellar-base,stellar/js-stellar-base,strllar/js-stellar-base,strllar/js-stellar-base,Payshare/js-stellar-base
yaml
## Code Before: language: node_js node_js: - '0.10' script: gulp test before_install: - mkdir -p .npm - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start notifications: slack: secure: q2z7sQIuc3sqGnRoHdmbOBcsvvffK3abJRL+qd/7Kh0ET/BtN6QJqM5O2IZwttFiDjVF5+lyIU0VEHBGyGoru5ovyRl7aOW6kq2uxJqdZrZcsQqz2Uug1M0RZG/1wSG7E2MbRdO0UGoJUCWxmXxAIRdg/cD0zEgAPxBjPc4m+aQ= ## Instruction: Add new slack notification token ## Code After: language: node_js node_js: - '0.10' script: gulp test before_install: - mkdir -p .npm - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start notifications: slack: secure: CgXxNEQrQ82EatyF/wSdF0P5rXcWPL+fFZ1lb1aBb8RbVt5gwddJ6xWVD/nYSr6tIJvIYHYhoYsIDPENwezIPsesG7kWXerQhydsEcA34JKxzsStd/TmU6Moxuwy6KTN7yzmL6586nSvoAw9TNPgvRkJFkH07asjGIc9Rlaq7/Y=
language: node_js node_js: - '0.10' script: gulp test before_install: - mkdir -p .npm - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start notifications: slack: - secure: q2z7sQIuc3sqGnRoHdmbOBcsvvffK3abJRL+qd/7Kh0ET/BtN6QJqM5O2IZwttFiDjVF5+lyIU0VEHBGyGoru5ovyRl7aOW6kq2uxJqdZrZcsQqz2Uug1M0RZG/1wSG7E2MbRdO0UGoJUCWxmXxAIRdg/cD0zEgAPxBjPc4m+aQ= + secure: CgXxNEQrQ82EatyF/wSdF0P5rXcWPL+fFZ1lb1aBb8RbVt5gwddJ6xWVD/nYSr6tIJvIYHYhoYsIDPENwezIPsesG7kWXerQhydsEcA34JKxzsStd/TmU6Moxuwy6KTN7yzmL6586nSvoAw9TNPgvRkJFkH07asjGIc9Rlaq7/Y=
2
0.181818
1
1
3771da113d6cb8e585f76667aeafa02f772bd97e
phpunit.xml
phpunit.xml
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://schema.phpunit.de/5.2/phpunit.xsd" bootstrap="src/autoload.php" beStrictAboutTestsThatDoNotTestAnything="true" beStrictAboutOutputDuringTests="true" convertErrorsToExceptions="true" convertNoticesToExceptions="true" convertWarningsToExceptions="true" stopOnError="true" stopOnFailure="true" stopOnIncomplete="true" stopOnSkipped="true" stopOnRisky="true" strict="true" verbose="true" colors="true"> <testsuites> <testsuite name="All tests"> <directory suffix="Test.php" phpVersion="5.6" phpVersionOperator=">=">tests</directory> </testsuite> </testsuites> <filter> <blacklist> <directory>demo</directory> <directory>tests</directory> <file>src/autoload.php</file> </blacklist> </filter> </phpunit>
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://schema.phpunit.de/5.2/phpunit.xsd" bootstrap="src/autoload.php" beStrictAboutTestsThatDoNotTestAnything="true" beStrictAboutOutputDuringTests="true" convertErrorsToExceptions="true" convertNoticesToExceptions="true" convertWarningsToExceptions="true" stopOnError="true" stopOnFailure="true" stopOnIncomplete="true" stopOnSkipped="true" stopOnRisky="true" strict="true" verbose="true" colors="true"> <testsuites> <testsuite name="All tests"> <directory suffix="Test.php" phpVersion="5.6" phpVersionOperator=">=">tests</directory> </testsuite> </testsuites> <filter> <whitelist> <directory>src</directory> </whitelist> <blacklist> <file>src/autoload.php</file> </blacklist> </filter> </phpunit>
Set correct filter for PHPunit
Set correct filter for PHPunit
XML
mit
xicrow/php-debug,xicrow/debug
xml
## Code Before: <phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://schema.phpunit.de/5.2/phpunit.xsd" bootstrap="src/autoload.php" beStrictAboutTestsThatDoNotTestAnything="true" beStrictAboutOutputDuringTests="true" convertErrorsToExceptions="true" convertNoticesToExceptions="true" convertWarningsToExceptions="true" stopOnError="true" stopOnFailure="true" stopOnIncomplete="true" stopOnSkipped="true" stopOnRisky="true" strict="true" verbose="true" colors="true"> <testsuites> <testsuite name="All tests"> <directory suffix="Test.php" phpVersion="5.6" phpVersionOperator=">=">tests</directory> </testsuite> </testsuites> <filter> <blacklist> <directory>demo</directory> <directory>tests</directory> <file>src/autoload.php</file> </blacklist> </filter> </phpunit> ## Instruction: Set correct filter for PHPunit ## Code After: <phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://schema.phpunit.de/5.2/phpunit.xsd" bootstrap="src/autoload.php" beStrictAboutTestsThatDoNotTestAnything="true" beStrictAboutOutputDuringTests="true" convertErrorsToExceptions="true" convertNoticesToExceptions="true" convertWarningsToExceptions="true" stopOnError="true" stopOnFailure="true" stopOnIncomplete="true" stopOnSkipped="true" stopOnRisky="true" strict="true" verbose="true" colors="true"> <testsuites> <testsuite name="All tests"> <directory suffix="Test.php" phpVersion="5.6" phpVersionOperator=">=">tests</directory> </testsuite> </testsuites> <filter> <whitelist> <directory>src</directory> </whitelist> <blacklist> <file>src/autoload.php</file> </blacklist> </filter> </phpunit>
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://schema.phpunit.de/5.2/phpunit.xsd" bootstrap="src/autoload.php" beStrictAboutTestsThatDoNotTestAnything="true" beStrictAboutOutputDuringTests="true" convertErrorsToExceptions="true" convertNoticesToExceptions="true" convertWarningsToExceptions="true" stopOnError="true" stopOnFailure="true" stopOnIncomplete="true" stopOnSkipped="true" stopOnRisky="true" strict="true" verbose="true" colors="true"> <testsuites> <testsuite name="All tests"> <directory suffix="Test.php" phpVersion="5.6" phpVersionOperator=">=">tests</directory> </testsuite> </testsuites> <filter> + <whitelist> + <directory>src</directory> + </whitelist> <blacklist> - <directory>demo</directory> - <directory>tests</directory> <file>src/autoload.php</file> </blacklist> </filter> </phpunit>
5
0.166667
3
2
02a19361fb2e853ffa4c59abfb54ff159011dd3b
spec/system/support/cuprite_setup.rb
spec/system/support/cuprite_setup.rb
require "capybara/cuprite" Capybara.register_driver(:cuprite) do |app| Capybara::Cuprite::Driver.new( app, **{ window_size: [1200, 800], browser_options: {}, process_timeout: 20, timeout: 20, # Don't load scripts from external sources, like google maps or stripe url_whitelist: ["http://localhost", "http://0.0.0.0", "http://127.0.0.1"], inspector: true, headless: true } ) end # Configure Capybara to use :cuprite driver by default Capybara.default_driver = Capybara.javascript_driver = :cuprite RSpec.configure do |config| config.include CupriteHelpers, type: :system config.include Devise::Test::IntegrationHelpers, type: :system config.prepend_before(:each, type: :system) { driven_by :cuprite } # System tests use transactional fixtures instead of DatabaseCleaner config.use_transactional_fixtures = true # Make sure url helpers in mailers use the Capybara server host. config.around(:each, type: :system) do |example| original_host = Rails.application.default_url_options[:host] Rails.application.default_url_options[:host] = Capybara.server_host example.run Rails.application.default_url_options[:host] = original_host end end
require "capybara/cuprite" Capybara.register_driver(:cuprite) do |app| Capybara::Cuprite::Driver.new( app, **{ window_size: [1200, 800], browser_options: {}, process_timeout: 20, timeout: 20, # Don't load scripts from external sources, like google maps or stripe url_whitelist: ["http://localhost", "http://0.0.0.0", "http://127.0.0.1"], inspector: true, headless: true, js_errors: true, } ) end # Configure Capybara to use :cuprite driver by default Capybara.default_driver = Capybara.javascript_driver = :cuprite RSpec.configure do |config| config.include CupriteHelpers, type: :system config.include Devise::Test::IntegrationHelpers, type: :system config.prepend_before(:each, type: :system) { driven_by :cuprite } # System tests use transactional fixtures instead of DatabaseCleaner config.use_transactional_fixtures = true # Make sure url helpers in mailers use the Capybara server host. config.around(:each, type: :system) do |example| original_host = Rails.application.default_url_options[:host] Rails.application.default_url_options[:host] = Capybara.server_host example.run Rails.application.default_url_options[:host] = original_host end end
Raise JavaScript errors within system specs
Raise JavaScript errors within system specs Re-raising console errors helps us to find subtle bugs.
Ruby
agpl-3.0
mkllnk/openfoodnetwork,lin-d-hop/openfoodnetwork,lin-d-hop/openfoodnetwork,Matt-Yorkley/openfoodnetwork,Matt-Yorkley/openfoodnetwork,lin-d-hop/openfoodnetwork,mkllnk/openfoodnetwork,openfoodfoundation/openfoodnetwork,openfoodfoundation/openfoodnetwork,mkllnk/openfoodnetwork,openfoodfoundation/openfoodnetwork,mkllnk/openfoodnetwork,Matt-Yorkley/openfoodnetwork,Matt-Yorkley/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork
ruby
## Code Before: require "capybara/cuprite" Capybara.register_driver(:cuprite) do |app| Capybara::Cuprite::Driver.new( app, **{ window_size: [1200, 800], browser_options: {}, process_timeout: 20, timeout: 20, # Don't load scripts from external sources, like google maps or stripe url_whitelist: ["http://localhost", "http://0.0.0.0", "http://127.0.0.1"], inspector: true, headless: true } ) end # Configure Capybara to use :cuprite driver by default Capybara.default_driver = Capybara.javascript_driver = :cuprite RSpec.configure do |config| config.include CupriteHelpers, type: :system config.include Devise::Test::IntegrationHelpers, type: :system config.prepend_before(:each, type: :system) { driven_by :cuprite } # System tests use transactional fixtures instead of DatabaseCleaner config.use_transactional_fixtures = true # Make sure url helpers in mailers use the Capybara server host. config.around(:each, type: :system) do |example| original_host = Rails.application.default_url_options[:host] Rails.application.default_url_options[:host] = Capybara.server_host example.run Rails.application.default_url_options[:host] = original_host end end ## Instruction: Raise JavaScript errors within system specs Re-raising console errors helps us to find subtle bugs. ## Code After: require "capybara/cuprite" Capybara.register_driver(:cuprite) do |app| Capybara::Cuprite::Driver.new( app, **{ window_size: [1200, 800], browser_options: {}, process_timeout: 20, timeout: 20, # Don't load scripts from external sources, like google maps or stripe url_whitelist: ["http://localhost", "http://0.0.0.0", "http://127.0.0.1"], inspector: true, headless: true, js_errors: true, } ) end # Configure Capybara to use :cuprite driver by default Capybara.default_driver = Capybara.javascript_driver = :cuprite RSpec.configure do |config| config.include CupriteHelpers, type: :system config.include Devise::Test::IntegrationHelpers, type: :system config.prepend_before(:each, type: :system) { driven_by :cuprite } # System tests use transactional fixtures instead of DatabaseCleaner config.use_transactional_fixtures = true # Make sure url helpers in mailers use the Capybara server host. config.around(:each, type: :system) do |example| original_host = Rails.application.default_url_options[:host] Rails.application.default_url_options[:host] = Capybara.server_host example.run Rails.application.default_url_options[:host] = original_host end end
require "capybara/cuprite" Capybara.register_driver(:cuprite) do |app| Capybara::Cuprite::Driver.new( app, **{ window_size: [1200, 800], browser_options: {}, process_timeout: 20, timeout: 20, # Don't load scripts from external sources, like google maps or stripe url_whitelist: ["http://localhost", "http://0.0.0.0", "http://127.0.0.1"], inspector: true, - headless: true + headless: true, ? + + js_errors: true, } ) end # Configure Capybara to use :cuprite driver by default Capybara.default_driver = Capybara.javascript_driver = :cuprite RSpec.configure do |config| config.include CupriteHelpers, type: :system config.include Devise::Test::IntegrationHelpers, type: :system config.prepend_before(:each, type: :system) { driven_by :cuprite } # System tests use transactional fixtures instead of DatabaseCleaner config.use_transactional_fixtures = true # Make sure url helpers in mailers use the Capybara server host. config.around(:each, type: :system) do |example| original_host = Rails.application.default_url_options[:host] Rails.application.default_url_options[:host] = Capybara.server_host example.run Rails.application.default_url_options[:host] = original_host end end
3
0.076923
2
1
ca9213e3ebfcc56d269c1434c549d833056a516c
_config.yml
_config.yml
name: Romain Berger markdown: redcarpet pygments: true
name: Romain Berger markdown: redcarpet exclude: - Rakefile - Makefile - Readme.md - config.rb - sprites - '*.scss'
Add excludes to jekyll config
Add excludes to jekyll config
YAML
mit
romainberger/romainberger.com,romainberger/romainberger.com,romainberger/romainberger.com,romainberger/romainberger.com
yaml
## Code Before: name: Romain Berger markdown: redcarpet pygments: true ## Instruction: Add excludes to jekyll config ## Code After: name: Romain Berger markdown: redcarpet exclude: - Rakefile - Makefile - Readme.md - config.rb - sprites - '*.scss'
name: Romain Berger markdown: redcarpet - pygments: true + exclude: + - Rakefile + - Makefile + - Readme.md + - config.rb + - sprites + - '*.scss'
8
2.666667
7
1
ca17534cc3686da483e56cf94fba4f01de20a509
admin/init.php
admin/init.php
<?php /** * Define Constants. */ define('ADMIN_DIR', get_template_directory() .'/admin'); define('ADMIN_URL', get_template_directory_uri() .'/admin'); define('AUTHOR', 'Vincent Klaiber'); define('AUTHOR_URL', 'http://vinkla.com'); define('LOGIN_IMAGE_PATH', ADMIN_URL.'/images/admin-login-logo.png'); define('LOGIN_HEADER_URL', 'http://vinkla.com'); /** * Load Admin Components. */ require_once(ADMIN_DIR .'/admin-remove.php'); require_once(ADMIN_DIR .'/admin-functions.php');
<?php /** * Define Constants. */ define('TEMPLATE_DIR', get_template_directory()); define('ADMIN_DIR', get_template_directory().'/admin'); define('ADMIN_URL', get_template_directory_uri().'/admin'); define('AUTHOR', 'Vincent Klaiber'); define('AUTHOR_URL', 'http://vinkla.com'); define('LOGIN_IMAGE_PATH', TEMPLATE_DIR.'/images/admin-login-logo.png'); define('LOGIN_HEADER_URL', 'http://vinkla.com'); /** * Load Admin Components. */ require_once(ADMIN_DIR .'/admin-remove.php'); require_once(ADMIN_DIR .'/admin-functions.php');
Add a template directory constant.
Add a template directory constant.
PHP
mit
fieleman/wordplate,fieleman/wordplate,mikaelmattsson/wordplate
php
## Code Before: <?php /** * Define Constants. */ define('ADMIN_DIR', get_template_directory() .'/admin'); define('ADMIN_URL', get_template_directory_uri() .'/admin'); define('AUTHOR', 'Vincent Klaiber'); define('AUTHOR_URL', 'http://vinkla.com'); define('LOGIN_IMAGE_PATH', ADMIN_URL.'/images/admin-login-logo.png'); define('LOGIN_HEADER_URL', 'http://vinkla.com'); /** * Load Admin Components. */ require_once(ADMIN_DIR .'/admin-remove.php'); require_once(ADMIN_DIR .'/admin-functions.php'); ## Instruction: Add a template directory constant. ## Code After: <?php /** * Define Constants. */ define('TEMPLATE_DIR', get_template_directory()); define('ADMIN_DIR', get_template_directory().'/admin'); define('ADMIN_URL', get_template_directory_uri().'/admin'); define('AUTHOR', 'Vincent Klaiber'); define('AUTHOR_URL', 'http://vinkla.com'); define('LOGIN_IMAGE_PATH', TEMPLATE_DIR.'/images/admin-login-logo.png'); define('LOGIN_HEADER_URL', 'http://vinkla.com'); /** * Load Admin Components. */ require_once(ADMIN_DIR .'/admin-remove.php'); require_once(ADMIN_DIR .'/admin-functions.php');
<?php /** * Define Constants. */ + define('TEMPLATE_DIR', get_template_directory()); - define('ADMIN_DIR', get_template_directory() .'/admin'); ? - + define('ADMIN_DIR', get_template_directory().'/admin'); - define('ADMIN_URL', get_template_directory_uri() .'/admin'); ? - + define('ADMIN_URL', get_template_directory_uri().'/admin'); define('AUTHOR', 'Vincent Klaiber'); define('AUTHOR_URL', 'http://vinkla.com'); - define('LOGIN_IMAGE_PATH', ADMIN_URL.'/images/admin-login-logo.png'); ? - --- - + define('LOGIN_IMAGE_PATH', TEMPLATE_DIR.'/images/admin-login-logo.png'); ? +++++ +++ define('LOGIN_HEADER_URL', 'http://vinkla.com'); /** * Load Admin Components. */ require_once(ADMIN_DIR .'/admin-remove.php'); require_once(ADMIN_DIR .'/admin-functions.php');
7
0.368421
4
3
504fab5de89bd7e0598cac7e409c6c61aa2a2b35
README.md
README.md
Neta ==== > A modern, decentralized, and customizable chatting client. Neta is a modern, decentralized, and customizable chatting client, which uses a developer-friendly ecosystem to enable use of plugin and theme APIs, so users can build a more personal chatting client, but not at the expense of security, usability, or price. ## Installation Using the npm installer: ```shell $ npm install -g neta ``` Building the source: ``` gulp build package ``` (Package will be in `dist`) ## Usage ```shell $ neta ``` ## Docs & Support - [GitHub Wiki][wiki] - [Repo Issues][issues] - [Contact](#Credits) ## Credits |![Jamen Marz][jamen-image]| |:--------:| | [@jamen] | ## License [GPL v3](LICENSE) &copy; Jamen Marzonie <!-- All http links must be "tagged" --> [@jamen]: https://github.com/jamen [jamen-image]: https://avatars2.githubusercontent.com/u/6251703?v=3&s=125 [wiki]: https://github.com/jamen/neta/wiki [issues]: https://github.com/jamen/neta/issues
Neta ==== > A modern, decentralized, and customizable chatting client. Neta is a modern, decentralized, and customizable chatting client, which uses a developer-friendly ecosystem to enable use of plugin and theme APIs, so users can build a more personal chatting client, but not at the expense of security, usability, or price. ## Installation Using the npm installer: ```shell $ npm install -g neta ``` Building the source: ``` gulp build package ``` (Package will be in `dist`) ## Usage ```shell $ neta ``` For developers, you can use the `npm start` script, so you don't have to rebuild constant: ``` $ npm start ``` ## Docs & Support - [GitHub Wiki][wiki] - [Repo Issues][issues] - [Contact](#Credits) ## Credits |![Jamen Marz][jamen-image]| |:--------:| | [@jamen] | ## License [GPL v3](LICENSE) &copy; Jamen Marzonie <!-- All http links must be "tagged" --> [@jamen]: https://github.com/jamen [jamen-image]: https://avatars2.githubusercontent.com/u/6251703?v=3&s=125 [wiki]: https://github.com/jamen/neta/wiki [issues]: https://github.com/jamen/neta/issues
Add alternative usage method for developers
Add alternative usage method for developers
Markdown
mit
JamenMarz/cluster,JamenMarz/vint
markdown
## Code Before: Neta ==== > A modern, decentralized, and customizable chatting client. Neta is a modern, decentralized, and customizable chatting client, which uses a developer-friendly ecosystem to enable use of plugin and theme APIs, so users can build a more personal chatting client, but not at the expense of security, usability, or price. ## Installation Using the npm installer: ```shell $ npm install -g neta ``` Building the source: ``` gulp build package ``` (Package will be in `dist`) ## Usage ```shell $ neta ``` ## Docs & Support - [GitHub Wiki][wiki] - [Repo Issues][issues] - [Contact](#Credits) ## Credits |![Jamen Marz][jamen-image]| |:--------:| | [@jamen] | ## License [GPL v3](LICENSE) &copy; Jamen Marzonie <!-- All http links must be "tagged" --> [@jamen]: https://github.com/jamen [jamen-image]: https://avatars2.githubusercontent.com/u/6251703?v=3&s=125 [wiki]: https://github.com/jamen/neta/wiki [issues]: https://github.com/jamen/neta/issues ## Instruction: Add alternative usage method for developers ## Code After: Neta ==== > A modern, decentralized, and customizable chatting client. Neta is a modern, decentralized, and customizable chatting client, which uses a developer-friendly ecosystem to enable use of plugin and theme APIs, so users can build a more personal chatting client, but not at the expense of security, usability, or price. ## Installation Using the npm installer: ```shell $ npm install -g neta ``` Building the source: ``` gulp build package ``` (Package will be in `dist`) ## Usage ```shell $ neta ``` For developers, you can use the `npm start` script, so you don't have to rebuild constant: ``` $ npm start ``` ## Docs & Support - [GitHub Wiki][wiki] - [Repo Issues][issues] - [Contact](#Credits) ## Credits |![Jamen Marz][jamen-image]| |:--------:| | [@jamen] | ## License [GPL v3](LICENSE) &copy; Jamen Marzonie <!-- All http links must be "tagged" --> [@jamen]: https://github.com/jamen [jamen-image]: https://avatars2.githubusercontent.com/u/6251703?v=3&s=125 [wiki]: https://github.com/jamen/neta/wiki [issues]: https://github.com/jamen/neta/issues
Neta ==== > A modern, decentralized, and customizable chatting client. Neta is a modern, decentralized, and customizable chatting client, which uses a developer-friendly ecosystem to enable use of plugin and theme APIs, so users can build a more personal chatting client, but not at the expense of security, usability, or price. ## Installation Using the npm installer: ```shell $ npm install -g neta ``` Building the source: ``` gulp build package ``` (Package will be in `dist`) ## Usage ```shell $ neta ``` + For developers, you can use the `npm start` script, so you don't have to rebuild constant: + ``` + $ npm start + ``` + ## Docs & Support - [GitHub Wiki][wiki] - [Repo Issues][issues] - [Contact](#Credits) ## Credits |![Jamen Marz][jamen-image]| |:--------:| | [@jamen] | ## License [GPL v3](LICENSE) &copy; Jamen Marzonie <!-- All http links must be "tagged" --> [@jamen]: https://github.com/jamen [jamen-image]: https://avatars2.githubusercontent.com/u/6251703?v=3&s=125 [wiki]: https://github.com/jamen/neta/wiki [issues]: https://github.com/jamen/neta/issues
5
0.119048
5
0
3b34f5af33f329291c759ca1a2143741288988f3
src/kibana4-dashboards/README.md
src/kibana4-dashboards/README.md
Import kibana dashboards: ``` curl https://raw.githubusercontent.com/logsearch/logsearch-for-cloudfoundry/master/src/kibana4-dashboards/kibana.json | curl --data-binary @- http://10.10.3.51:9200/_bulk ```
Import kibana dashboards: ``` cat kibana.json | curl --data-binary @- http://10.10.3.51:9200/_bulk ```
Revert to importing dashboards from local file; since trying to curl from github seems to fail to import the final line of the .json file
Revert to importing dashboards from local file; since trying to curl from github seems to fail to import the final line of the .json file
Markdown
apache-2.0
hannayurkevich/logsearch-for-cloudfoundry,cloudfoundry-community/logsearch-for-cloudfoundry,logsearch/logsearch-for-cloudfoundry,cloudfoundry-community/logsearch-for-cloudfoundry,axelaris/logsearch-for-cloudfoundry,simonjohansson/logsearch-for-cloudfoundry,logsearch/logsearch-for-cloudfoundry,axelaris/logsearch-for-cloudfoundry,simonjohansson/logsearch-for-cloudfoundry,logsearch/logsearch-for-cloudfoundry,x6j8x/logsearch-for-cloudfoundry,cloudfoundry-community/logsearch-for-cloudfoundry,Orange-OpenSource/logsearch-for-cloudfoundry,simonjohansson/logsearch-for-cloudfoundry,x6j8x/logsearch-for-cloudfoundry,x6j8x/logsearch-for-cloudfoundry,Orange-OpenSource/logsearch-for-cloudfoundry,hannayurkevich/logsearch-for-cloudfoundry,cloudfoundry-community/logsearch-for-cloudfoundry,axelaris/logsearch-for-cloudfoundry,simonjohansson/logsearch-for-cloudfoundry,x6j8x/logsearch-for-cloudfoundry,logsearch/logsearch-for-cloudfoundry,Orange-OpenSource/logsearch-for-cloudfoundry,Orange-OpenSource/logsearch-for-cloudfoundry,logsearch/logsearch-for-cloudfoundry,hannayurkevich/logsearch-for-cloudfoundry,hannayurkevich/logsearch-for-cloudfoundry,cloudfoundry-community/logsearch-for-cloudfoundry
markdown
## Code Before: Import kibana dashboards: ``` curl https://raw.githubusercontent.com/logsearch/logsearch-for-cloudfoundry/master/src/kibana4-dashboards/kibana.json | curl --data-binary @- http://10.10.3.51:9200/_bulk ``` ## Instruction: Revert to importing dashboards from local file; since trying to curl from github seems to fail to import the final line of the .json file ## Code After: Import kibana dashboards: ``` cat kibana.json | curl --data-binary @- http://10.10.3.51:9200/_bulk ```
Import kibana dashboards: ``` - curl https://raw.githubusercontent.com/logsearch/logsearch-for-cloudfoundry/master/src/kibana4-dashboards/kibana.json | curl --data-binary @- http://10.10.3.51:9200/_bulk + cat kibana.json | curl --data-binary @- http://10.10.3.51:9200/_bulk ```
2
0.4
1
1
ee7cef7c8ca6a66898bd5ea1467f00627f5271c4
blog/blogpost2/content.html
blog/blogpost2/content.html
<p> Idag träffades vi för en informationsträff. För dig som inte var här, så följer här en sammanfattning av de viktigaste punkterna: </p> <p> <ul> <li>Vi träffas på torsdagar kl. 16.00 - 17.00 på Malmö Borgarskola i sal B010, som ligger i källaren. Det är bara huvudingången som är öppen om man inte går på Borgarskolan. <a href="%al% staticlink: blog/borgarkarta.png %al%">Här finns en (otroligt vacker) karta som visar hur man hittar till B010.</a></li> <li>Till lektionerna vill vi att ni tar med er en <b>dator</b> och <b>papper och penna</b> (förslagsvis ett rutat block eller liknande).</li> </ul> </p> <p> Vi ses på torsdag! </p>
<p> Idag träffades vi för en informationsträff. För er som inte var här, så följer här en sammanfattning av de viktigaste punkterna: </p> <p> <ul> <li>Vi träffas på torsdagar kl. 16.00 - 17.00 på Malmö Borgarskola i sal B010, som ligger i källaren. Det är bara huvudingången som är öppen om man inte går på Borgarskolan. <a href="%al% staticlink: blog/borgarkarta.png %al%">Här finns en (otroligt vacker) karta som visar hur man hittar till B010.</a></li> <li>Till lektionerna vill vi att ni tar med er en <b>dator</b> och <b>papper och penna</b> (förslagsvis ett rutat block eller liknande).</li> <li>Om ni någon gång är sjuka eller av andra skäl inte kan komma, så uppskattar vi om ni mejlar oss på <a href="mailto:[email protected]">[email protected]</a> eller <a href="mailto:[email protected]">[email protected]</a>.</li> </ul> </p> <p> Vi ses på torsdag! </p>
Update wording in blogpost2 for clarity
Update wording in blogpost2 for clarity
HTML
mit
ArVID220u/judge,ArVID220u/judge,ArVID220u/judge
html
## Code Before: <p> Idag träffades vi för en informationsträff. För dig som inte var här, så följer här en sammanfattning av de viktigaste punkterna: </p> <p> <ul> <li>Vi träffas på torsdagar kl. 16.00 - 17.00 på Malmö Borgarskola i sal B010, som ligger i källaren. Det är bara huvudingången som är öppen om man inte går på Borgarskolan. <a href="%al% staticlink: blog/borgarkarta.png %al%">Här finns en (otroligt vacker) karta som visar hur man hittar till B010.</a></li> <li>Till lektionerna vill vi att ni tar med er en <b>dator</b> och <b>papper och penna</b> (förslagsvis ett rutat block eller liknande).</li> </ul> </p> <p> Vi ses på torsdag! </p> ## Instruction: Update wording in blogpost2 for clarity ## Code After: <p> Idag träffades vi för en informationsträff. För er som inte var här, så följer här en sammanfattning av de viktigaste punkterna: </p> <p> <ul> <li>Vi träffas på torsdagar kl. 16.00 - 17.00 på Malmö Borgarskola i sal B010, som ligger i källaren. Det är bara huvudingången som är öppen om man inte går på Borgarskolan. <a href="%al% staticlink: blog/borgarkarta.png %al%">Här finns en (otroligt vacker) karta som visar hur man hittar till B010.</a></li> <li>Till lektionerna vill vi att ni tar med er en <b>dator</b> och <b>papper och penna</b> (förslagsvis ett rutat block eller liknande).</li> <li>Om ni någon gång är sjuka eller av andra skäl inte kan komma, så uppskattar vi om ni mejlar oss på <a href="mailto:[email protected]">[email protected]</a> eller <a href="mailto:[email protected]">[email protected]</a>.</li> </ul> </p> <p> Vi ses på torsdag! </p>
<p> - Idag träffades vi för en informationsträff. För dig som inte var här, så följer här en sammanfattning av de viktigaste punkterna: ? ^^^ + Idag träffades vi för en informationsträff. För er som inte var här, så följer här en sammanfattning av de viktigaste punkterna: ? ^^ </p> <p> <ul> <li>Vi träffas på torsdagar kl. 16.00 - 17.00 på Malmö Borgarskola i sal B010, som ligger i källaren. Det är bara huvudingången som är öppen om man inte går på Borgarskolan. <a href="%al% staticlink: blog/borgarkarta.png %al%">Här finns en (otroligt vacker) karta som visar hur man hittar till B010.</a></li> <li>Till lektionerna vill vi att ni tar med er en <b>dator</b> och <b>papper och penna</b> (förslagsvis ett rutat block eller liknande).</li> + <li>Om ni någon gång är sjuka eller av andra skäl inte kan komma, så uppskattar vi om ni mejlar oss på <a href="mailto:[email protected]">[email protected]</a> eller <a href="mailto:[email protected]">[email protected]</a>.</li> </ul> </p> <p> Vi ses på torsdag! </p>
3
0.25
2
1
18c2b76c99120179b59824a795aa8f07cd04a6c5
README.md
README.md
(pronounced ts as 'tsee' sound AVARI**TS**IA) Credit simulator for personal banking Language Translations: + [Español](assets/docs/translations/es/README_es_CO.md)
(pronounced ts as 'tsee' sound AVARI**TS**IA) [![Build Status](https://travis-ci.org/AlejoJamC/avaritia.svg?branch=v0.1)](https://travis-ci.org/AlejoJamC/avaritia) Credit simulator for personal banking Language Translations: + [Español](assets/docs/translations/es/README_es_CO.md)
Add travis status Readme.md file
Add travis status Readme.md file
Markdown
bsd-3-clause
AlejoJamC/avaritia,AlejoJamC/avaritia
markdown
## Code Before: (pronounced ts as 'tsee' sound AVARI**TS**IA) Credit simulator for personal banking Language Translations: + [Español](assets/docs/translations/es/README_es_CO.md) ## Instruction: Add travis status Readme.md file ## Code After: (pronounced ts as 'tsee' sound AVARI**TS**IA) [![Build Status](https://travis-ci.org/AlejoJamC/avaritia.svg?branch=v0.1)](https://travis-ci.org/AlejoJamC/avaritia) Credit simulator for personal banking Language Translations: + [Español](assets/docs/translations/es/README_es_CO.md)
(pronounced ts as 'tsee' sound AVARI**TS**IA) + + [![Build Status](https://travis-ci.org/AlejoJamC/avaritia.svg?branch=v0.1)](https://travis-ci.org/AlejoJamC/avaritia) Credit simulator for personal banking Language Translations: + [Español](assets/docs/translations/es/README_es_CO.md)
2
0.285714
2
0
b77c75dd8eec0299c55825580f0d6b7017c8b1c5
test_scripts/SDL5_0/Handling_VR_help_requests/003_SetGlobalProp_without_deleted_commands.lua
test_scripts/SDL5_0/Handling_VR_help_requests/003_SetGlobalProp_without_deleted_commands.lua
--------------------------------------------------------------------------------------------------- -- Proposal: https://github.com/smartdevicelink/sdl_evolution/blob/master/proposals/0122-New_rules_for_providing_VRHelpItems_VRHelpTitle.md -- User story: TBD -- Use case: TBD -- -- Requirement summary: TBD -- -- Description: -- In case: -- 1. Command1, Command2, Command3 commands with vrCommands are added -- 2. Command1 is deleted by app -- SDL does: -- send SetGlobalProperties with constructed the vrHelp and helpPrompt parameters using added vrCommand -- without deleted Command1 --------------------------------------------------------------------------------------------------- --[[ Required Shared libraries ]] local runner = require('user_modules/script_runner') local common = require('test_scripts/SDL5_0/Handling_VR_help_requests/commonVRhelp') --[[ Test Configuration ]] runner.testSettings.isSelfIncluded = false --[[ Scenario ]] runner.Title("Preconditions") runner.Step("Clean environment", common.preconditions) runner.Step("Start SDL, HMI, connect Mobile, start Session", common.start) runner.Step("App registration", common.registerAppWOPTU) runner.Step("App activation", common.activateApp) for i = 1,3 do runner.Step("AddCommand" .. i, common.addCommand, { common.getAddCommandParams(i) }) end runner.Title("Test") runner.Step("SetGlobalProperties by deleting Command1", common.deleteCommandWithSetGP, { 1 }) runner.Title("Postconditions") runner.Step("Stop SDL", common.postconditions)
--------------------------------------------------------------------------------------------------- -- Proposal: https://github.com/smartdevicelink/sdl_evolution/blob/master/proposals/0122-New_rules_for_providing_VRHelpItems_VRHelpTitle.md -- User story: TBD -- Use case: TBD -- -- Requirement summary: TBD -- -- Description: -- In case: -- 1. Command1, Command2, Command3 commands with vrCommands are added -- 2. Command1 is deleted by app -- SDL does: -- send SetGlobalProperties with constructed the vrHelp and helpPrompt parameters using added vrCommand -- without deleted Command1 --------------------------------------------------------------------------------------------------- --[[ Required Shared libraries ]] local runner = require('user_modules/script_runner') local common = require('test_scripts/SDL5_0/Handling_VR_help_requests/commonVRhelp') --[[ Test Configuration ]] runner.testSettings.isSelfIncluded = false --[[ Scenario ]] runner.Title("Preconditions") runner.Step("Clean environment", common.preconditions) runner.Step("Start SDL, HMI, connect Mobile, start Session", common.start) runner.Step("App registration", common.registerAppWOPTU) runner.Step("App activation", common.activateApp) for i = 1,3 do runner.Step("AddCommand" .. i, common.addCommandWithSetGP, { i }) end runner.Title("Test") runner.Step("SetGlobalProperties by deleting Command1", common.deleteCommandWithSetGP, { 1 }) runner.Title("Postconditions") runner.Step("Stop SDL", common.postconditions)
Improve stability of VR Help scripts
Improve stability of VR Help scripts
Lua
bsd-3-clause
smartdevicelink/sdl_atf_test_scripts,smartdevicelink/sdl_atf_test_scripts,smartdevicelink/sdl_atf_test_scripts
lua
## Code Before: --------------------------------------------------------------------------------------------------- -- Proposal: https://github.com/smartdevicelink/sdl_evolution/blob/master/proposals/0122-New_rules_for_providing_VRHelpItems_VRHelpTitle.md -- User story: TBD -- Use case: TBD -- -- Requirement summary: TBD -- -- Description: -- In case: -- 1. Command1, Command2, Command3 commands with vrCommands are added -- 2. Command1 is deleted by app -- SDL does: -- send SetGlobalProperties with constructed the vrHelp and helpPrompt parameters using added vrCommand -- without deleted Command1 --------------------------------------------------------------------------------------------------- --[[ Required Shared libraries ]] local runner = require('user_modules/script_runner') local common = require('test_scripts/SDL5_0/Handling_VR_help_requests/commonVRhelp') --[[ Test Configuration ]] runner.testSettings.isSelfIncluded = false --[[ Scenario ]] runner.Title("Preconditions") runner.Step("Clean environment", common.preconditions) runner.Step("Start SDL, HMI, connect Mobile, start Session", common.start) runner.Step("App registration", common.registerAppWOPTU) runner.Step("App activation", common.activateApp) for i = 1,3 do runner.Step("AddCommand" .. i, common.addCommand, { common.getAddCommandParams(i) }) end runner.Title("Test") runner.Step("SetGlobalProperties by deleting Command1", common.deleteCommandWithSetGP, { 1 }) runner.Title("Postconditions") runner.Step("Stop SDL", common.postconditions) ## Instruction: Improve stability of VR Help scripts ## Code After: --------------------------------------------------------------------------------------------------- -- Proposal: https://github.com/smartdevicelink/sdl_evolution/blob/master/proposals/0122-New_rules_for_providing_VRHelpItems_VRHelpTitle.md -- User story: TBD -- Use case: TBD -- -- Requirement summary: TBD -- -- Description: -- In case: -- 1. Command1, Command2, Command3 commands with vrCommands are added -- 2. Command1 is deleted by app -- SDL does: -- send SetGlobalProperties with constructed the vrHelp and helpPrompt parameters using added vrCommand -- without deleted Command1 --------------------------------------------------------------------------------------------------- --[[ Required Shared libraries ]] local runner = require('user_modules/script_runner') local common = require('test_scripts/SDL5_0/Handling_VR_help_requests/commonVRhelp') --[[ Test Configuration ]] runner.testSettings.isSelfIncluded = false --[[ Scenario ]] runner.Title("Preconditions") runner.Step("Clean environment", common.preconditions) runner.Step("Start SDL, HMI, connect Mobile, start Session", common.start) runner.Step("App registration", common.registerAppWOPTU) runner.Step("App activation", common.activateApp) for i = 1,3 do runner.Step("AddCommand" .. i, common.addCommandWithSetGP, { i }) end runner.Title("Test") runner.Step("SetGlobalProperties by deleting Command1", common.deleteCommandWithSetGP, { 1 }) runner.Title("Postconditions") runner.Step("Stop SDL", common.postconditions)
--------------------------------------------------------------------------------------------------- -- Proposal: https://github.com/smartdevicelink/sdl_evolution/blob/master/proposals/0122-New_rules_for_providing_VRHelpItems_VRHelpTitle.md -- User story: TBD -- Use case: TBD -- -- Requirement summary: TBD -- -- Description: -- In case: -- 1. Command1, Command2, Command3 commands with vrCommands are added -- 2. Command1 is deleted by app -- SDL does: -- send SetGlobalProperties with constructed the vrHelp and helpPrompt parameters using added vrCommand -- without deleted Command1 --------------------------------------------------------------------------------------------------- --[[ Required Shared libraries ]] local runner = require('user_modules/script_runner') local common = require('test_scripts/SDL5_0/Handling_VR_help_requests/commonVRhelp') --[[ Test Configuration ]] runner.testSettings.isSelfIncluded = false --[[ Scenario ]] runner.Title("Preconditions") runner.Step("Clean environment", common.preconditions) runner.Step("Start SDL, HMI, connect Mobile, start Session", common.start) runner.Step("App registration", common.registerAppWOPTU) runner.Step("App activation", common.activateApp) for i = 1,3 do - runner.Step("AddCommand" .. i, common.addCommand, { common.getAddCommandParams(i) }) ? --------------------------- - + runner.Step("AddCommand" .. i, common.addCommandWithSetGP, { i }) ? +++++++++ end runner.Title("Test") runner.Step("SetGlobalProperties by deleting Command1", common.deleteCommandWithSetGP, { 1 }) runner.Title("Postconditions") runner.Step("Stop SDL", common.postconditions)
2
0.054054
1
1
b47280c5545ef001701f24643b34cd9d7f94037e
tests/acceptance.suite.yml
tests/acceptance.suite.yml
actor: AcceptanceTester modules: enabled: - WebDriver: url: http://localhost:8080 browser: chrome - Db: dsn: 'mysql:host=localhost;dbname=mysql' user: 'root' password: '' dump: 'tests/_data/mysql.sql' populate: true cleanup: true - \Helper\Acceptance
class_name: AcceptanceTester modules: enabled: - \BrowserStackWebDriver config: \BrowserStackWebDriver: host: 'hub-cloud.browserstack.com' port: 80 browser: chrome url: 'http://localhost:8080' capabilities: 'browserstack.user': 'olmok1' 'browserstack.key' : '9yxyYxJnfFFTgmob7EYZ' 'browserstack.local': true
Add Test case with delete Feature
Add Test case with delete Feature
YAML
mit
sampleOmont/quick,sampleOmont/quick,sampleOmont/quick
yaml
## Code Before: actor: AcceptanceTester modules: enabled: - WebDriver: url: http://localhost:8080 browser: chrome - Db: dsn: 'mysql:host=localhost;dbname=mysql' user: 'root' password: '' dump: 'tests/_data/mysql.sql' populate: true cleanup: true - \Helper\Acceptance ## Instruction: Add Test case with delete Feature ## Code After: class_name: AcceptanceTester modules: enabled: - \BrowserStackWebDriver config: \BrowserStackWebDriver: host: 'hub-cloud.browserstack.com' port: 80 browser: chrome url: 'http://localhost:8080' capabilities: 'browserstack.user': 'olmok1' 'browserstack.key' : '9yxyYxJnfFFTgmob7EYZ' 'browserstack.local': true
+ class_name: AcceptanceTester - actor: AcceptanceTester modules: - enabled: ? -- + enabled: - - WebDriver: + - \BrowserStackWebDriver + config: + \BrowserStackWebDriver: + host: 'hub-cloud.browserstack.com' + port: 80 + browser: chrome - url: http://localhost:8080 ? ------- + url: 'http://localhost:8080' ? + + + capabilities: + 'browserstack.user': 'olmok1' + 'browserstack.key' : '9yxyYxJnfFFTgmob7EYZ' + 'browserstack.local': true - browser: chrome - - Db: - dsn: 'mysql:host=localhost;dbname=mysql' - user: 'root' - password: '' - dump: 'tests/_data/mysql.sql' - populate: true - cleanup: true - - \Helper\Acceptance
26
1.733333
13
13
485297df5cdc18d399d3d327fdcb5da056352b1e
src/main/resources/logback.xml
src/main/resources/logback.xml
<?xml version="1.0" encoding="UTF-8"?> <configuration> <include resource="logback-batch-base.xml" /> <logger name="org.springframework" level="INFO"/> </configuration>
<?xml version="1.0" encoding="UTF-8"?> <configuration> <include resource="logback-batch-base.xml" /> <logger name="org.springframework" level="INFO"/> <logger name="org.apache.http" level="INFO"/> <logger name="org.apache.axis" level="INFO"/> </configuration>
Add logging for Axis and HttpClient
Add logging for Axis and HttpClient
XML
mit
jrrdev/mantisbt-sync-core,jrrdev/mantisbt-sync-core
xml
## Code Before: <?xml version="1.0" encoding="UTF-8"?> <configuration> <include resource="logback-batch-base.xml" /> <logger name="org.springframework" level="INFO"/> </configuration> ## Instruction: Add logging for Axis and HttpClient ## Code After: <?xml version="1.0" encoding="UTF-8"?> <configuration> <include resource="logback-batch-base.xml" /> <logger name="org.springframework" level="INFO"/> <logger name="org.apache.http" level="INFO"/> <logger name="org.apache.axis" level="INFO"/> </configuration>
<?xml version="1.0" encoding="UTF-8"?> <configuration> <include resource="logback-batch-base.xml" /> <logger name="org.springframework" level="INFO"/> + <logger name="org.apache.http" level="INFO"/> + <logger name="org.apache.axis" level="INFO"/> </configuration>
2
0.4
2
0
e6ce370530fa40a0c871e61aa4e129c9ea51175f
layouts/partials/article_header.html
layouts/partials/article_header.html
<header class="article-header article-banner editable"> {{ if and (isset .Params "banner") (not (eq .Params.banner "")) }} <img src="{{ .Params.banner | absURL }}" class="img-responsive" style="width: 100%;"> {{ else }} <img src="/images/press-releases-header-h1.png" class="img-responsive" style="width: 100%;"> {{ end }} <code>{{ if eq (index .Params.categories 0) "press" }} <h1 class="article-title">Press Release</h1> <!--<h2 class="article-title" itemprop="name" property="name">{{ .Title }}</h2>--> {{ else }} <h1 class="article-title" itemprop="name" property="name">{{ .Title }}</h1> {{ end }} </code> </header>
<header class="article-header article-banner editable"> {{ if and (isset .Params "banner") (not (eq .Params.banner "")) }} <img src="{{ .Params.banner | absURL }}" class="img-responsive" style="width: 100%;"> {{ else }} <img src="/images/press-releases-header-h1.png" class="img-responsive" style="width: 100%;"> {{ end }} {{ if eq (index .Params.categories 0) "press" }} <h1 class="article-title">Press Release</h1> <!--<h2 class="article-title" itemprop="name" property="name">{{ .Title }}</h2>--> {{ else }} {{ if lt (len .Title) 15 }} <h1 class="article-title" itemprop="name" property="name">{{ .Title }}</h1> {{ else }} <h1 class="article-title-long" itemprop="name" property="name">{{ .Title }}</h1> {{ end }} {{ end }} </header>
Make long titles use a different class
Make long titles use a different class
HTML
mit
awalker/bs3theme
html
## Code Before: <header class="article-header article-banner editable"> {{ if and (isset .Params "banner") (not (eq .Params.banner "")) }} <img src="{{ .Params.banner | absURL }}" class="img-responsive" style="width: 100%;"> {{ else }} <img src="/images/press-releases-header-h1.png" class="img-responsive" style="width: 100%;"> {{ end }} <code>{{ if eq (index .Params.categories 0) "press" }} <h1 class="article-title">Press Release</h1> <!--<h2 class="article-title" itemprop="name" property="name">{{ .Title }}</h2>--> {{ else }} <h1 class="article-title" itemprop="name" property="name">{{ .Title }}</h1> {{ end }} </code> </header> ## Instruction: Make long titles use a different class ## Code After: <header class="article-header article-banner editable"> {{ if and (isset .Params "banner") (not (eq .Params.banner "")) }} <img src="{{ .Params.banner | absURL }}" class="img-responsive" style="width: 100%;"> {{ else }} <img src="/images/press-releases-header-h1.png" class="img-responsive" style="width: 100%;"> {{ end }} {{ if eq (index .Params.categories 0) "press" }} <h1 class="article-title">Press Release</h1> <!--<h2 class="article-title" itemprop="name" property="name">{{ .Title }}</h2>--> {{ else }} {{ if lt (len .Title) 15 }} <h1 class="article-title" itemprop="name" property="name">{{ .Title }}</h1> {{ else }} <h1 class="article-title-long" itemprop="name" property="name">{{ .Title }}</h1> {{ end }} {{ end }} </header>
<header class="article-header article-banner editable"> - {{ if and (isset .Params "banner") (not (eq .Params.banner "")) }} + {{ if and (isset .Params "banner") (not (eq .Params.banner "")) }} ? ++++ - <img src="{{ .Params.banner | absURL }}" class="img-responsive" style="width: 100%;"> + <img src="{{ .Params.banner | absURL }}" class="img-responsive" style="width: 100%;"> {{ else }} ? +++++++++++ + <img src="/images/press-releases-header-h1.png" class="img-responsive" style="width: 100%;"> {{ end }} {{ if eq (index .Params.categories 0) "press" }} + <h1 class="article-title">Press Release</h1> + <!--<h2 class="article-title" itemprop="name" property="name">{{ .Title }}</h2>--> + {{ else }} {{ if lt (len .Title) 15 }} + <h1 class="article-title" itemprop="name" property="name">{{ .Title }}</h1> {{ else }} - <img src="/images/press-releases-header-h1.png" class="img-responsive" style="width: 100%;"> - {{ end }} - - <code>{{ if eq (index .Params.categories 0) "press" }} - <h1 class="article-title">Press Release</h1> - <!--<h2 class="article-title" itemprop="name" property="name">{{ .Title }}</h2>--> - {{ else }} - <h1 class="article-title" itemprop="name" property="name">{{ .Title }}</h1> ? ---- + <h1 class="article-title-long" itemprop="name" property="name">{{ .Title }}</h1> ? +++++ + {{ end }} {{ end }} - {{ end }} - </code> </header>
21
1.3125
9
12
05e68b98a7d38efb95cca834e95ae35387bba730
static/js/entity.js
static/js/entity.js
(function () { var $specContainer = $('#spec-container'); if (!isMobile) { $('.section-tabs a').click(function () { var href = $(this).attr('href'); gotoTab(href); return false; }); $specContainer.scrollspy({ 'data-spy': 'scroll', 'data-target': '.section-tabs', 'offset': 100 }) .on('activate.changehash', function () { var target = $('.section-tabs li.active a').attr('href'); selectTab(target); }); function gotoTab(target) { selectTab(target); var $elem = $(target); $specContainer.animate({ scrollTop: $specContainer.scrollTop() + $elem.position().top }, 300); } function selectTab(target) { var $list = $('.section-tabs li'); $list.removeClass('active'); $list.children('[href="'+target+'"]').parent('li').addClass('active'); if (location.hash != target) { if (history.pushState) { history.pushState({}, null, target); } else { location.hash = target; } } } } }());
(function () { var $body = $('body'); if (!isMobile) { $('.section-tabs a').click(function () { var href = $(this).attr('href'); gotoTab(href); return false; }); $body.scrollspy({ 'data-spy': 'scroll', 'data-target': '.section-tabs', 'offset': 100 }) .on('activate.changehash', function () { var target = $('.section-tabs li.active a').attr('href'); selectTab(target); }); function gotoTab(target) { selectTab(target); var $elem = $(target); $body.animate({ scrollTop: $elem.offset().top }, 300); } function selectTab(target) { var $list = $('.section-tabs li'); $list.removeClass('active'); $list.children('[href="'+target+'"]').parent('li').addClass('active'); if (location.hash != target) { if (history.pushState) { history.pushState({}, null, target); } else { location.hash = target; } } } } }());
Fix scrollspy bug occured by layout changes
Fix scrollspy bug occured by layout changes
JavaScript
apache-2.0
teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr
javascript
## Code Before: (function () { var $specContainer = $('#spec-container'); if (!isMobile) { $('.section-tabs a').click(function () { var href = $(this).attr('href'); gotoTab(href); return false; }); $specContainer.scrollspy({ 'data-spy': 'scroll', 'data-target': '.section-tabs', 'offset': 100 }) .on('activate.changehash', function () { var target = $('.section-tabs li.active a').attr('href'); selectTab(target); }); function gotoTab(target) { selectTab(target); var $elem = $(target); $specContainer.animate({ scrollTop: $specContainer.scrollTop() + $elem.position().top }, 300); } function selectTab(target) { var $list = $('.section-tabs li'); $list.removeClass('active'); $list.children('[href="'+target+'"]').parent('li').addClass('active'); if (location.hash != target) { if (history.pushState) { history.pushState({}, null, target); } else { location.hash = target; } } } } }()); ## Instruction: Fix scrollspy bug occured by layout changes ## Code After: (function () { var $body = $('body'); if (!isMobile) { $('.section-tabs a').click(function () { var href = $(this).attr('href'); gotoTab(href); return false; }); $body.scrollspy({ 'data-spy': 'scroll', 'data-target': '.section-tabs', 'offset': 100 }) .on('activate.changehash', function () { var target = $('.section-tabs li.active a').attr('href'); selectTab(target); }); function gotoTab(target) { selectTab(target); var $elem = $(target); $body.animate({ scrollTop: $elem.offset().top }, 300); } function selectTab(target) { var $list = $('.section-tabs li'); $list.removeClass('active'); $list.children('[href="'+target+'"]').parent('li').addClass('active'); if (location.hash != target) { if (history.pushState) { history.pushState({}, null, target); } else { location.hash = target; } } } } }());
(function () { - var $specContainer = $('#spec-container'); + var $body = $('body'); if (!isMobile) { $('.section-tabs a').click(function () { var href = $(this).attr('href'); gotoTab(href); return false; }); - $specContainer.scrollspy({ + $body.scrollspy({ 'data-spy': 'scroll', 'data-target': '.section-tabs', 'offset': 100 }) .on('activate.changehash', function () { var target = $('.section-tabs li.active a').attr('href'); selectTab(target); }); function gotoTab(target) { selectTab(target); var $elem = $(target); - $specContainer.animate({ - scrollTop: $specContainer.scrollTop() + $elem.position().top + $body.animate({ + scrollTop: $elem.offset().top }, 300); } function selectTab(target) { var $list = $('.section-tabs li'); $list.removeClass('active'); $list.children('[href="'+target+'"]').parent('li').addClass('active'); if (location.hash != target) { if (history.pushState) { history.pushState({}, null, target); } else { location.hash = target; } } } } }());
8
0.170213
4
4
33071c449bfec7cbf484473b2bdfd62bc4c199c3
lib/tasks/export_mainstream_taxons.rake
lib/tasks/export_mainstream_taxons.rake
namespace :govuk do # bundle exec rake govuk:export_content_by_organisations[uk-border-agency,border-force] desc "Export taxons of mainstream content as CSV" task export_mainstream_taxons: [:environment] do content_types = %w[ answer guide simple_smart_answer transaction completed_transaction travel_advice_index local_transaction travel_advice licence ] fields = %w[ link title ] content_items_enum = Services.search_api.search_enum( fields: fields, filter_content_store_document_type: content_types, ) print "- saving items to CSV" filename = "tmp/mainstream.csv" CSV.open(filename, "wb", headers: ["mainstream title", "mainstream path", "taxon title", "taxon link", "taxon title", "taxon link", "taxon title", "taxon link"], write_headers: true) do |csv| content_items_enum.each do |content_item| link = content_item["link"] title = content_item["title"] content_item = Services.live_content_store.content_item(link) taxons = content_item.dig("links", "taxons") || [] row = [title, link] taxons.each do |taxon| row << taxon["title"] << taxon["base_path"] end csv << row puts row end end end end
namespace :govuk do # bundle exec rake govuk:export_content_by_organisations[uk-border-agency,border-force] desc "Export taxons of mainstream content as CSV" task export_mainstream_taxons: [:environment] do content_types = %w[ answer guide simple_smart_answer smart_answer transaction completed_transaction travel_advice_index local_transaction travel_advice licence ] fields = %w[ link title ] content_items_enum = Services.search_api.search_enum( fields: fields, filter_content_store_document_type: content_types, ) print "- saving items to CSV" filename = "tmp/mainstream.csv" CSV.open(filename, "wb", headers: ["mainstream title", "mainstream path", "taxon title", "taxon link", "taxon title", "taxon link", "taxon title", "taxon link"], write_headers: true) do |csv| content_items_enum.each do |content_item| link = content_item["link"] title = content_item["title"] content_item = Services.live_content_store.content_item(link) taxons = content_item.dig("links", "taxons") || [] row = [title, link] taxons.each do |taxon| row << taxon["title"] << taxon["base_path"] end csv << row puts row end end end end
Add support for exporting smart answers content
Add support for exporting smart answers content This adds support for the new `smart_answer` schema when exporting taxons of mainstream content as CSV. Previously smart answers were represented by the transaction schema. This ensures the functionality doesn't break when Smart Answer flows are republished with the new schema.
Ruby
mit
alphagov/content-tagger,alphagov/content-tagger,alphagov/content-tagger
ruby
## Code Before: namespace :govuk do # bundle exec rake govuk:export_content_by_organisations[uk-border-agency,border-force] desc "Export taxons of mainstream content as CSV" task export_mainstream_taxons: [:environment] do content_types = %w[ answer guide simple_smart_answer transaction completed_transaction travel_advice_index local_transaction travel_advice licence ] fields = %w[ link title ] content_items_enum = Services.search_api.search_enum( fields: fields, filter_content_store_document_type: content_types, ) print "- saving items to CSV" filename = "tmp/mainstream.csv" CSV.open(filename, "wb", headers: ["mainstream title", "mainstream path", "taxon title", "taxon link", "taxon title", "taxon link", "taxon title", "taxon link"], write_headers: true) do |csv| content_items_enum.each do |content_item| link = content_item["link"] title = content_item["title"] content_item = Services.live_content_store.content_item(link) taxons = content_item.dig("links", "taxons") || [] row = [title, link] taxons.each do |taxon| row << taxon["title"] << taxon["base_path"] end csv << row puts row end end end end ## Instruction: Add support for exporting smart answers content This adds support for the new `smart_answer` schema when exporting taxons of mainstream content as CSV. Previously smart answers were represented by the transaction schema. This ensures the functionality doesn't break when Smart Answer flows are republished with the new schema. ## Code After: namespace :govuk do # bundle exec rake govuk:export_content_by_organisations[uk-border-agency,border-force] desc "Export taxons of mainstream content as CSV" task export_mainstream_taxons: [:environment] do content_types = %w[ answer guide simple_smart_answer smart_answer transaction completed_transaction travel_advice_index local_transaction travel_advice licence ] fields = %w[ link title ] content_items_enum = Services.search_api.search_enum( fields: fields, filter_content_store_document_type: content_types, ) print "- saving items to CSV" filename = "tmp/mainstream.csv" CSV.open(filename, "wb", headers: ["mainstream title", "mainstream path", "taxon title", "taxon link", "taxon title", "taxon link", "taxon title", "taxon link"], write_headers: true) do |csv| content_items_enum.each do |content_item| link = content_item["link"] title = content_item["title"] content_item = Services.live_content_store.content_item(link) taxons = content_item.dig("links", "taxons") || [] row = [title, link] taxons.each do |taxon| row << taxon["title"] << taxon["base_path"] end csv << row puts row end end end end
namespace :govuk do # bundle exec rake govuk:export_content_by_organisations[uk-border-agency,border-force] desc "Export taxons of mainstream content as CSV" task export_mainstream_taxons: [:environment] do content_types = %w[ answer guide simple_smart_answer + smart_answer transaction completed_transaction travel_advice_index local_transaction travel_advice licence ] fields = %w[ link title ] content_items_enum = Services.search_api.search_enum( fields: fields, filter_content_store_document_type: content_types, ) print "- saving items to CSV" filename = "tmp/mainstream.csv" CSV.open(filename, "wb", headers: ["mainstream title", "mainstream path", "taxon title", "taxon link", "taxon title", "taxon link", "taxon title", "taxon link"], write_headers: true) do |csv| content_items_enum.each do |content_item| link = content_item["link"] title = content_item["title"] content_item = Services.live_content_store.content_item(link) taxons = content_item.dig("links", "taxons") || [] row = [title, link] taxons.each do |taxon| row << taxon["title"] << taxon["base_path"] end csv << row puts row end end end end
1
0.021277
1
0
bfd22261ef554575dee9a857e518b7b6c603af2a
meta-oe/recipes-extended/iotop/iotop_1.20.bb
meta-oe/recipes-extended/iotop/iotop_1.20.bb
SUMMARY = "A top utility for I/O" LICENSE = "GPLv2+" LIC_FILES_CHKSUM = "file://LICENSE;md5=48e7be78bd2671d08c9c3bad71f1cfaa" DEPENDS = "ncurses" SRC_URI = "https://github.com/Tomas-M/iotop/releases/download/v1.20/iotop-1.20.tar.xz" SRC_URI[sha256sum] = "e0227dd4b71ce3ffe50225b85cf9abb38a99c1d2dff69e3f1db7d059d7490d51" UPSTREAM_CHECK_URI = "https://github.com/Tomas-M/iotop/releases" inherit pkgconfig EXTRA_OEMAKE = "V=1 STRIP=true" # Fixes llvm-bc70b5.o: can't link soft-float modules with double-float modules EXTRA_OEMAKE:append:toolchain-clang:riscv64 = " NO_FLTO=1" # Workaround BFD linker crash with clang on arm # revisit when upgrading binutils and see if its fixed LDFLAGS:append:toolchain-clang:arm = " -fuse-ld=lld" do_install() { oe_runmake install DESTDIR=${D} }
SUMMARY = "A top utility for I/O" LICENSE = "GPLv2+" LIC_FILES_CHKSUM = "file://LICENSE;md5=48e7be78bd2671d08c9c3bad71f1cfaa" DEPENDS = "ncurses" SRC_URI = "https://github.com/Tomas-M/iotop/releases/download/v1.20/iotop-1.20.tar.xz" SRC_URI[sha256sum] = "e0227dd4b71ce3ffe50225b85cf9abb38a99c1d2dff69e3f1db7d059d7490d51" UPSTREAM_CHECK_URI = "https://github.com/Tomas-M/iotop/releases" inherit pkgconfig EXTRA_OEMAKE = "V=1 STRIP=true" # Fixes llvm-bc70b5.o: can't link soft-float modules with double-float modules EXTRA_OEMAKE:append:toolchain-clang:riscv64 = " NO_FLTO=1" EXTRA_OEMAKE:append:toolchain-clang:riscv32 = " NO_FLTO=1" # Workaround BFD linker crash with clang on arm # revisit when upgrading binutils and see if its fixed LDFLAGS:append:toolchain-clang:arm = " -fuse-ld=lld" do_install() { oe_runmake install DESTDIR=${D} }
Disable lto with clang for rv32
iotop: Disable lto with clang for rv32 clang/rv32 is mixing ABIs because defaults are ilp32d for OE but when LTO is used it pulls in ilp32 internally so disable lto for now. Signed-off-by: Khem Raj <[email protected]>
BitBake
mit
openembedded/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded
bitbake
## Code Before: SUMMARY = "A top utility for I/O" LICENSE = "GPLv2+" LIC_FILES_CHKSUM = "file://LICENSE;md5=48e7be78bd2671d08c9c3bad71f1cfaa" DEPENDS = "ncurses" SRC_URI = "https://github.com/Tomas-M/iotop/releases/download/v1.20/iotop-1.20.tar.xz" SRC_URI[sha256sum] = "e0227dd4b71ce3ffe50225b85cf9abb38a99c1d2dff69e3f1db7d059d7490d51" UPSTREAM_CHECK_URI = "https://github.com/Tomas-M/iotop/releases" inherit pkgconfig EXTRA_OEMAKE = "V=1 STRIP=true" # Fixes llvm-bc70b5.o: can't link soft-float modules with double-float modules EXTRA_OEMAKE:append:toolchain-clang:riscv64 = " NO_FLTO=1" # Workaround BFD linker crash with clang on arm # revisit when upgrading binutils and see if its fixed LDFLAGS:append:toolchain-clang:arm = " -fuse-ld=lld" do_install() { oe_runmake install DESTDIR=${D} } ## Instruction: iotop: Disable lto with clang for rv32 clang/rv32 is mixing ABIs because defaults are ilp32d for OE but when LTO is used it pulls in ilp32 internally so disable lto for now. Signed-off-by: Khem Raj <[email protected]> ## Code After: SUMMARY = "A top utility for I/O" LICENSE = "GPLv2+" LIC_FILES_CHKSUM = "file://LICENSE;md5=48e7be78bd2671d08c9c3bad71f1cfaa" DEPENDS = "ncurses" SRC_URI = "https://github.com/Tomas-M/iotop/releases/download/v1.20/iotop-1.20.tar.xz" SRC_URI[sha256sum] = "e0227dd4b71ce3ffe50225b85cf9abb38a99c1d2dff69e3f1db7d059d7490d51" UPSTREAM_CHECK_URI = "https://github.com/Tomas-M/iotop/releases" inherit pkgconfig EXTRA_OEMAKE = "V=1 STRIP=true" # Fixes llvm-bc70b5.o: can't link soft-float modules with double-float modules EXTRA_OEMAKE:append:toolchain-clang:riscv64 = " NO_FLTO=1" EXTRA_OEMAKE:append:toolchain-clang:riscv32 = " NO_FLTO=1" # Workaround BFD linker crash with clang on arm # revisit when upgrading binutils and see if its fixed LDFLAGS:append:toolchain-clang:arm = " -fuse-ld=lld" do_install() { oe_runmake install DESTDIR=${D} }
SUMMARY = "A top utility for I/O" LICENSE = "GPLv2+" LIC_FILES_CHKSUM = "file://LICENSE;md5=48e7be78bd2671d08c9c3bad71f1cfaa" DEPENDS = "ncurses" SRC_URI = "https://github.com/Tomas-M/iotop/releases/download/v1.20/iotop-1.20.tar.xz" SRC_URI[sha256sum] = "e0227dd4b71ce3ffe50225b85cf9abb38a99c1d2dff69e3f1db7d059d7490d51" UPSTREAM_CHECK_URI = "https://github.com/Tomas-M/iotop/releases" inherit pkgconfig EXTRA_OEMAKE = "V=1 STRIP=true" # Fixes llvm-bc70b5.o: can't link soft-float modules with double-float modules EXTRA_OEMAKE:append:toolchain-clang:riscv64 = " NO_FLTO=1" + EXTRA_OEMAKE:append:toolchain-clang:riscv32 = " NO_FLTO=1" # Workaround BFD linker crash with clang on arm # revisit when upgrading binutils and see if its fixed LDFLAGS:append:toolchain-clang:arm = " -fuse-ld=lld" do_install() { oe_runmake install DESTDIR=${D} }
1
0.043478
1
0
ab1a2982b6a44bfcfcaff5a3469f2d85f56a86a4
src/cli/_dbus/_manager.py
src/cli/_dbus/_manager.py
class Manager(object): """ Manager interface. """ _INTERFACE_NAME = 'org.storage.stratis1.Manager' def __init__(self, dbus_object): """ Initializer. :param dbus_object: the dbus object """ self._dbus_object = dbus_object def CreatePool(self, pool_name, devices, num_devices): """ Create a pool. :param str pool_name: the pool name :param devices: the component devices :type devices: sequence of str """ return self._dbus_object.CreatePool( pool_name, devices, num_devices, dbus_interface=self._INTERFACE_NAME, ) def DestroyPool(self, pool_name): """ Destroy a pool. :param str pool_name: the name of the pool """ return self._dbus_object.DestroyPool( pool_name, dbus_interface=self._INTERFACE_NAME ) def ListPools(self): """ List all pools. """ return self._dbus_object.ListPools(dbus_interface=self._INTERFACE_NAME)
from ._properties import Properties class Manager(object): """ Manager interface. """ _INTERFACE_NAME = 'org.storage.stratis1.Manager' def __init__(self, dbus_object): """ Initializer. :param dbus_object: the dbus object """ self._dbus_object = dbus_object def CreatePool(self, pool_name, devices, num_devices): """ Create a pool. :param str pool_name: the pool name :param devices: the component devices :type devices: sequence of str """ return self._dbus_object.CreatePool( pool_name, devices, num_devices, dbus_interface=self._INTERFACE_NAME, ) def DestroyPool(self, pool_name): """ Destroy a pool. :param str pool_name: the name of the pool """ return self._dbus_object.DestroyPool( pool_name, dbus_interface=self._INTERFACE_NAME ) def ListPools(self): """ List all pools. """ return self._dbus_object.ListPools(dbus_interface=self._INTERFACE_NAME) @property def Version(self): """ Stratisd Version getter. :rtype: String """ return Properties(self._dbus_object).Get( self._INTERFACE_NAME, 'Version' ) @property def LogLevel(self): """ Stratisd LogLevel getter. :rtype: String """ return Properties(self._dbus_object).Get( self._INTERFACE_NAME, 'LogLevel' ) @LogLevel.setter def LogLevel(self, value): """ Stratisd LogLevel setter. :param str value: the value to set """ return Properties(self._dbus_object).Set( self._INTERFACE_NAME, 'LogLevel', value )
Use Properties interface to get Manager properties.
Use Properties interface to get Manager properties. Signed-off-by: mulhern <[email protected]>
Python
apache-2.0
stratis-storage/stratis-cli,stratis-storage/stratis-cli
python
## Code Before: class Manager(object): """ Manager interface. """ _INTERFACE_NAME = 'org.storage.stratis1.Manager' def __init__(self, dbus_object): """ Initializer. :param dbus_object: the dbus object """ self._dbus_object = dbus_object def CreatePool(self, pool_name, devices, num_devices): """ Create a pool. :param str pool_name: the pool name :param devices: the component devices :type devices: sequence of str """ return self._dbus_object.CreatePool( pool_name, devices, num_devices, dbus_interface=self._INTERFACE_NAME, ) def DestroyPool(self, pool_name): """ Destroy a pool. :param str pool_name: the name of the pool """ return self._dbus_object.DestroyPool( pool_name, dbus_interface=self._INTERFACE_NAME ) def ListPools(self): """ List all pools. """ return self._dbus_object.ListPools(dbus_interface=self._INTERFACE_NAME) ## Instruction: Use Properties interface to get Manager properties. Signed-off-by: mulhern <[email protected]> ## Code After: from ._properties import Properties class Manager(object): """ Manager interface. """ _INTERFACE_NAME = 'org.storage.stratis1.Manager' def __init__(self, dbus_object): """ Initializer. :param dbus_object: the dbus object """ self._dbus_object = dbus_object def CreatePool(self, pool_name, devices, num_devices): """ Create a pool. :param str pool_name: the pool name :param devices: the component devices :type devices: sequence of str """ return self._dbus_object.CreatePool( pool_name, devices, num_devices, dbus_interface=self._INTERFACE_NAME, ) def DestroyPool(self, pool_name): """ Destroy a pool. :param str pool_name: the name of the pool """ return self._dbus_object.DestroyPool( pool_name, dbus_interface=self._INTERFACE_NAME ) def ListPools(self): """ List all pools. """ return self._dbus_object.ListPools(dbus_interface=self._INTERFACE_NAME) @property def Version(self): """ Stratisd Version getter. :rtype: String """ return Properties(self._dbus_object).Get( self._INTERFACE_NAME, 'Version' ) @property def LogLevel(self): """ Stratisd LogLevel getter. :rtype: String """ return Properties(self._dbus_object).Get( self._INTERFACE_NAME, 'LogLevel' ) @LogLevel.setter def LogLevel(self, value): """ Stratisd LogLevel setter. :param str value: the value to set """ return Properties(self._dbus_object).Set( self._INTERFACE_NAME, 'LogLevel', value )
+ + from ._properties import Properties + class Manager(object): """ Manager interface. """ _INTERFACE_NAME = 'org.storage.stratis1.Manager' def __init__(self, dbus_object): """ Initializer. :param dbus_object: the dbus object """ self._dbus_object = dbus_object def CreatePool(self, pool_name, devices, num_devices): """ Create a pool. :param str pool_name: the pool name :param devices: the component devices :type devices: sequence of str """ return self._dbus_object.CreatePool( pool_name, devices, num_devices, dbus_interface=self._INTERFACE_NAME, ) def DestroyPool(self, pool_name): """ Destroy a pool. :param str pool_name: the name of the pool """ return self._dbus_object.DestroyPool( pool_name, dbus_interface=self._INTERFACE_NAME ) def ListPools(self): """ List all pools. """ return self._dbus_object.ListPools(dbus_interface=self._INTERFACE_NAME) + + @property + def Version(self): + """ + Stratisd Version getter. + + :rtype: String + """ + return Properties(self._dbus_object).Get( + self._INTERFACE_NAME, + 'Version' + ) + + @property + def LogLevel(self): + """ + Stratisd LogLevel getter. + + :rtype: String + """ + return Properties(self._dbus_object).Get( + self._INTERFACE_NAME, + 'LogLevel' + ) + + @LogLevel.setter + def LogLevel(self, value): + """ + Stratisd LogLevel setter. + + :param str value: the value to set + """ + return Properties(self._dbus_object).Set( + self._INTERFACE_NAME, + 'LogLevel', + value + )
40
0.851064
40
0
8b181d4aa2682db1d7362f5f63495f084a87221b
.travis.yml
.travis.yml
language: ruby cache: bundler rvm: - 2.1.1 - 2.1.0 - 2.0.0 - 1.9.3 - 1.9.2 script: bundle exec rspec spec
language: ruby cache: bundler rvm: - 2.1.1 - 2.1.0 - 2.0.0 - 1.9.3 - 1.9.2 script: bundle exec rspec spec env: global: secure: kBCnfwyeUmQhO9JyBBD4Su8S7Xc10MDyJKoUcExiPjHXsLOIuu9uj1glEbEHg26XEqPlitc3qULswiy0HrGVdr9CqE3Qs1Q5yX0F3/Us5UJFf5w7dqBFpub+NkQS7dyzm/U5qVMkPrdVCNm0fxPyZP0zVQ8YyKHlM6iSjI7Rf68=
Add codeclimate test coverage reporting
Add codeclimate test coverage reporting
YAML
mit
thekompanee/chamber,thekompanee/chamber
yaml
## Code Before: language: ruby cache: bundler rvm: - 2.1.1 - 2.1.0 - 2.0.0 - 1.9.3 - 1.9.2 script: bundle exec rspec spec ## Instruction: Add codeclimate test coverage reporting ## Code After: language: ruby cache: bundler rvm: - 2.1.1 - 2.1.0 - 2.0.0 - 1.9.3 - 1.9.2 script: bundle exec rspec spec env: global: secure: kBCnfwyeUmQhO9JyBBD4Su8S7Xc10MDyJKoUcExiPjHXsLOIuu9uj1glEbEHg26XEqPlitc3qULswiy0HrGVdr9CqE3Qs1Q5yX0F3/Us5UJFf5w7dqBFpub+NkQS7dyzm/U5qVMkPrdVCNm0fxPyZP0zVQ8YyKHlM6iSjI7Rf68=
language: ruby cache: bundler rvm: - - 2.1.1 ? -- + - 2.1.1 - - 2.1.0 ? -- + - 2.1.0 - - 2.0.0 ? -- + - 2.0.0 - - 1.9.3 ? -- + - 1.9.3 - - 1.9.2 ? -- + - 1.9.2 script: bundle exec rspec spec + env: + global: + secure: kBCnfwyeUmQhO9JyBBD4Su8S7Xc10MDyJKoUcExiPjHXsLOIuu9uj1glEbEHg26XEqPlitc3qULswiy0HrGVdr9CqE3Qs1Q5yX0F3/Us5UJFf5w7dqBFpub+NkQS7dyzm/U5qVMkPrdVCNm0fxPyZP0zVQ8YyKHlM6iSjI7Rf68=
13
1.444444
8
5
7eb3712a51ac537ab998bf2a5f60ac33a16b49e7
platform/projectModel-impl/src/com/intellij/openapi/components/service.kt
platform/projectModel-impl/src/com/intellij/openapi/components/service.kt
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.components import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.impl.stores.IComponentStore import com.intellij.openapi.project.Project import com.intellij.project.ProjectStoreOwner inline fun <reified T : Any> service(): T = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceOrNull(): T? = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceIfCreated(): T? = ApplicationManager.getApplication().getServiceIfCreated(T::class.java) inline fun <reified T : Any> Project.service(): T = getService(T::class.java) inline fun <reified T : Any> Project.serviceOrNull(): T? = getService(T::class.java) inline fun <reified T : Any> Project.serviceIfCreated(): T? = getServiceIfCreated(T::class.java) val ComponentManager.stateStore: IComponentStore get() { return when (this) { is ProjectStoreOwner -> this.getComponentStore() else -> { // module or application service getService(IComponentStore::class.java) } } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.components import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.impl.stores.IComponentStore import com.intellij.openapi.project.Project import com.intellij.project.ProjectStoreOwner inline fun <reified T : Any> service(): T = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceOrNull(): T? = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceIfCreated(): T? = ApplicationManager.getApplication().getServiceIfCreated(T::class.java) inline fun <reified T : Any> Project.service(): T = getService(T::class.java) inline fun <reified T : Any> Project.serviceIfCreated(): T? = getServiceIfCreated(T::class.java) val ComponentManager.stateStore: IComponentStore get() { return when (this) { is ProjectStoreOwner -> this.getComponentStore() else -> { // module or application service getService(IComponentStore::class.java) } } }
Revert "CPP-15416 Implement unified workspace lifecycle"
Revert "CPP-15416 Implement unified workspace lifecycle" This reverts commit 9cd5d1f4 GitOrigin-RevId: 908bbdae40a775576ff1abae4424cc430b4147bf
Kotlin
apache-2.0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
kotlin
## Code Before: // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.components import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.impl.stores.IComponentStore import com.intellij.openapi.project.Project import com.intellij.project.ProjectStoreOwner inline fun <reified T : Any> service(): T = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceOrNull(): T? = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceIfCreated(): T? = ApplicationManager.getApplication().getServiceIfCreated(T::class.java) inline fun <reified T : Any> Project.service(): T = getService(T::class.java) inline fun <reified T : Any> Project.serviceOrNull(): T? = getService(T::class.java) inline fun <reified T : Any> Project.serviceIfCreated(): T? = getServiceIfCreated(T::class.java) val ComponentManager.stateStore: IComponentStore get() { return when (this) { is ProjectStoreOwner -> this.getComponentStore() else -> { // module or application service getService(IComponentStore::class.java) } } } ## Instruction: Revert "CPP-15416 Implement unified workspace lifecycle" This reverts commit 9cd5d1f4 GitOrigin-RevId: 908bbdae40a775576ff1abae4424cc430b4147bf ## Code After: // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.components import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.impl.stores.IComponentStore import com.intellij.openapi.project.Project import com.intellij.project.ProjectStoreOwner inline fun <reified T : Any> service(): T = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceOrNull(): T? = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceIfCreated(): T? = ApplicationManager.getApplication().getServiceIfCreated(T::class.java) inline fun <reified T : Any> Project.service(): T = getService(T::class.java) inline fun <reified T : Any> Project.serviceIfCreated(): T? = getServiceIfCreated(T::class.java) val ComponentManager.stateStore: IComponentStore get() { return when (this) { is ProjectStoreOwner -> this.getComponentStore() else -> { // module or application service getService(IComponentStore::class.java) } } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.components import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.components.impl.stores.IComponentStore import com.intellij.openapi.project.Project import com.intellij.project.ProjectStoreOwner inline fun <reified T : Any> service(): T = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceOrNull(): T? = ApplicationManager.getApplication().getService(T::class.java) inline fun <reified T : Any> serviceIfCreated(): T? = ApplicationManager.getApplication().getServiceIfCreated(T::class.java) inline fun <reified T : Any> Project.service(): T = getService(T::class.java) - inline fun <reified T : Any> Project.serviceOrNull(): T? = getService(T::class.java) - inline fun <reified T : Any> Project.serviceIfCreated(): T? = getServiceIfCreated(T::class.java) val ComponentManager.stateStore: IComponentStore get() { return when (this) { is ProjectStoreOwner -> this.getComponentStore() else -> { // module or application service getService(IComponentStore::class.java) } } }
2
0.066667
0
2
880d409e8dfda9ec261c639b7aea6bcc10cdcde2
spec/dummy/app/controllers/application_controller.rb
spec/dummy/app/controllers/application_controller.rb
class ApplicationController < ActionController::Base protect_from_forgery end
class ApplicationController < ActionController::Base protect_from_forgery def sign_in_path '/sign_in' end end
Define sign_in_path in ApplicationController for dummy application
Define sign_in_path in ApplicationController for dummy application
Ruby
mit
frankel/forem,szymon-przybyl/forem,szymon-przybyl/forem,dmitry-ilyashevich/forem,dmitry-ilyashevich/forem,frankel/forem,caffo/forem,filiptepper/forem,STRd6/forem,isotope11/forem,isotope11/forem,STRd6/forem,substantial/forem,nruth/forem,filiptepper/forem,substantial/forem,nruth/forem,caffo/forem
ruby
## Code Before: class ApplicationController < ActionController::Base protect_from_forgery end ## Instruction: Define sign_in_path in ApplicationController for dummy application ## Code After: class ApplicationController < ActionController::Base protect_from_forgery def sign_in_path '/sign_in' end end
class ApplicationController < ActionController::Base protect_from_forgery + + def sign_in_path + '/sign_in' + end end
4
1.333333
4
0
cfe2789e388c2bb6951bee0ee5986932c0337c27
.travis.yml
.travis.yml
language: php php: - 5.3 - 5.4 script: phpunit --coverage-text before_script: - pyrus install -f http://download.pear.php.net/package/PEAR-1.9.4.tgz - git clone https://github.com/phpDocumentor/template.responsive.git lib/template.responsive - pyrus channel-discover pear.phpdoc.org - pyrus install -f lib/template.responsive/package.xml
language: php php: - 5.3 - 5.4 script: - phpunit --coverage-text - pyrus package before_script: - pyrus install -f http://download.pear.php.net/package/PEAR-1.9.4.tgz - git clone https://github.com/phpDocumentor/template.responsive.git lib/template.responsive - pyrus channel-discover pear.phpdoc.org - pyrus install -f lib/template.responsive/package.xml
Add building the package as part of CI
Add building the package as part of CI
YAML
mit
rscarson/phpDocumentor2,mbed67/phpDocumentor2,phpDocumentor/phpDocumentor2,rgeraads/phpDocumentor2,4414/phpDocumentor2,senuamedia/phpDocumentor2,revinate/phpDocumentor2,phpDocumentor/phpDocumentor2,PatidarWeb/phpDocumentor2,ddymko/phpDocumentor2,pierredup/phpDocumentor2,rgeraads/phpDocumentor2,webmozart/phpDocumentor2,pierredup/phpDocumentor2,Maxim-Mazurok/phpDocumentor2,4414/phpDocumentor2,pierredup/phpDocumentor2,potherca-contrib/phpDocumentor2,ddymko/phpDocumentor2,mvriel/phpDocumentor2,ddymko/phpDocumentor2,beealone/phpDocumentor2,peterkokot/phpDocumentor2,mvriel/phpDocumentor2,PatidarWeb/phpDocumentor2,revinate/phpDocumentor2,beealone/phpDocumentor2,mbed67/phpDocumentor2,jaapio/phpDocumentor2,fdewinne/phpDocumentor2,phpDocumentor/phpDocumentor2,PatidarWeb/phpDocumentor2,webmozart/phpDocumentor2,4414/phpDocumentor2,revinate/phpDocumentor2,potherca-contrib/phpDocumentor2,jaapio/phpDocumentor2,rgeraads/phpDocumentor2,potherca/phpDocumentor2,fdewinne/phpDocumentor2,potherca/phpDocumentor2,cesarmarinhorj/phpDocumentor2,rscarson/phpDocumentor2,ddymko/phpDocumentor2,Maxim-Mazurok/phpDocumentor2,fdewinne/phpDocumentor2,senuamedia/phpDocumentor2,rscarson/phpDocumentor2,fdewinne/phpDocumentor2,mbed67/phpDocumentor2,potherca-contrib/phpDocumentor2,revinate/phpDocumentor2,peterkokot/phpDocumentor2,mvriel/phpDocumentor2,4414/phpDocumentor2,Maxim-Mazurok/phpDocumentor2,cesarmarinhorj/phpDocumentor2,Maxim-Mazurok/phpDocumentor2,angi2/phpDocumentor2,angi2/phpDocumentor2,senuamedia/phpDocumentor2,angi2/phpDocumentor2,pierredup/phpDocumentor2,beealone/phpDocumentor2,webmozart/phpDocumentor2,webmozart/phpDocumentor2,potherca/phpDocumentor2,potherca/phpDocumentor2,peterkokot/phpDocumentor2,jaapio/phpDocumentor2,mvriel/phpDocumentor2,angi2/phpDocumentor2,peterkokot/phpDocumentor2,mbed67/phpDocumentor2,cesarmarinhorj/phpDocumentor2,phpDocumentor/phpDocumentor2,PatidarWeb/phpDocumentor2,potherca-contrib/phpDocumentor2,cesarmarinhorj/phpDocumentor2,beealone/phpDocumentor2,peterkokot/phpDocumentor2,jaapio/phpDocumentor2,rscarson/phpDocumentor2,senuamedia/phpDocumentor2,rgeraads/phpDocumentor2
yaml
## Code Before: language: php php: - 5.3 - 5.4 script: phpunit --coverage-text before_script: - pyrus install -f http://download.pear.php.net/package/PEAR-1.9.4.tgz - git clone https://github.com/phpDocumentor/template.responsive.git lib/template.responsive - pyrus channel-discover pear.phpdoc.org - pyrus install -f lib/template.responsive/package.xml ## Instruction: Add building the package as part of CI ## Code After: language: php php: - 5.3 - 5.4 script: - phpunit --coverage-text - pyrus package before_script: - pyrus install -f http://download.pear.php.net/package/PEAR-1.9.4.tgz - git clone https://github.com/phpDocumentor/template.responsive.git lib/template.responsive - pyrus channel-discover pear.phpdoc.org - pyrus install -f lib/template.responsive/package.xml
language: php php: - 5.3 - 5.4 + script: - script: phpunit --coverage-text ? ^^^^^^^ + - phpunit --coverage-text ? ^^ + - pyrus package before_script: - pyrus install -f http://download.pear.php.net/package/PEAR-1.9.4.tgz - git clone https://github.com/phpDocumentor/template.responsive.git lib/template.responsive - pyrus channel-discover pear.phpdoc.org - pyrus install -f lib/template.responsive/package.xml
4
0.333333
3
1