commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
3.52k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
17
3.24k
6af918668cddf30c12a10fe46bc174e110bf04c3
red_api.py
red_api.py
import os from pymongo import MongoClient MONGO_USER = os.getenv('MONGO_USER') MONGO_PASSWORD = os.getenv('MONGO_PASSWORD') MONGO_URI = 'mongodb://{0}:{1}@paulo.mongohq.com:10039/redjohn'.format(MONGO_USER, MONGO_PASSWORD) # Open a connection to Mongo once # mongo_client = MongoClient(MONGO_URI) red_john_tweets = mongo_client.redjohn.tweets suspects = [ 'partridge', 'kirkland', 'bertram', 'stiles', 'haffner', 'mcallister', 'smith' ] def get_suspect_mentions(): suspect_mentions = {} for suspect in suspects: mentions = red_john_tweets.find({ 'suspect': suspect }).count() suspect_mentions[suspect] = mentions return suspect_mentions def get_tweet_count(): return red_john_tweets.count() def get_suspect_tweets(suspect, limit=5): tweets = red_john_tweets.find({ 'suspect': suspect })[:limit] return list(tweets)
import os from pymongo import DESCENDING from pymongo import MongoClient from bson.json_util import dumps MONGO_USER = os.getenv('MONGO_USER') MONGO_PASSWORD = os.getenv('MONGO_PASSWORD') MONGO_URI = 'mongodb://{0}:{1}@paulo.mongohq.com:10039/redjohn'.format(MONGO_USER, MONGO_PASSWORD) # Open a connection to Mongo once # mongo_client = MongoClient(MONGO_URI) red_john_tweets = mongo_client.redjohn.tweets suspects = [ 'partridge', 'kirkland', 'bertram', 'stiles', 'haffner', 'mcallister', 'smith' ] def get_suspect_mentions(): suspect_mentions = {} for suspect in suspects: mentions = red_john_tweets.find({ 'suspect': suspect }).count() suspect_mentions[suspect] = mentions return suspect_mentions def get_tweet_count(): return red_john_tweets.count() def get_suspect_tweets(suspect, limit=5): tweets = red_john_tweets.find({ 'suspect': suspect }).sort('entry_time', DESCENDING)[:limit] return dumps(tweets)
Use bson's JSON util to handle ObjectIds in a JSON context
Use bson's JSON util to handle ObjectIds in a JSON context
Python
mit
AnSavvides/redjohn,AnSavvides/redjohn
import os + from pymongo import DESCENDING from pymongo import MongoClient + from bson.json_util import dumps MONGO_USER = os.getenv('MONGO_USER') MONGO_PASSWORD = os.getenv('MONGO_PASSWORD') MONGO_URI = 'mongodb://{0}:{1}@paulo.mongohq.com:10039/redjohn'.format(MONGO_USER, MONGO_PASSWORD) # Open a connection to Mongo once # mongo_client = MongoClient(MONGO_URI) red_john_tweets = mongo_client.redjohn.tweets suspects = [ 'partridge', 'kirkland', 'bertram', 'stiles', 'haffner', 'mcallister', 'smith' ] def get_suspect_mentions(): suspect_mentions = {} for suspect in suspects: mentions = red_john_tweets.find({ 'suspect': suspect }).count() suspect_mentions[suspect] = mentions return suspect_mentions def get_tweet_count(): return red_john_tweets.count() def get_suspect_tweets(suspect, limit=5): tweets = red_john_tweets.find({ 'suspect': suspect - })[:limit] + }).sort('entry_time', DESCENDING)[:limit] - return list(tweets) + return dumps(tweets)
Use bson's JSON util to handle ObjectIds in a JSON context
## Code Before: import os from pymongo import MongoClient MONGO_USER = os.getenv('MONGO_USER') MONGO_PASSWORD = os.getenv('MONGO_PASSWORD') MONGO_URI = 'mongodb://{0}:{1}@paulo.mongohq.com:10039/redjohn'.format(MONGO_USER, MONGO_PASSWORD) # Open a connection to Mongo once # mongo_client = MongoClient(MONGO_URI) red_john_tweets = mongo_client.redjohn.tweets suspects = [ 'partridge', 'kirkland', 'bertram', 'stiles', 'haffner', 'mcallister', 'smith' ] def get_suspect_mentions(): suspect_mentions = {} for suspect in suspects: mentions = red_john_tweets.find({ 'suspect': suspect }).count() suspect_mentions[suspect] = mentions return suspect_mentions def get_tweet_count(): return red_john_tweets.count() def get_suspect_tweets(suspect, limit=5): tweets = red_john_tweets.find({ 'suspect': suspect })[:limit] return list(tweets) ## Instruction: Use bson's JSON util to handle ObjectIds in a JSON context ## Code After: import os from pymongo import DESCENDING from pymongo import MongoClient from bson.json_util import dumps MONGO_USER = os.getenv('MONGO_USER') MONGO_PASSWORD = os.getenv('MONGO_PASSWORD') MONGO_URI = 'mongodb://{0}:{1}@paulo.mongohq.com:10039/redjohn'.format(MONGO_USER, MONGO_PASSWORD) # Open a connection to Mongo once # mongo_client = MongoClient(MONGO_URI) red_john_tweets = mongo_client.redjohn.tweets suspects = [ 'partridge', 'kirkland', 'bertram', 'stiles', 'haffner', 'mcallister', 'smith' ] def get_suspect_mentions(): suspect_mentions = {} for suspect in suspects: mentions = red_john_tweets.find({ 'suspect': suspect }).count() suspect_mentions[suspect] = mentions return suspect_mentions def get_tweet_count(): return red_john_tweets.count() def get_suspect_tweets(suspect, limit=5): tweets = red_john_tweets.find({ 'suspect': suspect }).sort('entry_time', DESCENDING)[:limit] return dumps(tweets)
// ... existing code ... import os from pymongo import DESCENDING from pymongo import MongoClient from bson.json_util import dumps MONGO_USER = os.getenv('MONGO_USER') // ... modified code ... tweets = red_john_tweets.find({ 'suspect': suspect }).sort('entry_time', DESCENDING)[:limit] return dumps(tweets) // ... rest of the code ...
8eb0b7fcd6ffb81d6b0fc69cb31c7625550583d7
targetrupypy.py
targetrupypy.py
from pypy.jit.codewriter.policy import JitPolicy from rupypy.main import entry_point def target(driver, args): driver.exe_name = "rupypy-c" return entry_point, None def jitpolicy(driver): return JitPolicy()
from pypy.jit.codewriter.policy import JitPolicy from rupypy.main import entry_point def target(driver, args): driver.exe_name = "./bin/topaz" return entry_point, None def jitpolicy(driver): return JitPolicy()
Move towards a normal bin directory.
Move towards a normal bin directory.
Python
bsd-3-clause
babelsberg/babelsberg-r,topazproject/topaz,babelsberg/babelsberg-r,kachick/topaz,kachick/topaz,babelsberg/babelsberg-r,babelsberg/babelsberg-r,babelsberg/babelsberg-r,kachick/topaz,topazproject/topaz,topazproject/topaz,topazproject/topaz
from pypy.jit.codewriter.policy import JitPolicy from rupypy.main import entry_point def target(driver, args): - driver.exe_name = "rupypy-c" + driver.exe_name = "./bin/topaz" return entry_point, None + def jitpolicy(driver): return JitPolicy() +
Move towards a normal bin directory.
## Code Before: from pypy.jit.codewriter.policy import JitPolicy from rupypy.main import entry_point def target(driver, args): driver.exe_name = "rupypy-c" return entry_point, None def jitpolicy(driver): return JitPolicy() ## Instruction: Move towards a normal bin directory. ## Code After: from pypy.jit.codewriter.policy import JitPolicy from rupypy.main import entry_point def target(driver, args): driver.exe_name = "./bin/topaz" return entry_point, None def jitpolicy(driver): return JitPolicy()
// ... existing code ... def target(driver, args): driver.exe_name = "./bin/topaz" return entry_point, None def jitpolicy(driver): // ... rest of the code ...
2cc8a541814cc353e7b60767afd2128dce38918a
tests/test_plugins/test_plugin/server.py
tests/test_plugins/test_plugin/server.py
from girder.api import access from girder.api.describe import Description from girder.api.rest import Resource class CustomAppRoot(object): """ The webroot endpoint simply serves the main index HTML file. """ exposed = True def GET(self): return "hello world" class Other(Resource): def __init__(self): self.resourceName = 'other' self.route('GET', (), self.getResource) @access.public def getResource(self, params): return ['custom REST route'] getResource.description = Description('Get something.') def load(info): info['serverRoot'], info['serverRoot'].girder = CustomAppRoot(), info['serverRoot'] info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api info['apiRoot'].other = Other()
from girder.api import access from girder.api.describe import Description from girder.api.rest import Resource class CustomAppRoot(object): """ The webroot endpoint simply serves the main index HTML file. """ exposed = True def GET(self): return "hello world" class Other(Resource): def __init__(self): self.resourceName = 'other' self.route('GET', (), self.getResource) @access.public def getResource(self, params): return ['custom REST route'] getResource.description = Description('Get something.') def load(info): info['serverRoot'], info['serverRoot'].girder = ( CustomAppRoot(), info['serverRoot']) info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api info['apiRoot'].other = Other()
Fix failing python style test
Fix failing python style test
Python
apache-2.0
jbeezley/girder,jcfr/girder,RafaelPalomar/girder,opadron/girder,Kitware/girder,essamjoubori/girder,RafaelPalomar/girder,adsorensen/girder,Xarthisius/girder,adsorensen/girder,data-exp-lab/girder,jcfr/girder,girder/girder,opadron/girder,Xarthisius/girder,data-exp-lab/girder,jcfr/girder,kotfic/girder,manthey/girder,msmolens/girder,salamb/girder,sutartmelson/girder,adsorensen/girder,essamjoubori/girder,data-exp-lab/girder,essamjoubori/girder,chrismattmann/girder,kotfic/girder,opadron/girder,kotfic/girder,Xarthisius/girder,jcfr/girder,data-exp-lab/girder,girder/girder,opadron/girder,girder/girder,manthey/girder,salamb/girder,salamb/girder,adsorensen/girder,kotfic/girder,jbeezley/girder,data-exp-lab/girder,msmolens/girder,msmolens/girder,chrismattmann/girder,essamjoubori/girder,essamjoubori/girder,Kitware/girder,jcfr/girder,Xarthisius/girder,chrismattmann/girder,RafaelPalomar/girder,adsorensen/girder,jbeezley/girder,chrismattmann/girder,sutartmelson/girder,sutartmelson/girder,RafaelPalomar/girder,RafaelPalomar/girder,kotfic/girder,sutartmelson/girder,Xarthisius/girder,Kitware/girder,jbeezley/girder,salamb/girder,manthey/girder,msmolens/girder,chrismattmann/girder,girder/girder,salamb/girder,manthey/girder,sutartmelson/girder,Kitware/girder,opadron/girder,msmolens/girder
from girder.api import access from girder.api.describe import Description from girder.api.rest import Resource class CustomAppRoot(object): """ The webroot endpoint simply serves the main index HTML file. """ exposed = True def GET(self): return "hello world" class Other(Resource): def __init__(self): self.resourceName = 'other' self.route('GET', (), self.getResource) @access.public def getResource(self, params): return ['custom REST route'] getResource.description = Description('Get something.') def load(info): - info['serverRoot'], info['serverRoot'].girder = CustomAppRoot(), info['serverRoot'] + info['serverRoot'], info['serverRoot'].girder = ( + CustomAppRoot(), info['serverRoot']) info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api info['apiRoot'].other = Other()
Fix failing python style test
## Code Before: from girder.api import access from girder.api.describe import Description from girder.api.rest import Resource class CustomAppRoot(object): """ The webroot endpoint simply serves the main index HTML file. """ exposed = True def GET(self): return "hello world" class Other(Resource): def __init__(self): self.resourceName = 'other' self.route('GET', (), self.getResource) @access.public def getResource(self, params): return ['custom REST route'] getResource.description = Description('Get something.') def load(info): info['serverRoot'], info['serverRoot'].girder = CustomAppRoot(), info['serverRoot'] info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api info['apiRoot'].other = Other() ## Instruction: Fix failing python style test ## Code After: from girder.api import access from girder.api.describe import Description from girder.api.rest import Resource class CustomAppRoot(object): """ The webroot endpoint simply serves the main index HTML file. """ exposed = True def GET(self): return "hello world" class Other(Resource): def __init__(self): self.resourceName = 'other' self.route('GET', (), self.getResource) @access.public def getResource(self, params): return ['custom REST route'] getResource.description = Description('Get something.') def load(info): info['serverRoot'], info['serverRoot'].girder = ( CustomAppRoot(), info['serverRoot']) info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api info['apiRoot'].other = Other()
... def load(info): info['serverRoot'], info['serverRoot'].girder = ( CustomAppRoot(), info['serverRoot']) info['serverRoot'].api = info['serverRoot'].girder.api del info['serverRoot'].girder.api ...
5cca245f84a87f503c8e16577b7dba635d689a26
opencc/__main__.py
opencc/__main__.py
from __future__ import print_function import argparse import sys from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<file>', help='Configuration file') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a configuration file.", file=sys.stderr) return 1 cc = OpenCC(args.config) with open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
from __future__ import print_function import argparse import sys import io from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<conversion>', help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a conversion.", file=sys.stderr) return 1 cc = OpenCC(args.config) with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
Add support for Python 2.6 and 2.7
Add support for Python 2.6 and 2.7 Remove the following error when using Python 2.6 and 2.7. TypeError: 'encoding' is an invalid keyword argument for this function Python 3 operation is unchanged
Python
apache-2.0
yichen0831/opencc-python
from __future__ import print_function import argparse import sys + import io from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') - parser.add_argument('-c', '--config', metavar='<file>', + parser.add_argument('-c', '--config', metavar='<conversion>', - help='Configuration file') + help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: - print("Please specify a configuration file.", file=sys.stderr) + print("Please specify a conversion.", file=sys.stderr) return 1 cc = OpenCC(args.config) - with open(args.input if args.input else 0, encoding=args.in_enc) as f: + with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) - with open(args.output if args.output else 1, 'w', + with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
Add support for Python 2.6 and 2.7
## Code Before: from __future__ import print_function import argparse import sys from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<file>', help='Configuration file') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a configuration file.", file=sys.stderr) return 1 cc = OpenCC(args.config) with open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main()) ## Instruction: Add support for Python 2.6 and 2.7 ## Code After: from __future__ import print_function import argparse import sys import io from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<conversion>', help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a conversion.", file=sys.stderr) return 1 cc = OpenCC(args.config) with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
// ... existing code ... import argparse import sys import io from opencc import OpenCC // ... modified code ... parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<conversion>', help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') ... if args.config is None: print("Please specify a conversion.", file=sys.stderr) return 1 ... cc = OpenCC(args.config) with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) // ... rest of the code ...
6f0a35372d625f923b9093194540cf0b0e9f054d
platformio_api/__init__.py
platformio_api/__init__.py
import json import logging.config import os from time import tzset VERSION = (0, 3, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio-api" __description__ = ("An API for PlatformIO") __url__ = "https://github.com/ivankravets/platformio-api" __author__ = "Ivan Kravets" __email__ = "[email protected]" __license__ = "MIT License" __copyright__ = "Copyright (C) 2014-2015 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024*1024*10, LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" tzset()
import json import logging.config import os from time import tzset VERSION = (0, 3, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio-api" __description__ = ("An API for PlatformIO") __url__ = "https://github.com/ivankravets/platformio-api" __author__ = "Ivan Kravets" __email__ = "[email protected]" __license__ = "MIT License" __copyright__ = "Copyright (C) 2014-2015 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024*1024*20, # 20 Mb LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" tzset()
Increase repo size to 20Mb
Increase repo size to 20Mb
Python
apache-2.0
orgkhnargh/platformio-api,platformio/platformio-api
import json import logging.config import os from time import tzset VERSION = (0, 3, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio-api" __description__ = ("An API for PlatformIO") __url__ = "https://github.com/ivankravets/platformio-api" __author__ = "Ivan Kravets" __email__ = "[email protected]" __license__ = "MIT License" __copyright__ = "Copyright (C) 2014-2015 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, - MAX_DLFILE_SIZE=1024*1024*10, + MAX_DLFILE_SIZE=1024*1024*20, # 20 Mb LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" tzset()
Increase repo size to 20Mb
## Code Before: import json import logging.config import os from time import tzset VERSION = (0, 3, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio-api" __description__ = ("An API for PlatformIO") __url__ = "https://github.com/ivankravets/platformio-api" __author__ = "Ivan Kravets" __email__ = "[email protected]" __license__ = "MIT License" __copyright__ = "Copyright (C) 2014-2015 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024*1024*10, LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" tzset() ## Instruction: Increase repo size to 20Mb ## Code After: import json import logging.config import os from time import tzset VERSION = (0, 3, 0) __version__ = ".".join([str(s) for s in VERSION]) __title__ = "platformio-api" __description__ = ("An API for PlatformIO") __url__ = "https://github.com/ivankravets/platformio-api" __author__ = "Ivan Kravets" __email__ = "[email protected]" __license__ = "MIT License" __copyright__ = "Copyright (C) 2014-2015 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024*1024*20, # 20 Mb LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" tzset()
# ... existing code ... DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024*1024*20, # 20 Mb LOGGING=dict(version=1) ) # ... rest of the code ...
7a057ba74a5914f8d7f8db3646feb5cb06a74cef
ml/pytorch/image_classification/image_classifier.py
ml/pytorch/image_classification/image_classifier.py
import torch from torch import nn from torch.autograd import Variable def accuracy(preds, labels): return (preds==labels).mean() def n_correct(preds, labels): return (preds==labels).sum() class ImageClassifier(object): def __init__(self, net, n_classes): """ Args: net: A pytorch network module that will computer a forward pass n_classes: number of output classes. """ self.history = [] self.n_classes = n_classes self.net = net # LOSS FUNCTION if n_classes <= 2: # Binary classification self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None) else: # multiclass classification self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits #self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs # OPTIMIZER self.optimizer = None
import torch from torch import nn from torch.autograd import Variable def accuracy(preds, labels): return (preds==labels).mean() def n_correct(preds, labels): return (preds==labels).sum() class ImageClassifier(object): def __init__(self, net, n_classes): """ Args: net: A pytorch network module that will computer a forward pass n_classes: number of output classes. """ self.history = [] self.n_classes = n_classes self.net = net # LOSS FUNCTION if n_classes <= 2: # Binary classification self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None) else: # multiclass classification self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits #self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs # OPTIMIZER self.optimizer = None def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs): """ Args: opt_func: (function class) the optimization function creator to use **kwargs: The keyword arguments to pass to opt_func eg: lr=1e-3, weight_decay=0 """ self.opt_func = opt_func self.opt_args = kwargs self.optimizer = opt_func(self.net.parameters(), **kwargs)
Add set_optimizer method to pytorch ImageClassifier class
FEAT: Add set_optimizer method to pytorch ImageClassifier class
Python
apache-2.0
ronrest/convenience_py,ronrest/convenience_py
import torch from torch import nn from torch.autograd import Variable def accuracy(preds, labels): return (preds==labels).mean() def n_correct(preds, labels): return (preds==labels).sum() class ImageClassifier(object): def __init__(self, net, n_classes): """ Args: net: A pytorch network module that will computer a forward pass n_classes: number of output classes. """ self.history = [] self.n_classes = n_classes self.net = net # LOSS FUNCTION if n_classes <= 2: # Binary classification self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None) else: # multiclass classification self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits #self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs # OPTIMIZER self.optimizer = None + def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs): + """ + Args: + opt_func: (function class) the optimization function creator to use + **kwargs: The keyword arguments to pass to opt_func + eg: lr=1e-3, weight_decay=0 + """ + self.opt_func = opt_func + self.opt_args = kwargs + self.optimizer = opt_func(self.net.parameters(), **kwargs) +
Add set_optimizer method to pytorch ImageClassifier class
## Code Before: import torch from torch import nn from torch.autograd import Variable def accuracy(preds, labels): return (preds==labels).mean() def n_correct(preds, labels): return (preds==labels).sum() class ImageClassifier(object): def __init__(self, net, n_classes): """ Args: net: A pytorch network module that will computer a forward pass n_classes: number of output classes. """ self.history = [] self.n_classes = n_classes self.net = net # LOSS FUNCTION if n_classes <= 2: # Binary classification self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None) else: # multiclass classification self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits #self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs # OPTIMIZER self.optimizer = None ## Instruction: Add set_optimizer method to pytorch ImageClassifier class ## Code After: import torch from torch import nn from torch.autograd import Variable def accuracy(preds, labels): return (preds==labels).mean() def n_correct(preds, labels): return (preds==labels).sum() class ImageClassifier(object): def __init__(self, net, n_classes): """ Args: net: A pytorch network module that will computer a forward pass n_classes: number of output classes. """ self.history = [] self.n_classes = n_classes self.net = net # LOSS FUNCTION if n_classes <= 2: # Binary classification self.loss_func = torch.nn.BCEWithLogitsLoss(weight=None) else: # multiclass classification self.loss_func = torch.nn.CrossEntropyLoss(weight=None) # on logits #self.loss_func = torch.nn.NLLLoss(weight=None) #on LogSoftmax() outputs # OPTIMIZER self.optimizer = None def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs): """ Args: opt_func: (function class) the optimization function creator to use **kwargs: The keyword arguments to pass to opt_func eg: lr=1e-3, weight_decay=0 """ self.opt_func = opt_func self.opt_args = kwargs self.optimizer = opt_func(self.net.parameters(), **kwargs)
# ... existing code ... self.optimizer = None def set_optimizer(self, opt_func=torch.optim.Adam, **kwargs): """ Args: opt_func: (function class) the optimization function creator to use **kwargs: The keyword arguments to pass to opt_func eg: lr=1e-3, weight_decay=0 """ self.opt_func = opt_func self.opt_args = kwargs self.optimizer = opt_func(self.net.parameters(), **kwargs) # ... rest of the code ...
6d72a1d3b4bd2e1a11e2fb9744353e5d2d9c8863
setup.py
setup.py
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup(cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), Extension("ccomp", ["ccomp.pyx"])])
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')])
Add NumPy includes dir for Cython builds.
Add NumPy includes dir for Cython builds.
Python
bsd-3-clause
stefanv/lulu
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext + import numpy + + def cext(name): + return Extension(name, [name + ".pyx"], + include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, + ext_modules = [cext('lulu_base'), cext('ccomp')]) - ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), - Extension("ccomp", ["ccomp.pyx"])]) +
Add NumPy includes dir for Cython builds.
## Code Before: from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup(cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), Extension("ccomp", ["ccomp.pyx"])]) ## Instruction: Add NumPy includes dir for Cython builds. ## Code After: from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')])
// ... existing code ... from distutils.extension import Extension from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')]) // ... rest of the code ...
bf41f23d71491050dc79a2975b26ffe210b45505
examples/test_contains_selector.py
examples/test_contains_selector.py
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_text("Math Work", "#ctitle") self.click('a:contains("Next")') self.assert_text("Drone Fishing", "#ctitle")
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")')
Update an example that uses the ":contains()" selector
Update an example that uses the ":contains()" selector
Python
mit
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") - self.assert_text("Math Work", "#ctitle") + self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') - self.assert_text("Drone Fishing", "#ctitle") + self.assert_element('div div:contains("Drone Fishing")')
Update an example that uses the ":contains()" selector
## Code Before: from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_text("Math Work", "#ctitle") self.click('a:contains("Next")') self.assert_text("Drone Fishing", "#ctitle") ## Instruction: Update an example that uses the ":contains()" selector ## Code After: from seleniumbase import BaseCase class ContainsSelectorTests(BaseCase): def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")')
// ... existing code ... def test_contains_selector(self): self.open("https://xkcd.com/2207/") self.assert_element('div.box div:contains("Math Work")') self.click('a:contains("Next")') self.assert_element('div div:contains("Drone Fishing")') // ... rest of the code ...
047483d9897e75f8284c39e8477a285763da7b37
heufybot/modules/util/commandhandler.py
heufybot/modules/util/commandhandler.py
from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): message = { "server": server, "source": channel.name, "channel": channel, "user": user, "body": messageBody } self._handleCommand(message) def handlePrivateMessage(self, server, user, messageBody): message = { "server": server, "source": user.nick, "user": user, "body": messageBody } self._handleCommand(message) def _handleCommand(self, message): commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!") if not message["body"].startswith(commandPrefix): return # We don't need to be handling things that aren't bot commands params = message["body"].split() message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):] del params[0] message["params"] = params self.bot.moduleHandler.runProcessingAction("botmessage", message) commandHandler = CommandHandler()
from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): message = { "server": server, "source": channel.name, "channel": channel, "user": user, "body": messageBody } self._handleCommand(message) def handlePrivateMessage(self, server, user, messageBody): message = { "server": server, "source": user.nick, "user": user, "body": messageBody } self._handleCommand(message) def _handleCommand(self, message): commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!") botNick = self.bot.servers[message["server"]].nick.lower() params = message["body"].split() if message["body"].startswith(commandPrefix): message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):] del params[0] elif message["body"].lower().startswith(botNick): message["command"] = params[1] del params[0:2] else: return # We don't need to be handling things that aren't bot commands message["params"] = params self.bot.moduleHandler.runProcessingAction("botmessage", message) commandHandler = CommandHandler()
Make the bot respond to its name
Make the bot respond to its name Implements GH-7
Python
mit
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): message = { "server": server, "source": channel.name, "channel": channel, "user": user, "body": messageBody } self._handleCommand(message) def handlePrivateMessage(self, server, user, messageBody): message = { "server": server, "source": user.nick, "user": user, "body": messageBody } self._handleCommand(message) def _handleCommand(self, message): commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!") + botNick = self.bot.servers[message["server"]].nick.lower() + params = message["body"].split() + - if not message["body"].startswith(commandPrefix): + if message["body"].startswith(commandPrefix): + message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):] + del params[0] + elif message["body"].lower().startswith(botNick): + message["command"] = params[1] + del params[0:2] + else: return # We don't need to be handling things that aren't bot commands - params = message["body"].split() - message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):] - del params[0] message["params"] = params self.bot.moduleHandler.runProcessingAction("botmessage", message) commandHandler = CommandHandler()
Make the bot respond to its name
## Code Before: from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): message = { "server": server, "source": channel.name, "channel": channel, "user": user, "body": messageBody } self._handleCommand(message) def handlePrivateMessage(self, server, user, messageBody): message = { "server": server, "source": user.nick, "user": user, "body": messageBody } self._handleCommand(message) def _handleCommand(self, message): commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!") if not message["body"].startswith(commandPrefix): return # We don't need to be handling things that aren't bot commands params = message["body"].split() message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):] del params[0] message["params"] = params self.bot.moduleHandler.runProcessingAction("botmessage", message) commandHandler = CommandHandler() ## Instruction: Make the bot respond to its name ## Code After: from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): message = { "server": server, "source": channel.name, "channel": channel, "user": user, "body": messageBody } self._handleCommand(message) def handlePrivateMessage(self, server, user, messageBody): message = { "server": server, "source": user.nick, "user": user, "body": messageBody } self._handleCommand(message) def _handleCommand(self, message): commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!") botNick = self.bot.servers[message["server"]].nick.lower() params = message["body"].split() if message["body"].startswith(commandPrefix): message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):] del params[0] elif message["body"].lower().startswith(botNick): message["command"] = params[1] del params[0:2] else: return # We don't need to be handling things that aren't bot commands message["params"] = params self.bot.moduleHandler.runProcessingAction("botmessage", message) commandHandler = CommandHandler()
... def _handleCommand(self, message): commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!") botNick = self.bot.servers[message["server"]].nick.lower() params = message["body"].split() if message["body"].startswith(commandPrefix): message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):] del params[0] elif message["body"].lower().startswith(botNick): message["command"] = params[1] del params[0:2] else: return # We don't need to be handling things that aren't bot commands message["params"] = params self.bot.moduleHandler.runProcessingAction("botmessage", message) ...
262a8fe3651a4ad368fd6594cba0669267c2d225
run_deploy_job_wr.py
run_deploy_job_wr.py
import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix = 'juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix = 'juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', 'artifacts/*.json', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
Add *.json to the list of artifacts backed up by Workspace Runner.
Add *.json to the list of artifacts backed up by Workspace Runner.
Python
agpl-3.0
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix = 'juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', + 'artifacts/*.json', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
Add *.json to the list of artifacts backed up by Workspace Runner.
## Code Before: import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix = 'juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main() ## Instruction: Add *.json to the list of artifacts backed up by Workspace Runner. ## Code After: import json import os from os.path import join import subprocess import sys from tempfile import NamedTemporaryFile def main(): revision_build = os.environ['revision_build'] job_name = os.environ['JOB_NAME'] build_number = os.environ['BUILD_NUMBER'] prefix = 'juju-ci/products/version-{}/{}/build-{}'.format( revision_build, job_name, build_number) s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') command = [ '$HOME/juju-ci-tools/run-deploy-job-remote.bash', revision_build, job_name, ] command.extend(sys.argv[2:]) with NamedTemporaryFile() as config_file: json.dump({ 'command': command, 'install': {}, 'artifacts': {'artifacts': [ 'artifacts/machine*/*log*', 'artifacts/*.jenv', 'artifacts/*.json', ]}, 'bucket': 'juju-qa-data', }, config_file) config_file.flush() subprocess.check_call([ 'workspace-run', config_file.name, sys.argv[1], prefix, '--s3-config', s3_config, '-v', ]) if __name__ == '__main__': main()
// ... existing code ... 'artifacts/machine*/*log*', 'artifacts/*.jenv', 'artifacts/*.json', ]}, 'bucket': 'juju-qa-data', // ... rest of the code ...
9c2951d794bb27952606cae77da1ebcd0d651e72
aiodownload/api.py
aiodownload/api.py
from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( AioDownload(url, info=info) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x)
from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( download.main(AioDownloadBundle(url, info=info)) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x)
Fix - needed to provide create_task a function, not a class
Fix - needed to provide create_task a function, not a class
Python
mit
jelloslinger/aiodownload
from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( - AioDownload(url, info=info) + download.main(AioDownloadBundle(url, info=info)) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x)
Fix - needed to provide create_task a function, not a class
## Code Before: from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( AioDownload(url, info=info) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x) ## Instruction: Fix - needed to provide create_task a function, not a class ## Code After: from aiodownload import AioDownloadBundle, AioDownload import asyncio def one(url, download=None): return [s for s in swarm([url], download=download)][0] def swarm(urls, download=None): return [e for e in each(urls, download=download)] def each(iterable, url_map=None, download=None): url_map = url_map or _url_map download = download or AioDownload() tasks = [] for i in iterable: url = url_map(i) info = None if i == url else i tasks.append( download._loop.create_task( download.main(AioDownloadBundle(url, info=info)) ) ) for task_set in download._loop.run_until_complete(asyncio.wait(tasks)): for task in task_set: yield task.result() def _url_map(x): return str(x)
# ... existing code ... tasks.append( download._loop.create_task( download.main(AioDownloadBundle(url, info=info)) ) ) # ... rest of the code ...
b3b67fe0e68423fc2f85bccf1f20acdb779a38ba
pylxd/deprecated/tests/utils.py
pylxd/deprecated/tests/utils.py
from pylxd import api from pylxd import exceptions as lxd_exceptions def upload_image(image): alias = "{}/{}/{}/{}".format( image["os"], image["release"], image["arch"], image["variant"] ) lxd = api.API() imgs = api.API(host="images.linuxcontainers.org") d = imgs.alias_show(alias) meta = d[1]["metadata"] tgt = meta["target"] try: lxd.alias_update(meta) except lxd_exceptions.APIError as ex: if ex.status_code == 404: lxd.alias_create(meta) return tgt def delete_image(image): lxd = api.API() lxd.image_delete(image)
from pylxd import api def delete_image(image): lxd = api.API() lxd.image_delete(image)
Remove unused testing utility function
Remove unused testing utility function Signed-off-by: Dougal Matthews <[email protected]>
Python
apache-2.0
lxc/pylxd,lxc/pylxd
from pylxd import api - from pylxd import exceptions as lxd_exceptions - - - def upload_image(image): - alias = "{}/{}/{}/{}".format( - image["os"], image["release"], image["arch"], image["variant"] - ) - lxd = api.API() - imgs = api.API(host="images.linuxcontainers.org") - d = imgs.alias_show(alias) - - meta = d[1]["metadata"] - tgt = meta["target"] - - try: - lxd.alias_update(meta) - except lxd_exceptions.APIError as ex: - if ex.status_code == 404: - lxd.alias_create(meta) - - return tgt def delete_image(image): lxd = api.API() lxd.image_delete(image)
Remove unused testing utility function
## Code Before: from pylxd import api from pylxd import exceptions as lxd_exceptions def upload_image(image): alias = "{}/{}/{}/{}".format( image["os"], image["release"], image["arch"], image["variant"] ) lxd = api.API() imgs = api.API(host="images.linuxcontainers.org") d = imgs.alias_show(alias) meta = d[1]["metadata"] tgt = meta["target"] try: lxd.alias_update(meta) except lxd_exceptions.APIError as ex: if ex.status_code == 404: lxd.alias_create(meta) return tgt def delete_image(image): lxd = api.API() lxd.image_delete(image) ## Instruction: Remove unused testing utility function ## Code After: from pylxd import api def delete_image(image): lxd = api.API() lxd.image_delete(image)
# ... existing code ... from pylxd import api # ... rest of the code ...
c24dc7db961b03c947a98454fc3e8655c5f938ff
functional_tests/test_all_users.py
functional_tests/test_all_users.py
from datetime import date from django.core.urlresolvers import reverse from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.utils import formats from selenium import webdriver class HomeNewVisitorTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def get_full_url(self, namespace): return "{0}{1}".format(self.live_server_url, reverse(namespace)) def test_home_title(self): self.browser.get(self.get_full_url("home")) self.assertIn("Alert", self.browser.title) def test_h1_css(self): self.browser.get(self.get_full_url("home")) h1 = self.browser.find_element_by_tag_name("h1") self.assertIn(h1.value_of_css_property( "color"), "rgba(200, 50, 255, 1)") def test_home_files(self): self.browser.get(self.live_server_url + "/robots.txt") self.assertNotIn("Not Found", self.browser.title) self.browser.get(self.live_server_url + "/humans.txt") self.assertNotIn("Not Found", self.browser.title) def test_localization(self): today = date.today() self.browser.get(self.get_full_url("home")) local_date = self.browser.find_element_by_id("local-date") self.assertEqual(formats.date_format( today, use_l10n=True), local_date.text)
from datetime import date from django.core.urlresolvers import reverse from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.utils import formats from selenium import webdriver class HomeNewVisitorTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def get_full_url(self, namespace): return "{0}{1}".format(self.live_server_url, reverse(namespace)) def test_home_title(self): self.browser.get(self.get_full_url("home")) self.assertIn("Alert", self.browser.title) def test_h2_css(self): self.browser.get(self.get_full_url("home")) h2 = self.browser.find_element_by_tag_name("h2") self.assertIn(h2.value_of_css_property( "color"), "rgba(0, 0, 0, 1)") def test_home_files(self): self.browser.get(self.live_server_url + "/robots.txt") self.assertNotIn("Not Found", self.browser.title) self.browser.get(self.live_server_url + "/humans.txt") self.assertNotIn("Not Found", self.browser.title)
Fix css and heading test also removed localization test as no longer required
Fix css and heading test also removed localization test as no longer required
Python
mit
iAmMrinal0/django_moviealert,iAmMrinal0/django_moviealert,iAmMrinal0/django_moviealert
from datetime import date from django.core.urlresolvers import reverse from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.utils import formats from selenium import webdriver class HomeNewVisitorTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def get_full_url(self, namespace): return "{0}{1}".format(self.live_server_url, reverse(namespace)) def test_home_title(self): self.browser.get(self.get_full_url("home")) self.assertIn("Alert", self.browser.title) - def test_h1_css(self): + def test_h2_css(self): self.browser.get(self.get_full_url("home")) - h1 = self.browser.find_element_by_tag_name("h1") + h2 = self.browser.find_element_by_tag_name("h2") - self.assertIn(h1.value_of_css_property( + self.assertIn(h2.value_of_css_property( - "color"), "rgba(200, 50, 255, 1)") + "color"), "rgba(0, 0, 0, 1)") def test_home_files(self): self.browser.get(self.live_server_url + "/robots.txt") self.assertNotIn("Not Found", self.browser.title) self.browser.get(self.live_server_url + "/humans.txt") self.assertNotIn("Not Found", self.browser.title) - def test_localization(self): - today = date.today() - self.browser.get(self.get_full_url("home")) - local_date = self.browser.find_element_by_id("local-date") - self.assertEqual(formats.date_format( - today, use_l10n=True), local_date.text) -
Fix css and heading test also removed localization test as no longer required
## Code Before: from datetime import date from django.core.urlresolvers import reverse from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.utils import formats from selenium import webdriver class HomeNewVisitorTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def get_full_url(self, namespace): return "{0}{1}".format(self.live_server_url, reverse(namespace)) def test_home_title(self): self.browser.get(self.get_full_url("home")) self.assertIn("Alert", self.browser.title) def test_h1_css(self): self.browser.get(self.get_full_url("home")) h1 = self.browser.find_element_by_tag_name("h1") self.assertIn(h1.value_of_css_property( "color"), "rgba(200, 50, 255, 1)") def test_home_files(self): self.browser.get(self.live_server_url + "/robots.txt") self.assertNotIn("Not Found", self.browser.title) self.browser.get(self.live_server_url + "/humans.txt") self.assertNotIn("Not Found", self.browser.title) def test_localization(self): today = date.today() self.browser.get(self.get_full_url("home")) local_date = self.browser.find_element_by_id("local-date") self.assertEqual(formats.date_format( today, use_l10n=True), local_date.text) ## Instruction: Fix css and heading test also removed localization test as no longer required ## Code After: from datetime import date from django.core.urlresolvers import reverse from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.utils import formats from selenium import webdriver class HomeNewVisitorTest(StaticLiveServerTestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def get_full_url(self, namespace): return "{0}{1}".format(self.live_server_url, reverse(namespace)) def test_home_title(self): self.browser.get(self.get_full_url("home")) self.assertIn("Alert", self.browser.title) def test_h2_css(self): self.browser.get(self.get_full_url("home")) h2 = self.browser.find_element_by_tag_name("h2") self.assertIn(h2.value_of_css_property( "color"), "rgba(0, 0, 0, 1)") def test_home_files(self): self.browser.get(self.live_server_url + "/robots.txt") self.assertNotIn("Not Found", self.browser.title) self.browser.get(self.live_server_url + "/humans.txt") self.assertNotIn("Not Found", self.browser.title)
# ... existing code ... self.assertIn("Alert", self.browser.title) def test_h2_css(self): self.browser.get(self.get_full_url("home")) h2 = self.browser.find_element_by_tag_name("h2") self.assertIn(h2.value_of_css_property( "color"), "rgba(0, 0, 0, 1)") def test_home_files(self): # ... modified code ... self.browser.get(self.live_server_url + "/humans.txt") self.assertNotIn("Not Found", self.browser.title) # ... rest of the code ...
5748b1a7dc4a5be3b2b9da9959eabe586347078a
tensorflow_federated/python/program/value_reference.py
tensorflow_federated/python/program/value_reference.py
"""Defines the abstract interface for classes that reference values.""" import abc from typing import Any from tensorflow_federated.python.core.impl.types import typed_object class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta): """An abstract interface for classes that reference values. This interfaces provides the capability to maniplutate values without requiring them to be materialized as Python objects. """ @abc.abstractmethod def get_value(self) -> Any: pass
import abc from typing import Union import numpy as np from tensorflow_federated.python.core.impl.types import typed_object class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta): """An abstract interface representing references to server placed values.""" @abc.abstractmethod def get_value(self) -> Union[np.generic, np.ndarray]: """Returns the referenced value as a numpy scalar or array.""" raise NotImplementedError
Update the Value Reference API to be more precise about the types of values being referenced.
Update the Value Reference API to be more precise about the types of values being referenced. PiperOrigin-RevId: 404647934
Python
apache-2.0
tensorflow/federated,tensorflow/federated,tensorflow/federated
- """Defines the abstract interface for classes that reference values.""" import abc - from typing import Any + from typing import Union + + import numpy as np from tensorflow_federated.python.core.impl.types import typed_object - class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta): + class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta): + """An abstract interface representing references to server placed values.""" - """An abstract interface for classes that reference values. - - This interfaces provides the capability to maniplutate values without - requiring them to be materialized as Python objects. - """ @abc.abstractmethod - def get_value(self) -> Any: - pass + def get_value(self) -> Union[np.generic, np.ndarray]: + """Returns the referenced value as a numpy scalar or array.""" + raise NotImplementedError
Update the Value Reference API to be more precise about the types of values being referenced.
## Code Before: """Defines the abstract interface for classes that reference values.""" import abc from typing import Any from tensorflow_federated.python.core.impl.types import typed_object class ValueReference(typed_object.TypedObject, metaclass=abc.ABCMeta): """An abstract interface for classes that reference values. This interfaces provides the capability to maniplutate values without requiring them to be materialized as Python objects. """ @abc.abstractmethod def get_value(self) -> Any: pass ## Instruction: Update the Value Reference API to be more precise about the types of values being referenced. ## Code After: import abc from typing import Union import numpy as np from tensorflow_federated.python.core.impl.types import typed_object class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta): """An abstract interface representing references to server placed values.""" @abc.abstractmethod def get_value(self) -> Union[np.generic, np.ndarray]: """Returns the referenced value as a numpy scalar or array.""" raise NotImplementedError
... import abc from typing import Union import numpy as np from tensorflow_federated.python.core.impl.types import typed_object ... class ServerArrayReference(typed_object.TypedObject, metaclass=abc.ABCMeta): """An abstract interface representing references to server placed values.""" @abc.abstractmethod def get_value(self) -> Union[np.generic, np.ndarray]: """Returns the referenced value as a numpy scalar or array.""" raise NotImplementedError ...
b9b4089fcd7f26ebf339c568ba6454d538a1813e
zk_shell/cli.py
zk_shell/cli.py
from __future__ import print_function import argparse import logging import sys from . import __version__ from .shell import Shell try: raw_input except NameError: raw_input = input class CLI(object): def run(self): logging.basicConfig(level=logging.ERROR) params = self.get_params() s = Shell(params.hosts, params.connect_timeout, setup_readline=params.run_once == "") if params.run_once != "": sys.exit(0 if s.onecmd(params.run_once) == None else 1) intro = "Welcome to zk-shell (%s)" % (__version__) first = True while True: try: s.run(intro if first else None) except KeyboardInterrupt: done = raw_input("\nExit? (y|n) ") if done == "y": break first = False def get_params(self): parser = argparse.ArgumentParser() parser.add_argument("--connect-timeout", type=int, default=10, help="ZK connect timeout") parser.add_argument("--run-once", type=str, default="", help="Run a command non-interactively and exit") parser.add_argument("hosts", nargs="*", help="ZK hosts to connect") return parser.parse_args()
from __future__ import print_function import argparse import logging import sys from . import __version__ from .shell import Shell try: raw_input except NameError: raw_input = input class CLI(object): def run(self): logging.basicConfig(level=logging.ERROR) params = self.get_params() s = Shell(params.hosts, params.connect_timeout, setup_readline=params.run_once == "") if params.run_once != "": try: sys.exit(0 if s.onecmd(params.run_once) == None else 1) except IOError: sys.exit(1) intro = "Welcome to zk-shell (%s)" % (__version__) first = True while True: try: s.run(intro if first else None) except KeyboardInterrupt: done = raw_input("\nExit? (y|n) ") if done == "y": break first = False def get_params(self): parser = argparse.ArgumentParser() parser.add_argument("--connect-timeout", type=int, default=10, help="ZK connect timeout") parser.add_argument("--run-once", type=str, default="", help="Run a command non-interactively and exit") parser.add_argument("hosts", nargs="*", help="ZK hosts to connect") return parser.parse_args()
Handle IOError in run_once mode so paging works
Handle IOError in run_once mode so paging works Signed-off-by: Raul Gutierrez S <[email protected]>
Python
apache-2.0
harlowja/zk_shell,harlowja/zk_shell,rgs1/zk_shell,rgs1/zk_shell
from __future__ import print_function import argparse import logging import sys from . import __version__ from .shell import Shell try: raw_input except NameError: raw_input = input class CLI(object): def run(self): logging.basicConfig(level=logging.ERROR) params = self.get_params() s = Shell(params.hosts, params.connect_timeout, setup_readline=params.run_once == "") if params.run_once != "": + try: - sys.exit(0 if s.onecmd(params.run_once) == None else 1) + sys.exit(0 if s.onecmd(params.run_once) == None else 1) + except IOError: + sys.exit(1) intro = "Welcome to zk-shell (%s)" % (__version__) first = True while True: try: s.run(intro if first else None) except KeyboardInterrupt: done = raw_input("\nExit? (y|n) ") if done == "y": break first = False def get_params(self): parser = argparse.ArgumentParser() parser.add_argument("--connect-timeout", type=int, default=10, help="ZK connect timeout") parser.add_argument("--run-once", type=str, default="", help="Run a command non-interactively and exit") parser.add_argument("hosts", nargs="*", help="ZK hosts to connect") return parser.parse_args()
Handle IOError in run_once mode so paging works
## Code Before: from __future__ import print_function import argparse import logging import sys from . import __version__ from .shell import Shell try: raw_input except NameError: raw_input = input class CLI(object): def run(self): logging.basicConfig(level=logging.ERROR) params = self.get_params() s = Shell(params.hosts, params.connect_timeout, setup_readline=params.run_once == "") if params.run_once != "": sys.exit(0 if s.onecmd(params.run_once) == None else 1) intro = "Welcome to zk-shell (%s)" % (__version__) first = True while True: try: s.run(intro if first else None) except KeyboardInterrupt: done = raw_input("\nExit? (y|n) ") if done == "y": break first = False def get_params(self): parser = argparse.ArgumentParser() parser.add_argument("--connect-timeout", type=int, default=10, help="ZK connect timeout") parser.add_argument("--run-once", type=str, default="", help="Run a command non-interactively and exit") parser.add_argument("hosts", nargs="*", help="ZK hosts to connect") return parser.parse_args() ## Instruction: Handle IOError in run_once mode so paging works ## Code After: from __future__ import print_function import argparse import logging import sys from . import __version__ from .shell import Shell try: raw_input except NameError: raw_input = input class CLI(object): def run(self): logging.basicConfig(level=logging.ERROR) params = self.get_params() s = Shell(params.hosts, params.connect_timeout, setup_readline=params.run_once == "") if params.run_once != "": try: sys.exit(0 if s.onecmd(params.run_once) == None else 1) except IOError: sys.exit(1) intro = "Welcome to zk-shell (%s)" % (__version__) first = True while True: try: s.run(intro if first else None) except KeyboardInterrupt: done = raw_input("\nExit? (y|n) ") if done == "y": break first = False def get_params(self): parser = argparse.ArgumentParser() parser.add_argument("--connect-timeout", type=int, default=10, help="ZK connect timeout") parser.add_argument("--run-once", type=str, default="", help="Run a command non-interactively and exit") parser.add_argument("hosts", nargs="*", help="ZK hosts to connect") return parser.parse_args()
... if params.run_once != "": try: sys.exit(0 if s.onecmd(params.run_once) == None else 1) except IOError: sys.exit(1) intro = "Welcome to zk-shell (%s)" % (__version__) ...
8090fa9c072656497ff383e9b76d49af2955e420
examples/hopv/hopv_graph_conv.py
examples/hopv/hopv_graph_conv.py
from __future__ import print_function from __future__ import division from __future__ import unicode_literals import numpy as np from models import GraphConvTensorGraph np.random.seed(123) import tensorflow as tf tf.set_random_seed(123) import deepchem as dc from deepchem.molnet import load_hopv # Load HOPV dataset hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv') train_dataset, valid_dataset, test_dataset = hopv_datasets # Fit models metric = [ dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"), dc.metrics.Metric( dc.metrics.mean_absolute_error, np.mean, mode="regression") ] # Number of features on conv-mols n_feat = 75 # Batch size of models batch_size = 50 model = GraphConvTensorGraph( len(hopv_tasks), batch_size=batch_size, mode='regression') # Fit trained model model.fit(train_dataset, nb_epoch=25) print("Evaluating model") train_scores = model.evaluate(train_dataset, metric, transformers) valid_scores = model.evaluate(valid_dataset, metric, transformers) print("Train scores") print(train_scores) print("Validation scores") print(valid_scores)
from __future__ import print_function from __future__ import division from __future__ import unicode_literals import numpy as np from models import GraphConvModel np.random.seed(123) import tensorflow as tf tf.set_random_seed(123) import deepchem as dc from deepchem.molnet import load_hopv # Load HOPV dataset hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv') train_dataset, valid_dataset, test_dataset = hopv_datasets # Fit models metric = [ dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"), dc.metrics.Metric( dc.metrics.mean_absolute_error, np.mean, mode="regression") ] # Number of features on conv-mols n_feat = 75 # Batch size of models batch_size = 50 model = GraphConvModel( len(hopv_tasks), batch_size=batch_size, mode='regression') # Fit trained model model.fit(train_dataset, nb_epoch=25) print("Evaluating model") train_scores = model.evaluate(train_dataset, metric, transformers) valid_scores = model.evaluate(valid_dataset, metric, transformers) print("Train scores") print(train_scores) print("Validation scores") print(valid_scores)
Fix GraphConvTensorGraph to GraphConvModel in hopv example
Fix GraphConvTensorGraph to GraphConvModel in hopv example
Python
mit
Agent007/deepchem,lilleswing/deepchem,lilleswing/deepchem,Agent007/deepchem,peastman/deepchem,miaecle/deepchem,peastman/deepchem,ktaneishi/deepchem,miaecle/deepchem,Agent007/deepchem,deepchem/deepchem,ktaneishi/deepchem,deepchem/deepchem,ktaneishi/deepchem,miaecle/deepchem,lilleswing/deepchem
from __future__ import print_function from __future__ import division from __future__ import unicode_literals import numpy as np - from models import GraphConvTensorGraph + from models import GraphConvModel np.random.seed(123) import tensorflow as tf tf.set_random_seed(123) import deepchem as dc from deepchem.molnet import load_hopv # Load HOPV dataset hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv') train_dataset, valid_dataset, test_dataset = hopv_datasets # Fit models metric = [ dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"), dc.metrics.Metric( dc.metrics.mean_absolute_error, np.mean, mode="regression") ] # Number of features on conv-mols n_feat = 75 # Batch size of models batch_size = 50 - model = GraphConvTensorGraph( + model = GraphConvModel( len(hopv_tasks), batch_size=batch_size, mode='regression') # Fit trained model model.fit(train_dataset, nb_epoch=25) print("Evaluating model") train_scores = model.evaluate(train_dataset, metric, transformers) valid_scores = model.evaluate(valid_dataset, metric, transformers) print("Train scores") print(train_scores) print("Validation scores") print(valid_scores)
Fix GraphConvTensorGraph to GraphConvModel in hopv example
## Code Before: from __future__ import print_function from __future__ import division from __future__ import unicode_literals import numpy as np from models import GraphConvTensorGraph np.random.seed(123) import tensorflow as tf tf.set_random_seed(123) import deepchem as dc from deepchem.molnet import load_hopv # Load HOPV dataset hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv') train_dataset, valid_dataset, test_dataset = hopv_datasets # Fit models metric = [ dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"), dc.metrics.Metric( dc.metrics.mean_absolute_error, np.mean, mode="regression") ] # Number of features on conv-mols n_feat = 75 # Batch size of models batch_size = 50 model = GraphConvTensorGraph( len(hopv_tasks), batch_size=batch_size, mode='regression') # Fit trained model model.fit(train_dataset, nb_epoch=25) print("Evaluating model") train_scores = model.evaluate(train_dataset, metric, transformers) valid_scores = model.evaluate(valid_dataset, metric, transformers) print("Train scores") print(train_scores) print("Validation scores") print(valid_scores) ## Instruction: Fix GraphConvTensorGraph to GraphConvModel in hopv example ## Code After: from __future__ import print_function from __future__ import division from __future__ import unicode_literals import numpy as np from models import GraphConvModel np.random.seed(123) import tensorflow as tf tf.set_random_seed(123) import deepchem as dc from deepchem.molnet import load_hopv # Load HOPV dataset hopv_tasks, hopv_datasets, transformers = load_hopv(featurizer='GraphConv') train_dataset, valid_dataset, test_dataset = hopv_datasets # Fit models metric = [ dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"), dc.metrics.Metric( dc.metrics.mean_absolute_error, np.mean, mode="regression") ] # Number of features on conv-mols n_feat = 75 # Batch size of models batch_size = 50 model = GraphConvModel( len(hopv_tasks), batch_size=batch_size, mode='regression') # Fit trained model model.fit(train_dataset, nb_epoch=25) print("Evaluating model") train_scores = model.evaluate(train_dataset, metric, transformers) valid_scores = model.evaluate(valid_dataset, metric, transformers) print("Train scores") print(train_scores) print("Validation scores") print(valid_scores)
// ... existing code ... import numpy as np from models import GraphConvModel np.random.seed(123) // ... modified code ... # Batch size of models batch_size = 50 model = GraphConvModel( len(hopv_tasks), batch_size=batch_size, mode='regression') // ... rest of the code ...
c1b97bbc6fc0603c0f2a809175edf88cd1e4a207
setup.py
setup.py
from distutils.core import setup packages = [ 'upho', 'upho.phonon', 'upho.harmonic', 'upho.analysis', 'upho.structure', 'upho.irreps', 'upho.qpoints', 'group', ] scripts = [ 'scripts/upho_weights', 'scripts/upho_sf', 'scripts/qpoints', ] setup(name='upho', version='0.5.1', author="Yuji Ikeda", author_email="[email protected]", packages=packages, scripts=scripts, install_requires=['numpy'])
from distutils.core import setup packages = [ 'upho', 'upho.phonon', 'upho.harmonic', 'upho.analysis', 'upho.structure', 'upho.irreps', 'upho.qpoints', 'group', ] scripts = [ 'scripts/upho_weights', 'scripts/upho_sf', 'scripts/qpoints', ] setup(name='upho', version='0.5.1', author="Yuji Ikeda", author_email="[email protected]", packages=packages, scripts=scripts, install_requires=['numpy', 'h5py', 'phonopy'])
Add requirement of h5py and phonopy
Add requirement of h5py and phonopy
Python
mit
yuzie007/ph_unfolder,yuzie007/upho
from distutils.core import setup packages = [ 'upho', 'upho.phonon', 'upho.harmonic', 'upho.analysis', 'upho.structure', 'upho.irreps', 'upho.qpoints', 'group', ] scripts = [ 'scripts/upho_weights', 'scripts/upho_sf', 'scripts/qpoints', ] setup(name='upho', version='0.5.1', author="Yuji Ikeda", author_email="[email protected]", packages=packages, scripts=scripts, - install_requires=['numpy']) + install_requires=['numpy', 'h5py', 'phonopy'])
Add requirement of h5py and phonopy
## Code Before: from distutils.core import setup packages = [ 'upho', 'upho.phonon', 'upho.harmonic', 'upho.analysis', 'upho.structure', 'upho.irreps', 'upho.qpoints', 'group', ] scripts = [ 'scripts/upho_weights', 'scripts/upho_sf', 'scripts/qpoints', ] setup(name='upho', version='0.5.1', author="Yuji Ikeda", author_email="[email protected]", packages=packages, scripts=scripts, install_requires=['numpy']) ## Instruction: Add requirement of h5py and phonopy ## Code After: from distutils.core import setup packages = [ 'upho', 'upho.phonon', 'upho.harmonic', 'upho.analysis', 'upho.structure', 'upho.irreps', 'upho.qpoints', 'group', ] scripts = [ 'scripts/upho_weights', 'scripts/upho_sf', 'scripts/qpoints', ] setup(name='upho', version='0.5.1', author="Yuji Ikeda", author_email="[email protected]", packages=packages, scripts=scripts, install_requires=['numpy', 'h5py', 'phonopy'])
// ... existing code ... packages=packages, scripts=scripts, install_requires=['numpy', 'h5py', 'phonopy']) // ... rest of the code ...
9217bfc6bab0d152e33d9fda60218c404b61d064
cmd2/__init__.py
cmd2/__init__.py
from .cmd2 import __version__, Cmd, CmdResult, Statement, categorize from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
from .cmd2 import __version__, Cmd, CmdResult, Statement, EmptyStatement, categorize from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
Add EmptyStatement exception to default imports
Add EmptyStatement exception to default imports
Python
mit
python-cmd2/cmd2,python-cmd2/cmd2
- from .cmd2 import __version__, Cmd, CmdResult, Statement, categorize + from .cmd2 import __version__, Cmd, CmdResult, Statement, EmptyStatement, categorize from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
Add EmptyStatement exception to default imports
## Code Before: from .cmd2 import __version__, Cmd, CmdResult, Statement, categorize from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category ## Instruction: Add EmptyStatement exception to default imports ## Code After: from .cmd2 import __version__, Cmd, CmdResult, Statement, EmptyStatement, categorize from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
// ... existing code ... from .cmd2 import __version__, Cmd, CmdResult, Statement, EmptyStatement, categorize from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category // ... rest of the code ...
dd8c85a49a31693f43e6f6877a0657d63cbc1b01
auth0/v2/device_credentials.py
auth0/v2/device_credentials.py
from .rest import RestClient class DeviceCredentials(object): """Auth0 connection endpoints Args: domain (str): Your Auth0 domain, e.g: 'username.auth0.com' jwt_token (str): An API token created with your account's global keys. You can create one by using the token generator in the API Explorer: https://auth0.com/docs/api/v2 """ def __init__(self, domain, jwt_token): self.domain = domain self.client = RestClient(jwt=jwt_token) def _url(self, id=None): url = 'https://%s/api/v2/device-credentials' % self.domain if id is not None: return url + '/' + id return url def get(self, user_id=None, client_id=None, type=None, fields=[], include_fields=True): params = { 'fields': ','.join(fields) or None, 'include_fields': str(include_fields).lower(), 'user_id': user_id, 'client_id': client_id, 'type': type, } return self.client.get(self._url(), params=params) def create(self, body): return self.client.post(self._url(), data=body) def delete(self, id): return self.client.delete(self._url(id))
from .rest import RestClient class DeviceCredentials(object): """Auth0 connection endpoints Args: domain (str): Your Auth0 domain, e.g: 'username.auth0.com' jwt_token (str): An API token created with your account's global keys. You can create one by using the token generator in the API Explorer: https://auth0.com/docs/api/v2 """ def __init__(self, domain, jwt_token): self.domain = domain self.client = RestClient(jwt=jwt_token) def _url(self, id=None): url = 'https://%s/api/v2/device-credentials' % self.domain if id is not None: return url + '/' + id return url def get(self, user_id, client_id, type, fields=[], include_fields=True): params = { 'fields': ','.join(fields) or None, 'include_fields': str(include_fields).lower(), 'user_id': user_id, 'client_id': client_id, 'type': type, } return self.client.get(self._url(), params=params) def create(self, body): return self.client.post(self._url(), data=body) def delete(self, id): return self.client.delete(self._url(id))
Remove default arguments for user_id, client_id and type
Remove default arguments for user_id, client_id and type
Python
mit
auth0/auth0-python,auth0/auth0-python
from .rest import RestClient class DeviceCredentials(object): """Auth0 connection endpoints Args: domain (str): Your Auth0 domain, e.g: 'username.auth0.com' jwt_token (str): An API token created with your account's global keys. You can create one by using the token generator in the API Explorer: https://auth0.com/docs/api/v2 """ def __init__(self, domain, jwt_token): self.domain = domain self.client = RestClient(jwt=jwt_token) def _url(self, id=None): url = 'https://%s/api/v2/device-credentials' % self.domain if id is not None: return url + '/' + id return url + def get(self, user_id, client_id, type, fields=[], include_fields=True): - def get(self, user_id=None, client_id=None, type=None, - fields=[], include_fields=True): params = { 'fields': ','.join(fields) or None, 'include_fields': str(include_fields).lower(), 'user_id': user_id, 'client_id': client_id, 'type': type, } return self.client.get(self._url(), params=params) def create(self, body): return self.client.post(self._url(), data=body) def delete(self, id): return self.client.delete(self._url(id))
Remove default arguments for user_id, client_id and type
## Code Before: from .rest import RestClient class DeviceCredentials(object): """Auth0 connection endpoints Args: domain (str): Your Auth0 domain, e.g: 'username.auth0.com' jwt_token (str): An API token created with your account's global keys. You can create one by using the token generator in the API Explorer: https://auth0.com/docs/api/v2 """ def __init__(self, domain, jwt_token): self.domain = domain self.client = RestClient(jwt=jwt_token) def _url(self, id=None): url = 'https://%s/api/v2/device-credentials' % self.domain if id is not None: return url + '/' + id return url def get(self, user_id=None, client_id=None, type=None, fields=[], include_fields=True): params = { 'fields': ','.join(fields) or None, 'include_fields': str(include_fields).lower(), 'user_id': user_id, 'client_id': client_id, 'type': type, } return self.client.get(self._url(), params=params) def create(self, body): return self.client.post(self._url(), data=body) def delete(self, id): return self.client.delete(self._url(id)) ## Instruction: Remove default arguments for user_id, client_id and type ## Code After: from .rest import RestClient class DeviceCredentials(object): """Auth0 connection endpoints Args: domain (str): Your Auth0 domain, e.g: 'username.auth0.com' jwt_token (str): An API token created with your account's global keys. You can create one by using the token generator in the API Explorer: https://auth0.com/docs/api/v2 """ def __init__(self, domain, jwt_token): self.domain = domain self.client = RestClient(jwt=jwt_token) def _url(self, id=None): url = 'https://%s/api/v2/device-credentials' % self.domain if id is not None: return url + '/' + id return url def get(self, user_id, client_id, type, fields=[], include_fields=True): params = { 'fields': ','.join(fields) or None, 'include_fields': str(include_fields).lower(), 'user_id': user_id, 'client_id': client_id, 'type': type, } return self.client.get(self._url(), params=params) def create(self, body): return self.client.post(self._url(), data=body) def delete(self, id): return self.client.delete(self._url(id))
// ... existing code ... return url def get(self, user_id, client_id, type, fields=[], include_fields=True): params = { 'fields': ','.join(fields) or None, // ... rest of the code ...
3e98ed8801d380b6ab40156b1f20a1f9fe23a755
books/views.py
books/views.py
from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.all() serializer_class = BookPageSerializer
from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.order_by('page_number') serializer_class = BookPageSerializer
Order book pages by page number.
Order book pages by page number.
Python
mit
Pepedou/Famas
from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ - queryset = BookPage.objects.all() + queryset = BookPage.objects.order_by('page_number') serializer_class = BookPageSerializer
Order book pages by page number.
## Code Before: from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.all() serializer_class = BookPageSerializer ## Instruction: Order book pages by page number. ## Code After: from rest_framework import viewsets from books.models import BookPage from books.serializers import BookPageSerializer class BookPageViewSet(viewsets.ModelViewSet): """ API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.order_by('page_number') serializer_class = BookPageSerializer
// ... existing code ... API endpoint that allows BookPages to be viewed or edited. """ queryset = BookPage.objects.order_by('page_number') serializer_class = BookPageSerializer // ... rest of the code ...
d7ebf5c6db9b73133915aabb3dbd9c5b283f9982
ooni/tests/test_trueheaders.py
ooni/tests/test_trueheaders.py
from twisted.trial import unittest from ooni.utils.txagentwithsocks import TrueHeaders dummy_headers_dict = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'] } dummy_headers_dict2 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header3': ['ValueA', 'ValueB'], } dummy_headers_dict3 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header4': ['ValueA', 'ValueB'], } class TestTrueHeaders(unittest.TestCase): def test_names_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set()) def test_names_not_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3'])) th = TrueHeaders(dummy_headers_dict3) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4'])) def test_names_match_expect_ignore(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
from twisted.trial import unittest from ooni.utils.trueheaders import TrueHeaders dummy_headers_dict = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'] } dummy_headers_dict2 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header3': ['ValueA', 'ValueB'], } dummy_headers_dict3 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header4': ['ValueA', 'ValueB'], } class TestTrueHeaders(unittest.TestCase): def test_names_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set()) def test_names_not_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3'])) th = TrueHeaders(dummy_headers_dict3) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4'])) def test_names_match_expect_ignore(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
Fix unittest for true headers..
Fix unittest for true headers..
Python
bsd-2-clause
kdmurray91/ooni-probe,kdmurray91/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe
from twisted.trial import unittest - from ooni.utils.txagentwithsocks import TrueHeaders + from ooni.utils.trueheaders import TrueHeaders dummy_headers_dict = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'] } dummy_headers_dict2 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header3': ['ValueA', 'ValueB'], } dummy_headers_dict3 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header4': ['ValueA', 'ValueB'], } class TestTrueHeaders(unittest.TestCase): def test_names_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set()) def test_names_not_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3'])) th = TrueHeaders(dummy_headers_dict3) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4'])) def test_names_match_expect_ignore(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
Fix unittest for true headers..
## Code Before: from twisted.trial import unittest from ooni.utils.txagentwithsocks import TrueHeaders dummy_headers_dict = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'] } dummy_headers_dict2 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header3': ['ValueA', 'ValueB'], } dummy_headers_dict3 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header4': ['ValueA', 'ValueB'], } class TestTrueHeaders(unittest.TestCase): def test_names_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set()) def test_names_not_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3'])) th = TrueHeaders(dummy_headers_dict3) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4'])) def test_names_match_expect_ignore(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set()) ## Instruction: Fix unittest for true headers.. ## Code After: from twisted.trial import unittest from ooni.utils.trueheaders import TrueHeaders dummy_headers_dict = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'] } dummy_headers_dict2 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header3': ['ValueA', 'ValueB'], } dummy_headers_dict3 = { 'Header1': ['Value1', 'Value2'], 'Header2': ['ValueA', 'ValueB'], 'Header4': ['ValueA', 'ValueB'], } class TestTrueHeaders(unittest.TestCase): def test_names_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict)), set()) def test_names_not_match(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3'])) th = TrueHeaders(dummy_headers_dict3) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2)), set(['Header3', 'Header4'])) def test_names_match_expect_ignore(self): th = TrueHeaders(dummy_headers_dict) self.assertEqual(th.getDiff(TrueHeaders(dummy_headers_dict2), ignore=['Header3']), set())
# ... existing code ... from twisted.trial import unittest from ooni.utils.trueheaders import TrueHeaders dummy_headers_dict = { # ... rest of the code ...
5b7a1a40ea43834feb5563f566d07bd5b31c589d
tests/test-recipes/metadata/always_include_files_glob/run_test.py
tests/test-recipes/metadata/always_include_files_glob/run_test.py
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} if __name__ == '__main__': main()
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
Add error messages to the asserts
Add error messages to the asserts
Python
bsd-3-clause
ilastik/conda-build,shastings517/conda-build,frol/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,ilastik/conda-build,sandhujasmine/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,shastings517/conda-build,rmcgibbo/conda-build,shastings517/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,ilastik/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,frol/conda-build,frol/conda-build
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': - assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} + assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): - assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} + assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
Add error messages to the asserts
## Code Before: import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} if __name__ == '__main__': main() ## Instruction: Add error messages to the asserts ## Code After: import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
# ... existing code ... if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': # ... rest of the code ...
08812c8507fac2c57bd143dd7aad4c54d5c0aa75
panoptes_client/user.py
panoptes_client/user.py
from __future__ import absolute_import, division, print_function from panoptes_client.panoptes import PanoptesObject, LinkResolver from panoptes_client.utils import isiterable, split class User(PanoptesObject): _api_slug = 'users' _link_slug = 'users' _edit_attributes = ( 'valid_email', ) @classmethod def where(cls, **kwargs): email = kwargs.get('email') if not email: for user in super(User, cls).where(**kwargs): yield user return if not isiterable(email): email = [email] for batch in split(email, 50): kwargs['email'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user @property def avatar(self): """ A dict containing metadata about the user's avatar. """ return User.http_get('{}/avatar'.format(self.id))[0] LinkResolver.register(User) LinkResolver.register(User, 'owner')
from __future__ import absolute_import, division, print_function from panoptes_client.panoptes import PanoptesObject, LinkResolver from panoptes_client.utils import isiterable, split class User(PanoptesObject): _api_slug = 'users' _link_slug = 'users' _edit_attributes = ( 'valid_email', ) @classmethod def where(cls, **kwargs): email = kwargs.get('email') login = kwargs.get('login') if email and login: raise ValueError( 'Queries are supported on at most ONE of email and login' ) if email: if not isiterable(email): email = [email] for batch in split(email, 50): kwargs['email'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user elif login: if not isiterable(login): login = [login] for batch in split(login, 50): kwargs['login'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user else: for user in super(User, cls).where(**kwargs): yield user @property def avatar(self): """ A dict containing metadata about the user's avatar. """ return User.http_get('{}/avatar'.format(self.id))[0] LinkResolver.register(User) LinkResolver.register(User, 'owner')
Allow batched User lookups by login name
Allow batched User lookups by login name
Python
apache-2.0
zooniverse/panoptes-python-client
from __future__ import absolute_import, division, print_function from panoptes_client.panoptes import PanoptesObject, LinkResolver from panoptes_client.utils import isiterable, split class User(PanoptesObject): _api_slug = 'users' _link_slug = 'users' _edit_attributes = ( 'valid_email', ) @classmethod def where(cls, **kwargs): email = kwargs.get('email') + login = kwargs.get('login') - if not email: - for user in super(User, cls).where(**kwargs): - yield user - return - if not isiterable(email): - email = [email] + if email and login: + raise ValueError( + 'Queries are supported on at most ONE of email and login' + ) + if email: + if not isiterable(email): + email = [email] + - for batch in split(email, 50): + for batch in split(email, 50): - kwargs['email'] = ",".join(batch) + kwargs['email'] = ",".join(batch) + for user in super(User, cls).where(**kwargs): + yield user + + elif login: + if not isiterable(login): + login = [login] + + for batch in split(login, 50): + kwargs['login'] = ",".join(batch) + for user in super(User, cls).where(**kwargs): + yield user + + else: for user in super(User, cls).where(**kwargs): yield user @property def avatar(self): """ A dict containing metadata about the user's avatar. """ return User.http_get('{}/avatar'.format(self.id))[0] LinkResolver.register(User) LinkResolver.register(User, 'owner')
Allow batched User lookups by login name
## Code Before: from __future__ import absolute_import, division, print_function from panoptes_client.panoptes import PanoptesObject, LinkResolver from panoptes_client.utils import isiterable, split class User(PanoptesObject): _api_slug = 'users' _link_slug = 'users' _edit_attributes = ( 'valid_email', ) @classmethod def where(cls, **kwargs): email = kwargs.get('email') if not email: for user in super(User, cls).where(**kwargs): yield user return if not isiterable(email): email = [email] for batch in split(email, 50): kwargs['email'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user @property def avatar(self): """ A dict containing metadata about the user's avatar. """ return User.http_get('{}/avatar'.format(self.id))[0] LinkResolver.register(User) LinkResolver.register(User, 'owner') ## Instruction: Allow batched User lookups by login name ## Code After: from __future__ import absolute_import, division, print_function from panoptes_client.panoptes import PanoptesObject, LinkResolver from panoptes_client.utils import isiterable, split class User(PanoptesObject): _api_slug = 'users' _link_slug = 'users' _edit_attributes = ( 'valid_email', ) @classmethod def where(cls, **kwargs): email = kwargs.get('email') login = kwargs.get('login') if email and login: raise ValueError( 'Queries are supported on at most ONE of email and login' ) if email: if not isiterable(email): email = [email] for batch in split(email, 50): kwargs['email'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user elif login: if not isiterable(login): login = [login] for batch in split(login, 50): kwargs['login'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user else: for user in super(User, cls).where(**kwargs): yield user @property def avatar(self): """ A dict containing metadata about the user's avatar. """ return User.http_get('{}/avatar'.format(self.id))[0] LinkResolver.register(User) LinkResolver.register(User, 'owner')
// ... existing code ... def where(cls, **kwargs): email = kwargs.get('email') login = kwargs.get('login') if email and login: raise ValueError( 'Queries are supported on at most ONE of email and login' ) if email: if not isiterable(email): email = [email] for batch in split(email, 50): kwargs['email'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user elif login: if not isiterable(login): login = [login] for batch in split(login, 50): kwargs['login'] = ",".join(batch) for user in super(User, cls).where(**kwargs): yield user else: for user in super(User, cls).where(**kwargs): yield user // ... rest of the code ...
6caca3259f4ec8f298b1d35f15e4492efbcff6b1
tests/basics/dict1.py
tests/basics/dict1.py
d = {} print(d) d[2] = 123 print(d) d = {1:2} d[3] = 3 print(len(d), d[1], d[3]) d[1] = 0 print(len(d), d[1], d[3]) print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}') x = 1 while x < 100: d[x] = x x += 1 print(d[50]) # equality operator on dicts of different size print({} == {1:1}) # equality operator on dicts of same size but with different keys print({1:1} == {2:1}) # value not found try: {}[0] except KeyError: print('KeyError') # unsupported unary op try: +{} except TypeError: print('TypeError') # unsupported binary op try: {} + {} except TypeError: print('TypeError')
d = {} print(d) d[2] = 123 print(d) d = {1:2} d[3] = 3 print(len(d), d[1], d[3]) d[1] = 0 print(len(d), d[1], d[3]) print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}') x = 1 while x < 100: d[x] = x x += 1 print(d[50]) # equality operator on dicts of different size print({} == {1:1}) # equality operator on dicts of same size but with different keys print({1:1} == {2:1}) # value not found try: {}[0] except KeyError as er: print('KeyError', er, repr(er), er.args) # unsupported unary op try: +{} except TypeError: print('TypeError') # unsupported binary op try: {} + {} except TypeError: print('TypeError')
Add test to print full KeyError exc from failed dict lookup.
tests: Add test to print full KeyError exc from failed dict lookup.
Python
mit
jmarcelino/pycom-micropython,alex-march/micropython,hiway/micropython,AriZuu/micropython,chrisdearman/micropython,kerneltask/micropython,jmarcelino/pycom-micropython,selste/micropython,tuc-osg/micropython,blazewicz/micropython,oopy/micropython,ryannathans/micropython,micropython/micropython-esp32,trezor/micropython,infinnovation/micropython,MrSurly/micropython,puuu/micropython,adafruit/micropython,torwag/micropython,pfalcon/micropython,micropython/micropython-esp32,AriZuu/micropython,ryannathans/micropython,pozetroninc/micropython,tuc-osg/micropython,pozetroninc/micropython,cwyark/micropython,dmazzella/micropython,pramasoul/micropython,tobbad/micropython,lowRISC/micropython,HenrikSolver/micropython,TDAbboud/micropython,pramasoul/micropython,infinnovation/micropython,puuu/micropython,blazewicz/micropython,SHA2017-badge/micropython-esp32,infinnovation/micropython,selste/micropython,AriZuu/micropython,adafruit/micropython,swegener/micropython,mhoffma/micropython,adafruit/circuitpython,mhoffma/micropython,oopy/micropython,deshipu/micropython,trezor/micropython,PappaPeppar/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,tobbad/micropython,ryannathans/micropython,tobbad/micropython,bvernoux/micropython,chrisdearman/micropython,pozetroninc/micropython,blazewicz/micropython,HenrikSolver/micropython,hiway/micropython,torwag/micropython,ryannathans/micropython,AriZuu/micropython,henriknelson/micropython,henriknelson/micropython,mhoffma/micropython,dmazzella/micropython,PappaPeppar/micropython,Timmenem/micropython,mhoffma/micropython,blazewicz/micropython,infinnovation/micropython,oopy/micropython,tralamazza/micropython,dxxb/micropython,TDAbboud/micropython,puuu/micropython,chrisdearman/micropython,PappaPeppar/micropython,Timmenem/micropython,alex-march/micropython,pozetroninc/micropython,TDAbboud/micropython,Peetz0r/micropython-esp32,HenrikSolver/micropython,pramasoul/micropython,TDAbboud/micropython,HenrikSolver/micropython,AriZuu/micropython,oopy/micropython,alex-march/micropython,pramasoul/micropython,tobbad/micropython,alex-robbins/micropython,kerneltask/micropython,pfalcon/micropython,henriknelson/micropython,pfalcon/micropython,adafruit/circuitpython,torwag/micropython,Timmenem/micropython,cwyark/micropython,tuc-osg/micropython,tuc-osg/micropython,MrSurly/micropython,toolmacher/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,adafruit/circuitpython,pozetroninc/micropython,micropython/micropython-esp32,alex-robbins/micropython,alex-robbins/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,Peetz0r/micropython-esp32,puuu/micropython,swegener/micropython,dxxb/micropython,tuc-osg/micropython,adafruit/micropython,lowRISC/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,MrSurly/micropython-esp32,hosaka/micropython,bvernoux/micropython,selste/micropython,PappaPeppar/micropython,matthewelse/micropython,matthewelse/micropython,trezor/micropython,MrSurly/micropython-esp32,hiway/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython,adafruit/circuitpython,hiway/micropython,blazewicz/micropython,kerneltask/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,selste/micropython,kerneltask/micropython,MrSurly/micropython,micropython/micropython-esp32,alex-march/micropython,pfalcon/micropython,matthewelse/micropython,alex-robbins/micropython,toolmacher/micropython,puuu/micropython,toolmacher/micropython,tralamazza/micropython,torwag/micropython,hosaka/micropython,hosaka/micropython,alex-march/micropython,trezor/micropython,Timmenem/micropython,hosaka/micropython,ryannathans/micropython,swegener/micropython,jmarcelino/pycom-micropython,mhoffma/micropython,Peetz0r/micropython-esp32,dxxb/micropython,Peetz0r/micropython-esp32,swegener/micropython,toolmacher/micropython,torwag/micropython,deshipu/micropython,deshipu/micropython,adafruit/circuitpython,dxxb/micropython,lowRISC/micropython,cwyark/micropython,Timmenem/micropython,matthewelse/micropython,MrSurly/micropython-esp32,tralamazza/micropython,oopy/micropython,MrSurly/micropython,chrisdearman/micropython,dxxb/micropython,tralamazza/micropython,bvernoux/micropython,hiway/micropython,deshipu/micropython,matthewelse/micropython,toolmacher/micropython,hosaka/micropython,HenrikSolver/micropython,TDAbboud/micropython,tobbad/micropython,swegener/micropython,adafruit/circuitpython,infinnovation/micropython,cwyark/micropython,bvernoux/micropython,adafruit/micropython,trezor/micropython,MrSurly/micropython-esp32,dmazzella/micropython,lowRISC/micropython,kerneltask/micropython,SHA2017-badge/micropython-esp32,lowRISC/micropython,deshipu/micropython,chrisdearman/micropython,matthewelse/micropython,cwyark/micropython,selste/micropython,alex-robbins/micropython,PappaPeppar/micropython,jmarcelino/pycom-micropython,pramasoul/micropython,bvernoux/micropython
d = {} print(d) d[2] = 123 print(d) d = {1:2} d[3] = 3 print(len(d), d[1], d[3]) d[1] = 0 print(len(d), d[1], d[3]) print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}') x = 1 while x < 100: d[x] = x x += 1 print(d[50]) # equality operator on dicts of different size print({} == {1:1}) # equality operator on dicts of same size but with different keys print({1:1} == {2:1}) # value not found try: {}[0] - except KeyError: + except KeyError as er: - print('KeyError') + print('KeyError', er, repr(er), er.args) # unsupported unary op try: +{} except TypeError: print('TypeError') # unsupported binary op try: {} + {} except TypeError: print('TypeError')
Add test to print full KeyError exc from failed dict lookup.
## Code Before: d = {} print(d) d[2] = 123 print(d) d = {1:2} d[3] = 3 print(len(d), d[1], d[3]) d[1] = 0 print(len(d), d[1], d[3]) print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}') x = 1 while x < 100: d[x] = x x += 1 print(d[50]) # equality operator on dicts of different size print({} == {1:1}) # equality operator on dicts of same size but with different keys print({1:1} == {2:1}) # value not found try: {}[0] except KeyError: print('KeyError') # unsupported unary op try: +{} except TypeError: print('TypeError') # unsupported binary op try: {} + {} except TypeError: print('TypeError') ## Instruction: Add test to print full KeyError exc from failed dict lookup. ## Code After: d = {} print(d) d[2] = 123 print(d) d = {1:2} d[3] = 3 print(len(d), d[1], d[3]) d[1] = 0 print(len(d), d[1], d[3]) print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}') x = 1 while x < 100: d[x] = x x += 1 print(d[50]) # equality operator on dicts of different size print({} == {1:1}) # equality operator on dicts of same size but with different keys print({1:1} == {2:1}) # value not found try: {}[0] except KeyError as er: print('KeyError', er, repr(er), er.args) # unsupported unary op try: +{} except TypeError: print('TypeError') # unsupported binary op try: {} + {} except TypeError: print('TypeError')
... try: {}[0] except KeyError as er: print('KeyError', er, repr(er), er.args) # unsupported unary op ...
53d5f47c828bec78e7241cb9e3d4f614dd18e6f9
responder.py
responder.py
import random import yaml from flask import jsonify, Response, render_template class Which(object): def __init__(self, mime_type, args): self.mime_type = mime_type self.args = args @property def _excuse(self): stream = open("excuses.yaml", 'r') excuses = yaml.load(stream) return random.choice(excuses["excuses"]) def get_response(self): if self.mime_type == "application/json": return jsonify({ "excuse": self._excuse }), "/json/" elif self.mime_type == "application/xml": return Response( render_template('xml.xml', excuse=self._excuse), mimetype='text/xml' ), "/xml/" elif self.mime_type == "application/javascript" or "jsonp" in self.args: return Response( render_template('jsonp.js', excuse=self._excuse), mimetype='application/javascript' ), "/jsonp/" elif self.mime_type == "text/plain": return Response("Hello world", mimetype='text/plain'), "/text/" else: return render_template('html.html', excuse=self._excuse), "/html/"
import random import yaml from flask import jsonify, Response, render_template class Which(object): def __init__(self, mime_type, args): self.mime_type = mime_type self.args = args @property def _excuse(self): stream = open("excuses.yaml", 'r') excuses = yaml.load(stream) return random.choice(excuses["excuses"]) def get_response(self): if self.mime_type == "application/json": return jsonify({ "excuse": self._excuse }), "/json/" elif self.mime_type == "application/xml": return Response( render_template('xml.xml', excuse=self._excuse), mimetype='text/xml' ), "/xml/" elif self.mime_type == "application/javascript" or "jsonp" in self.args: return Response( render_template('jsonp.js', excuse=self._excuse), mimetype='application/javascript' ), "/jsonp/" elif self.mime_type == "text/plain": return Response(self._excuse, mimetype='text/plain'), "/text/" else: return render_template('html.html', excuse=self._excuse), "/html/"
Fix bug with text/plain response
Fix bug with text/plain response
Python
mit
aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools
import random import yaml from flask import jsonify, Response, render_template class Which(object): def __init__(self, mime_type, args): self.mime_type = mime_type self.args = args @property def _excuse(self): stream = open("excuses.yaml", 'r') excuses = yaml.load(stream) return random.choice(excuses["excuses"]) def get_response(self): if self.mime_type == "application/json": return jsonify({ "excuse": self._excuse }), "/json/" elif self.mime_type == "application/xml": return Response( render_template('xml.xml', excuse=self._excuse), mimetype='text/xml' ), "/xml/" elif self.mime_type == "application/javascript" or "jsonp" in self.args: return Response( render_template('jsonp.js', excuse=self._excuse), mimetype='application/javascript' ), "/jsonp/" elif self.mime_type == "text/plain": - return Response("Hello world", mimetype='text/plain'), "/text/" + return Response(self._excuse, mimetype='text/plain'), "/text/" else: return render_template('html.html', excuse=self._excuse), "/html/"
Fix bug with text/plain response
## Code Before: import random import yaml from flask import jsonify, Response, render_template class Which(object): def __init__(self, mime_type, args): self.mime_type = mime_type self.args = args @property def _excuse(self): stream = open("excuses.yaml", 'r') excuses = yaml.load(stream) return random.choice(excuses["excuses"]) def get_response(self): if self.mime_type == "application/json": return jsonify({ "excuse": self._excuse }), "/json/" elif self.mime_type == "application/xml": return Response( render_template('xml.xml', excuse=self._excuse), mimetype='text/xml' ), "/xml/" elif self.mime_type == "application/javascript" or "jsonp" in self.args: return Response( render_template('jsonp.js', excuse=self._excuse), mimetype='application/javascript' ), "/jsonp/" elif self.mime_type == "text/plain": return Response("Hello world", mimetype='text/plain'), "/text/" else: return render_template('html.html', excuse=self._excuse), "/html/" ## Instruction: Fix bug with text/plain response ## Code After: import random import yaml from flask import jsonify, Response, render_template class Which(object): def __init__(self, mime_type, args): self.mime_type = mime_type self.args = args @property def _excuse(self): stream = open("excuses.yaml", 'r') excuses = yaml.load(stream) return random.choice(excuses["excuses"]) def get_response(self): if self.mime_type == "application/json": return jsonify({ "excuse": self._excuse }), "/json/" elif self.mime_type == "application/xml": return Response( render_template('xml.xml', excuse=self._excuse), mimetype='text/xml' ), "/xml/" elif self.mime_type == "application/javascript" or "jsonp" in self.args: return Response( render_template('jsonp.js', excuse=self._excuse), mimetype='application/javascript' ), "/jsonp/" elif self.mime_type == "text/plain": return Response(self._excuse, mimetype='text/plain'), "/text/" else: return render_template('html.html', excuse=self._excuse), "/html/"
... elif self.mime_type == "text/plain": return Response(self._excuse, mimetype='text/plain'), "/text/" else: ...
47ddf999dd7ef8cd7600710ad6ad7611dd55a218
bin/testNetwork.py
bin/testNetwork.py
import subprocess import os from time import sleep env = {} HOME = os.environ.get("HOME", "/root") scannerConf = open(HOME+"/scanner.conf", "rt") while True: in_line = scannerConf.readline() if not in_line: break in_line = in_line[:-1] key, value = in_line.split("=") env[key] = value scannerConf.close() GATEWAY = '192.168.1.1' if env['GATEWAY']: GATEWAY = env['GATEWAY'] IFACE = 'wlan0' if env['IFACE']: IFACE = env['IFACE'] print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE)) while True: ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY]) if ret == 0: print("Network appears to be up") else: print("Network appears to be down, restarting...") ret = subprocess.call(['/sbin/ifdown', '--force', IFACE]) ret = subprocess.call(['/sbin/ifup', IFACE]) sleep(60)
import subprocess import os from time import sleep env = {} HOME = os.environ.get("HOME", "/root") scannerConf = open(HOME+"/scanner.conf", "rt") while True: in_line = scannerConf.readline() if not in_line: break in_line = in_line[:-1] key, value = in_line.split("=") env[key] = value scannerConf.close() GATEWAY = '192.168.1.1' if 'GATEWAY' in env: GATEWAY = env['GATEWAY'] IFACE = 'wlan0' if 'IFACE' in env: IFACE = env['IFACE'] print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE)) while True: ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY]) if ret == 0: print("Network appears to be up") else: print("Network appears to be down, restarting...") ret = subprocess.call(['/sbin/ifdown', '--force', IFACE]) ret = subprocess.call(['/sbin/ifup', IFACE]) sleep(60)
Change the config dictionary key validation
Change the config dictionary key validation
Python
apache-2.0
starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser,starksm64/NativeRaspberryPiBeaconParser
import subprocess import os from time import sleep env = {} HOME = os.environ.get("HOME", "/root") scannerConf = open(HOME+"/scanner.conf", "rt") while True: in_line = scannerConf.readline() if not in_line: break in_line = in_line[:-1] key, value = in_line.split("=") env[key] = value scannerConf.close() GATEWAY = '192.168.1.1' - if env['GATEWAY']: + if 'GATEWAY' in env: GATEWAY = env['GATEWAY'] IFACE = 'wlan0' - if env['IFACE']: + if 'IFACE' in env: IFACE = env['IFACE'] print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE)) while True: ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY]) if ret == 0: print("Network appears to be up") else: print("Network appears to be down, restarting...") ret = subprocess.call(['/sbin/ifdown', '--force', IFACE]) ret = subprocess.call(['/sbin/ifup', IFACE]) sleep(60)
Change the config dictionary key validation
## Code Before: import subprocess import os from time import sleep env = {} HOME = os.environ.get("HOME", "/root") scannerConf = open(HOME+"/scanner.conf", "rt") while True: in_line = scannerConf.readline() if not in_line: break in_line = in_line[:-1] key, value = in_line.split("=") env[key] = value scannerConf.close() GATEWAY = '192.168.1.1' if env['GATEWAY']: GATEWAY = env['GATEWAY'] IFACE = 'wlan0' if env['IFACE']: IFACE = env['IFACE'] print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE)) while True: ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY]) if ret == 0: print("Network appears to be up") else: print("Network appears to be down, restarting...") ret = subprocess.call(['/sbin/ifdown', '--force', IFACE]) ret = subprocess.call(['/sbin/ifup', IFACE]) sleep(60) ## Instruction: Change the config dictionary key validation ## Code After: import subprocess import os from time import sleep env = {} HOME = os.environ.get("HOME", "/root") scannerConf = open(HOME+"/scanner.conf", "rt") while True: in_line = scannerConf.readline() if not in_line: break in_line = in_line[:-1] key, value = in_line.split("=") env[key] = value scannerConf.close() GATEWAY = '192.168.1.1' if 'GATEWAY' in env: GATEWAY = env['GATEWAY'] IFACE = 'wlan0' if 'IFACE' in env: IFACE = env['IFACE'] print("Testing GATEWAY=%s, IFACE=%s" % (GATEWAY, IFACE)) while True: ret = subprocess.call(['/bin/ping','-I', IFACE, '-nc4', GATEWAY]) if ret == 0: print("Network appears to be up") else: print("Network appears to be down, restarting...") ret = subprocess.call(['/sbin/ifdown', '--force', IFACE]) ret = subprocess.call(['/sbin/ifup', IFACE]) sleep(60)
... GATEWAY = '192.168.1.1' if 'GATEWAY' in env: GATEWAY = env['GATEWAY'] IFACE = 'wlan0' if 'IFACE' in env: IFACE = env['IFACE'] ...
27d40996f0912a1b9b16afa0884f10b1504acce2
scoring_engine/web/__init__.py
scoring_engine/web/__init__.py
import os from flask import Flask app = Flask(__name__) app.config.from_pyfile('settings.cfg') app.secret_key = os.urandom(128) from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about app.register_blueprint(welcome.mod) app.register_blueprint(scoreboard.mod) app.register_blueprint(overview.mod) app.register_blueprint(services.mod) app.register_blueprint(admin.mod) app.register_blueprint(auth.mod) app.register_blueprint(profile.mod) app.register_blueprint(api.mod) app.register_blueprint(about.mod)
import os import logging from flask import Flask app = Flask(__name__) app.config.from_pyfile('settings.cfg') app.secret_key = os.urandom(128) log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about app.register_blueprint(welcome.mod) app.register_blueprint(scoreboard.mod) app.register_blueprint(overview.mod) app.register_blueprint(services.mod) app.register_blueprint(admin.mod) app.register_blueprint(auth.mod) app.register_blueprint(profile.mod) app.register_blueprint(api.mod) app.register_blueprint(about.mod)
Use error severity for flask output
Use error severity for flask output
Python
mit
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
import os + import logging from flask import Flask + app = Flask(__name__) app.config.from_pyfile('settings.cfg') app.secret_key = os.urandom(128) + + + log = logging.getLogger('werkzeug') + log.setLevel(logging.ERROR) from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about app.register_blueprint(welcome.mod) app.register_blueprint(scoreboard.mod) app.register_blueprint(overview.mod) app.register_blueprint(services.mod) app.register_blueprint(admin.mod) app.register_blueprint(auth.mod) app.register_blueprint(profile.mod) app.register_blueprint(api.mod) app.register_blueprint(about.mod)
Use error severity for flask output
## Code Before: import os from flask import Flask app = Flask(__name__) app.config.from_pyfile('settings.cfg') app.secret_key = os.urandom(128) from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about app.register_blueprint(welcome.mod) app.register_blueprint(scoreboard.mod) app.register_blueprint(overview.mod) app.register_blueprint(services.mod) app.register_blueprint(admin.mod) app.register_blueprint(auth.mod) app.register_blueprint(profile.mod) app.register_blueprint(api.mod) app.register_blueprint(about.mod) ## Instruction: Use error severity for flask output ## Code After: import os import logging from flask import Flask app = Flask(__name__) app.config.from_pyfile('settings.cfg') app.secret_key = os.urandom(128) log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about app.register_blueprint(welcome.mod) app.register_blueprint(scoreboard.mod) app.register_blueprint(overview.mod) app.register_blueprint(services.mod) app.register_blueprint(admin.mod) app.register_blueprint(auth.mod) app.register_blueprint(profile.mod) app.register_blueprint(api.mod) app.register_blueprint(about.mod)
... import os import logging from flask import Flask app = Flask(__name__) ... app.config.from_pyfile('settings.cfg') app.secret_key = os.urandom(128) log = logging.getLogger('werkzeug') log.setLevel(logging.ERROR) from scoring_engine.web.views import welcome, scoreboard, overview, services, admin, auth, profile, api, about ...
56e3f571196bdc0ab8882f56ed66192d54ff8cad
gmt/clib/tests/test_functions.py
gmt/clib/tests/test_functions.py
import os from .. import create_session, call_module def test_create_session(): "Test that create_session is called without errors" session = create_session() assert session is not None def test_call_module(): "Run a psbasemap call to see if the module works" module = 'psbasemap' args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps' session = create_session() call_module(session, module, args) assert os.path.exists('tmp.ps') # Not the most ideal test. Just check if no segfaults or exceptions occur.
import os from .. import create_session, call_module def test_create_session(): "Test that create_session is called without errors" session = create_session() assert session is not None def test_call_module(): "Run a psbasemap call to see if the module works" module = 'psbasemap' args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps' session = create_session() call_module(session, module, args) assert os.path.exists('tmp.ps') os.remove('tmp.ps') # Not the most ideal test. Just check if no segfaults or exceptions occur.
Remove tmp file created by test
Remove tmp file created by test
Python
bsd-3-clause
GenericMappingTools/gmt-python,GenericMappingTools/gmt-python
import os from .. import create_session, call_module def test_create_session(): "Test that create_session is called without errors" session = create_session() assert session is not None def test_call_module(): "Run a psbasemap call to see if the module works" module = 'psbasemap' args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps' session = create_session() call_module(session, module, args) assert os.path.exists('tmp.ps') + os.remove('tmp.ps') # Not the most ideal test. Just check if no segfaults or exceptions occur.
Remove tmp file created by test
## Code Before: import os from .. import create_session, call_module def test_create_session(): "Test that create_session is called without errors" session = create_session() assert session is not None def test_call_module(): "Run a psbasemap call to see if the module works" module = 'psbasemap' args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps' session = create_session() call_module(session, module, args) assert os.path.exists('tmp.ps') # Not the most ideal test. Just check if no segfaults or exceptions occur. ## Instruction: Remove tmp file created by test ## Code After: import os from .. import create_session, call_module def test_create_session(): "Test that create_session is called without errors" session = create_session() assert session is not None def test_call_module(): "Run a psbasemap call to see if the module works" module = 'psbasemap' args = '-R10/70/-3/8 -JX4i/3i -Ba -P ->tmp.ps' session = create_session() call_module(session, module, args) assert os.path.exists('tmp.ps') os.remove('tmp.ps') # Not the most ideal test. Just check if no segfaults or exceptions occur.
... call_module(session, module, args) assert os.path.exists('tmp.ps') os.remove('tmp.ps') # Not the most ideal test. Just check if no segfaults or exceptions occur. ...
b16474b4523e8e804f28188ba74c992896748efe
broctl/Napatech.py
broctl/Napatech.py
import BroControl.plugin import BroControl.config class Napatech(BroControl.plugin.Plugin): def __init__(self): super(Napatech, self).__init__(apiversion=1) def name(self): return 'napatech' def pluginVersion(self): return 1 def init(self): # Use this plugin only if there is a Napatech interface in use for nn in self.nodes(): if nn.type == 'worker' and nn.interface.startswith('napatech::'): return True return False def nodeKeys(self): return ['dedupe_lru_size', 'host_buffer_allowance'] def options(self): return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'), ('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')] def broctl_config(self): script += '# Settings for configuring Napatech interractions' script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size')) script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance')) return script
import BroControl.plugin import BroControl.config class Napatech(BroControl.plugin.Plugin): def __init__(self): super(Napatech, self).__init__(apiversion=1) def name(self): return 'napatech' def pluginVersion(self): return 1 def init(self): # Use this plugin only if there is a Napatech interface in use for nn in self.nodes(): if nn.type == 'worker' and nn.interface.startswith('napatech::'): return True return False def nodeKeys(self): return ['dedupe_lru_size', 'host_buffer_allowance'] def options(self): return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'), ('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')] def broctl_config(self): script = '' script += '# Settings for configuring Napatech interractions' script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size')) script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance')) return script
Fix minor bug in broctl plugin.
Fix minor bug in broctl plugin.
Python
bsd-3-clause
hosom/bro-napatech,hosom/bro-napatech
import BroControl.plugin import BroControl.config class Napatech(BroControl.plugin.Plugin): def __init__(self): super(Napatech, self).__init__(apiversion=1) def name(self): return 'napatech' def pluginVersion(self): return 1 def init(self): # Use this plugin only if there is a Napatech interface in use for nn in self.nodes(): if nn.type == 'worker' and nn.interface.startswith('napatech::'): return True return False def nodeKeys(self): return ['dedupe_lru_size', 'host_buffer_allowance'] def options(self): return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'), ('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')] def broctl_config(self): + script = '' script += '# Settings for configuring Napatech interractions' script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size')) script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance')) return script
Fix minor bug in broctl plugin.
## Code Before: import BroControl.plugin import BroControl.config class Napatech(BroControl.plugin.Plugin): def __init__(self): super(Napatech, self).__init__(apiversion=1) def name(self): return 'napatech' def pluginVersion(self): return 1 def init(self): # Use this plugin only if there is a Napatech interface in use for nn in self.nodes(): if nn.type == 'worker' and nn.interface.startswith('napatech::'): return True return False def nodeKeys(self): return ['dedupe_lru_size', 'host_buffer_allowance'] def options(self): return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'), ('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')] def broctl_config(self): script += '# Settings for configuring Napatech interractions' script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size')) script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance')) return script ## Instruction: Fix minor bug in broctl plugin. ## Code After: import BroControl.plugin import BroControl.config class Napatech(BroControl.plugin.Plugin): def __init__(self): super(Napatech, self).__init__(apiversion=1) def name(self): return 'napatech' def pluginVersion(self): return 1 def init(self): # Use this plugin only if there is a Napatech interface in use for nn in self.nodes(): if nn.type == 'worker' and nn.interface.startswith('napatech::'): return True return False def nodeKeys(self): return ['dedupe_lru_size', 'host_buffer_allowance'] def options(self): return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'), ('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')] def broctl_config(self): script = '' script += '# Settings for configuring Napatech interractions' script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size')) script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance')) return script
# ... existing code ... def broctl_config(self): script = '' script += '# Settings for configuring Napatech interractions' script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size')) # ... rest of the code ...
9dafef749aaf2fca9e865cf28b043ea22bafe3a5
backend/django/apps/accounts/tests.py
backend/django/apps/accounts/tests.py
from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data)
from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) class CreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_create_user(self): self.user.email = '[email protected]' data = json.dumps(WholeAccountSerializer(self.user).data) response = self.client.post( reverse('_accounts:account-list'), data, content_type='application/json') self.assertEqual( first=response.status_code, second=status.HTTP_201_CREATED)
Create a test for Account creation
Create a test for Account creation
Python
mit
slavpetroff/sweetshop,slavpetroff/sweetshop
from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) + + class CreateUserTest(APITestCase): + + def setUp(self): + self.user = UserFactory() + + def test_create_user(self): + self.user.email = '[email protected]' + data = json.dumps(WholeAccountSerializer(self.user).data) + response = self.client.post( + reverse('_accounts:account-list'), + data, + content_type='application/json') + self.assertEqual( + first=response.status_code, second=status.HTTP_201_CREATED) +
Create a test for Account creation
## Code Before: from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) ## Instruction: Create a test for Account creation ## Code After: from django.core.urlresolvers import reverse from rest_framework.test import APITestCase from rest_framework import status import factory import json from .models import BaseAccount from .serializers import WholeAccountSerializer class UserFactory(factory.django.DjangoModelFactory): class Meta: model = BaseAccount first_name = 'John' last_name = 'Doe' email = '{}.{}@email.com'.format(first_name, last_name) password = 'passjohn1' class FactoryBoyCreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_can_create_user(self): response = self.client.get( reverse('_accounts:account-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertJSONEqual( raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) class CreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_create_user(self): self.user.email = '[email protected]' data = json.dumps(WholeAccountSerializer(self.user).data) response = self.client.post( reverse('_accounts:account-list'), data, content_type='application/json') self.assertEqual( first=response.status_code, second=status.HTTP_201_CREATED)
... raw=json.dumps(response.data), expected_data=WholeAccountSerializer(self.user).data) class CreateUserTest(APITestCase): def setUp(self): self.user = UserFactory() def test_create_user(self): self.user.email = '[email protected]' data = json.dumps(WholeAccountSerializer(self.user).data) response = self.client.post( reverse('_accounts:account-list'), data, content_type='application/json') self.assertEqual( first=response.status_code, second=status.HTTP_201_CREATED) ...
4d29aa24b39285c491182edd69ecb7c22a9d643d
ceph_medic/tests/test_main.py
ceph_medic/tests/test_main.py
import pytest import ceph_medic.main class TestMain(object): def test_main(self): assert ceph_medic.main def test_invalid_ssh_config(self, capsys): argv = ["ceph-medic", "--ssh-config", "/does/not/exist"] with pytest.raises(SystemExit): ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'the given ssh config path does not exist' in out.out def test_valid_ssh_config(self, capsys): ssh_config = '/etc/ssh/ssh_config' argv = ["ceph-medic", "--ssh-config", ssh_config] ceph_medic.main.Medic(argv) out = capsys.readouterr() assert out.out == '' assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config
import pytest import ceph_medic.main from mock import patch class TestMain(object): def test_main(self): assert ceph_medic.main def test_invalid_ssh_config(self, capsys): argv = ["ceph-medic", "--ssh-config", "/does/not/exist"] with pytest.raises(SystemExit): ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'the given ssh config path does not exist' in out.out def test_valid_ssh_config(self, capsys): ssh_config = '/etc/ssh/ssh_config' argv = ["ceph-medic", "--ssh-config", ssh_config] def fake_exists(path): if path == ssh_config: return True if path.endswith('cephmedic.conf'): return False return True with patch.object(ceph_medic.main.os.path, 'exists') as m_exists: m_exists.side_effect = fake_exists ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'tssh config path does not exist' not in out.out assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config
Fix test breakage when ssh_config missing
tests: Fix test breakage when ssh_config missing I assumed /etc/ssh/ssh_config would be present, but it turns out in a mock chroot environment it isn't. Signed-off-by: Zack Cerza <[email protected]>
Python
mit
alfredodeza/ceph-doctor
import pytest import ceph_medic.main + + from mock import patch class TestMain(object): def test_main(self): assert ceph_medic.main def test_invalid_ssh_config(self, capsys): argv = ["ceph-medic", "--ssh-config", "/does/not/exist"] with pytest.raises(SystemExit): ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'the given ssh config path does not exist' in out.out def test_valid_ssh_config(self, capsys): ssh_config = '/etc/ssh/ssh_config' argv = ["ceph-medic", "--ssh-config", ssh_config] + + def fake_exists(path): + if path == ssh_config: + return True + if path.endswith('cephmedic.conf'): + return False + return True + + with patch.object(ceph_medic.main.os.path, 'exists') as m_exists: + m_exists.side_effect = fake_exists - ceph_medic.main.Medic(argv) + ceph_medic.main.Medic(argv) out = capsys.readouterr() - assert out.out == '' + assert 'tssh config path does not exist' not in out.out assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config
Fix test breakage when ssh_config missing
## Code Before: import pytest import ceph_medic.main class TestMain(object): def test_main(self): assert ceph_medic.main def test_invalid_ssh_config(self, capsys): argv = ["ceph-medic", "--ssh-config", "/does/not/exist"] with pytest.raises(SystemExit): ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'the given ssh config path does not exist' in out.out def test_valid_ssh_config(self, capsys): ssh_config = '/etc/ssh/ssh_config' argv = ["ceph-medic", "--ssh-config", ssh_config] ceph_medic.main.Medic(argv) out = capsys.readouterr() assert out.out == '' assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config ## Instruction: Fix test breakage when ssh_config missing ## Code After: import pytest import ceph_medic.main from mock import patch class TestMain(object): def test_main(self): assert ceph_medic.main def test_invalid_ssh_config(self, capsys): argv = ["ceph-medic", "--ssh-config", "/does/not/exist"] with pytest.raises(SystemExit): ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'the given ssh config path does not exist' in out.out def test_valid_ssh_config(self, capsys): ssh_config = '/etc/ssh/ssh_config' argv = ["ceph-medic", "--ssh-config", ssh_config] def fake_exists(path): if path == ssh_config: return True if path.endswith('cephmedic.conf'): return False return True with patch.object(ceph_medic.main.os.path, 'exists') as m_exists: m_exists.side_effect = fake_exists ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'tssh config path does not exist' not in out.out assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config
... import pytest import ceph_medic.main from mock import patch ... ssh_config = '/etc/ssh/ssh_config' argv = ["ceph-medic", "--ssh-config", ssh_config] def fake_exists(path): if path == ssh_config: return True if path.endswith('cephmedic.conf'): return False return True with patch.object(ceph_medic.main.os.path, 'exists') as m_exists: m_exists.side_effect = fake_exists ceph_medic.main.Medic(argv) out = capsys.readouterr() assert 'tssh config path does not exist' not in out.out assert ssh_config == ceph_medic.main.ceph_medic.config.ssh_config ...
959897478bbda18f02aa6e38f2ebdd837581f1f0
tests/test_sct_verify_signature.py
tests/test_sct_verify_signature.py
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is True assert got_output == 'Verified OK\n' assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is False assert got_output == 'Verification Failure\n' assert got_cmd_res.exitcode == 1
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() assert verify_signature(signature_input, signature, pubkey) is True signature_input = b'some invalid signature input' assert verify_signature(signature_input, signature, pubkey) is False
Fix test for changed SctVerificationResult
Fix test for changed SctVerificationResult
Python
mit
theno/ctutlz,theno/ctutlz
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() - got_verified, got_output, got_cmd_res = \ - verify_signature(signature_input, signature, pubkey) + assert verify_signature(signature_input, signature, pubkey) is True - - assert got_verified is True - assert got_output == 'Verified OK\n' - assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' - got_verified, got_output, got_cmd_res = \ - verify_signature(signature_input, signature, pubkey) + assert verify_signature(signature_input, signature, pubkey) is False - assert got_verified is False - assert got_output == 'Verification Failure\n' - assert got_cmd_res.exitcode == 1 -
Fix test for changed SctVerificationResult
## Code Before: from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is True assert got_output == 'Verified OK\n' assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is False assert got_output == 'Verification Failure\n' assert got_cmd_res.exitcode == 1 ## Instruction: Fix test for changed SctVerificationResult ## Code After: from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() assert verify_signature(signature_input, signature, pubkey) is True signature_input = b'some invalid signature input' assert verify_signature(signature_input, signature, pubkey) is False
// ... existing code ... pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() assert verify_signature(signature_input, signature, pubkey) is True signature_input = b'some invalid signature input' assert verify_signature(signature_input, signature, pubkey) is False // ... rest of the code ...
2ec5f71d04ae17a1c0a457fba1b82f8c8e8891ab
sc2reader/listeners/utils.py
sc2reader/listeners/utils.py
from sc2reader import log_utils class ListenerBase(object): def __init__(self): self.logger = log_utils.get_logger(self.__class__) def accepts(self, event): return true
from sc2reader import log_utils class ListenerBase(object): def __init__(self): self.logger = log_utils.get_logger(self.__class__) def accepts(self, event): return true def setup(self, replay): pass
Add a default ListenerBase.setup implementation.
Add a default ListenerBase.setup implementation.
Python
mit
StoicLoofah/sc2reader,vlaufer/sc2reader,vlaufer/sc2reader,GraylinKim/sc2reader,ggtracker/sc2reader,ggtracker/sc2reader,GraylinKim/sc2reader,StoicLoofah/sc2reader
from sc2reader import log_utils class ListenerBase(object): def __init__(self): self.logger = log_utils.get_logger(self.__class__) def accepts(self, event): return true + + def setup(self, replay): + pass
Add a default ListenerBase.setup implementation.
## Code Before: from sc2reader import log_utils class ListenerBase(object): def __init__(self): self.logger = log_utils.get_logger(self.__class__) def accepts(self, event): return true ## Instruction: Add a default ListenerBase.setup implementation. ## Code After: from sc2reader import log_utils class ListenerBase(object): def __init__(self): self.logger = log_utils.get_logger(self.__class__) def accepts(self, event): return true def setup(self, replay): pass
// ... existing code ... def accepts(self, event): return true def setup(self, replay): pass // ... rest of the code ...
2ef0ccfbf337d0ef1870c5a1191b2bcdcffd1f9e
dbaas/backup/admin/log_configuration.py
dbaas/backup/admin/log_configuration.py
from __future__ import absolute_import, unicode_literals from django.contrib import admin import logging LOG = logging.getLogger(__name__) class LogConfigurationAdmin(admin.ModelAdmin): list_filter = ("environment", "engine_type") list_display = ("environment", "engine_type", "retention_days", "filer_path", "mount_point_path", "log_path")
from __future__ import absolute_import, unicode_literals from django.contrib import admin import logging LOG = logging.getLogger(__name__) class LogConfigurationAdmin(admin.ModelAdmin): list_filter = ("environment", "engine_type") list_display = ("environment", "engine_type", "retention_days", "filer_path", "mount_point_path", "log_path", "cron_minute", "cron_hour")
Add new fields on LogConfiguration model
Add new fields on LogConfiguration model
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
from __future__ import absolute_import, unicode_literals from django.contrib import admin import logging LOG = logging.getLogger(__name__) class LogConfigurationAdmin(admin.ModelAdmin): list_filter = ("environment", "engine_type") list_display = ("environment", "engine_type", "retention_days", - "filer_path", "mount_point_path", "log_path") + "filer_path", "mount_point_path", "log_path", + "cron_minute", "cron_hour")
Add new fields on LogConfiguration model
## Code Before: from __future__ import absolute_import, unicode_literals from django.contrib import admin import logging LOG = logging.getLogger(__name__) class LogConfigurationAdmin(admin.ModelAdmin): list_filter = ("environment", "engine_type") list_display = ("environment", "engine_type", "retention_days", "filer_path", "mount_point_path", "log_path") ## Instruction: Add new fields on LogConfiguration model ## Code After: from __future__ import absolute_import, unicode_literals from django.contrib import admin import logging LOG = logging.getLogger(__name__) class LogConfigurationAdmin(admin.ModelAdmin): list_filter = ("environment", "engine_type") list_display = ("environment", "engine_type", "retention_days", "filer_path", "mount_point_path", "log_path", "cron_minute", "cron_hour")
# ... existing code ... list_display = ("environment", "engine_type", "retention_days", "filer_path", "mount_point_path", "log_path", "cron_minute", "cron_hour") # ... rest of the code ...
f4d5bafcf99d2117fe589d8c31f8aff8ed3467a5
RefreshScripts.py
RefreshScripts.py
import time from CheckAndPostForSeriesSubmissions import checkNewSubmissions # Refreshes all other scripts every couple of minutes def refreshScripts(): while True: checkNewSubmissions() timeToSleep = 900 print("Sleeping for " + str(timeToSleep / 60) + " minutes.") time.sleep(timeToSleep) print("") if __name__ == '__main__': refreshScripts()
import time from CheckAndPostForSeriesSubmissions import checkNewSubmissions # Refreshes all other scripts every couple of minutes def refreshScripts(): while True: try: checkNewSubmissions() except Exception as e: #traceback.print_exc() print("Found error, skipping this loop. ") print(str(e)) timeToSleep = 900 print("Sleeping for " + str(timeToSleep / 60) + " minutes.") time.sleep(timeToSleep) print("") if __name__ == '__main__': refreshScripts()
Refresh script now displays error message
Refresh script now displays error message
Python
mit
LiquidFun/Reddit-GeoGuessr-Tracking-Bot
import time from CheckAndPostForSeriesSubmissions import checkNewSubmissions # Refreshes all other scripts every couple of minutes def refreshScripts(): while True: + try: - checkNewSubmissions() + checkNewSubmissions() + except Exception as e: + #traceback.print_exc() + print("Found error, skipping this loop. ") + print(str(e)) timeToSleep = 900 print("Sleeping for " + str(timeToSleep / 60) + " minutes.") time.sleep(timeToSleep) print("") if __name__ == '__main__': refreshScripts()
Refresh script now displays error message
## Code Before: import time from CheckAndPostForSeriesSubmissions import checkNewSubmissions # Refreshes all other scripts every couple of minutes def refreshScripts(): while True: checkNewSubmissions() timeToSleep = 900 print("Sleeping for " + str(timeToSleep / 60) + " minutes.") time.sleep(timeToSleep) print("") if __name__ == '__main__': refreshScripts() ## Instruction: Refresh script now displays error message ## Code After: import time from CheckAndPostForSeriesSubmissions import checkNewSubmissions # Refreshes all other scripts every couple of minutes def refreshScripts(): while True: try: checkNewSubmissions() except Exception as e: #traceback.print_exc() print("Found error, skipping this loop. ") print(str(e)) timeToSleep = 900 print("Sleeping for " + str(timeToSleep / 60) + " minutes.") time.sleep(timeToSleep) print("") if __name__ == '__main__': refreshScripts()
// ... existing code ... while True: try: checkNewSubmissions() except Exception as e: #traceback.print_exc() print("Found error, skipping this loop. ") print(str(e)) timeToSleep = 900 print("Sleeping for " + str(timeToSleep / 60) + " minutes.") // ... rest of the code ...
de6a32e4b9a94103c923188894da6455ca14956c
TopTenView.py
TopTenView.py
import feedparser, requests, ui url = 'https://itunes.apple.com/us/rss/topsongs/limit=10/xml' def get_image_urls(itunes_url): for entry in feedparser.parse(itunes_url).entries: yield entry['summary'].partition('src="')[2].partition('"')[0] class TopTenView(ui.View): def __init__(self, image_urls): self.present() for i, url in enumerate(image_urls): button = ui.Button() button.background_image = ui.Image.from_data(requests.get(url).content) button.border_width = 2 button.x = (i % 5) * 128 + 10 button.y = (i / 5) * 128 + 10 button.width = button.height = 128 self.add_subview(button) TopTenView(list(get_image_urls(url)))
import feedparser, requests, ui url = 'https://itunes.apple.com/us/rss/topsongs/limit=10/xml' def get_image_urls(itunes_url): for entry in feedparser.parse(itunes_url).entries: yield entry['summary'].partition('src="')[2].partition('"')[0] class TopTenView(ui.View): def __init__(self, image_urls): self.present() for i, url in enumerate(image_urls): button = ui.Button() button.background_image = ui.Image.from_data(requests.get(url).content) w, h = button.background_image.size button.x = i % 5 * w button.y = i / 5 * h button.width, button.height = w, h button.border_width = 2 self.add_subview(button) TopTenView(list(get_image_urls(url)))
Move to buttons & remove hardcoding of image size
Move to buttons & remove hardcoding of image size
Python
apache-2.0
cclauss/Pythonista_ui
import feedparser, requests, ui url = 'https://itunes.apple.com/us/rss/topsongs/limit=10/xml' def get_image_urls(itunes_url): for entry in feedparser.parse(itunes_url).entries: yield entry['summary'].partition('src="')[2].partition('"')[0] class TopTenView(ui.View): def __init__(self, image_urls): self.present() for i, url in enumerate(image_urls): button = ui.Button() button.background_image = ui.Image.from_data(requests.get(url).content) + w, h = button.background_image.size + button.x = i % 5 * w + button.y = i / 5 * h + button.width, button.height = w, h button.border_width = 2 - button.x = (i % 5) * 128 + 10 - button.y = (i / 5) * 128 + 10 - button.width = button.height = 128 self.add_subview(button) TopTenView(list(get_image_urls(url)))
Move to buttons & remove hardcoding of image size
## Code Before: import feedparser, requests, ui url = 'https://itunes.apple.com/us/rss/topsongs/limit=10/xml' def get_image_urls(itunes_url): for entry in feedparser.parse(itunes_url).entries: yield entry['summary'].partition('src="')[2].partition('"')[0] class TopTenView(ui.View): def __init__(self, image_urls): self.present() for i, url in enumerate(image_urls): button = ui.Button() button.background_image = ui.Image.from_data(requests.get(url).content) button.border_width = 2 button.x = (i % 5) * 128 + 10 button.y = (i / 5) * 128 + 10 button.width = button.height = 128 self.add_subview(button) TopTenView(list(get_image_urls(url))) ## Instruction: Move to buttons & remove hardcoding of image size ## Code After: import feedparser, requests, ui url = 'https://itunes.apple.com/us/rss/topsongs/limit=10/xml' def get_image_urls(itunes_url): for entry in feedparser.parse(itunes_url).entries: yield entry['summary'].partition('src="')[2].partition('"')[0] class TopTenView(ui.View): def __init__(self, image_urls): self.present() for i, url in enumerate(image_urls): button = ui.Button() button.background_image = ui.Image.from_data(requests.get(url).content) w, h = button.background_image.size button.x = i % 5 * w button.y = i / 5 * h button.width, button.height = w, h button.border_width = 2 self.add_subview(button) TopTenView(list(get_image_urls(url)))
// ... existing code ... button = ui.Button() button.background_image = ui.Image.from_data(requests.get(url).content) w, h = button.background_image.size button.x = i % 5 * w button.y = i / 5 * h button.width, button.height = w, h button.border_width = 2 self.add_subview(button) // ... rest of the code ...
5b6bd6cacc7032bc6a830707374b1448861e5d08
plugin.py
plugin.py
import sublime def plugin_loaded(): # Native package causes some conflicts. # Thanks https://github.com/SublimeText-Markdown/MarkdownEditing disable_native_php_package() def disable_native_php_package(): settings = sublime.load_settings('Preferences.sublime-settings') ignored_packages = settings.get('ignored_packages', []) if 'PHP' not in ignored_packages: ignored_packages.append('PHP') settings.set('ignored_packages', ignored_packages) sublime.save_settings('Preferences.sublime-settings')
import sublime def plugin_loaded(): # Native package causes some conflicts. # Thanks https://github.com/SublimeText-Markdown/MarkdownEditing disable_native_php_package() def disable_native_php_package(): settings = sublime.load_settings('Preferences.sublime-settings') ignored_packages = settings.get('ignored_packages', []) if 'PHP' not in ignored_packages: ignored_packages.append('PHP') ignored_packages.sort() settings.set('ignored_packages', ignored_packages) sublime.save_settings('Preferences.sublime-settings')
Sort ignored packages when adding native PHP package
Sort ignored packages when adding native PHP package
Python
bsd-3-clause
gerardroche/sublime-php-grammar,gerardroche/sublime-php-grammar
import sublime def plugin_loaded(): # Native package causes some conflicts. # Thanks https://github.com/SublimeText-Markdown/MarkdownEditing disable_native_php_package() def disable_native_php_package(): settings = sublime.load_settings('Preferences.sublime-settings') ignored_packages = settings.get('ignored_packages', []) if 'PHP' not in ignored_packages: ignored_packages.append('PHP') + ignored_packages.sort() settings.set('ignored_packages', ignored_packages) sublime.save_settings('Preferences.sublime-settings')
Sort ignored packages when adding native PHP package
## Code Before: import sublime def plugin_loaded(): # Native package causes some conflicts. # Thanks https://github.com/SublimeText-Markdown/MarkdownEditing disable_native_php_package() def disable_native_php_package(): settings = sublime.load_settings('Preferences.sublime-settings') ignored_packages = settings.get('ignored_packages', []) if 'PHP' not in ignored_packages: ignored_packages.append('PHP') settings.set('ignored_packages', ignored_packages) sublime.save_settings('Preferences.sublime-settings') ## Instruction: Sort ignored packages when adding native PHP package ## Code After: import sublime def plugin_loaded(): # Native package causes some conflicts. # Thanks https://github.com/SublimeText-Markdown/MarkdownEditing disable_native_php_package() def disable_native_php_package(): settings = sublime.load_settings('Preferences.sublime-settings') ignored_packages = settings.get('ignored_packages', []) if 'PHP' not in ignored_packages: ignored_packages.append('PHP') ignored_packages.sort() settings.set('ignored_packages', ignored_packages) sublime.save_settings('Preferences.sublime-settings')
# ... existing code ... if 'PHP' not in ignored_packages: ignored_packages.append('PHP') ignored_packages.sort() settings.set('ignored_packages', ignored_packages) sublime.save_settings('Preferences.sublime-settings') # ... rest of the code ...
8126a93ec002c3cbff8f9cd7bfe996de740f4bef
setup.py
setup.py
from distutils.core import setup setup( name='buqeyemodel', # packages=['buqeyemodel'], py_modules=['buqeyemodel'], version='0.1', description='A statistical model of EFT convergence.', author='Jordan Melendez', author_email='[email protected]', license='MIT', url='https://github.com/jordan-melendez/buqeyemodel', download_url='https://github.com/jordan-melendez/buqeyemodel/archive/v0.1.tar.gz', keywords='EFT nuclear model gaussian process uncertainty quantification buqeyemodel buqeye', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Physics' ] )
from distutils.core import setup setup( name='buqeyemodel', packages=['buqeyemodel'], # py_modules=['buqeyemodel', 'pymc3_additions'], version='0.1', description='A statistical model of EFT convergence.', author='Jordan Melendez', author_email='[email protected]', license='MIT', url='https://github.com/jordan-melendez/buqeyemodel', download_url='https://github.com/jordan-melendez/buqeyemodel/archive/v0.1.tar.gz', keywords='EFT nuclear model gaussian process uncertainty quantification buqeyemodel buqeye', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Physics' ] )
Put buqeyemodel back in folder for multi-file use
Put buqeyemodel back in folder for multi-file use
Python
mit
jordan-melendez/buqeyemodel,jordan-melendez/buqeyemodel
from distutils.core import setup setup( name='buqeyemodel', - # packages=['buqeyemodel'], + packages=['buqeyemodel'], - py_modules=['buqeyemodel'], + # py_modules=['buqeyemodel', 'pymc3_additions'], version='0.1', description='A statistical model of EFT convergence.', author='Jordan Melendez', author_email='[email protected]', license='MIT', url='https://github.com/jordan-melendez/buqeyemodel', download_url='https://github.com/jordan-melendez/buqeyemodel/archive/v0.1.tar.gz', keywords='EFT nuclear model gaussian process uncertainty quantification buqeyemodel buqeye', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Physics' ] )
Put buqeyemodel back in folder for multi-file use
## Code Before: from distutils.core import setup setup( name='buqeyemodel', # packages=['buqeyemodel'], py_modules=['buqeyemodel'], version='0.1', description='A statistical model of EFT convergence.', author='Jordan Melendez', author_email='[email protected]', license='MIT', url='https://github.com/jordan-melendez/buqeyemodel', download_url='https://github.com/jordan-melendez/buqeyemodel/archive/v0.1.tar.gz', keywords='EFT nuclear model gaussian process uncertainty quantification buqeyemodel buqeye', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Physics' ] ) ## Instruction: Put buqeyemodel back in folder for multi-file use ## Code After: from distutils.core import setup setup( name='buqeyemodel', packages=['buqeyemodel'], # py_modules=['buqeyemodel', 'pymc3_additions'], version='0.1', description='A statistical model of EFT convergence.', author='Jordan Melendez', author_email='[email protected]', license='MIT', url='https://github.com/jordan-melendez/buqeyemodel', download_url='https://github.com/jordan-melendez/buqeyemodel/archive/v0.1.tar.gz', keywords='EFT nuclear model gaussian process uncertainty quantification buqeyemodel buqeye', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Physics' ] )
// ... existing code ... setup( name='buqeyemodel', packages=['buqeyemodel'], # py_modules=['buqeyemodel', 'pymc3_additions'], version='0.1', description='A statistical model of EFT convergence.', // ... rest of the code ...
54dc5c3a6ddf7fdc630547836058d017c778008f
python/recursive-digit-sum.py
python/recursive-digit-sum.py
def superDigit(n, k): p = create_p(n, k) return get_super_digit(p) def get_super_digit(p): if len(p) == 1: return int(p) else: digits = map(int, list(p)) return get_super_digit(str(sum(digits))) def create_p(n, k): return n * k if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) result = superDigit(n, k) print(result)
def super_digit(n, k): digits = map(int, list(n)) return get_super_digit(str(sum(digits) * k)) def get_super_digit(p): if len(p) == 1: return int(p) else: digits = map(int, list(p)) return get_super_digit(str(sum(digits))) if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) result = super_digit(n, k) print(result)
Implement shortcut to compute initial p super digit
Implement shortcut to compute initial p super digit
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
- def superDigit(n, k): + def super_digit(n, k): - p = create_p(n, k) - return get_super_digit(p) + digits = map(int, list(n)) + return get_super_digit(str(sum(digits) * k)) def get_super_digit(p): if len(p) == 1: return int(p) else: digits = map(int, list(p)) return get_super_digit(str(sum(digits))) - def create_p(n, k): - return n * k - if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) - result = superDigit(n, k) + result = super_digit(n, k) print(result)
Implement shortcut to compute initial p super digit
## Code Before: def superDigit(n, k): p = create_p(n, k) return get_super_digit(p) def get_super_digit(p): if len(p) == 1: return int(p) else: digits = map(int, list(p)) return get_super_digit(str(sum(digits))) def create_p(n, k): return n * k if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) result = superDigit(n, k) print(result) ## Instruction: Implement shortcut to compute initial p super digit ## Code After: def super_digit(n, k): digits = map(int, list(n)) return get_super_digit(str(sum(digits) * k)) def get_super_digit(p): if len(p) == 1: return int(p) else: digits = map(int, list(p)) return get_super_digit(str(sum(digits))) if __name__ == '__main__': nk = input().split() n = nk[0] k = int(nk[1]) result = super_digit(n, k) print(result)
# ... existing code ... def super_digit(n, k): digits = map(int, list(n)) return get_super_digit(str(sum(digits) * k)) def get_super_digit(p): # ... modified code ... return get_super_digit(str(sum(digits))) if __name__ == '__main__': nk = input().split() ... n = nk[0] k = int(nk[1]) result = super_digit(n, k) print(result) # ... rest of the code ...
2377f500d4667623da9a2921c62862b00d7f404c
school/frontend/views.py
school/frontend/views.py
from flask import Blueprint, render_template, url_for, redirect, flash from flask.ext.login import login_required, logout_user, current_user, login_user from .forms import LoginForm from school.config import FLASH_SUCCESS, FLASH_INFO, FLASH_WARNING frontend = Blueprint('frontend', __name__) @frontend.route('/login', methods=["GET", "POST"]) def login(): if current_user.is_authenticated(): # user is already logged in flash("You are already logged in", FLASH_WARNING) return redirect(url_for('user.index')) form = LoginForm() if form.validate_on_submit(): flash('Successfully logged in as %s' % form.user.username, FLASH_SUCCESS) login_user(form.user) return form.redirect("user.index") return render_template('frontend/index.html', form=form) @frontend.route('/logout') @login_required def logout(): logout_user() flash('You have been logged out.', FLASH_INFO) return redirect(url_for("frontend.login"))
from flask import Blueprint, render_template, url_for, redirect, flash from flask.ext.login import login_required, logout_user, current_user, login_user from .forms import LoginForm from school.config import FLASH_SUCCESS, FLASH_INFO, FLASH_WARNING frontend = Blueprint('frontend', __name__) @frontend.route('/login', methods=["GET", "POST"]) def login(): if current_user.is_authenticated(): # user is already logged in flash("You are already logged in", FLASH_WARNING) return redirect(url_for('user.index')) form = LoginForm() if form.validate_on_submit(): login_user(form.user) return form.redirect("user.index") return render_template('frontend/index.html', form=form) @frontend.route('/logout') @login_required def logout(): logout_user() flash('You have been logged out.', FLASH_INFO) return redirect(url_for("frontend.login"))
Remove flash success message when logging in.
Remove flash success message when logging in.
Python
mit
leyyin/university-SE,leyyin/university-SE,leyyin/university-SE
from flask import Blueprint, render_template, url_for, redirect, flash from flask.ext.login import login_required, logout_user, current_user, login_user from .forms import LoginForm from school.config import FLASH_SUCCESS, FLASH_INFO, FLASH_WARNING frontend = Blueprint('frontend', __name__) @frontend.route('/login', methods=["GET", "POST"]) def login(): if current_user.is_authenticated(): # user is already logged in flash("You are already logged in", FLASH_WARNING) return redirect(url_for('user.index')) form = LoginForm() if form.validate_on_submit(): - flash('Successfully logged in as %s' % form.user.username, FLASH_SUCCESS) login_user(form.user) return form.redirect("user.index") return render_template('frontend/index.html', form=form) @frontend.route('/logout') @login_required def logout(): logout_user() flash('You have been logged out.', FLASH_INFO) return redirect(url_for("frontend.login"))
Remove flash success message when logging in.
## Code Before: from flask import Blueprint, render_template, url_for, redirect, flash from flask.ext.login import login_required, logout_user, current_user, login_user from .forms import LoginForm from school.config import FLASH_SUCCESS, FLASH_INFO, FLASH_WARNING frontend = Blueprint('frontend', __name__) @frontend.route('/login', methods=["GET", "POST"]) def login(): if current_user.is_authenticated(): # user is already logged in flash("You are already logged in", FLASH_WARNING) return redirect(url_for('user.index')) form = LoginForm() if form.validate_on_submit(): flash('Successfully logged in as %s' % form.user.username, FLASH_SUCCESS) login_user(form.user) return form.redirect("user.index") return render_template('frontend/index.html', form=form) @frontend.route('/logout') @login_required def logout(): logout_user() flash('You have been logged out.', FLASH_INFO) return redirect(url_for("frontend.login")) ## Instruction: Remove flash success message when logging in. ## Code After: from flask import Blueprint, render_template, url_for, redirect, flash from flask.ext.login import login_required, logout_user, current_user, login_user from .forms import LoginForm from school.config import FLASH_SUCCESS, FLASH_INFO, FLASH_WARNING frontend = Blueprint('frontend', __name__) @frontend.route('/login', methods=["GET", "POST"]) def login(): if current_user.is_authenticated(): # user is already logged in flash("You are already logged in", FLASH_WARNING) return redirect(url_for('user.index')) form = LoginForm() if form.validate_on_submit(): login_user(form.user) return form.redirect("user.index") return render_template('frontend/index.html', form=form) @frontend.route('/logout') @login_required def logout(): logout_user() flash('You have been logged out.', FLASH_INFO) return redirect(url_for("frontend.login"))
# ... existing code ... form = LoginForm() if form.validate_on_submit(): login_user(form.user) # ... rest of the code ...
fb5c2e5df4f700fb19663bbe96e7aa2710e627ca
osprey/execute_dump.py
osprey/execute_dump.py
from __future__ import print_function, absolute_import, division import csv import json from six.moves import cStringIO from .config import Config from .trials import Trial def execute(args, parser): config = Config(args.config, verbose=False) session = config.trials() columns = Trial.__mapper__.columns if args.output == 'json': items = [curr.to_dict() for curr in session.query(Trial).all()] value = json.dumps(items) elif args.output == 'csv': buf = cStringIO() outcsv = csv.writer(buf) outcsv.writerow([column.name for column in columns]) for curr in session.query(Trial).all(): row = [getattr(curr, column.name) for column in columns] outcsv.writerow(row) value = buf.getvalue() print(value) return value
from __future__ import print_function, absolute_import, division import csv import json from six.moves import cStringIO from .config import Config from .trials import Trial def execute(args, parser): config = Config(args.config, verbose=False) session = config.trials() columns = Trial.__mapper__.columns if args.output == 'json': items = [curr.to_dict() for curr in session.query(Trial).all()] new_items = [] # Instead of saving the parameters on their own nested dict, # save them along the rest of elements for item in items: parameters = item.pop('parameters') # remove dict item.update(parameters) # update original dict with the parameters new_items.append(item) value = json.dumps(new_items) elif args.output == 'csv': buf = cStringIO() outcsv = csv.writer(buf) outcsv.writerow([column.name for column in columns]) for curr in session.query(Trial).all(): row = [getattr(curr, column.name) for column in columns] outcsv.writerow(row) value = buf.getvalue() print(value) return value
Store hyperparameters with the other settings
Store hyperparameters with the other settings Instead of storing them in their own 'parameters' directory.
Python
apache-2.0
msmbuilder/osprey,msultan/osprey,pandegroup/osprey,msultan/osprey,msmbuilder/osprey,pandegroup/osprey
from __future__ import print_function, absolute_import, division import csv import json from six.moves import cStringIO from .config import Config from .trials import Trial def execute(args, parser): config = Config(args.config, verbose=False) session = config.trials() columns = Trial.__mapper__.columns if args.output == 'json': items = [curr.to_dict() for curr in session.query(Trial).all()] + new_items = [] + # Instead of saving the parameters on their own nested dict, + # save them along the rest of elements + for item in items: + parameters = item.pop('parameters') # remove dict + item.update(parameters) # update original dict with the parameters + new_items.append(item) - value = json.dumps(items) + value = json.dumps(new_items) elif args.output == 'csv': buf = cStringIO() outcsv = csv.writer(buf) outcsv.writerow([column.name for column in columns]) for curr in session.query(Trial).all(): row = [getattr(curr, column.name) for column in columns] outcsv.writerow(row) value = buf.getvalue() print(value) return value
Store hyperparameters with the other settings
## Code Before: from __future__ import print_function, absolute_import, division import csv import json from six.moves import cStringIO from .config import Config from .trials import Trial def execute(args, parser): config = Config(args.config, verbose=False) session = config.trials() columns = Trial.__mapper__.columns if args.output == 'json': items = [curr.to_dict() for curr in session.query(Trial).all()] value = json.dumps(items) elif args.output == 'csv': buf = cStringIO() outcsv = csv.writer(buf) outcsv.writerow([column.name for column in columns]) for curr in session.query(Trial).all(): row = [getattr(curr, column.name) for column in columns] outcsv.writerow(row) value = buf.getvalue() print(value) return value ## Instruction: Store hyperparameters with the other settings ## Code After: from __future__ import print_function, absolute_import, division import csv import json from six.moves import cStringIO from .config import Config from .trials import Trial def execute(args, parser): config = Config(args.config, verbose=False) session = config.trials() columns = Trial.__mapper__.columns if args.output == 'json': items = [curr.to_dict() for curr in session.query(Trial).all()] new_items = [] # Instead of saving the parameters on their own nested dict, # save them along the rest of elements for item in items: parameters = item.pop('parameters') # remove dict item.update(parameters) # update original dict with the parameters new_items.append(item) value = json.dumps(new_items) elif args.output == 'csv': buf = cStringIO() outcsv = csv.writer(buf) outcsv.writerow([column.name for column in columns]) for curr in session.query(Trial).all(): row = [getattr(curr, column.name) for column in columns] outcsv.writerow(row) value = buf.getvalue() print(value) return value
# ... existing code ... if args.output == 'json': items = [curr.to_dict() for curr in session.query(Trial).all()] new_items = [] # Instead of saving the parameters on their own nested dict, # save them along the rest of elements for item in items: parameters = item.pop('parameters') # remove dict item.update(parameters) # update original dict with the parameters new_items.append(item) value = json.dumps(new_items) elif args.output == 'csv': # ... rest of the code ...
bfecf498c30c08d8ede18fd587e192f0961c334c
invoke/run.py
invoke/run.py
from subprocess import PIPE from .monkey import Popen from .exceptions import Failure class Result(object): def __init__(self, stdout=None, stderr=None, exited=None): self.exited = self.return_code = exited self.stdout = stdout self.stderr = stderr def __nonzero__(self): # Holy mismatch between name and implementation, Batman! return self.exited == 0 def run(command, warn=False): """ Execute ``command`` in a local subprocess. By default, raises an exception if the subprocess terminates with a nonzero return code. This may be disabled by setting ``warn=True``. """ process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE ) stdout, stderr = process.communicate() result = Result(stdout=stdout, stderr=stderr, exited=process.returncode) if not (result or warn): raise Failure(result) return result
from subprocess import PIPE from .monkey import Popen from .exceptions import Failure class Result(object): def __init__(self, stdout=None, stderr=None, exited=None): self.exited = self.return_code = exited self.stdout = stdout self.stderr = stderr def __nonzero__(self): # Holy mismatch between name and implementation, Batman! return self.exited == 0 def __str__(self): ret = ["Command exited with status %s." % self.exited] for x in ('stdout', 'stderr'): val = getattr(self, x) ret.append("""=== %s === %s """ % (x, val.rstrip()) if val else "(no %s)" % x) return "\n".join(ret) def run(command, warn=False): """ Execute ``command`` in a local subprocess. By default, raises an exception if the subprocess terminates with a nonzero return code. This may be disabled by setting ``warn=True``. """ process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE ) stdout, stderr = process.communicate() result = Result(stdout=stdout, stderr=stderr, exited=process.returncode) if not (result or warn): raise Failure(result) return result
Add semi-useful `__str__` for Result
Add semi-useful `__str__` for Result
Python
bsd-2-clause
pyinvoke/invoke,mkusz/invoke,singingwolfboy/invoke,tyewang/invoke,mattrobenolt/invoke,kejbaly2/invoke,sophacles/invoke,pfmoore/invoke,pyinvoke/invoke,mkusz/invoke,pfmoore/invoke,kejbaly2/invoke,alex/invoke,frol/invoke,mattrobenolt/invoke,frol/invoke
from subprocess import PIPE from .monkey import Popen from .exceptions import Failure class Result(object): def __init__(self, stdout=None, stderr=None, exited=None): self.exited = self.return_code = exited self.stdout = stdout self.stderr = stderr def __nonzero__(self): # Holy mismatch between name and implementation, Batman! return self.exited == 0 + def __str__(self): + ret = ["Command exited with status %s." % self.exited] + for x in ('stdout', 'stderr'): + val = getattr(self, x) + ret.append("""=== %s === + %s + """ % (x, val.rstrip()) if val else "(no %s)" % x) + return "\n".join(ret) def run(command, warn=False): """ Execute ``command`` in a local subprocess. By default, raises an exception if the subprocess terminates with a nonzero return code. This may be disabled by setting ``warn=True``. """ process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE ) stdout, stderr = process.communicate() result = Result(stdout=stdout, stderr=stderr, exited=process.returncode) if not (result or warn): raise Failure(result) return result
Add semi-useful `__str__` for Result
## Code Before: from subprocess import PIPE from .monkey import Popen from .exceptions import Failure class Result(object): def __init__(self, stdout=None, stderr=None, exited=None): self.exited = self.return_code = exited self.stdout = stdout self.stderr = stderr def __nonzero__(self): # Holy mismatch between name and implementation, Batman! return self.exited == 0 def run(command, warn=False): """ Execute ``command`` in a local subprocess. By default, raises an exception if the subprocess terminates with a nonzero return code. This may be disabled by setting ``warn=True``. """ process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE ) stdout, stderr = process.communicate() result = Result(stdout=stdout, stderr=stderr, exited=process.returncode) if not (result or warn): raise Failure(result) return result ## Instruction: Add semi-useful `__str__` for Result ## Code After: from subprocess import PIPE from .monkey import Popen from .exceptions import Failure class Result(object): def __init__(self, stdout=None, stderr=None, exited=None): self.exited = self.return_code = exited self.stdout = stdout self.stderr = stderr def __nonzero__(self): # Holy mismatch between name and implementation, Batman! return self.exited == 0 def __str__(self): ret = ["Command exited with status %s." % self.exited] for x in ('stdout', 'stderr'): val = getattr(self, x) ret.append("""=== %s === %s """ % (x, val.rstrip()) if val else "(no %s)" % x) return "\n".join(ret) def run(command, warn=False): """ Execute ``command`` in a local subprocess. By default, raises an exception if the subprocess terminates with a nonzero return code. This may be disabled by setting ``warn=True``. """ process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE ) stdout, stderr = process.communicate() result = Result(stdout=stdout, stderr=stderr, exited=process.returncode) if not (result or warn): raise Failure(result) return result
// ... existing code ... return self.exited == 0 def __str__(self): ret = ["Command exited with status %s." % self.exited] for x in ('stdout', 'stderr'): val = getattr(self, x) ret.append("""=== %s === %s """ % (x, val.rstrip()) if val else "(no %s)" % x) return "\n".join(ret) def run(command, warn=False): // ... rest of the code ...
898e087d67ba5f6f8af3f280d46c59edc0bb665e
modules/module_spotify.py
modules/module_spotify.py
import re import urllib def handle_url(bot, user, channel, url, msg): """Handle IMDB urls""" m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url) if not m: return dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4)) f = urllib.urlopen(dataurl) songinfo = f.read() f.close() artist, album, song = songinfo.split("/", 2) bot.say(channel, "[Spotify] Artist: %s - Album: %s - Song: %s" % (artist.strip(), album.strip(), song.strip()))
import re import urllib def handle_url(bot, user, channel, url, msg): """Handle IMDB urls""" m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url) if not m: return dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4)) f = urllib.urlopen(dataurl) songinfo = f.read() f.close() artist, album, song = songinfo.split("/", 2) bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
Change output format to a more reasonable one
Change output format to a more reasonable one git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@143 dda364a1-ef19-0410-af65-756c83048fb2
Python
bsd-3-clause
EArmour/pyfibot,nigeljonez/newpyfibot,huqa/pyfibot,lepinkainen/pyfibot,rnyberg/pyfibot,lepinkainen/pyfibot,aapa/pyfibot,aapa/pyfibot,EArmour/pyfibot,rnyberg/pyfibot,huqa/pyfibot
import re import urllib def handle_url(bot, user, channel, url, msg): """Handle IMDB urls""" m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url) if not m: return dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4)) f = urllib.urlopen(dataurl) songinfo = f.read() f.close() artist, album, song = songinfo.split("/", 2) - bot.say(channel, "[Spotify] Artist: %s - Album: %s - Song: %s" % (artist.strip(), album.strip(), song.strip())) + bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
Change output format to a more reasonable one
## Code Before: import re import urllib def handle_url(bot, user, channel, url, msg): """Handle IMDB urls""" m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url) if not m: return dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4)) f = urllib.urlopen(dataurl) songinfo = f.read() f.close() artist, album, song = songinfo.split("/", 2) bot.say(channel, "[Spotify] Artist: %s - Album: %s - Song: %s" % (artist.strip(), album.strip(), song.strip())) ## Instruction: Change output format to a more reasonable one ## Code After: import re import urllib def handle_url(bot, user, channel, url, msg): """Handle IMDB urls""" m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url) if not m: return dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4)) f = urllib.urlopen(dataurl) songinfo = f.read() f.close() artist, album, song = songinfo.split("/", 2) bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
// ... existing code ... artist, album, song = songinfo.split("/", 2) bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip())) // ... rest of the code ...
00cbac852e83eb1f3ddc03ed70ad32494f16fdbf
caslogging.py
caslogging.py
from config import config import logging as root_logging # Set up the logger logger = root_logging.getLogger() logger.setLevel(root_logging.INFO) logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s') logging_file_handler = root_logging.FileHandler(config['logging']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) logger.addHandler(logging_file_handler) logging_stream_handler = root_logging.StreamHandler() logging_stream_handler.setLevel(root_logging.INFO) logging_stream_handler.setFormatter(logger_format) logger.addHandler(logging_stream_handler) logging = root_logging
from config import config import logging as root_logging # Set up the logger logger = root_logging.getLogger() logger.setLevel(root_logging.INFO) logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s', '%Y-%m-%d %H:%M:%S') logging_file_handler = root_logging.FileHandler(config['logging_system']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) logger.addHandler(logging_file_handler) logging_stream_handler = root_logging.StreamHandler() logging_stream_handler.setLevel(root_logging.INFO) logging_stream_handler.setFormatter(logger_format) logger.addHandler(logging_stream_handler) logging = root_logging
Fix of the logging system exception
Fix of the logging system exception Added a format to the date for the logging system. '%Y-%m-%d %H:%M:%S’. Fixed an exception opening the logging file because the variable name was not written correctly.
Python
mit
bumper-app/bumper-bianca,bumper-app/bumper-bianca
from config import config import logging as root_logging # Set up the logger logger = root_logging.getLogger() logger.setLevel(root_logging.INFO) - logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s') + logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s', '%Y-%m-%d %H:%M:%S') - logging_file_handler = root_logging.FileHandler(config['logging']['filename']) + logging_file_handler = root_logging.FileHandler(config['logging_system']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) logger.addHandler(logging_file_handler) logging_stream_handler = root_logging.StreamHandler() logging_stream_handler.setLevel(root_logging.INFO) logging_stream_handler.setFormatter(logger_format) logger.addHandler(logging_stream_handler) logging = root_logging
Fix of the logging system exception
## Code Before: from config import config import logging as root_logging # Set up the logger logger = root_logging.getLogger() logger.setLevel(root_logging.INFO) logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s') logging_file_handler = root_logging.FileHandler(config['logging']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) logger.addHandler(logging_file_handler) logging_stream_handler = root_logging.StreamHandler() logging_stream_handler.setLevel(root_logging.INFO) logging_stream_handler.setFormatter(logger_format) logger.addHandler(logging_stream_handler) logging = root_logging ## Instruction: Fix of the logging system exception ## Code After: from config import config import logging as root_logging # Set up the logger logger = root_logging.getLogger() logger.setLevel(root_logging.INFO) logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s', '%Y-%m-%d %H:%M:%S') logging_file_handler = root_logging.FileHandler(config['logging_system']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) logger.addHandler(logging_file_handler) logging_stream_handler = root_logging.StreamHandler() logging_stream_handler.setLevel(root_logging.INFO) logging_stream_handler.setFormatter(logger_format) logger.addHandler(logging_stream_handler) logging = root_logging
# ... existing code ... logger.setLevel(root_logging.INFO) logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s', '%Y-%m-%d %H:%M:%S') logging_file_handler = root_logging.FileHandler(config['logging_system']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) # ... rest of the code ...
caaa59ca23d7405ff16726d509e3c0d4e659baec
djstripe/migrations/0023_auto_20170307_0937.py
djstripe/migrations/0023_auto_20170307_0937.py
from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('djstripe', '0022_fix_subscriber_delete'), ] operations = [ migrations.AlterField( model_name='customer', name='subscriber', field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL), ), migrations.AlterUniqueTogether( name='customer', unique_together=set([('subscriber', 'livemode')]), ), ]
from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL) class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('djstripe', '0022_fix_subscriber_delete'), ] operations = [ migrations.AlterField( model_name='customer', name='subscriber', field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL), ), migrations.AlterUniqueTogether( name='customer', unique_together=set([('subscriber', 'livemode')]), ), ]
Fix migration 0023 subscriber model reference
Fix migration 0023 subscriber model reference
Python
mit
pydanny/dj-stripe,jameshiew/dj-stripe,tkwon/dj-stripe,pydanny/dj-stripe,jleclanche/dj-stripe,jleclanche/dj-stripe,jameshiew/dj-stripe,tkwon/dj-stripe,dj-stripe/dj-stripe,kavdev/dj-stripe,kavdev/dj-stripe,dj-stripe/dj-stripe
from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models + + + DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL) class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('djstripe', '0022_fix_subscriber_delete'), ] operations = [ migrations.AlterField( model_name='customer', name='subscriber', - field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL), + field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL), ), migrations.AlterUniqueTogether( name='customer', unique_together=set([('subscriber', 'livemode')]), ), ]
Fix migration 0023 subscriber model reference
## Code Before: from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('djstripe', '0022_fix_subscriber_delete'), ] operations = [ migrations.AlterField( model_name='customer', name='subscriber', field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=settings.AUTH_USER_MODEL), ), migrations.AlterUniqueTogether( name='customer', unique_together=set([('subscriber', 'livemode')]), ), ] ## Instruction: Fix migration 0023 subscriber model reference ## Code After: from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL) class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('djstripe', '0022_fix_subscriber_delete'), ] operations = [ migrations.AlterField( model_name='customer', name='subscriber', field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL), ), migrations.AlterUniqueTogether( name='customer', unique_together=set([('subscriber', 'livemode')]), ), ]
// ... existing code ... from django.conf import settings from django.db import migrations, models DJSTRIPE_SUBSCRIBER_MODEL = getattr(settings, "DJSTRIPE_SUBSCRIBER_MODEL", settings.AUTH_USER_MODEL) // ... modified code ... model_name='customer', name='subscriber', field=models.ForeignKey(null=True, on_delete=models.SET_NULL, related_name='djstripe_customers', to=DJSTRIPE_SUBSCRIBER_MODEL), ), migrations.AlterUniqueTogether( // ... rest of the code ...
2a8a564fbd48fba25c4876ff3d4317152a1d647c
tests/basics/builtin_range.py
tests/basics/builtin_range.py
print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) print(range(1,4)[:]) print(range(1,4)[0:]) print(range(1,4)[1:]) print(range(1,4)[:-1]) print(range(7,-2,-4)[:]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
Test slicing a range that does not start at zero.
tests: Test slicing a range that does not start at zero.
Python
mit
torwag/micropython,TDAbboud/micropython,dinau/micropython,dmazzella/micropython,pramasoul/micropython,adafruit/micropython,danicampora/micropython,misterdanb/micropython,trezor/micropython,misterdanb/micropython,redbear/micropython,noahwilliamsson/micropython,adafruit/circuitpython,alex-robbins/micropython,torwag/micropython,alex-robbins/micropython,mpalomer/micropython,ernesto-g/micropython,drrk/micropython,trezor/micropython,dinau/micropython,adamkh/micropython,pfalcon/micropython,matthewelse/micropython,xhat/micropython,cloudformdesign/micropython,chrisdearman/micropython,puuu/micropython,ernesto-g/micropython,turbinenreiter/micropython,bvernoux/micropython,alex-robbins/micropython,dxxb/micropython,alex-march/micropython,deshipu/micropython,MrSurly/micropython,adafruit/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,ryannathans/micropython,ernesto-g/micropython,matthewelse/micropython,HenrikSolver/micropython,jmarcelino/pycom-micropython,MrSurly/micropython-esp32,pramasoul/micropython,adafruit/micropython,ganshun666/micropython,puuu/micropython,selste/micropython,martinribelotta/micropython,ruffy91/micropython,omtinez/micropython,Peetz0r/micropython-esp32,hosaka/micropython,lowRISC/micropython,kerneltask/micropython,selste/micropython,micropython/micropython-esp32,misterdanb/micropython,cloudformdesign/micropython,deshipu/micropython,supergis/micropython,tuc-osg/micropython,mianos/micropython,alex-robbins/micropython,xuxiaoxin/micropython,mpalomer/micropython,ChuckM/micropython,blmorris/micropython,turbinenreiter/micropython,PappaPeppar/micropython,Timmenem/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,pramasoul/micropython,redbear/micropython,xhat/micropython,dhylands/micropython,ganshun666/micropython,supergis/micropython,infinnovation/micropython,praemdonck/micropython,cloudformdesign/micropython,mhoffma/micropython,AriZuu/micropython,bvernoux/micropython,turbinenreiter/micropython,bvernoux/micropython,mhoffma/micropython,selste/micropython,xuxiaoxin/micropython,infinnovation/micropython,bvernoux/micropython,dmazzella/micropython,Timmenem/micropython,tralamazza/micropython,dxxb/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,kerneltask/micropython,infinnovation/micropython,vitiral/micropython,misterdanb/micropython,lowRISC/micropython,tobbad/micropython,adamkh/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,tuc-osg/micropython,pramasoul/micropython,omtinez/micropython,blmorris/micropython,puuu/micropython,swegener/micropython,Peetz0r/micropython-esp32,emfcamp/micropython,hosaka/micropython,cloudformdesign/micropython,mianos/micropython,tuc-osg/micropython,xuxiaoxin/micropython,xuxiaoxin/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,misterdanb/micropython,mianos/micropython,swegener/micropython,noahwilliamsson/micropython,pozetroninc/micropython,cwyark/micropython,chrisdearman/micropython,hosaka/micropython,deshipu/micropython,toolmacher/micropython,dhylands/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,cwyark/micropython,drrk/micropython,MrSurly/micropython,redbear/micropython,henriknelson/micropython,matthewelse/micropython,mhoffma/micropython,dinau/micropython,trezor/micropython,kerneltask/micropython,feilongfl/micropython,galenhz/micropython,ChuckM/micropython,toolmacher/micropython,alex-march/micropython,dhylands/micropython,torwag/micropython,selste/micropython,EcmaXp/micropython,noahwilliamsson/micropython,galenhz/micropython,micropython/micropython-esp32,feilongfl/micropython,ruffy91/micropython,micropython/micropython-esp32,SHA2017-badge/micropython-esp32,kerneltask/micropython,ryannathans/micropython,Timmenem/micropython,vitiral/micropython,chrisdearman/micropython,supergis/micropython,adamkh/micropython,lowRISC/micropython,MrSurly/micropython,dhylands/micropython,blazewicz/micropython,adamkh/micropython,toolmacher/micropython,omtinez/micropython,cwyark/micropython,tuc-osg/micropython,swegener/micropython,danicampora/micropython,Peetz0r/micropython-esp32,dxxb/micropython,xhat/micropython,toolmacher/micropython,ruffy91/micropython,henriknelson/micropython,mpalomer/micropython,oopy/micropython,danicampora/micropython,ChuckM/micropython,deshipu/micropython,EcmaXp/micropython,PappaPeppar/micropython,feilongfl/micropython,ganshun666/micropython,alex-march/micropython,hiway/micropython,trezor/micropython,mhoffma/micropython,dhylands/micropython,AriZuu/micropython,chrisdearman/micropython,puuu/micropython,swegener/micropython,drrk/micropython,mhoffma/micropython,HenrikSolver/micropython,emfcamp/micropython,pfalcon/micropython,turbinenreiter/micropython,AriZuu/micropython,blazewicz/micropython,hiway/micropython,hosaka/micropython,neilh10/micropython,adafruit/circuitpython,EcmaXp/micropython,hosaka/micropython,martinribelotta/micropython,hiway/micropython,feilongfl/micropython,xhat/micropython,pozetroninc/micropython,pfalcon/micropython,supergis/micropython,emfcamp/micropython,HenrikSolver/micropython,praemdonck/micropython,hiway/micropython,vitiral/micropython,oopy/micropython,chrisdearman/micropython,pozetroninc/micropython,noahwilliamsson/micropython,PappaPeppar/micropython,pramasoul/micropython,ryannathans/micropython,xuxiaoxin/micropython,blmorris/micropython,adafruit/micropython,blmorris/micropython,tobbad/micropython,ChuckM/micropython,selste/micropython,matthewelse/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,lowRISC/micropython,EcmaXp/micropython,galenhz/micropython,MrSurly/micropython-esp32,deshipu/micropython,TDAbboud/micropython,matthewelse/micropython,HenrikSolver/micropython,AriZuu/micropython,pozetroninc/micropython,feilongfl/micropython,blazewicz/micropython,henriknelson/micropython,vitiral/micropython,SHA2017-badge/micropython-esp32,omtinez/micropython,bvernoux/micropython,tralamazza/micropython,adafruit/circuitpython,galenhz/micropython,jmarcelino/pycom-micropython,turbinenreiter/micropython,AriZuu/micropython,xhat/micropython,tobbad/micropython,TDAbboud/micropython,danicampora/micropython,ChuckM/micropython,infinnovation/micropython,tralamazza/micropython,martinribelotta/micropython,blmorris/micropython,MrSurly/micropython,noahwilliamsson/micropython,henriknelson/micropython,martinribelotta/micropython,redbear/micropython,puuu/micropython,ryannathans/micropython,ernesto-g/micropython,omtinez/micropython,adamkh/micropython,dxxb/micropython,EcmaXp/micropython,dmazzella/micropython,mianos/micropython,torwag/micropython,cwyark/micropython,drrk/micropython,danicampora/micropython,ruffy91/micropython,ernesto-g/micropython,TDAbboud/micropython,MrSurly/micropython-esp32,drrk/micropython,ganshun666/micropython,neilh10/micropython,MrSurly/micropython,alex-march/micropython,neilh10/micropython,adafruit/circuitpython,TDAbboud/micropython,jmarcelino/pycom-micropython,cwyark/micropython,ruffy91/micropython,praemdonck/micropython,swegener/micropython,cloudformdesign/micropython,Peetz0r/micropython-esp32,pfalcon/micropython,infinnovation/micropython,supergis/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,dxxb/micropython,Timmenem/micropython,matthewelse/micropython,oopy/micropython,PappaPeppar/micropython,tuc-osg/micropython,trezor/micropython,praemdonck/micropython,ganshun666/micropython,lowRISC/micropython,oopy/micropython,tobbad/micropython,dinau/micropython,mianos/micropython,tralamazza/micropython,mpalomer/micropython,tobbad/micropython,dmazzella/micropython,praemdonck/micropython,redbear/micropython,alex-march/micropython,neilh10/micropython,galenhz/micropython,vitiral/micropython,toolmacher/micropython,neilh10/micropython,oopy/micropython,blazewicz/micropython,micropython/micropython-esp32,kerneltask/micropython,adafruit/micropython,mpalomer/micropython,emfcamp/micropython,alex-robbins/micropython,martinribelotta/micropython,emfcamp/micropython,adafruit/circuitpython,ryannathans/micropython,torwag/micropython,HenrikSolver/micropython,dinau/micropython
print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) + print(range(1,4)[:]) + print(range(1,4)[0:]) + print(range(1,4)[1:]) + print(range(1,4)[:-1]) + print(range(7,-2,-4)[:]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
Test slicing a range that does not start at zero.
## Code Before: print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError") ## Instruction: Test slicing a range that does not start at zero. ## Code After: print(range(4)) # bool print(bool(range(0))) print(bool(range(10))) # len print(len(range(0))) print(len(range(4))) print(len(range(1, 4))) print(len(range(1, 4, 2))) print(len(range(1, 4, -1))) print(len(range(4, 1, -1))) print(len(range(4, 1, -2))) # subscr print(range(4)[0]) print(range(4)[1]) print(range(4)[-1]) # slice print(range(4)[0:]) print(range(4)[1:]) print(range(4)[1:2]) print(range(4)[1:3]) print(range(4)[1::2]) print(range(4)[1:-2:2]) print(range(1,4)[:]) print(range(1,4)[0:]) print(range(1,4)[1:]) print(range(1,4)[:-1]) print(range(7,-2,-4)[:]) # attrs print(range(1, 2, 3).start) print(range(1, 2, 3).stop) print(range(1, 2, 3).step) # bad unary op try: -range(1) except TypeError: print("TypeError") # bad subscription (can't store) try: range(1)[0] = 1 except TypeError: print("TypeError")
# ... existing code ... print(range(4)[1::2]) print(range(4)[1:-2:2]) print(range(1,4)[:]) print(range(1,4)[0:]) print(range(1,4)[1:]) print(range(1,4)[:-1]) print(range(7,-2,-4)[:]) # attrs # ... rest of the code ...
8b5337878172df95400a708b096e012436f8a706
dags/main_summary.py
dags/main_summary.py
from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': '[email protected]', 'depends_on_past': False, 'start_date': datetime(2016, 6, 27), 'email': ['[email protected]', '[email protected]'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily') # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': '[email protected]', 'depends_on_past': False, 'start_date': datetime(2016, 6, 25), 'email': ['[email protected]', '[email protected]'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
Prepare "Main Summary" job for backfill
Prepare "Main Summary" job for backfill Set the max number of active runs so we don't overwhelm the system, and rewind the start date by a couple of days to test that the scheduler does the right thing.
Python
mpl-2.0
opentrials/opentrials-airflow,opentrials/opentrials-airflow
from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': '[email protected]', 'depends_on_past': False, - 'start_date': datetime(2016, 6, 27), + 'start_date': datetime(2016, 6, 25), 'email': ['[email protected]', '[email protected]'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } - dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily') + dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, - env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, + env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
Prepare "Main Summary" job for backfill
## Code Before: from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': '[email protected]', 'depends_on_past': False, 'start_date': datetime(2016, 6, 27), 'email': ['[email protected]', '[email protected]'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily') # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0) ## Instruction: Prepare "Main Summary" job for backfill ## Code After: from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': '[email protected]', 'depends_on_past': False, 'start_date': datetime(2016, 6, 25), 'email': ['[email protected]', '[email protected]'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
# ... existing code ... 'owner': '[email protected]', 'depends_on_past': False, 'start_date': datetime(2016, 6, 25), 'email': ['[email protected]', '[email protected]'], 'email_on_failure': True, # ... modified code ... } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) # Make sure all the data for the given day has arrived before running. ... execution_timeout=timedelta(hours=10), instance_count=10, env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # ... rest of the code ...
2d4382ae1cec44875e7bec2f16b8406879a0bac9
opentreemap/api/models.py
opentreemap/api/models.py
from __future__ import print_function from __future__ import unicode_literals from __future__ import division import uuid import base64 import os from django.contrib.gis.db import models from treemap.models import User class APIAccessCredential(models.Model): access_key = models.CharField(max_length=100, null=False, blank=False) secret_key = models.CharField(max_length=256, null=False, blank=False) # If a user is specified then this credential # is always authorized as the given user # # If user is None this credential can access # any user's data if that user's username # and password are also provided user = models.ForeignKey(User, null=True) enabled = models.BooleanField(default=True) @classmethod def create(clz, user=None): secret_key = base64.urlsafe_b64encode(os.urandom(64)) access_key = base64.urlsafe_b64encode(uuid.uuid4().bytes)\ .replace('=', '') return APIAccessCredential.objects.create( user=user, access_key=access_key, secret_key=secret_key)
from __future__ import print_function from __future__ import unicode_literals from __future__ import division import uuid import base64 import os from django.contrib.gis.db import models from treemap.models import User class APIAccessCredential(models.Model): access_key = models.CharField(max_length=100, null=False, blank=False) secret_key = models.CharField(max_length=256, null=False, blank=False) # If a user is specified then this credential # is always authorized as the given user # # If user is None this credential can access # any user's data if that user's username # and password are also provided user = models.ForeignKey(User, null=True) enabled = models.BooleanField(default=True) def __unicode__(self): return self.access_key @classmethod def create(clz, user=None): secret_key = base64.urlsafe_b64encode(os.urandom(64)) access_key = base64.urlsafe_b64encode(uuid.uuid4().bytes)\ .replace('=', '') return APIAccessCredential.objects.create( user=user, access_key=access_key, secret_key=secret_key)
Print debug-friendly repr of APIAccessCredential
Print debug-friendly repr of APIAccessCredential
Python
agpl-3.0
maurizi/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,recklessromeo/otm-core,recklessromeo/otm-core,RickMohr/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core
from __future__ import print_function from __future__ import unicode_literals from __future__ import division import uuid import base64 import os from django.contrib.gis.db import models from treemap.models import User class APIAccessCredential(models.Model): access_key = models.CharField(max_length=100, null=False, blank=False) secret_key = models.CharField(max_length=256, null=False, blank=False) # If a user is specified then this credential # is always authorized as the given user # # If user is None this credential can access # any user's data if that user's username # and password are also provided user = models.ForeignKey(User, null=True) enabled = models.BooleanField(default=True) + def __unicode__(self): + return self.access_key + @classmethod def create(clz, user=None): secret_key = base64.urlsafe_b64encode(os.urandom(64)) access_key = base64.urlsafe_b64encode(uuid.uuid4().bytes)\ .replace('=', '') return APIAccessCredential.objects.create( user=user, access_key=access_key, secret_key=secret_key)
Print debug-friendly repr of APIAccessCredential
## Code Before: from __future__ import print_function from __future__ import unicode_literals from __future__ import division import uuid import base64 import os from django.contrib.gis.db import models from treemap.models import User class APIAccessCredential(models.Model): access_key = models.CharField(max_length=100, null=False, blank=False) secret_key = models.CharField(max_length=256, null=False, blank=False) # If a user is specified then this credential # is always authorized as the given user # # If user is None this credential can access # any user's data if that user's username # and password are also provided user = models.ForeignKey(User, null=True) enabled = models.BooleanField(default=True) @classmethod def create(clz, user=None): secret_key = base64.urlsafe_b64encode(os.urandom(64)) access_key = base64.urlsafe_b64encode(uuid.uuid4().bytes)\ .replace('=', '') return APIAccessCredential.objects.create( user=user, access_key=access_key, secret_key=secret_key) ## Instruction: Print debug-friendly repr of APIAccessCredential ## Code After: from __future__ import print_function from __future__ import unicode_literals from __future__ import division import uuid import base64 import os from django.contrib.gis.db import models from treemap.models import User class APIAccessCredential(models.Model): access_key = models.CharField(max_length=100, null=False, blank=False) secret_key = models.CharField(max_length=256, null=False, blank=False) # If a user is specified then this credential # is always authorized as the given user # # If user is None this credential can access # any user's data if that user's username # and password are also provided user = models.ForeignKey(User, null=True) enabled = models.BooleanField(default=True) def __unicode__(self): return self.access_key @classmethod def create(clz, user=None): secret_key = base64.urlsafe_b64encode(os.urandom(64)) access_key = base64.urlsafe_b64encode(uuid.uuid4().bytes)\ .replace('=', '') return APIAccessCredential.objects.create( user=user, access_key=access_key, secret_key=secret_key)
// ... existing code ... enabled = models.BooleanField(default=True) def __unicode__(self): return self.access_key @classmethod def create(clz, user=None): // ... rest of the code ...
f44630714ce1c20c88919a1ce8d9e4ad49ec9fde
nodeconductor/cloud/perms.py
nodeconductor/cloud/perms.py
from __future__ import unicode_literals from django.contrib.auth import get_user_model from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic from nodeconductor.structure.models import CustomerRole User = get_user_model() PERMISSION_LOGICS = ( ('cloud.Cloud', FilteredCollaboratorsPermissionLogic( collaborators_query='customer__roles__permission_group__user', collaborators_filter={ 'roles__role_type': CustomerRole.OWNER, }, any_permission=True, )), )
from __future__ import unicode_literals from django.contrib.auth import get_user_model from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic from nodeconductor.structure.models import CustomerRole User = get_user_model() PERMISSION_LOGICS = ( ('cloud.Cloud', FilteredCollaboratorsPermissionLogic( collaborators_query='customer__roles__permission_group__user', collaborators_filter={ 'customer__roles__role_type': CustomerRole.OWNER, }, any_permission=True, )), )
Fix permission path for customer role lookup
Fix permission path for customer role lookup
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
from __future__ import unicode_literals from django.contrib.auth import get_user_model from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic from nodeconductor.structure.models import CustomerRole User = get_user_model() PERMISSION_LOGICS = ( ('cloud.Cloud', FilteredCollaboratorsPermissionLogic( collaborators_query='customer__roles__permission_group__user', collaborators_filter={ - 'roles__role_type': CustomerRole.OWNER, + 'customer__roles__role_type': CustomerRole.OWNER, }, any_permission=True, )), )
Fix permission path for customer role lookup
## Code Before: from __future__ import unicode_literals from django.contrib.auth import get_user_model from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic from nodeconductor.structure.models import CustomerRole User = get_user_model() PERMISSION_LOGICS = ( ('cloud.Cloud', FilteredCollaboratorsPermissionLogic( collaborators_query='customer__roles__permission_group__user', collaborators_filter={ 'roles__role_type': CustomerRole.OWNER, }, any_permission=True, )), ) ## Instruction: Fix permission path for customer role lookup ## Code After: from __future__ import unicode_literals from django.contrib.auth import get_user_model from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic from nodeconductor.structure.models import CustomerRole User = get_user_model() PERMISSION_LOGICS = ( ('cloud.Cloud', FilteredCollaboratorsPermissionLogic( collaborators_query='customer__roles__permission_group__user', collaborators_filter={ 'customer__roles__role_type': CustomerRole.OWNER, }, any_permission=True, )), )
# ... existing code ... collaborators_query='customer__roles__permission_group__user', collaborators_filter={ 'customer__roles__role_type': CustomerRole.OWNER, }, # ... rest of the code ...
bbcbcefedcbff4cfd7a16cbfa904b42462f1ee88
python/ql/test/query-tests/Variables/unused/type_annotation_fp.py
python/ql/test/query-tests/Variables/unused/type_annotation_fp.py
def type_annotation(x): foo = 5 if x: foo : int do_stuff_with(foo) else: foo : float do_other_stuff_with(foo)
def type_annotation(x): foo = 5 if x: foo : int do_stuff_with(foo) else: foo : float do_other_stuff_with(foo) def type_annotation_fn(): # False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation. bar = 5 bar : int
Add false negative test case.
Python: Add false negative test case.
Python
mit
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
def type_annotation(x): foo = 5 if x: foo : int do_stuff_with(foo) else: foo : float do_other_stuff_with(foo) + def type_annotation_fn(): + # False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation. + bar = 5 + bar : int +
Add false negative test case.
## Code Before: def type_annotation(x): foo = 5 if x: foo : int do_stuff_with(foo) else: foo : float do_other_stuff_with(foo) ## Instruction: Add false negative test case. ## Code After: def type_annotation(x): foo = 5 if x: foo : int do_stuff_with(foo) else: foo : float do_other_stuff_with(foo) def type_annotation_fn(): # False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation. bar = 5 bar : int
# ... existing code ... foo : float do_other_stuff_with(foo) def type_annotation_fn(): # False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation. bar = 5 bar : int # ... rest of the code ...
e76ca364ab979e309d34ff458ef2629145a52ce2
magnum/db/sqlalchemy/alembic/versions/a1136d335540_add_docker_storage_driver_column.py
magnum/db/sqlalchemy/alembic/versions/a1136d335540_add_docker_storage_driver_column.py
# revision identifiers, used by Alembic. revision = 'a1136d335540' down_revision = 'd072f58ab240' from alembic import op import sqlalchemy as sa docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay', name='docker_storage_driver') def upgrade(): op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, nullable=True))
# revision identifiers, used by Alembic. revision = 'a1136d335540' down_revision = 'd072f58ab240' from alembic import op import sqlalchemy as sa docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay', name='docker_storage_driver') def upgrade(): docker_storage_driver_enum.create(op.get_bind(), checkfirst=True) op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, nullable=True))
Fix for enum type docker_storage_driver
Fix for enum type docker_storage_driver Create enum type "docker_storage_driver" for migration This is fixing oslo_db.exception.DBError: (psycopg2.ProgrammingError) type "docker_storage_driver" does not exist Closes-Bug: #1609776 Change-Id: I92d427e90bd73b4114d8688d3761cabac450fc9d
Python
apache-2.0
openstack/magnum,openstack/magnum,ArchiFleKs/magnum,ArchiFleKs/magnum
# revision identifiers, used by Alembic. revision = 'a1136d335540' down_revision = 'd072f58ab240' from alembic import op import sqlalchemy as sa docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay', name='docker_storage_driver') def upgrade(): + docker_storage_driver_enum.create(op.get_bind(), checkfirst=True) op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, nullable=True))
Fix for enum type docker_storage_driver
## Code Before: # revision identifiers, used by Alembic. revision = 'a1136d335540' down_revision = 'd072f58ab240' from alembic import op import sqlalchemy as sa docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay', name='docker_storage_driver') def upgrade(): op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, nullable=True)) ## Instruction: Fix for enum type docker_storage_driver ## Code After: # revision identifiers, used by Alembic. revision = 'a1136d335540' down_revision = 'd072f58ab240' from alembic import op import sqlalchemy as sa docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay', name='docker_storage_driver') def upgrade(): docker_storage_driver_enum.create(op.get_bind(), checkfirst=True) op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, nullable=True))
# ... existing code ... def upgrade(): docker_storage_driver_enum.create(op.get_bind(), checkfirst=True) op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, # ... rest of the code ...
260cd3b96df3a4746560db0032d7b6042c55d7fc
integration-test/976-fractional-pois.py
integration-test/976-fractional-pois.py
assert_has_feature( 15, 5242, 12664, 'pois', { 'id': 147689077, 'min_zoom': 15.68 })
assert_has_feature( 15, 5242, 12664, 'pois', { 'id': 147689077, 'min_zoom': 15.68 }) # Test that source and min_zoom are set properly for boundaries, roads, transit, and water assert_has_feature( 5, 9, 12, 'boundaries', { 'min_zoom': 0 , 'id': 8024, 'source': 'naturalearthdata.com', 'name': 'New Jersey - Pennsylvania' }) assert_has_feature( 5, 9, 12, 'roads', { 'min_zoom': 5 , 'id': 90, 'source': 'naturalearthdata.com' }) # There is no transit data from Natural Earth assert_has_feature( 5, 9, 12, 'water', { 'min_zoom': 0 , 'id': 1144, 'source': 'naturalearthdata.com', 'name': 'John H. Kerr Reservoir' }) # https://www.openstreetmap.org/relation/224951 # https://www.openstreetmap.org/relation/61320 assert_has_feature( 9, 150, 192, 'boundaries', { 'min_zoom': 8, 'id': -224951, 'source': 'openstretmap.org', 'name': 'New Jersey - New York' }) assert_has_feature( 9, 150, 192, 'roads', { 'min_zoom': 8, 'sort_key': 381, 'source': 'openstretmap.org', 'kind': 'Major Road', 'network': 'US:NJ:Hudson' }) assert_has_feature( 9, 150, 192, 'transit', { 'min_zoom': 5, 'ref': '54-57', 'source': 'openstretmap.org', 'name': 'Vermonter' }) assert_has_feature( 9, 150, 192, 'water', { 'min_zoom': 0, 'id': 10613, 'source': 'openstretmapdata.com', 'kind': 'ocean', 'name': '' })
Add tests for source and min_zoom
Add tests for source and min_zoom
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
assert_has_feature( 15, 5242, 12664, 'pois', { 'id': 147689077, 'min_zoom': 15.68 }) + # Test that source and min_zoom are set properly for boundaries, roads, transit, and water + assert_has_feature( + 5, 9, 12, 'boundaries', + { 'min_zoom': 0 , 'id': 8024, + 'source': 'naturalearthdata.com', + 'name': 'New Jersey - Pennsylvania' }) + + assert_has_feature( + 5, 9, 12, 'roads', + { 'min_zoom': 5 , 'id': 90, + 'source': 'naturalearthdata.com' }) + + # There is no transit data from Natural Earth + + assert_has_feature( + 5, 9, 12, 'water', + { 'min_zoom': 0 , 'id': 1144, + 'source': 'naturalearthdata.com', + 'name': 'John H. Kerr Reservoir' }) + + # https://www.openstreetmap.org/relation/224951 + # https://www.openstreetmap.org/relation/61320 + assert_has_feature( + 9, 150, 192, 'boundaries', + { 'min_zoom': 8, 'id': -224951, + 'source': 'openstretmap.org', + 'name': 'New Jersey - New York' }) + + assert_has_feature( + 9, 150, 192, 'roads', + { 'min_zoom': 8, 'sort_key': 381, + 'source': 'openstretmap.org', + 'kind': 'Major Road', + 'network': 'US:NJ:Hudson' }) + + assert_has_feature( + 9, 150, 192, 'transit', + { 'min_zoom': 5, 'ref': '54-57', + 'source': 'openstretmap.org', + 'name': 'Vermonter' }) + + assert_has_feature( + 9, 150, 192, 'water', + { 'min_zoom': 0, 'id': 10613, + 'source': 'openstretmapdata.com', + 'kind': 'ocean', + 'name': '' }) +
Add tests for source and min_zoom
## Code Before: assert_has_feature( 15, 5242, 12664, 'pois', { 'id': 147689077, 'min_zoom': 15.68 }) ## Instruction: Add tests for source and min_zoom ## Code After: assert_has_feature( 15, 5242, 12664, 'pois', { 'id': 147689077, 'min_zoom': 15.68 }) # Test that source and min_zoom are set properly for boundaries, roads, transit, and water assert_has_feature( 5, 9, 12, 'boundaries', { 'min_zoom': 0 , 'id': 8024, 'source': 'naturalearthdata.com', 'name': 'New Jersey - Pennsylvania' }) assert_has_feature( 5, 9, 12, 'roads', { 'min_zoom': 5 , 'id': 90, 'source': 'naturalearthdata.com' }) # There is no transit data from Natural Earth assert_has_feature( 5, 9, 12, 'water', { 'min_zoom': 0 , 'id': 1144, 'source': 'naturalearthdata.com', 'name': 'John H. Kerr Reservoir' }) # https://www.openstreetmap.org/relation/224951 # https://www.openstreetmap.org/relation/61320 assert_has_feature( 9, 150, 192, 'boundaries', { 'min_zoom': 8, 'id': -224951, 'source': 'openstretmap.org', 'name': 'New Jersey - New York' }) assert_has_feature( 9, 150, 192, 'roads', { 'min_zoom': 8, 'sort_key': 381, 'source': 'openstretmap.org', 'kind': 'Major Road', 'network': 'US:NJ:Hudson' }) assert_has_feature( 9, 150, 192, 'transit', { 'min_zoom': 5, 'ref': '54-57', 'source': 'openstretmap.org', 'name': 'Vermonter' }) assert_has_feature( 9, 150, 192, 'water', { 'min_zoom': 0, 'id': 10613, 'source': 'openstretmapdata.com', 'kind': 'ocean', 'name': '' })
... 15, 5242, 12664, 'pois', { 'id': 147689077, 'min_zoom': 15.68 }) # Test that source and min_zoom are set properly for boundaries, roads, transit, and water assert_has_feature( 5, 9, 12, 'boundaries', { 'min_zoom': 0 , 'id': 8024, 'source': 'naturalearthdata.com', 'name': 'New Jersey - Pennsylvania' }) assert_has_feature( 5, 9, 12, 'roads', { 'min_zoom': 5 , 'id': 90, 'source': 'naturalearthdata.com' }) # There is no transit data from Natural Earth assert_has_feature( 5, 9, 12, 'water', { 'min_zoom': 0 , 'id': 1144, 'source': 'naturalearthdata.com', 'name': 'John H. Kerr Reservoir' }) # https://www.openstreetmap.org/relation/224951 # https://www.openstreetmap.org/relation/61320 assert_has_feature( 9, 150, 192, 'boundaries', { 'min_zoom': 8, 'id': -224951, 'source': 'openstretmap.org', 'name': 'New Jersey - New York' }) assert_has_feature( 9, 150, 192, 'roads', { 'min_zoom': 8, 'sort_key': 381, 'source': 'openstretmap.org', 'kind': 'Major Road', 'network': 'US:NJ:Hudson' }) assert_has_feature( 9, 150, 192, 'transit', { 'min_zoom': 5, 'ref': '54-57', 'source': 'openstretmap.org', 'name': 'Vermonter' }) assert_has_feature( 9, 150, 192, 'water', { 'min_zoom': 0, 'id': 10613, 'source': 'openstretmapdata.com', 'kind': 'ocean', 'name': '' }) ...
b0df06a29d4a235de86e51f4c6ff860fe5495d12
run-tests.py
run-tests.py
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
Test runner: fix line endings, print to stderr
Test runner: fix line endings, print to stderr
Python
mit
divtxt/binder
- - import os, sys - - PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) - SRC_DIR = os.path.join(PROJECT_DIR, "src") - TEST_DIR = os.path.join(PROJECT_DIR, "test") - - def runtestdir(subdir): - entries = os.listdir(subdir) - total = 0 - errs = 0 - for f in entries: - if not f.endswith(".py"): - continue - if not f.startswith("test_"): - continue - test_file = os.path.join(subdir, f) - print "FILE:", test_file - exit_code = os.system(sys.executable + " " + test_file) - total += 1 - if exit_code != 0: - errs += 1 - print "SUMMARY: %s -> %s total / %s error (%s)" \ - % (subdir, total, errs, sys.executable) - - - if __name__ == "__main__": - os.chdir(TEST_DIR) - os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) - runtestdir("bindertest") - + import os, sys + + PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) + SRC_DIR = os.path.join(PROJECT_DIR, "src") + TEST_DIR = os.path.join(PROJECT_DIR, "test") + + def runtestdir(subdir): + entries = os.listdir(subdir) + total = 0 + errs = 0 + for f in entries: + if not f.endswith(".py"): + continue + if not f.startswith("test_"): + continue + test_file = os.path.join(subdir, f) + print >> sys.stderr, "FILE:", test_file + exit_code = os.system(sys.executable + " " + test_file) + total += 1 + if exit_code != 0: + errs += 1 + print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ + % (subdir, total, errs, sys.executable) + + + if __name__ == "__main__": + os.chdir(TEST_DIR) + os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) + runtestdir("bindertest") + +
Test runner: fix line endings, print to stderr
## Code Before: import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest") ## Instruction: Test runner: fix line endings, print to stderr ## Code After: import os, sys PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ )) SRC_DIR = os.path.join(PROJECT_DIR, "src") TEST_DIR = os.path.join(PROJECT_DIR, "test") def runtestdir(subdir): entries = os.listdir(subdir) total = 0 errs = 0 for f in entries: if not f.endswith(".py"): continue if not f.startswith("test_"): continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) if __name__ == "__main__": os.chdir(TEST_DIR) os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR]) runtestdir("bindertest")
# ... existing code ... continue test_file = os.path.join(subdir, f) print >> sys.stderr, "FILE:", test_file exit_code = os.system(sys.executable + " " + test_file) total += 1 # ... modified code ... if exit_code != 0: errs += 1 print >> sys.stderr, "SUMMARY: %s -> %s total / %s error (%s)" \ % (subdir, total, errs, sys.executable) # ... rest of the code ...
c6b51b82fa0503a2696161374160c47297211c7c
qubs_data_api/urls.py
qubs_data_api/urls.py
from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ url(r'^climate/', include('climate.urls')), url(r'^herbarium/', include('herbarium.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^admin/', admin.site.urls), ]
from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ url(r'^', include('core.urls')), url(r'^climate/', include('climate.urls')), url(r'^herbarium/', include('herbarium.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^admin/', admin.site.urls), ]
Include Core URLS at the base URL path.
Include Core URLS at the base URL path.
Python
apache-2.0
qubs/data-centre,qubs/data-centre,qubs/climate-data-api,qubs/climate-data-api
from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ + url(r'^', include('core.urls')), url(r'^climate/', include('climate.urls')), url(r'^herbarium/', include('herbarium.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^admin/', admin.site.urls), ]
Include Core URLS at the base URL path.
## Code Before: from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ url(r'^climate/', include('climate.urls')), url(r'^herbarium/', include('herbarium.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^admin/', admin.site.urls), ] ## Instruction: Include Core URLS at the base URL path. ## Code After: from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ url(r'^', include('core.urls')), url(r'^climate/', include('climate.urls')), url(r'^herbarium/', include('herbarium.urls')), url(r'^api-auth/', include('rest_framework.urls')), url(r'^admin/', admin.site.urls), ]
# ... existing code ... urlpatterns = [ url(r'^', include('core.urls')), url(r'^climate/', include('climate.urls')), url(r'^herbarium/', include('herbarium.urls')), # ... rest of the code ...
322d8f90f86c40a756716a79e7e5719196687ece
saic/paste/search_indexes.py
saic/paste/search_indexes.py
import datetime from haystack.indexes import * from haystack import site from models import Paste, Commit class CommitIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) commit = CharField(model_attr='commit') created = DateField(model_attr='created') user = CharField(model_attr='owner', null=True) class PasteIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) paste = CharField(model_attr='paste') filename = CharField(model_attr='filename') language = CharField(model_attr='language') commit = CharField(model_attr='revision__commit') site.register(Paste, PasteIndex) site.register(Commit, CommitIndex)
import datetime from haystack.indexes import * from haystack import site from models import Paste, Commit class CommitIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) commit = CharField(model_attr='commit') created = DateField(model_attr='created') user = CharField(model_attr='owner', null=True) def index_queryset(self): return Commit.objects.all() class PasteIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) paste = CharField(model_attr='paste') filename = CharField(model_attr='filename') language = CharField(model_attr='language') commit = CharField(model_attr='revision__commit') def index_queryset(self): return Paste.objects.all() site.register(Paste, PasteIndex) site.register(Commit, CommitIndex)
Update search index to look for all objects.
Update search index to look for all objects.
Python
bsd-3-clause
justinvh/gitpaste,GarrettHeel/quark-paste,GarrettHeel/quark-paste,justinvh/gitpaste,justinvh/gitpaste,justinvh/gitpaste,GarrettHeel/quark-paste
import datetime from haystack.indexes import * from haystack import site from models import Paste, Commit class CommitIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) commit = CharField(model_attr='commit') created = DateField(model_attr='created') user = CharField(model_attr='owner', null=True) + def index_queryset(self): + return Commit.objects.all() + class PasteIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) paste = CharField(model_attr='paste') filename = CharField(model_attr='filename') language = CharField(model_attr='language') commit = CharField(model_attr='revision__commit') + def index_queryset(self): + return Paste.objects.all() + site.register(Paste, PasteIndex) site.register(Commit, CommitIndex)
Update search index to look for all objects.
## Code Before: import datetime from haystack.indexes import * from haystack import site from models import Paste, Commit class CommitIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) commit = CharField(model_attr='commit') created = DateField(model_attr='created') user = CharField(model_attr='owner', null=True) class PasteIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) paste = CharField(model_attr='paste') filename = CharField(model_attr='filename') language = CharField(model_attr='language') commit = CharField(model_attr='revision__commit') site.register(Paste, PasteIndex) site.register(Commit, CommitIndex) ## Instruction: Update search index to look for all objects. ## Code After: import datetime from haystack.indexes import * from haystack import site from models import Paste, Commit class CommitIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) commit = CharField(model_attr='commit') created = DateField(model_attr='created') user = CharField(model_attr='owner', null=True) def index_queryset(self): return Commit.objects.all() class PasteIndex(RealTimeSearchIndex): text = CharField(document=True, use_template=True) paste = CharField(model_attr='paste') filename = CharField(model_attr='filename') language = CharField(model_attr='language') commit = CharField(model_attr='revision__commit') def index_queryset(self): return Paste.objects.all() site.register(Paste, PasteIndex) site.register(Commit, CommitIndex)
# ... existing code ... user = CharField(model_attr='owner', null=True) def index_queryset(self): return Commit.objects.all() class PasteIndex(RealTimeSearchIndex): # ... modified code ... commit = CharField(model_attr='revision__commit') def index_queryset(self): return Paste.objects.all() site.register(Paste, PasteIndex) # ... rest of the code ...
ba10a22c47ec2f6a27ddbc1cbddbfe8ec31e9955
netdumplings/__init__.py
netdumplings/__init__.py
from .dumpling import Dumpling, DumplingDriver from .dumplingchef import DumplingChef from .dumplingeater import DumplingEater from .dumplinghub import DumplingHub from .dumplingkitchen import DumplingKitchen from ._version import __version__ # Workaround to avoid F401 "imported but unused" linter errors. ( Dumpling, DumplingDriver, DumplingChef, DumplingEater, DumplingHub, DumplingKitchen, __version__, )
from .dumpling import Dumpling, DumplingDriver from .dumplingchef import DumplingChef from .dumplingeater import DumplingEater from .exceptions import ( InvalidDumpling, InvalidDumplingPayload, NetDumplingsError, ) from .dumplinghub import DumplingHub from .dumplingkitchen import DumplingKitchen from ._version import __version__ # Workaround to avoid F401 "imported but unused" linter errors. ( Dumpling, DumplingDriver, DumplingChef, DumplingEater, DumplingHub, DumplingKitchen, InvalidDumpling, InvalidDumplingPayload, NetDumplingsError, __version__, )
Make exceptions available at top-level netdumplings module
Make exceptions available at top-level netdumplings module
Python
mit
mjoblin/netdumplings,mjoblin/netdumplings,mjoblin/netdumplings
from .dumpling import Dumpling, DumplingDriver from .dumplingchef import DumplingChef from .dumplingeater import DumplingEater + from .exceptions import ( + InvalidDumpling, InvalidDumplingPayload, NetDumplingsError, + ) from .dumplinghub import DumplingHub from .dumplingkitchen import DumplingKitchen from ._version import __version__ # Workaround to avoid F401 "imported but unused" linter errors. ( Dumpling, DumplingDriver, DumplingChef, DumplingEater, DumplingHub, DumplingKitchen, + InvalidDumpling, + InvalidDumplingPayload, + NetDumplingsError, __version__, )
Make exceptions available at top-level netdumplings module
## Code Before: from .dumpling import Dumpling, DumplingDriver from .dumplingchef import DumplingChef from .dumplingeater import DumplingEater from .dumplinghub import DumplingHub from .dumplingkitchen import DumplingKitchen from ._version import __version__ # Workaround to avoid F401 "imported but unused" linter errors. ( Dumpling, DumplingDriver, DumplingChef, DumplingEater, DumplingHub, DumplingKitchen, __version__, ) ## Instruction: Make exceptions available at top-level netdumplings module ## Code After: from .dumpling import Dumpling, DumplingDriver from .dumplingchef import DumplingChef from .dumplingeater import DumplingEater from .exceptions import ( InvalidDumpling, InvalidDumplingPayload, NetDumplingsError, ) from .dumplinghub import DumplingHub from .dumplingkitchen import DumplingKitchen from ._version import __version__ # Workaround to avoid F401 "imported but unused" linter errors. ( Dumpling, DumplingDriver, DumplingChef, DumplingEater, DumplingHub, DumplingKitchen, InvalidDumpling, InvalidDumplingPayload, NetDumplingsError, __version__, )
# ... existing code ... from .dumplingchef import DumplingChef from .dumplingeater import DumplingEater from .exceptions import ( InvalidDumpling, InvalidDumplingPayload, NetDumplingsError, ) from .dumplinghub import DumplingHub from .dumplingkitchen import DumplingKitchen # ... modified code ... DumplingHub, DumplingKitchen, InvalidDumpling, InvalidDumplingPayload, NetDumplingsError, __version__, ) # ... rest of the code ...
ee9b869f2bb43e00da7c208cc2cfc9641d631b1a
examples/canvas/repeat_texture.py
examples/canvas/repeat_texture.py
''' Demonstrate repeating textures ============================== This was a test to fix an issue with repeating texture and window reloading. ''' from kivy.app import App from kivy.uix.image import Image from kivy.properties import ObjectProperty from kivy.lang import Builder kv = ''' FloatLayout: canvas.before: Color: rgb: 1, 1, 1 Rectangle: pos: self.pos size: self.size texture: app.texture Label: text: '{} (try to resize the window)'.format(root.size) ''' class RepeatTexture(App): texture = ObjectProperty() def build(self): self.texture = Image(source='mtexture1.png').texture self.texture.wrap = 'repeat' self.texture.uvsize = (8, 8) return Builder.load_string(kv) RepeatTexture().run()
''' Demonstrate repeating textures ============================== This was a test to fix an issue with repeating texture and window reloading. ''' from kivy.app import App from kivy.uix.image import Image from kivy.uix.label import Label from kivy.properties import ObjectProperty, ListProperty from kivy.lang import Builder kv = ''' <LabelOnBackground>: canvas.before: Color: rgb: self.background Rectangle: pos: self.pos size: self.size FloatLayout: canvas.before: Color: rgb: 1, 1, 1 Rectangle: pos: self.pos size: self.size texture: app.texture LabelOnBackground: text: '{} (try to resize the window)'.format(root.size) color: (0.4, 1, 1, 1) background: (.3, .3, .3) pos_hint: {'center_x': .5, 'center_y': .5 } size_hint: None, None height: 30 width: 250 ''' class LabelOnBackground(Label): background = ListProperty((0.2, 0.2, 0.2)) class RepeatTexture(App): texture = ObjectProperty() def build(self): self.texture = Image(source='mtexture1.png').texture self.texture.wrap = 'repeat' self.texture.uvsize = (8, 8) return Builder.load_string(kv) RepeatTexture().run()
Add background color to size message.
Add background color to size message. Added a colored background to the label because it kept getting lost in the white ‘K’ field. Also changed the label color to cyan for readability.
Python
mit
jkankiewicz/kivy,bob-the-hamster/kivy,KeyWeeUsr/kivy,LogicalDash/kivy,ernstp/kivy,yoelk/kivy,jegger/kivy,LogicalDash/kivy,darkopevec/kivy,akshayaurora/kivy,gonzafirewall/kivy,rafalo1333/kivy,thezawad/kivy,bhargav2408/kivy,aron-bordin/kivy,Cheaterman/kivy,andnovar/kivy,MiyamotoAkira/kivy,youprofit/kivy,VinGarcia/kivy,el-ethan/kivy,xpndlabs/kivy,viralpandey/kivy,manashmndl/kivy,Farkal/kivy,el-ethan/kivy,xpndlabs/kivy,jkankiewicz/kivy,arcticshores/kivy,jffernandez/kivy,bionoid/kivy,matham/kivy,denys-duchier/kivy,xiaoyanit/kivy,MiyamotoAkira/kivy,adamkh/kivy,bliz937/kivy,xiaoyanit/kivy,jehutting/kivy,jffernandez/kivy,angryrancor/kivy,ernstp/kivy,MiyamotoAkira/kivy,Farkal/kivy,kivy/kivy,bhargav2408/kivy,Shyam10/kivy,adamkh/kivy,bob-the-hamster/kivy,jffernandez/kivy,rnixx/kivy,habibmasuro/kivy,aron-bordin/kivy,gonzafirewall/kivy,cbenhagen/kivy,arcticshores/kivy,dirkjot/kivy,gonzafirewall/kivy,habibmasuro/kivy,Ramalus/kivy,Shyam10/kivy,janssen/kivy,xpndlabs/kivy,denys-duchier/kivy,MiyamotoAkira/kivy,ernstp/kivy,bionoid/kivy,inclement/kivy,CuriousLearner/kivy,vitorio/kivy,jegger/kivy,manthansharma/kivy,janssen/kivy,jehutting/kivy,yoelk/kivy,inclement/kivy,vipulroxx/kivy,arlowhite/kivy,inclement/kivy,matham/kivy,Cheaterman/kivy,yoelk/kivy,cbenhagen/kivy,darkopevec/kivy,bionoid/kivy,Shyam10/kivy,jffernandez/kivy,darkopevec/kivy,adamkh/kivy,cbenhagen/kivy,angryrancor/kivy,matham/kivy,rafalo1333/kivy,jegger/kivy,autosportlabs/kivy,xiaoyanit/kivy,yoelk/kivy,dirkjot/kivy,thezawad/kivy,ernstp/kivy,CuriousLearner/kivy,habibmasuro/kivy,akshayaurora/kivy,Cheaterman/kivy,vipulroxx/kivy,bionoid/kivy,Cheaterman/kivy,jegger/kivy,autosportlabs/kivy,mSenyor/kivy,kived/kivy,Shyam10/kivy,dirkjot/kivy,vipulroxx/kivy,viralpandey/kivy,andnovar/kivy,andnovar/kivy,bliz937/kivy,manashmndl/kivy,Ramalus/kivy,kived/kivy,manashmndl/kivy,arcticshores/kivy,manthansharma/kivy,youprofit/kivy,el-ethan/kivy,akshayaurora/kivy,autosportlabs/kivy,tony/kivy,jkankiewicz/kivy,arcticshores/kivy,janssen/kivy,Farkal/kivy,angryrancor/kivy,VinGarcia/kivy,kivy/kivy,janssen/kivy,bliz937/kivy,tony/kivy,denys-duchier/kivy,mSenyor/kivy,CuriousLearner/kivy,manthansharma/kivy,vitorio/kivy,arlowhite/kivy,kivy/kivy,vipulroxx/kivy,denys-duchier/kivy,edubrunaldi/kivy,bob-the-hamster/kivy,dirkjot/kivy,viralpandey/kivy,adamkh/kivy,edubrunaldi/kivy,LogicalDash/kivy,manthansharma/kivy,darkopevec/kivy,jehutting/kivy,rnixx/kivy,aron-bordin/kivy,VinGarcia/kivy,KeyWeeUsr/kivy,rnixx/kivy,rafalo1333/kivy,matham/kivy,tony/kivy,Farkal/kivy,iamutkarshtiwari/kivy,angryrancor/kivy,iamutkarshtiwari/kivy,vitorio/kivy,thezawad/kivy,iamutkarshtiwari/kivy,aron-bordin/kivy,Ramalus/kivy,youprofit/kivy,arlowhite/kivy,KeyWeeUsr/kivy,jkankiewicz/kivy,KeyWeeUsr/kivy,mSenyor/kivy,LogicalDash/kivy,bob-the-hamster/kivy,gonzafirewall/kivy,bhargav2408/kivy,kived/kivy,edubrunaldi/kivy
''' Demonstrate repeating textures ============================== This was a test to fix an issue with repeating texture and window reloading. ''' from kivy.app import App from kivy.uix.image import Image + from kivy.uix.label import Label - from kivy.properties import ObjectProperty + from kivy.properties import ObjectProperty, ListProperty from kivy.lang import Builder kv = ''' + <LabelOnBackground>: + canvas.before: + Color: + rgb: self.background + Rectangle: + pos: self.pos + size: self.size + FloatLayout: canvas.before: Color: rgb: 1, 1, 1 Rectangle: pos: self.pos size: self.size texture: app.texture - Label: + LabelOnBackground: text: '{} (try to resize the window)'.format(root.size) + color: (0.4, 1, 1, 1) + background: (.3, .3, .3) + pos_hint: {'center_x': .5, 'center_y': .5 } + size_hint: None, None + height: 30 + width: 250 + ''' + + + class LabelOnBackground(Label): + background = ListProperty((0.2, 0.2, 0.2)) class RepeatTexture(App): texture = ObjectProperty() def build(self): self.texture = Image(source='mtexture1.png').texture self.texture.wrap = 'repeat' self.texture.uvsize = (8, 8) return Builder.load_string(kv) RepeatTexture().run()
Add background color to size message.
## Code Before: ''' Demonstrate repeating textures ============================== This was a test to fix an issue with repeating texture and window reloading. ''' from kivy.app import App from kivy.uix.image import Image from kivy.properties import ObjectProperty from kivy.lang import Builder kv = ''' FloatLayout: canvas.before: Color: rgb: 1, 1, 1 Rectangle: pos: self.pos size: self.size texture: app.texture Label: text: '{} (try to resize the window)'.format(root.size) ''' class RepeatTexture(App): texture = ObjectProperty() def build(self): self.texture = Image(source='mtexture1.png').texture self.texture.wrap = 'repeat' self.texture.uvsize = (8, 8) return Builder.load_string(kv) RepeatTexture().run() ## Instruction: Add background color to size message. ## Code After: ''' Demonstrate repeating textures ============================== This was a test to fix an issue with repeating texture and window reloading. ''' from kivy.app import App from kivy.uix.image import Image from kivy.uix.label import Label from kivy.properties import ObjectProperty, ListProperty from kivy.lang import Builder kv = ''' <LabelOnBackground>: canvas.before: Color: rgb: self.background Rectangle: pos: self.pos size: self.size FloatLayout: canvas.before: Color: rgb: 1, 1, 1 Rectangle: pos: self.pos size: self.size texture: app.texture LabelOnBackground: text: '{} (try to resize the window)'.format(root.size) color: (0.4, 1, 1, 1) background: (.3, .3, .3) pos_hint: {'center_x': .5, 'center_y': .5 } size_hint: None, None height: 30 width: 250 ''' class LabelOnBackground(Label): background = ListProperty((0.2, 0.2, 0.2)) class RepeatTexture(App): texture = ObjectProperty() def build(self): self.texture = Image(source='mtexture1.png').texture self.texture.wrap = 'repeat' self.texture.uvsize = (8, 8) return Builder.load_string(kv) RepeatTexture().run()
# ... existing code ... from kivy.app import App from kivy.uix.image import Image from kivy.uix.label import Label from kivy.properties import ObjectProperty, ListProperty from kivy.lang import Builder kv = ''' <LabelOnBackground>: canvas.before: Color: rgb: self.background Rectangle: pos: self.pos size: self.size FloatLayout: canvas.before: # ... modified code ... texture: app.texture LabelOnBackground: text: '{} (try to resize the window)'.format(root.size) color: (0.4, 1, 1, 1) background: (.3, .3, .3) pos_hint: {'center_x': .5, 'center_y': .5 } size_hint: None, None height: 30 width: 250 ''' class LabelOnBackground(Label): background = ListProperty((0.2, 0.2, 0.2)) # ... rest of the code ...
2b3406a46625fd5350200dcb6d2dc32224d3c609
warehouse/defaults.py
warehouse/defaults.py
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The amount of time (in seconds) that synchronizing each project can take # before timing out. SYNCHRONIZATION_TIMEOUT = 60 * 15 # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
Remove a no longer used setting
Remove a no longer used setting With the removal of eventlet there is no longer a mechanism for timing out a synchronization.
Python
bsd-2-clause
davidfischer/warehouse
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" - # The amount of time (in seconds) that synchronizing each project can take - # before timing out. - SYNCHRONIZATION_TIMEOUT = 60 * 15 - # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
Remove a no longer used setting
## Code Before: from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The amount of time (in seconds) that synchronizing each project can take # before timing out. SYNCHRONIZATION_TIMEOUT = 60 * 15 # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", } ## Instruction: Remove a no longer used setting ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # Options to pass into the stockpile storage backend STORAGE_OPTIONS = { "location": "data", "hash_algorithm": "md5", }
# ... existing code ... REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. STORAGE = "stockpile.filesystem:HashedFileSystem" # ... rest of the code ...
ae600fdf602d12f1a2f8082df49693117fba2596
test/test_cxx_imports.py
test/test_cxx_imports.py
def test_cxx_import(): from microscopes.mixture.model import \ state, fixed_state, \ bind, bind_fixed, \ initialize, initialize_fixed, \ deserialize, deserialize_fixed assert state and fixed_state assert bind and bind_fixed assert initialize and initialize_fixed assert deserialize and deserialize_fixed
def test_cxx_import(): from microscopes.mixture.model import \ state, \ bind, \ initialize, \ deserialize assert state assert bind assert initialize assert deserialize
Remove fixed references from test_cxx.py
Remove fixed references from test_cxx.py
Python
bsd-3-clause
datamicroscopes/mixturemodel,datamicroscopes/mixturemodel,datamicroscopes/mixturemodel
def test_cxx_import(): from microscopes.mixture.model import \ - state, fixed_state, \ - bind, bind_fixed, \ - initialize, initialize_fixed, \ - deserialize, deserialize_fixed - assert state and fixed_state - assert bind and bind_fixed - assert initialize and initialize_fixed - assert deserialize and deserialize_fixed + state, \ + bind, \ + initialize, \ + deserialize + assert state + assert bind + assert initialize + assert deserialize
Remove fixed references from test_cxx.py
## Code Before: def test_cxx_import(): from microscopes.mixture.model import \ state, fixed_state, \ bind, bind_fixed, \ initialize, initialize_fixed, \ deserialize, deserialize_fixed assert state and fixed_state assert bind and bind_fixed assert initialize and initialize_fixed assert deserialize and deserialize_fixed ## Instruction: Remove fixed references from test_cxx.py ## Code After: def test_cxx_import(): from microscopes.mixture.model import \ state, \ bind, \ initialize, \ deserialize assert state assert bind assert initialize assert deserialize
# ... existing code ... def test_cxx_import(): from microscopes.mixture.model import \ state, \ bind, \ initialize, \ deserialize assert state assert bind assert initialize assert deserialize # ... rest of the code ...
ffc1b8c83e32f4c2b5454a0ae71b9c30cc8e7596
toolz/tests/test_serialization.py
toolz/tests/test_serialization.py
from toolz import * import pickle def test_compose(): f = compose(str, sum) g = pickle.loads(pickle.dumps(f)) assert f((1, 2)) == g((1, 2)) def test_curry(): f = curry(map)(str) g = pickle.loads(pickle.dumps(f)) assert list(f((1, 2, 3))) == list(g((1, 2, 3))) def test_juxt(): f = juxt(str, int, bool) g = pickle.loads(pickle.dumps(f)) assert f(1) == g(1) assert f.funcs == g.funcs
from toolz import * import pickle def test_compose(): f = compose(str, sum) g = pickle.loads(pickle.dumps(f)) assert f((1, 2)) == g((1, 2)) def test_curry(): f = curry(map)(str) g = pickle.loads(pickle.dumps(f)) assert list(f((1, 2, 3))) == list(g((1, 2, 3))) def test_juxt(): f = juxt(str, int, bool) g = pickle.loads(pickle.dumps(f)) assert f(1) == g(1) assert f.funcs == g.funcs def test_complement(): f = complement(bool) assert f(True) is False assert f(False) is True g = pickle.loads(pickle.dumps(f)) assert f(True) == g(True) assert f(False) == g(False)
Add serialization test for `complement`
Add serialization test for `complement`
Python
bsd-3-clause
pombredanne/toolz,simudream/toolz,machinelearningdeveloper/toolz,quantopian/toolz,jdmcbr/toolz,bartvm/toolz,jcrist/toolz,cpcloud/toolz,pombredanne/toolz,quantopian/toolz,simudream/toolz,machinelearningdeveloper/toolz,bartvm/toolz,llllllllll/toolz,jdmcbr/toolz,llllllllll/toolz,cpcloud/toolz,jcrist/toolz
from toolz import * import pickle def test_compose(): f = compose(str, sum) g = pickle.loads(pickle.dumps(f)) assert f((1, 2)) == g((1, 2)) def test_curry(): f = curry(map)(str) g = pickle.loads(pickle.dumps(f)) assert list(f((1, 2, 3))) == list(g((1, 2, 3))) def test_juxt(): f = juxt(str, int, bool) g = pickle.loads(pickle.dumps(f)) assert f(1) == g(1) assert f.funcs == g.funcs + + def test_complement(): + f = complement(bool) + assert f(True) is False + assert f(False) is True + g = pickle.loads(pickle.dumps(f)) + assert f(True) == g(True) + assert f(False) == g(False) +
Add serialization test for `complement`
## Code Before: from toolz import * import pickle def test_compose(): f = compose(str, sum) g = pickle.loads(pickle.dumps(f)) assert f((1, 2)) == g((1, 2)) def test_curry(): f = curry(map)(str) g = pickle.loads(pickle.dumps(f)) assert list(f((1, 2, 3))) == list(g((1, 2, 3))) def test_juxt(): f = juxt(str, int, bool) g = pickle.loads(pickle.dumps(f)) assert f(1) == g(1) assert f.funcs == g.funcs ## Instruction: Add serialization test for `complement` ## Code After: from toolz import * import pickle def test_compose(): f = compose(str, sum) g = pickle.loads(pickle.dumps(f)) assert f((1, 2)) == g((1, 2)) def test_curry(): f = curry(map)(str) g = pickle.loads(pickle.dumps(f)) assert list(f((1, 2, 3))) == list(g((1, 2, 3))) def test_juxt(): f = juxt(str, int, bool) g = pickle.loads(pickle.dumps(f)) assert f(1) == g(1) assert f.funcs == g.funcs def test_complement(): f = complement(bool) assert f(True) is False assert f(False) is True g = pickle.loads(pickle.dumps(f)) assert f(True) == g(True) assert f(False) == g(False)
... assert f(1) == g(1) assert f.funcs == g.funcs def test_complement(): f = complement(bool) assert f(True) is False assert f(False) is True g = pickle.loads(pickle.dumps(f)) assert f(True) == g(True) assert f(False) == g(False) ...
69e6db7a4a28ff1f50bd4f12f550a2b65f05eb38
utils/dusk/__init__.py
utils/dusk/__init__.py
from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA
from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA __version__ = "1.0.0"
Remove obsolete TODO and add version
Remove obsolete TODO and add version
Python
mit
awau/Amethyst,HexadecimalPython/Xeili
from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA + __version__ = "1.0.0"
Remove obsolete TODO and add version
## Code Before: from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA ## Instruction: Remove obsolete TODO and add version ## Code After: from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA __version__ = "1.0.0"
# ... existing code ... from .command_holder import CommandHolder # NOQA from .constants import * # NOQA __version__ = "1.0.0" # ... rest of the code ...
a268f6c74806d5996d469dd84ab365b0cf830f96
OpenSSL/_util.py
OpenSSL/_util.py
from six import PY3, binary_type, text_type from cryptography.hazmat.bindings.openssl.binding import Binding binding = Binding() ffi = binding.ffi lib = binding.lib def exception_from_error_queue(exceptionType): def text(charp): return native(ffi.string(charp)) errors = [] while True: error = lib.ERR_get_error() if error == 0: break errors.append(( text(lib.ERR_lib_error_string(error)), text(lib.ERR_func_error_string(error)), text(lib.ERR_reason_error_string(error)))) raise exceptionType(errors) def native(s): """ Convert :py:class:`bytes` or :py:class:`unicode` to the native :py:class:`str` type, using UTF-8 encoding if conversion is necessary. :raise UnicodeError: The input string is not UTF-8 decodeable. :raise TypeError: The input is neither :py:class:`bytes` nor :py:class:`unicode`. """ if not isinstance(s, (binary_type, text_type)): raise TypeError("%r is neither bytes nor unicode" % s) if PY3: if isinstance(s, binary_type): return s.decode("utf-8") else: if isinstance(s, text_type): return s.encode("utf-8") return s if PY3: def byte_string(s): return s.encode("charmap") else: def byte_string(s): return s
from six import PY3, binary_type, text_type from cryptography.hazmat.bindings.openssl.binding import Binding binding = Binding() ffi = binding.ffi lib = binding.lib def exception_from_error_queue(exceptionType): def text(charp): if not charp: return "" return native(ffi.string(charp)) errors = [] while True: error = lib.ERR_get_error() if error == 0: break errors.append(( text(lib.ERR_lib_error_string(error)), text(lib.ERR_func_error_string(error)), text(lib.ERR_reason_error_string(error)))) raise exceptionType(errors) def native(s): """ Convert :py:class:`bytes` or :py:class:`unicode` to the native :py:class:`str` type, using UTF-8 encoding if conversion is necessary. :raise UnicodeError: The input string is not UTF-8 decodeable. :raise TypeError: The input is neither :py:class:`bytes` nor :py:class:`unicode`. """ if not isinstance(s, (binary_type, text_type)): raise TypeError("%r is neither bytes nor unicode" % s) if PY3: if isinstance(s, binary_type): return s.decode("utf-8") else: if isinstance(s, text_type): return s.encode("utf-8") return s if PY3: def byte_string(s): return s.encode("charmap") else: def byte_string(s): return s
Handle when an OpenSSL error doesn't contain a reason
Handle when an OpenSSL error doesn't contain a reason (Or any other field) You can reproduce the error by running: ``` treq.get('https://nile.ghdonline.org') ``` from within a twisted program (and doing the approrpiate deferred stuff). I'm unsure how to craft a unit test for this
Python
apache-2.0
mschmo/pyopenssl,daodaoliang/pyopenssl,mhils/pyopenssl,mhils/pyopenssl,pyca/pyopenssl,sorenh/pyopenssl,adamwolf/pyopenssl,reaperhulk/pyopenssl,aalba6675/pyopenssl,mitghi/pyopenssl,samv/pyopenssl,elitest/pyopenssl,r0ro/pyopenssl,r0ro/pyopenssl,reaperhulk/pyopenssl,kjav/pyopenssl,mitghi/pyopenssl,aalba6675/pyopenssl,hynek/pyopenssl,sholsapp/pyopenssl,hynek/pyopenssl
from six import PY3, binary_type, text_type from cryptography.hazmat.bindings.openssl.binding import Binding binding = Binding() ffi = binding.ffi lib = binding.lib def exception_from_error_queue(exceptionType): def text(charp): + if not charp: + return "" return native(ffi.string(charp)) errors = [] while True: error = lib.ERR_get_error() if error == 0: break errors.append(( text(lib.ERR_lib_error_string(error)), text(lib.ERR_func_error_string(error)), text(lib.ERR_reason_error_string(error)))) raise exceptionType(errors) def native(s): """ Convert :py:class:`bytes` or :py:class:`unicode` to the native :py:class:`str` type, using UTF-8 encoding if conversion is necessary. :raise UnicodeError: The input string is not UTF-8 decodeable. :raise TypeError: The input is neither :py:class:`bytes` nor :py:class:`unicode`. """ if not isinstance(s, (binary_type, text_type)): raise TypeError("%r is neither bytes nor unicode" % s) if PY3: if isinstance(s, binary_type): return s.decode("utf-8") else: if isinstance(s, text_type): return s.encode("utf-8") return s if PY3: def byte_string(s): return s.encode("charmap") else: def byte_string(s): return s
Handle when an OpenSSL error doesn't contain a reason
## Code Before: from six import PY3, binary_type, text_type from cryptography.hazmat.bindings.openssl.binding import Binding binding = Binding() ffi = binding.ffi lib = binding.lib def exception_from_error_queue(exceptionType): def text(charp): return native(ffi.string(charp)) errors = [] while True: error = lib.ERR_get_error() if error == 0: break errors.append(( text(lib.ERR_lib_error_string(error)), text(lib.ERR_func_error_string(error)), text(lib.ERR_reason_error_string(error)))) raise exceptionType(errors) def native(s): """ Convert :py:class:`bytes` or :py:class:`unicode` to the native :py:class:`str` type, using UTF-8 encoding if conversion is necessary. :raise UnicodeError: The input string is not UTF-8 decodeable. :raise TypeError: The input is neither :py:class:`bytes` nor :py:class:`unicode`. """ if not isinstance(s, (binary_type, text_type)): raise TypeError("%r is neither bytes nor unicode" % s) if PY3: if isinstance(s, binary_type): return s.decode("utf-8") else: if isinstance(s, text_type): return s.encode("utf-8") return s if PY3: def byte_string(s): return s.encode("charmap") else: def byte_string(s): return s ## Instruction: Handle when an OpenSSL error doesn't contain a reason ## Code After: from six import PY3, binary_type, text_type from cryptography.hazmat.bindings.openssl.binding import Binding binding = Binding() ffi = binding.ffi lib = binding.lib def exception_from_error_queue(exceptionType): def text(charp): if not charp: return "" return native(ffi.string(charp)) errors = [] while True: error = lib.ERR_get_error() if error == 0: break errors.append(( text(lib.ERR_lib_error_string(error)), text(lib.ERR_func_error_string(error)), text(lib.ERR_reason_error_string(error)))) raise exceptionType(errors) def native(s): """ Convert :py:class:`bytes` or :py:class:`unicode` to the native :py:class:`str` type, using UTF-8 encoding if conversion is necessary. :raise UnicodeError: The input string is not UTF-8 decodeable. :raise TypeError: The input is neither :py:class:`bytes` nor :py:class:`unicode`. """ if not isinstance(s, (binary_type, text_type)): raise TypeError("%r is neither bytes nor unicode" % s) if PY3: if isinstance(s, binary_type): return s.decode("utf-8") else: if isinstance(s, text_type): return s.encode("utf-8") return s if PY3: def byte_string(s): return s.encode("charmap") else: def byte_string(s): return s
// ... existing code ... def exception_from_error_queue(exceptionType): def text(charp): if not charp: return "" return native(ffi.string(charp)) // ... rest of the code ...
3364747195f0f3d2711169fb92c250fc10823d82
default_settings.py
default_settings.py
import logging import os UV4 = os.path.join("C:","Keil","UV4","UV4.exe") IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe') # Be able to locate project generator anywhere in a project # By default it's tools/project_generator (2 folders deep from root) PROJECT_ROOT= os.path.join('..','..') if os.name == "posix": # Expects either arm-none-eabi to be installed here, or # even better, a symlink from /usr/local/arm-none-eabi to the most recent # version. gcc_bin_path = "/usr/local/arm-none-eabi/bin/" elif os.name == "nt": gcc_bin_path = "" try: from user_settings import * except: pass
import logging import os UV4 = os.path.join("C:","Keil","UV4","UV4.exe") IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe') # Be able to locate project generator anywhere in a project # By default it's tools/project_generator (2 folders deep from root) PROJECT_ROOT= os.path.join('..','..') if os.name == "posix": # Expects either arm-none-eabi to be installed here, or # even better, a symlink from /usr/local/arm-none-eabi to the most recent # version. gcc_bin_path = "/usr/local/arm-none-eabi/bin/" elif os.name == "nt": gcc_bin_path = "" try: from user_settings import * except: logging.info("Using default settings.")
Add message if you're using default settings
Add message if you're using default settings
Python
apache-2.0
0xc0170/valinor,sarahmarshy/project_generator,autopulated/valinor,ARMmbed/valinor,sg-/project_generator,ohagendorf/project_generator,molejar/project_generator,aethaniel/project_generator,0xc0170/project_generator,sg-/project_generator,project-generator/project_generator,hwfwgrp/project_generator
import logging import os UV4 = os.path.join("C:","Keil","UV4","UV4.exe") IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe') # Be able to locate project generator anywhere in a project # By default it's tools/project_generator (2 folders deep from root) PROJECT_ROOT= os.path.join('..','..') if os.name == "posix": # Expects either arm-none-eabi to be installed here, or # even better, a symlink from /usr/local/arm-none-eabi to the most recent # version. gcc_bin_path = "/usr/local/arm-none-eabi/bin/" elif os.name == "nt": gcc_bin_path = "" try: from user_settings import * except: - pass + logging.info("Using default settings.")
Add message if you're using default settings
## Code Before: import logging import os UV4 = os.path.join("C:","Keil","UV4","UV4.exe") IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe') # Be able to locate project generator anywhere in a project # By default it's tools/project_generator (2 folders deep from root) PROJECT_ROOT= os.path.join('..','..') if os.name == "posix": # Expects either arm-none-eabi to be installed here, or # even better, a symlink from /usr/local/arm-none-eabi to the most recent # version. gcc_bin_path = "/usr/local/arm-none-eabi/bin/" elif os.name == "nt": gcc_bin_path = "" try: from user_settings import * except: pass ## Instruction: Add message if you're using default settings ## Code After: import logging import os UV4 = os.path.join("C:","Keil","UV4","UV4.exe") IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe') # Be able to locate project generator anywhere in a project # By default it's tools/project_generator (2 folders deep from root) PROJECT_ROOT= os.path.join('..','..') if os.name == "posix": # Expects either arm-none-eabi to be installed here, or # even better, a symlink from /usr/local/arm-none-eabi to the most recent # version. gcc_bin_path = "/usr/local/arm-none-eabi/bin/" elif os.name == "nt": gcc_bin_path = "" try: from user_settings import * except: logging.info("Using default settings.")
# ... existing code ... from user_settings import * except: logging.info("Using default settings.") # ... rest of the code ...
37903904cd0b1a8c4a04811b4a10a16606f9d7b0
doc/jsdoc_conf.py
doc/jsdoc_conf.py
from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference"
from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
Add interlinks urls for doc and tiliado
Add interlinks urls for doc and tiliado Signed-off-by: Jiří Janoušek <[email protected]>
Python
bsd-2-clause
tiliado/nuvolaruntime,tiliado/nuvolaruntime,tiliado/nuvolaruntime,tiliado/nuvolaruntime
from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" + INTERLINKS = { + "doc": "../", + "tiliado": TILIADOWEB, + } +
Add interlinks urls for doc and tiliado
## Code Before: from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" ## Instruction: Add interlinks urls for doc and tiliado ## Code After: from __future__ import unicode_literals from common_conf import * SITEURL = ".." TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, }
# ... existing code ... TEMPLATE = "doc/theme/templates/jsdoc.html" TITLE = "NuvolaKit 3.0 JavaScript API Reference" INTERLINKS = { "doc": "../", "tiliado": TILIADOWEB, } # ... rest of the code ...
5c0bee77329f68ed0b2e3b576747886492007b8c
neovim/tabpage.py
neovim/tabpage.py
from util import RemoteMap class Tabpage(object): @property def windows(self): if not hasattr(self, '_windows'): self._windows = RemoteSequence(self, self.Window, lambda: self.get_windows()) return self._windows @property def vars(self): if not hasattr(self, '_vars'): self._vars = RemoteMap(lambda k: self.get_var(k), lambda k, v: self.set_var(k, v)) return self._vars @property def number(self): return self._handle @property def window(self): return self.get_window() @property def valid(self): return self.is_valid()
from util import RemoteMap, RemoteSequence class Tabpage(object): @property def windows(self): if not hasattr(self, '_windows'): self._windows = RemoteSequence(self, self._vim.Window, lambda: self.get_windows()) return self._windows @property def vars(self): if not hasattr(self, '_vars'): self._vars = RemoteMap(lambda k: self.get_var(k), lambda k, v: self.set_var(k, v)) return self._vars @property def number(self): return self._handle @property def window(self): return self.get_window() @property def valid(self): return self.is_valid()
Fix 'windows' property of Tabpage objects
Fix 'windows' property of Tabpage objects
Python
apache-2.0
bfredl/python-client,fwalch/python-client,Shougo/python-client,neovim/python-client,meitham/python-client,brcolow/python-client,traverseda/python-client,neovim/python-client,Shougo/python-client,meitham/python-client,starcraftman/python-client,brcolow/python-client,0x90sled/python-client,fwalch/python-client,zchee/python-client,justinmk/python-client,bfredl/python-client,justinmk/python-client,zchee/python-client,0x90sled/python-client,timeyyy/python-client,starcraftman/python-client,timeyyy/python-client,traverseda/python-client
- from util import RemoteMap + from util import RemoteMap, RemoteSequence class Tabpage(object): @property def windows(self): if not hasattr(self, '_windows'): self._windows = RemoteSequence(self, - self.Window, + self._vim.Window, lambda: self.get_windows()) return self._windows @property def vars(self): if not hasattr(self, '_vars'): self._vars = RemoteMap(lambda k: self.get_var(k), lambda k, v: self.set_var(k, v)) return self._vars @property def number(self): return self._handle @property def window(self): return self.get_window() @property def valid(self): return self.is_valid()
Fix 'windows' property of Tabpage objects
## Code Before: from util import RemoteMap class Tabpage(object): @property def windows(self): if not hasattr(self, '_windows'): self._windows = RemoteSequence(self, self.Window, lambda: self.get_windows()) return self._windows @property def vars(self): if not hasattr(self, '_vars'): self._vars = RemoteMap(lambda k: self.get_var(k), lambda k, v: self.set_var(k, v)) return self._vars @property def number(self): return self._handle @property def window(self): return self.get_window() @property def valid(self): return self.is_valid() ## Instruction: Fix 'windows' property of Tabpage objects ## Code After: from util import RemoteMap, RemoteSequence class Tabpage(object): @property def windows(self): if not hasattr(self, '_windows'): self._windows = RemoteSequence(self, self._vim.Window, lambda: self.get_windows()) return self._windows @property def vars(self): if not hasattr(self, '_vars'): self._vars = RemoteMap(lambda k: self.get_var(k), lambda k, v: self.set_var(k, v)) return self._vars @property def number(self): return self._handle @property def window(self): return self.get_window() @property def valid(self): return self.is_valid()
// ... existing code ... from util import RemoteMap, RemoteSequence class Tabpage(object): // ... modified code ... if not hasattr(self, '_windows'): self._windows = RemoteSequence(self, self._vim.Window, lambda: self.get_windows()) return self._windows // ... rest of the code ...
616e9727397853e8d8f8de5b2c040c99c91e4a50
gen_settings.py
gen_settings.py
import os settings = os.path.join(os.path.dirname(__file__),'lib','mapnik_settings.js') # this goes into a mapnik_settings.js file beside the C++ _mapnik.node settings_template = """ module.exports.paths = { 'fonts': %s, 'input_plugins': %s }; """ def write_mapnik_settings(fonts='undefined',input_plugins='undefined'): global settings_template if '__dirname' in fonts or '__dirname' in input_plugins: settings_template = "var path = require('path');\n" + settings_template open(settings,'w').write(settings_template % (fonts,input_plugins)) if __name__ == '__main__': settings_dict = {} # settings for fonts and input plugins settings_dict['input_plugins'] = '\'%s\'' % os.popen("mapnik-config --input-plugins").readline().strip() settings_dict['fonts'] = '\'%s\'' % os.popen("mapnik-config --fonts").readline().strip() write_mapnik_settings(**settings_dict)
import os settings = os.path.join(os.path.dirname(__file__),'lib','mapnik_settings.js') # this goes into a mapnik_settings.js file beside the C++ _mapnik.node settings_template = """ module.exports.paths = { 'fonts': %s, 'input_plugins': %s }; """ def write_mapnik_settings(fonts='undefined',input_plugins='undefined'): global settings_template if '__dirname' in fonts or '__dirname' in input_plugins: settings_template = "var path = require('path');\n" + settings_template open(settings,'w').write(settings_template % (fonts,input_plugins)) if __name__ == '__main__': settings_dict = {} # settings for fonts and input plugins if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'): settings_dict['input_plugins'] = os.environ['MAPNIK_INPUT_PLUGINS_DIRECTORY'] else: settings_dict['input_plugins'] = '\'%s\'' % os.popen("mapnik-config --input-plugins").readline().strip() if os.environ.has_key('MAPNIK_FONT_DIRECTORY'): settings_dict['fonts'] = os.environ['MAPNIK_FONT_DIRECTORY'] else: settings_dict['fonts'] = '\'%s\'' % os.popen("mapnik-config --fonts").readline().strip() write_mapnik_settings(**settings_dict)
Revert "stop reading fonts/input plugins from environ as we now have a working mapnik-config.bat on windows"
Revert "stop reading fonts/input plugins from environ as we now have a working mapnik-config.bat on windows" This reverts commit d87c71142ba7bcc0d99d84886f3534dea7617b0c.
Python
bsd-3-clause
mapnik/node-mapnik,langateam/node-mapnik,mojodna/node-mapnik,CartoDB/node-mapnik,CartoDB/node-mapnik,MaxSem/node-mapnik,gravitystorm/node-mapnik,tomhughes/node-mapnik,mojodna/node-mapnik,tomhughes/node-mapnik,CartoDB/node-mapnik,Uli1/node-mapnik,mojodna/node-mapnik,stefanklug/node-mapnik,CartoDB/node-mapnik,langateam/node-mapnik,gravitystorm/node-mapnik,Uli1/node-mapnik,Uli1/node-mapnik,MaxSem/node-mapnik,CartoDB/node-mapnik,mapnik/node-mapnik,tomhughes/node-mapnik,stefanklug/node-mapnik,mapnik/node-mapnik,mojodna/node-mapnik,langateam/node-mapnik,MaxSem/node-mapnik,tomhughes/node-mapnik,gravitystorm/node-mapnik,langateam/node-mapnik,gravitystorm/node-mapnik,Uli1/node-mapnik,stefanklug/node-mapnik,tomhughes/node-mapnik,mapnik/node-mapnik,stefanklug/node-mapnik,MaxSem/node-mapnik,mapnik/node-mapnik,langateam/node-mapnik
import os settings = os.path.join(os.path.dirname(__file__),'lib','mapnik_settings.js') # this goes into a mapnik_settings.js file beside the C++ _mapnik.node settings_template = """ module.exports.paths = { 'fonts': %s, 'input_plugins': %s }; """ def write_mapnik_settings(fonts='undefined',input_plugins='undefined'): global settings_template if '__dirname' in fonts or '__dirname' in input_plugins: settings_template = "var path = require('path');\n" + settings_template open(settings,'w').write(settings_template % (fonts,input_plugins)) if __name__ == '__main__': settings_dict = {} # settings for fonts and input plugins + if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'): + settings_dict['input_plugins'] = os.environ['MAPNIK_INPUT_PLUGINS_DIRECTORY'] + else: - settings_dict['input_plugins'] = '\'%s\'' % os.popen("mapnik-config --input-plugins").readline().strip() + settings_dict['input_plugins'] = '\'%s\'' % os.popen("mapnik-config --input-plugins").readline().strip() + if os.environ.has_key('MAPNIK_FONT_DIRECTORY'): + settings_dict['fonts'] = os.environ['MAPNIK_FONT_DIRECTORY'] + else: - settings_dict['fonts'] = '\'%s\'' % os.popen("mapnik-config --fonts").readline().strip() + settings_dict['fonts'] = '\'%s\'' % os.popen("mapnik-config --fonts").readline().strip() write_mapnik_settings(**settings_dict)
Revert "stop reading fonts/input plugins from environ as we now have a working mapnik-config.bat on windows"
## Code Before: import os settings = os.path.join(os.path.dirname(__file__),'lib','mapnik_settings.js') # this goes into a mapnik_settings.js file beside the C++ _mapnik.node settings_template = """ module.exports.paths = { 'fonts': %s, 'input_plugins': %s }; """ def write_mapnik_settings(fonts='undefined',input_plugins='undefined'): global settings_template if '__dirname' in fonts or '__dirname' in input_plugins: settings_template = "var path = require('path');\n" + settings_template open(settings,'w').write(settings_template % (fonts,input_plugins)) if __name__ == '__main__': settings_dict = {} # settings for fonts and input plugins settings_dict['input_plugins'] = '\'%s\'' % os.popen("mapnik-config --input-plugins").readline().strip() settings_dict['fonts'] = '\'%s\'' % os.popen("mapnik-config --fonts").readline().strip() write_mapnik_settings(**settings_dict) ## Instruction: Revert "stop reading fonts/input plugins from environ as we now have a working mapnik-config.bat on windows" ## Code After: import os settings = os.path.join(os.path.dirname(__file__),'lib','mapnik_settings.js') # this goes into a mapnik_settings.js file beside the C++ _mapnik.node settings_template = """ module.exports.paths = { 'fonts': %s, 'input_plugins': %s }; """ def write_mapnik_settings(fonts='undefined',input_plugins='undefined'): global settings_template if '__dirname' in fonts or '__dirname' in input_plugins: settings_template = "var path = require('path');\n" + settings_template open(settings,'w').write(settings_template % (fonts,input_plugins)) if __name__ == '__main__': settings_dict = {} # settings for fonts and input plugins if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'): settings_dict['input_plugins'] = os.environ['MAPNIK_INPUT_PLUGINS_DIRECTORY'] else: settings_dict['input_plugins'] = '\'%s\'' % os.popen("mapnik-config --input-plugins").readline().strip() if os.environ.has_key('MAPNIK_FONT_DIRECTORY'): settings_dict['fonts'] = os.environ['MAPNIK_FONT_DIRECTORY'] else: settings_dict['fonts'] = '\'%s\'' % os.popen("mapnik-config --fonts").readline().strip() write_mapnik_settings(**settings_dict)
... # settings for fonts and input plugins if os.environ.has_key('MAPNIK_INPUT_PLUGINS_DIRECTORY'): settings_dict['input_plugins'] = os.environ['MAPNIK_INPUT_PLUGINS_DIRECTORY'] else: settings_dict['input_plugins'] = '\'%s\'' % os.popen("mapnik-config --input-plugins").readline().strip() if os.environ.has_key('MAPNIK_FONT_DIRECTORY'): settings_dict['fonts'] = os.environ['MAPNIK_FONT_DIRECTORY'] else: settings_dict['fonts'] = '\'%s\'' % os.popen("mapnik-config --fonts").readline().strip() write_mapnik_settings(**settings_dict) ...
3fec4855d53a5077762892295582601cc193d068
tests/scoring_engine/models/test_kb.py
tests/scoring_engine/models/test_kb.py
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") self.db.save(kb) assert kb.id is not None
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
Update kb test to check for port_num
Update kb test to check for port_num Signed-off-by: Brandon Myers <[email protected]>
Python
mit
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): - kb = KB(name="task_ids", value="1,2,3,4,5,6") + kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' + assert kb.round_num == 100 def test_basic_kb(self): - kb = KB(name="task_ids", value="1,2,3,4,5,6") + kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
Update kb test to check for port_num
## Code Before: from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") self.db.save(kb) assert kb.id is not None ## Instruction: Update kb test to check for port_num ## Code After: from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
# ... existing code ... def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None # ... rest of the code ...
3977b36760afa2407c5e98926a6c3c1f926f5493
x64/expand.py
x64/expand.py
import sys def expand(filename): for dir in ('.', '../common', '../anstests/'): try: f = open(dir + "/" + filename) except IOError: continue for line in f: line = line.replace('\r', '') if line.strip().startswith('#bye'): sys.exit(0) if line.strip().startswith('include '): expand(line.split()[1]) else: sys.stdout.write(line) print return assert 0, filename + 'not found' if __name__ == '__main__': for a in sys.argv[1:]: expand(a)
import sys def expand(filename): for dir in ('.', '../common', '../anstests/'): try: f = open(dir + "/" + filename) except IOError: continue for line in f: line = line.replace('\r', '') if line.strip().startswith('#bye'): sys.exit(0) if line.strip().startswith('include '): expand(line.split()[1]) else: sys.stdout.write(line) sys.stdout.write('\n') return assert 0, filename + 'not found' if __name__ == '__main__': for a in sys.argv[1:]: expand(a)
Fix missing newlines with Python3
Fix missing newlines with Python3
Python
bsd-3-clause
jamesbowman/swapforth,zuloloxi/swapforth,jamesbowman/swapforth,zuloloxi/swapforth,zuloloxi/swapforth,zuloloxi/swapforth,RGD2/swapforth,jamesbowman/swapforth,RGD2/swapforth,jamesbowman/swapforth,RGD2/swapforth,RGD2/swapforth
import sys def expand(filename): for dir in ('.', '../common', '../anstests/'): try: f = open(dir + "/" + filename) except IOError: continue for line in f: line = line.replace('\r', '') if line.strip().startswith('#bye'): sys.exit(0) if line.strip().startswith('include '): expand(line.split()[1]) else: sys.stdout.write(line) - print + sys.stdout.write('\n') return assert 0, filename + 'not found' if __name__ == '__main__': for a in sys.argv[1:]: expand(a)
Fix missing newlines with Python3
## Code Before: import sys def expand(filename): for dir in ('.', '../common', '../anstests/'): try: f = open(dir + "/" + filename) except IOError: continue for line in f: line = line.replace('\r', '') if line.strip().startswith('#bye'): sys.exit(0) if line.strip().startswith('include '): expand(line.split()[1]) else: sys.stdout.write(line) print return assert 0, filename + 'not found' if __name__ == '__main__': for a in sys.argv[1:]: expand(a) ## Instruction: Fix missing newlines with Python3 ## Code After: import sys def expand(filename): for dir in ('.', '../common', '../anstests/'): try: f = open(dir + "/" + filename) except IOError: continue for line in f: line = line.replace('\r', '') if line.strip().startswith('#bye'): sys.exit(0) if line.strip().startswith('include '): expand(line.split()[1]) else: sys.stdout.write(line) sys.stdout.write('\n') return assert 0, filename + 'not found' if __name__ == '__main__': for a in sys.argv[1:]: expand(a)
// ... existing code ... else: sys.stdout.write(line) sys.stdout.write('\n') return assert 0, filename + 'not found' // ... rest of the code ...
fea2cbcbc80d76a75f41fb81ea6ded93312bd11b
imhotep_rubocop/plugin.py
imhotep_rubocop/plugin.py
from imhotep.tools import Tool from collections import defaultdict import json import os class RubyLintLinter(Tool): def invoke(self, dirname, filenames=set()): retval = defaultdict(lambda: defaultdict(list)) if len(filenames) == 0: cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname else: ruby_files = [] for filename in filenames: if '.rb' in filename: ruby_files.append("%s/%s" % (dirname, filename)) cmd = "rubocop -f j %s" % (" ".join(ruby_files)) try: output = json.loads(self.executor(cmd)) for linted_file in output['files']: # The path should be relative to the repo, # without a leading slash # example db/file.rb file_name = os.path.abspath(linted_file['path']) file_name = file_name.replace(dirname, "")[1:] for offence in linted_file['offences']: line_number = str(offence['location']['line']) retval[str(file_name)][line_number].append( str(offence['message'])) retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number])) except: pass return retval
from imhotep.tools import Tool from collections import defaultdict import json import os class RubyLintLinter(Tool): def invoke(self, dirname, filenames=set(), linter_configs=set()): retval = defaultdict(lambda: defaultdict(list)) config = '' for config_file in linter_configs: if 'rubocop' in config_file: config = "-c %s " % config_file if len(filenames) == 0: cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config) else: ruby_files = [] for filename in filenames: if '.rb' in filename: ruby_files.append("%s/%s" % (dirname, filename)) cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files)) try: output = json.loads(self.executor(cmd)) for linted_file in output['files']: # The path should be relative to the repo, # without a leading slash # example db/file.rb file_name = os.path.abspath(linted_file['path']) file_name = file_name.replace(dirname, "")[1:] for offence in linted_file['offences']: line_number = str(offence['location']['line']) retval[str(file_name)][line_number].append( str(offence['message'])) retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number])) except: pass return retval
Update to support config files that are passed to it.
Update to support config files that are passed to it.
Python
mit
scottjab/imhotep_rubocop
from imhotep.tools import Tool from collections import defaultdict import json import os class RubyLintLinter(Tool): - def invoke(self, dirname, filenames=set()): + def invoke(self, dirname, filenames=set(), linter_configs=set()): retval = defaultdict(lambda: defaultdict(list)) - + config = '' + for config_file in linter_configs: + if 'rubocop' in config_file: + config = "-c %s " % config_file if len(filenames) == 0: - cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname + cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config) else: ruby_files = [] for filename in filenames: if '.rb' in filename: ruby_files.append("%s/%s" % (dirname, filename)) - cmd = "rubocop -f j %s" % (" ".join(ruby_files)) + cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files)) try: output = json.loads(self.executor(cmd)) for linted_file in output['files']: # The path should be relative to the repo, # without a leading slash # example db/file.rb file_name = os.path.abspath(linted_file['path']) file_name = file_name.replace(dirname, "")[1:] for offence in linted_file['offences']: line_number = str(offence['location']['line']) retval[str(file_name)][line_number].append( str(offence['message'])) retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number])) except: pass return retval
Update to support config files that are passed to it.
## Code Before: from imhotep.tools import Tool from collections import defaultdict import json import os class RubyLintLinter(Tool): def invoke(self, dirname, filenames=set()): retval = defaultdict(lambda: defaultdict(list)) if len(filenames) == 0: cmd = "find %s -name '*.rb' | xargs rubocop -f j" % dirname else: ruby_files = [] for filename in filenames: if '.rb' in filename: ruby_files.append("%s/%s" % (dirname, filename)) cmd = "rubocop -f j %s" % (" ".join(ruby_files)) try: output = json.loads(self.executor(cmd)) for linted_file in output['files']: # The path should be relative to the repo, # without a leading slash # example db/file.rb file_name = os.path.abspath(linted_file['path']) file_name = file_name.replace(dirname, "")[1:] for offence in linted_file['offences']: line_number = str(offence['location']['line']) retval[str(file_name)][line_number].append( str(offence['message'])) retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number])) except: pass return retval ## Instruction: Update to support config files that are passed to it. ## Code After: from imhotep.tools import Tool from collections import defaultdict import json import os class RubyLintLinter(Tool): def invoke(self, dirname, filenames=set(), linter_configs=set()): retval = defaultdict(lambda: defaultdict(list)) config = '' for config_file in linter_configs: if 'rubocop' in config_file: config = "-c %s " % config_file if len(filenames) == 0: cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config) else: ruby_files = [] for filename in filenames: if '.rb' in filename: ruby_files.append("%s/%s" % (dirname, filename)) cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files)) try: output = json.loads(self.executor(cmd)) for linted_file in output['files']: # The path should be relative to the repo, # without a leading slash # example db/file.rb file_name = os.path.abspath(linted_file['path']) file_name = file_name.replace(dirname, "")[1:] for offence in linted_file['offences']: line_number = str(offence['location']['line']) retval[str(file_name)][line_number].append( str(offence['message'])) retval[str(file_name)][line_number] = list(set(retval[str(file_name)][line_number])) except: pass return retval
// ... existing code ... class RubyLintLinter(Tool): def invoke(self, dirname, filenames=set(), linter_configs=set()): retval = defaultdict(lambda: defaultdict(list)) config = '' for config_file in linter_configs: if 'rubocop' in config_file: config = "-c %s " % config_file if len(filenames) == 0: cmd = "find %s -name '*.rb' | xargs rubocop %s -f j" % (dirname, config) else: ruby_files = [] // ... modified code ... ruby_files.append("%s/%s" % (dirname, filename)) cmd = "rubocop %s -f j %s" % (config, " ".join(ruby_files)) try: output = json.loads(self.executor(cmd)) // ... rest of the code ...
653ae451e204905b8295e424ca86c20d60ee686c
settings/__init__.py
settings/__init__.py
import yaml import utils with open('settings.yml') as settings_file: YML = yaml.safe_load(settings_file) IMAGE = utils.Settings(YML['image']) SNAP = utils.Settings(YML['snap']) DROPBOX_TOKEN_FILE = "./dropbox.txt" WORKING_DIRECTORY = "/home/pi/time-lapse" IMAGES_DIRECTORY = WORKING_DIRECTORY + "/images" JOBS_DIRECTORY = WORKING_DIRECTORY + "/jobs"
import glob import yaml import utils with open('settings.yml') as settings_file: YML = yaml.safe_load(settings_file) IMAGE = utils.Settings(YML['image']) SNAP = utils.Settings(YML['snap']) DROPBOX_TOKEN_FILE = "./dropbox.txt" WORKING_DIRECTORY = "/home/pi/time-lapse" IMAGES_DIRECTORY = WORKING_DIRECTORY + "/images" JOBS_DIRECTORY = WORKING_DIRECTORY + "/jobs" class Setting(object): def __init__(self, settings_dict): self.__dict__.update(settings_dict) class Job(object): def __init__(self): self.image = None self.snap = None self.__load_job_data() def __parse_data_from_file(self, job_file): with open('settings.yml') as file_data: parsed_yaml = yaml.safe_load(file_data) return parsed_yaml def __load_job_data(self): job_files = glob.glob(JOBS_DIRECTORY + "/job_*.yml") try: # Take the first file if there are many parsed_yaml = self.__parse_data_from_file(job_files[0]) self.image = Setting(parsed_yaml['image']) self.snap = Setting(parsed_yaml['snap']) except (IndexError, KeyError): pass def is_defined(self): if not self.image: return False if not self.snap: return False return True
Make a class to manage job settings
Make a class to manage job settings
Python
mit
projectweekend/Pi-Camera-Time-Lapse,projectweekend/Pi-Camera-Time-Lapse
+ import glob import yaml import utils with open('settings.yml') as settings_file: YML = yaml.safe_load(settings_file) IMAGE = utils.Settings(YML['image']) SNAP = utils.Settings(YML['snap']) DROPBOX_TOKEN_FILE = "./dropbox.txt" WORKING_DIRECTORY = "/home/pi/time-lapse" IMAGES_DIRECTORY = WORKING_DIRECTORY + "/images" JOBS_DIRECTORY = WORKING_DIRECTORY + "/jobs" + + class Setting(object): + + def __init__(self, settings_dict): + self.__dict__.update(settings_dict) + + + class Job(object): + + def __init__(self): + self.image = None + self.snap = None + self.__load_job_data() + + def __parse_data_from_file(self, job_file): + with open('settings.yml') as file_data: + parsed_yaml = yaml.safe_load(file_data) + return parsed_yaml + + def __load_job_data(self): + job_files = glob.glob(JOBS_DIRECTORY + "/job_*.yml") + try: + # Take the first file if there are many + parsed_yaml = self.__parse_data_from_file(job_files[0]) + self.image = Setting(parsed_yaml['image']) + self.snap = Setting(parsed_yaml['snap']) + except (IndexError, KeyError): + pass + + def is_defined(self): + if not self.image: + return False + if not self.snap: + return False + return True +
Make a class to manage job settings
## Code Before: import yaml import utils with open('settings.yml') as settings_file: YML = yaml.safe_load(settings_file) IMAGE = utils.Settings(YML['image']) SNAP = utils.Settings(YML['snap']) DROPBOX_TOKEN_FILE = "./dropbox.txt" WORKING_DIRECTORY = "/home/pi/time-lapse" IMAGES_DIRECTORY = WORKING_DIRECTORY + "/images" JOBS_DIRECTORY = WORKING_DIRECTORY + "/jobs" ## Instruction: Make a class to manage job settings ## Code After: import glob import yaml import utils with open('settings.yml') as settings_file: YML = yaml.safe_load(settings_file) IMAGE = utils.Settings(YML['image']) SNAP = utils.Settings(YML['snap']) DROPBOX_TOKEN_FILE = "./dropbox.txt" WORKING_DIRECTORY = "/home/pi/time-lapse" IMAGES_DIRECTORY = WORKING_DIRECTORY + "/images" JOBS_DIRECTORY = WORKING_DIRECTORY + "/jobs" class Setting(object): def __init__(self, settings_dict): self.__dict__.update(settings_dict) class Job(object): def __init__(self): self.image = None self.snap = None self.__load_job_data() def __parse_data_from_file(self, job_file): with open('settings.yml') as file_data: parsed_yaml = yaml.safe_load(file_data) return parsed_yaml def __load_job_data(self): job_files = glob.glob(JOBS_DIRECTORY + "/job_*.yml") try: # Take the first file if there are many parsed_yaml = self.__parse_data_from_file(job_files[0]) self.image = Setting(parsed_yaml['image']) self.snap = Setting(parsed_yaml['snap']) except (IndexError, KeyError): pass def is_defined(self): if not self.image: return False if not self.snap: return False return True
// ... existing code ... import glob import yaml import utils // ... modified code ... IMAGES_DIRECTORY = WORKING_DIRECTORY + "/images" JOBS_DIRECTORY = WORKING_DIRECTORY + "/jobs" class Setting(object): def __init__(self, settings_dict): self.__dict__.update(settings_dict) class Job(object): def __init__(self): self.image = None self.snap = None self.__load_job_data() def __parse_data_from_file(self, job_file): with open('settings.yml') as file_data: parsed_yaml = yaml.safe_load(file_data) return parsed_yaml def __load_job_data(self): job_files = glob.glob(JOBS_DIRECTORY + "/job_*.yml") try: # Take the first file if there are many parsed_yaml = self.__parse_data_from_file(job_files[0]) self.image = Setting(parsed_yaml['image']) self.snap = Setting(parsed_yaml['snap']) except (IndexError, KeyError): pass def is_defined(self): if not self.image: return False if not self.snap: return False return True // ... rest of the code ...
cbd9c312b857565bfebc2d9f8452453ca333ba92
giles.py
giles.py
import giles.server server = giles.server.Server() server.instantiate() server.loop()
import giles.server import sys server = giles.server.Server() if len(sys.argv) == 2: port = int(sys.argv[1]) else: port = 9435 server.instantiate(port) server.loop()
Support choosing a port on the command line.
Support choosing a port on the command line. Just put a number as the only CLI for now. Useful for me testing changes now that I'm actually keeping the server running.
Python
agpl-3.0
sunfall/giles
import giles.server + import sys server = giles.server.Server() + + if len(sys.argv) == 2: + port = int(sys.argv[1]) + else: + port = 9435 - server.instantiate() + server.instantiate(port) server.loop()
Support choosing a port on the command line.
## Code Before: import giles.server server = giles.server.Server() server.instantiate() server.loop() ## Instruction: Support choosing a port on the command line. ## Code After: import giles.server import sys server = giles.server.Server() if len(sys.argv) == 2: port = int(sys.argv[1]) else: port = 9435 server.instantiate(port) server.loop()
// ... existing code ... import giles.server import sys server = giles.server.Server() if len(sys.argv) == 2: port = int(sys.argv[1]) else: port = 9435 server.instantiate(port) server.loop() // ... rest of the code ...
5d5098db8e5a3b60cbba77aa04035bc35e3f1726
db_logger.py
db_logger.py
import threading import time import accounts import args import config MAX_TEXT_LENGTH = 1024 enabled = bool(args.args['database']) if enabled: import MySQLdb connected = False conn = None cur = None db_lock = threading.RLock() def _connect(): global conn, cur, connected if not connected: conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database'), charset='utf8mb4') cur = conn.cursor() connected = True def log(message, kind, text_msg=None): global connected, enabled if enabled: if not config.get('db_logger.host') or not config.get('db_logger.database'): print('Incorrect database configuration!') enabled = False return with db_lock: try: _connect() if text_msg is None: text_msg = message text_msg = text_msg[:MAX_TEXT_LENGTH] cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account)) conn.commit() except MySQLdb.Error as e: print(e, flush=True) time.sleep(5) connected = False log(message, kind, text_msg)
import threading import time import accounts import args import config import log as _log MAX_TEXT_LENGTH = 1024 enabled = bool(args.args['database']) if enabled: import MySQLdb connected = False conn = None cur = None db_lock = threading.RLock() def _connect(): global conn, cur, connected if not connected: conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database'), charset='utf8mb4') cur = conn.cursor() connected = True def log(message, kind, text_msg=None): global connected, enabled if enabled: if not config.get('db_logger.host') or not config.get('db_logger.database'): print('Incorrect database configuration!') enabled = False return with db_lock: try: _connect() if text_msg is None: text_msg = message text_msg = text_msg[:MAX_TEXT_LENGTH] cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account)) conn.commit() except MySQLdb.Error as e: print(e, flush=True) _log.write('error', 'MySQL error: ' + str(e)) time.sleep(5) connected = False log(message, kind, text_msg)
Write db errors to error.log
Write db errors to error.log
Python
mit
kalinochkind/vkbot,kalinochkind/vkbot,kalinochkind/vkbot
import threading import time import accounts import args import config + import log as _log MAX_TEXT_LENGTH = 1024 enabled = bool(args.args['database']) if enabled: import MySQLdb connected = False conn = None cur = None db_lock = threading.RLock() def _connect(): global conn, cur, connected if not connected: conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database'), charset='utf8mb4') cur = conn.cursor() connected = True def log(message, kind, text_msg=None): global connected, enabled if enabled: if not config.get('db_logger.host') or not config.get('db_logger.database'): print('Incorrect database configuration!') enabled = False return with db_lock: try: _connect() if text_msg is None: text_msg = message text_msg = text_msg[:MAX_TEXT_LENGTH] cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account)) conn.commit() except MySQLdb.Error as e: print(e, flush=True) + _log.write('error', 'MySQL error: ' + str(e)) time.sleep(5) connected = False log(message, kind, text_msg)
Write db errors to error.log
## Code Before: import threading import time import accounts import args import config MAX_TEXT_LENGTH = 1024 enabled = bool(args.args['database']) if enabled: import MySQLdb connected = False conn = None cur = None db_lock = threading.RLock() def _connect(): global conn, cur, connected if not connected: conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database'), charset='utf8mb4') cur = conn.cursor() connected = True def log(message, kind, text_msg=None): global connected, enabled if enabled: if not config.get('db_logger.host') or not config.get('db_logger.database'): print('Incorrect database configuration!') enabled = False return with db_lock: try: _connect() if text_msg is None: text_msg = message text_msg = text_msg[:MAX_TEXT_LENGTH] cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account)) conn.commit() except MySQLdb.Error as e: print(e, flush=True) time.sleep(5) connected = False log(message, kind, text_msg) ## Instruction: Write db errors to error.log ## Code After: import threading import time import accounts import args import config import log as _log MAX_TEXT_LENGTH = 1024 enabled = bool(args.args['database']) if enabled: import MySQLdb connected = False conn = None cur = None db_lock = threading.RLock() def _connect(): global conn, cur, connected if not connected: conn = MySQLdb.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database'), charset='utf8mb4') cur = conn.cursor() connected = True def log(message, kind, text_msg=None): global connected, enabled if enabled: if not config.get('db_logger.host') or not config.get('db_logger.database'): print('Incorrect database configuration!') enabled = False return with db_lock: try: _connect() if text_msg is None: text_msg = message text_msg = text_msg[:MAX_TEXT_LENGTH] cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW(), %s, %s)', (message, kind, text_msg, accounts.current_account)) conn.commit() except MySQLdb.Error as e: print(e, flush=True) _log.write('error', 'MySQL error: ' + str(e)) time.sleep(5) connected = False log(message, kind, text_msg)
... import args import config import log as _log MAX_TEXT_LENGTH = 1024 ... except MySQLdb.Error as e: print(e, flush=True) _log.write('error', 'MySQL error: ' + str(e)) time.sleep(5) connected = False ...
9d2d41f8450f6f3735b8ff9a0041f9bf5d80e5ec
config/template.py
config/template.py
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = ''
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = ['']
Allow for representative view display with sample configuration
Allow for representative view display with sample configuration
Python
mit
PeterTheOne/ueberwachungspaket.at,PeterTheOne/ueberwachungspaket.at,PeterTheOne/ueberwachungspaket.at
DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' + TWILIO_NUMBERS = [''] +
Allow for representative view display with sample configuration
## Code Before: DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' ## Instruction: Allow for representative view display with sample configuration ## Code After: DB_USER = '' DB_HOST = '' DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = ['']
// ... existing code ... DB_PASSWORD = '' DB_NAME = '' TWILIO_NUMBERS = [''] // ... rest of the code ...
8af3aef367135dbbc55e573c6a943a86ff3ccd9d
survey/tests/locale/test_locale_normalization.py
survey/tests/locale/test_locale_normalization.py
import os import platform import subprocess import unittest class TestLocaleNormalization(unittest.TestCase): LOCALE_PATH = "survey/locale/" def test_normalization(self): """ We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """ if platform.system() == "Windows": python_3 = ["py", "-3"] else: python_3 = ["python3"] makemessages_command = python_3 + [ "manage.py", "makemessages", "--no-obsolete", "--no-wrap", "--ignore", "venv", ] number_of_language = len(os.listdir(self.LOCALE_PATH)) subprocess.check_call(makemessages_command) git_diff_command = ["git", "diff", self.LOCALE_PATH] git_diff = subprocess.check_output(git_diff_command).decode("utf8") # In the diff we should have a change only for the date of the generation # So 2 * @@ * number of language number_of_change = git_diff.count("@@") / 2 msg = ( "You did not update the translation following your changes. Maybe you did not use the " "normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're " "working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH), ) self.assertEqual(number_of_change, number_of_language, msg)
import os import platform import subprocess import unittest from pathlib import Path class TestLocaleNormalization(unittest.TestCase): LOCALE_PATH = Path("survey", "locale").absolute() def test_normalization(self): """ We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """ if platform.system() == "Windows": python_3 = ["py", "-3"] else: python_3 = ["python3"] makemessages_command = python_3 + [ "manage.py", "makemessages", "--no-obsolete", "--no-wrap", "--ignore", "venv", ] number_of_language = len(os.listdir(self.LOCALE_PATH)) subprocess.check_call(makemessages_command) git_diff_command = ["git", "diff", self.LOCALE_PATH] git_diff = subprocess.check_output(git_diff_command).decode("utf8") # In the diff we should have a change only for the date of the generation # So 2 * @@ * number of language number_of_change = git_diff.count("@@") / 2 msg = ( "You did not update the translation following your changes. Maybe you did not use the " "normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're " "working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH), ) self.assertEqual(number_of_change, number_of_language, msg)
Use an absolute Path for localization tests
Use an absolute Path for localization tests
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
import os import platform import subprocess import unittest + from pathlib import Path class TestLocaleNormalization(unittest.TestCase): - LOCALE_PATH = "survey/locale/" + LOCALE_PATH = Path("survey", "locale").absolute() def test_normalization(self): """ We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """ if platform.system() == "Windows": python_3 = ["py", "-3"] else: python_3 = ["python3"] makemessages_command = python_3 + [ "manage.py", "makemessages", "--no-obsolete", "--no-wrap", "--ignore", "venv", ] number_of_language = len(os.listdir(self.LOCALE_PATH)) subprocess.check_call(makemessages_command) git_diff_command = ["git", "diff", self.LOCALE_PATH] git_diff = subprocess.check_output(git_diff_command).decode("utf8") # In the diff we should have a change only for the date of the generation # So 2 * @@ * number of language number_of_change = git_diff.count("@@") / 2 msg = ( "You did not update the translation following your changes. Maybe you did not use the " "normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're " "working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH), ) self.assertEqual(number_of_change, number_of_language, msg)
Use an absolute Path for localization tests
## Code Before: import os import platform import subprocess import unittest class TestLocaleNormalization(unittest.TestCase): LOCALE_PATH = "survey/locale/" def test_normalization(self): """ We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """ if platform.system() == "Windows": python_3 = ["py", "-3"] else: python_3 = ["python3"] makemessages_command = python_3 + [ "manage.py", "makemessages", "--no-obsolete", "--no-wrap", "--ignore", "venv", ] number_of_language = len(os.listdir(self.LOCALE_PATH)) subprocess.check_call(makemessages_command) git_diff_command = ["git", "diff", self.LOCALE_PATH] git_diff = subprocess.check_output(git_diff_command).decode("utf8") # In the diff we should have a change only for the date of the generation # So 2 * @@ * number of language number_of_change = git_diff.count("@@") / 2 msg = ( "You did not update the translation following your changes. Maybe you did not use the " "normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're " "working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH), ) self.assertEqual(number_of_change, number_of_language, msg) ## Instruction: Use an absolute Path for localization tests ## Code After: import os import platform import subprocess import unittest from pathlib import Path class TestLocaleNormalization(unittest.TestCase): LOCALE_PATH = Path("survey", "locale").absolute() def test_normalization(self): """ We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """ if platform.system() == "Windows": python_3 = ["py", "-3"] else: python_3 = ["python3"] makemessages_command = python_3 + [ "manage.py", "makemessages", "--no-obsolete", "--no-wrap", "--ignore", "venv", ] number_of_language = len(os.listdir(self.LOCALE_PATH)) subprocess.check_call(makemessages_command) git_diff_command = ["git", "diff", self.LOCALE_PATH] git_diff = subprocess.check_output(git_diff_command).decode("utf8") # In the diff we should have a change only for the date of the generation # So 2 * @@ * number of language number_of_change = git_diff.count("@@") / 2 msg = ( "You did not update the translation following your changes. Maybe you did not use the " "normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're " "working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH), ) self.assertEqual(number_of_change, number_of_language, msg)
// ... existing code ... import subprocess import unittest from pathlib import Path // ... modified code ... class TestLocaleNormalization(unittest.TestCase): LOCALE_PATH = Path("survey", "locale").absolute() def test_normalization(self): // ... rest of the code ...
f946ca92b74bb945c3884fcfa3e515132ec56b06
virtool/processes.py
virtool/processes.py
STEP_COUNTS = { "import_reference": 0, "setup_remote_reference": 0, "update_remote_reference": 0, "update_software": 0, "install_hmms": 0 } FIRST_STEPS = { "import_reference": "load_file", "setup_remote_reference": "", "update_remote_reference": "", "update_software": "", "install_hmms": "" } UNIQUES = [ "update_software", "install_hmms" ]
STEP_COUNTS = { "import_reference": 0, "setup_remote_reference": 0, "update_remote_reference": 0, "update_software": 0, "install_hmms": 0 } FIRST_STEPS = { "import_reference": "load_file", "setup_remote_reference": "", "update_remote_reference": "", "update_software": "", "install_hmms": "" } UNIQUES = [ "update_software", "install_hmms" ] class ProgressTracker: def __init__(self, total, db=None, increment=0.05, factor=1): self.total = total self.db = db self.increment = increment self.factor = factor self.count = 0 self.last_reported = 0 def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count return self.progress async def reported(self): self.last_reported = self.progress @property def progress(self): return round(self.count / self.total, 2)
Add ProgressTracker class for tracking progress in long operations
Add ProgressTracker class for tracking progress in long operations
Python
mit
virtool/virtool,igboyes/virtool,igboyes/virtool,virtool/virtool
STEP_COUNTS = { "import_reference": 0, "setup_remote_reference": 0, "update_remote_reference": 0, "update_software": 0, "install_hmms": 0 } FIRST_STEPS = { "import_reference": "load_file", "setup_remote_reference": "", "update_remote_reference": "", "update_software": "", "install_hmms": "" } UNIQUES = [ "update_software", "install_hmms" ] + + class ProgressTracker: + + def __init__(self, total, db=None, increment=0.05, factor=1): + self.total = total + self.db = db + self.increment = increment + self.factor = factor + + self.count = 0 + self.last_reported = 0 + + def add(self, value): + count = self.count + value + + if count > self.total: + raise ValueError("Count cannot exceed total") + + self.count = count + + return self.progress + + async def reported(self): + self.last_reported = self.progress + + @property + def progress(self): + return round(self.count / self.total, 2) +
Add ProgressTracker class for tracking progress in long operations
## Code Before: STEP_COUNTS = { "import_reference": 0, "setup_remote_reference": 0, "update_remote_reference": 0, "update_software": 0, "install_hmms": 0 } FIRST_STEPS = { "import_reference": "load_file", "setup_remote_reference": "", "update_remote_reference": "", "update_software": "", "install_hmms": "" } UNIQUES = [ "update_software", "install_hmms" ] ## Instruction: Add ProgressTracker class for tracking progress in long operations ## Code After: STEP_COUNTS = { "import_reference": 0, "setup_remote_reference": 0, "update_remote_reference": 0, "update_software": 0, "install_hmms": 0 } FIRST_STEPS = { "import_reference": "load_file", "setup_remote_reference": "", "update_remote_reference": "", "update_software": "", "install_hmms": "" } UNIQUES = [ "update_software", "install_hmms" ] class ProgressTracker: def __init__(self, total, db=None, increment=0.05, factor=1): self.total = total self.db = db self.increment = increment self.factor = factor self.count = 0 self.last_reported = 0 def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count return self.progress async def reported(self): self.last_reported = self.progress @property def progress(self): return round(self.count / self.total, 2)
// ... existing code ... "install_hmms" ] class ProgressTracker: def __init__(self, total, db=None, increment=0.05, factor=1): self.total = total self.db = db self.increment = increment self.factor = factor self.count = 0 self.last_reported = 0 def add(self, value): count = self.count + value if count > self.total: raise ValueError("Count cannot exceed total") self.count = count return self.progress async def reported(self): self.last_reported = self.progress @property def progress(self): return round(self.count / self.total, 2) // ... rest of the code ...
a3ee55cf4d9182247dcc7a42b0336c467dce9e3e
linter.py
linter.py
from SublimeLinter.lint import Linter, util class Cppcheck(Linter): cmd = ( 'cppcheck', '--template={file}:{line}: {severity}: {message}', '--inline-suppr', '--quiet', '${args}', '${file}' ) regex = ( r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)?\s+' r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+' r'(?P<message>.+)' ) error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout on_stderr = None # handle stderr via split_match tempfile_suffix = '-' defaults = { 'selector': 'source.c, source.c++', '--std=,+': [], # example ['c99', 'c89'] '--enable=,': 'style', } def split_match(self, match): """ Return the components of the match. We override this because included header files can cause linter errors, and we only want errors from the linted file. """ if match: if match.group('file') != self.filename: return None return super().split_match(match)
from SublimeLinter.lint import Linter, util class Cppcheck(Linter): cmd = ( 'cppcheck', '--template={file}:{line}:{column}:{severity}:{id}:{message}', '--inline-suppr', '--quiet', '${args}', '${file}' ) regex = ( r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)' r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):' r'(?P<code>\w+):(?P<message>.+)' ) error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout on_stderr = None # handle stderr via split_match tempfile_suffix = '-' defaults = { 'selector': 'source.c, source.c++', '--std=,+': [], # example ['c99', 'c89'] '--enable=,': 'style', } def split_match(self, match): """ Return the components of the match. We override this because included header files can cause linter errors, and we only want errors from the linted file. """ if match: if match.group('file') != self.filename: return None return super().split_match(match)
Add cppcheck issue id as code field
Add cppcheck issue id as code field
Python
mit
SublimeLinter/SublimeLinter-cppcheck
from SublimeLinter.lint import Linter, util class Cppcheck(Linter): cmd = ( 'cppcheck', - '--template={file}:{line}: {severity}: {message}', + '--template={file}:{line}:{column}:{severity}:{id}:{message}', '--inline-suppr', '--quiet', '${args}', '${file}' ) regex = ( - r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)?\s+' + r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)' - r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+' + r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):' - r'(?P<message>.+)' + r'(?P<code>\w+):(?P<message>.+)' ) error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout on_stderr = None # handle stderr via split_match tempfile_suffix = '-' defaults = { 'selector': 'source.c, source.c++', '--std=,+': [], # example ['c99', 'c89'] '--enable=,': 'style', } def split_match(self, match): """ Return the components of the match. We override this because included header files can cause linter errors, and we only want errors from the linted file. """ if match: if match.group('file') != self.filename: return None return super().split_match(match)
Add cppcheck issue id as code field
## Code Before: from SublimeLinter.lint import Linter, util class Cppcheck(Linter): cmd = ( 'cppcheck', '--template={file}:{line}: {severity}: {message}', '--inline-suppr', '--quiet', '${args}', '${file}' ) regex = ( r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)?\s+' r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):\s+' r'(?P<message>.+)' ) error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout on_stderr = None # handle stderr via split_match tempfile_suffix = '-' defaults = { 'selector': 'source.c, source.c++', '--std=,+': [], # example ['c99', 'c89'] '--enable=,': 'style', } def split_match(self, match): """ Return the components of the match. We override this because included header files can cause linter errors, and we only want errors from the linted file. """ if match: if match.group('file') != self.filename: return None return super().split_match(match) ## Instruction: Add cppcheck issue id as code field ## Code After: from SublimeLinter.lint import Linter, util class Cppcheck(Linter): cmd = ( 'cppcheck', '--template={file}:{line}:{column}:{severity}:{id}:{message}', '--inline-suppr', '--quiet', '${args}', '${file}' ) regex = ( r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)' r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):' r'(?P<code>\w+):(?P<message>.+)' ) error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout on_stderr = None # handle stderr via split_match tempfile_suffix = '-' defaults = { 'selector': 'source.c, source.c++', '--std=,+': [], # example ['c99', 'c89'] '--enable=,': 'style', } def split_match(self, match): """ Return the components of the match. We override this because included header files can cause linter errors, and we only want errors from the linted file. """ if match: if match.group('file') != self.filename: return None return super().split_match(match)
// ... existing code ... cmd = ( 'cppcheck', '--template={file}:{line}:{column}:{severity}:{id}:{message}', '--inline-suppr', '--quiet', // ... modified code ... ) regex = ( r'^(?P<file>(:\\|[^:])+):(?P<line>\d+):((?P<col>\d+):)' r'((?P<error>error)|(?P<warning>warning|style|performance|portability|information)):' r'(?P<code>\w+):(?P<message>.+)' ) error_stream = util.STREAM_BOTH # linting errors are on stderr, exceptions like "file not found" on stdout // ... rest of the code ...
7b1d520278b8fe33b68103d26f9aa7bb945f6791
cryptography/hazmat/backends/__init__.py
cryptography/hazmat/backends/__init__.py
from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend
from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
Make the default backend be a multi-backend
Make the default backend be a multi-backend
Python
bsd-3-clause
bwhmather/cryptography,Ayrx/cryptography,bwhmather/cryptography,Lukasa/cryptography,Ayrx/cryptography,bwhmather/cryptography,kimvais/cryptography,skeuomorf/cryptography,dstufft/cryptography,kimvais/cryptography,Lukasa/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,Lukasa/cryptography,sholsapp/cryptography,Hasimir/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,dstufft/cryptography,sholsapp/cryptography,Hasimir/cryptography,Hasimir/cryptography,skeuomorf/cryptography,kimvais/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,kimvais/cryptography,sholsapp/cryptography,bwhmather/cryptography
from cryptography.hazmat.backends import openssl + from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) + _default_backend = MultiBackend(_ALL_BACKENDS) + def default_backend(): - return openssl.backend + return _default_backend
Make the default backend be a multi-backend
## Code Before: from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend ## Instruction: Make the default backend be a multi-backend ## Code After: from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
# ... existing code ... from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding # ... modified code ... _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend # ... rest of the code ...
6ee261309f4492994b52403d485bdfd08739a072
kolibri/utils/tests/test_handler.py
kolibri/utils/tests/test_handler.py
import os from time import sleep from django.conf import settings from django.test import TestCase from kolibri.utils import cli class KolibriTimedRotatingFileHandlerTestCase(TestCase): def test_do_rollover(self): archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive") orig_value = settings.LOGGING["handlers"]["file"]["when"] # Temporarily set the rotation time of the log file to be every second settings.LOGGING["handlers"]["file"]["when"] = "s" # make sure that kolibri will be running for more than one second try: cli.main(["--skipupdate", "manage", "help"]) except SystemExit: pass sleep(1) try: cli.main(["--skipupdate", "manage", "help"]) except SystemExit: pass # change back to the original rotation time settings.LOGGING["handlers"]["file"]["when"] = orig_value self.assertNotEqual(os.listdir(archive_dir), [])
import os from time import sleep from django.conf import settings from django.test import TestCase from kolibri.utils import cli class KolibriTimedRotatingFileHandlerTestCase(TestCase): def test_do_rollover(self): archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive") orig_value = settings.LOGGING["handlers"]["file"]["when"] # Temporarily set the rotation time of the log file to be every second settings.LOGGING["handlers"]["file"]["when"] = "s" # make sure that kolibri will be running for more than one second try: cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass sleep(1) try: cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass # change back to the original rotation time settings.LOGGING["handlers"]["file"]["when"] = orig_value self.assertNotEqual(os.listdir(archive_dir), [])
Fix argument ordering in log handler test.
Fix argument ordering in log handler test.
Python
mit
indirectlylit/kolibri,indirectlylit/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri,learningequality/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri
import os from time import sleep from django.conf import settings from django.test import TestCase from kolibri.utils import cli class KolibriTimedRotatingFileHandlerTestCase(TestCase): def test_do_rollover(self): archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive") orig_value = settings.LOGGING["handlers"]["file"]["when"] # Temporarily set the rotation time of the log file to be every second settings.LOGGING["handlers"]["file"]["when"] = "s" # make sure that kolibri will be running for more than one second try: - cli.main(["--skipupdate", "manage", "help"]) + cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass sleep(1) try: - cli.main(["--skipupdate", "manage", "help"]) + cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass # change back to the original rotation time settings.LOGGING["handlers"]["file"]["when"] = orig_value self.assertNotEqual(os.listdir(archive_dir), [])
Fix argument ordering in log handler test.
## Code Before: import os from time import sleep from django.conf import settings from django.test import TestCase from kolibri.utils import cli class KolibriTimedRotatingFileHandlerTestCase(TestCase): def test_do_rollover(self): archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive") orig_value = settings.LOGGING["handlers"]["file"]["when"] # Temporarily set the rotation time of the log file to be every second settings.LOGGING["handlers"]["file"]["when"] = "s" # make sure that kolibri will be running for more than one second try: cli.main(["--skipupdate", "manage", "help"]) except SystemExit: pass sleep(1) try: cli.main(["--skipupdate", "manage", "help"]) except SystemExit: pass # change back to the original rotation time settings.LOGGING["handlers"]["file"]["when"] = orig_value self.assertNotEqual(os.listdir(archive_dir), []) ## Instruction: Fix argument ordering in log handler test. ## Code After: import os from time import sleep from django.conf import settings from django.test import TestCase from kolibri.utils import cli class KolibriTimedRotatingFileHandlerTestCase(TestCase): def test_do_rollover(self): archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive") orig_value = settings.LOGGING["handlers"]["file"]["when"] # Temporarily set the rotation time of the log file to be every second settings.LOGGING["handlers"]["file"]["when"] = "s" # make sure that kolibri will be running for more than one second try: cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass sleep(1) try: cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass # change back to the original rotation time settings.LOGGING["handlers"]["file"]["when"] = orig_value self.assertNotEqual(os.listdir(archive_dir), [])
... # make sure that kolibri will be running for more than one second try: cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass ... sleep(1) try: cli.main(["manage", "--skipupdate", "help"]) except SystemExit: pass ...
6da0aaf77fe221286981b94eaf7d304568f55957
examples/stories/movie_lister/movies/__init__.py
examples/stories/movie_lister/movies/__init__.py
from . import finders from . import listers from . import models from dependency_injector import catalogs from dependency_injector import providers class MoviesModule(catalogs.DeclarativeCatalog): """Catalog of movies module components.""" movie_model = providers.DelegatedFactory(models.Movie) movie_finder = providers.Factory(finders.MovieFinder, movie_model=movie_model) movie_lister = providers.Factory(listers.MovieLister, movie_finder=movie_finder)
from dependency_injector import catalogs from dependency_injector import providers from . import finders from . import listers from . import models class MoviesModule(catalogs.DeclarativeCatalog): """Catalog of movies module components.""" movie_model = providers.DelegatedFactory(models.Movie) movie_finder = providers.Factory(finders.MovieFinder, movie_model=movie_model) movie_lister = providers.Factory(listers.MovieLister, movie_finder=movie_finder)
Update imports for MovieLister standrard module
Update imports for MovieLister standrard module
Python
bsd-3-clause
rmk135/objects,ets-labs/python-dependency-injector,ets-labs/dependency_injector,rmk135/dependency_injector
+ + from dependency_injector import catalogs + from dependency_injector import providers from . import finders from . import listers from . import models - - from dependency_injector import catalogs - from dependency_injector import providers class MoviesModule(catalogs.DeclarativeCatalog): """Catalog of movies module components.""" movie_model = providers.DelegatedFactory(models.Movie) movie_finder = providers.Factory(finders.MovieFinder, movie_model=movie_model) movie_lister = providers.Factory(listers.MovieLister, movie_finder=movie_finder)
Update imports for MovieLister standrard module
## Code Before: from . import finders from . import listers from . import models from dependency_injector import catalogs from dependency_injector import providers class MoviesModule(catalogs.DeclarativeCatalog): """Catalog of movies module components.""" movie_model = providers.DelegatedFactory(models.Movie) movie_finder = providers.Factory(finders.MovieFinder, movie_model=movie_model) movie_lister = providers.Factory(listers.MovieLister, movie_finder=movie_finder) ## Instruction: Update imports for MovieLister standrard module ## Code After: from dependency_injector import catalogs from dependency_injector import providers from . import finders from . import listers from . import models class MoviesModule(catalogs.DeclarativeCatalog): """Catalog of movies module components.""" movie_model = providers.DelegatedFactory(models.Movie) movie_finder = providers.Factory(finders.MovieFinder, movie_model=movie_model) movie_lister = providers.Factory(listers.MovieLister, movie_finder=movie_finder)
... from dependency_injector import catalogs from dependency_injector import providers from . import finders ... from . import listers from . import models ...
5d652eacf793dc3aa1873279708f88e16e1c0dfd
eloqua/endpoints_v2.py
eloqua/endpoints_v2.py
mapping_table = { 'content_type': 'application/json', 'path_prefix': '/API/REST/2.0', # Campaigns 'get_campaign': { 'method': 'GET', 'path': '/assets/campaign/{{campaign_id}}', 'valid_params': ['depth'] }, 'list_campaigns': { 'method': 'GET', 'path': '/assets/campaigns', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, 'create_campaign': { 'method': 'POST', 'path': '/assets/campaign', 'status': 201 }, # Campaign folders - UNDOCUMENTED 'get_campaign_folder': { 'method': 'GET', 'path': '/assets/campaign/folder/{{campaign_folder_id}}', 'valid_params': ['depth'] }, 'list_campaign_folders': { 'method': 'GET', 'path': '/assets/campaign/folders', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, }
mapping_table = { 'content_type': 'application/json', 'path_prefix': '/API/REST/2.0', # Campaigns 'get_campaign': { 'method': 'GET', 'path': '/assets/campaign/{{campaign_id}}', 'valid_params': ['depth'] }, 'list_campaigns': { 'method': 'GET', 'path': '/assets/campaigns', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, 'create_campaign': { 'method': 'POST', 'path': '/assets/campaign', 'status': 201 }, 'activate_campaign': { 'method': 'POST', 'path': '/assets/campaign/active/{{campaign_id}}', 'valid_params': ['activateNow','scheduledFor','runAsUserId'] }, # Campaign folders - UNDOCUMENTED 'get_campaign_folder': { 'method': 'GET', 'path': '/assets/campaign/folder/{{campaign_folder_id}}', 'valid_params': ['depth'] }, 'list_campaign_folders': { 'method': 'GET', 'path': '/assets/campaign/folders', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, }
Add operation to activate campaign.
Add operation to activate campaign.
Python
mit
alexcchan/eloqua
mapping_table = { 'content_type': 'application/json', 'path_prefix': '/API/REST/2.0', # Campaigns 'get_campaign': { 'method': 'GET', 'path': '/assets/campaign/{{campaign_id}}', 'valid_params': ['depth'] }, 'list_campaigns': { 'method': 'GET', 'path': '/assets/campaigns', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, 'create_campaign': { 'method': 'POST', 'path': '/assets/campaign', 'status': 201 }, + 'activate_campaign': { + 'method': 'POST', + 'path': '/assets/campaign/active/{{campaign_id}}', + 'valid_params': ['activateNow','scheduledFor','runAsUserId'] + }, # Campaign folders - UNDOCUMENTED 'get_campaign_folder': { 'method': 'GET', 'path': '/assets/campaign/folder/{{campaign_folder_id}}', 'valid_params': ['depth'] }, 'list_campaign_folders': { 'method': 'GET', 'path': '/assets/campaign/folders', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, }
Add operation to activate campaign.
## Code Before: mapping_table = { 'content_type': 'application/json', 'path_prefix': '/API/REST/2.0', # Campaigns 'get_campaign': { 'method': 'GET', 'path': '/assets/campaign/{{campaign_id}}', 'valid_params': ['depth'] }, 'list_campaigns': { 'method': 'GET', 'path': '/assets/campaigns', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, 'create_campaign': { 'method': 'POST', 'path': '/assets/campaign', 'status': 201 }, # Campaign folders - UNDOCUMENTED 'get_campaign_folder': { 'method': 'GET', 'path': '/assets/campaign/folder/{{campaign_folder_id}}', 'valid_params': ['depth'] }, 'list_campaign_folders': { 'method': 'GET', 'path': '/assets/campaign/folders', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, } ## Instruction: Add operation to activate campaign. ## Code After: mapping_table = { 'content_type': 'application/json', 'path_prefix': '/API/REST/2.0', # Campaigns 'get_campaign': { 'method': 'GET', 'path': '/assets/campaign/{{campaign_id}}', 'valid_params': ['depth'] }, 'list_campaigns': { 'method': 'GET', 'path': '/assets/campaigns', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, 'create_campaign': { 'method': 'POST', 'path': '/assets/campaign', 'status': 201 }, 'activate_campaign': { 'method': 'POST', 'path': '/assets/campaign/active/{{campaign_id}}', 'valid_params': ['activateNow','scheduledFor','runAsUserId'] }, # Campaign folders - UNDOCUMENTED 'get_campaign_folder': { 'method': 'GET', 'path': '/assets/campaign/folder/{{campaign_folder_id}}', 'valid_params': ['depth'] }, 'list_campaign_folders': { 'method': 'GET', 'path': '/assets/campaign/folders', 'valid_params': ['depth','count','page','search','sort','dir','orderBy','lastUpdatedAt'] }, }
# ... existing code ... 'status': 201 }, 'activate_campaign': { 'method': 'POST', 'path': '/assets/campaign/active/{{campaign_id}}', 'valid_params': ['activateNow','scheduledFor','runAsUserId'] }, # Campaign folders - UNDOCUMENTED # ... rest of the code ...
83d767f75534da4c225eca407ec5eff6ed5774a2
crmapp/contacts/views.py
crmapp/contacts/views.py
from django.shortcuts import render from django.contrib.auth.decorators import login_required from .models import Contact @login_required() def contact_detail(request, uuid): contact = Contact.objects.get(uuid=uuid) return render(request, 'contacts/contact_detail.html', {'contact': contact} )
from django.shortcuts import render from django.contrib.auth.decorators import login_required from django.http import HttpResponseRedirect from django.core.urlresolvers import reverse from django.http import HttpResponseForbidden from .models import Contact from .forms import ContactForm @login_required() def contact_detail(request, uuid): contact = Contact.objects.get(uuid=uuid) return render(request, 'contacts/contact_detail.html', {'contact': contact} ) @login_required() def contact_cru(request): if request.POST: form = ContactForm(request.POST) if form.is_valid(): # make sure the user owns the account account = form.cleaned_data['account'] if account.owner != request.user: return HttpResponseForbidden() # save the data contact = form.save(commit=False) contact.owner = request.user contact.save() # return the user to the account detail view reverse_url = reverse( 'crmapp.accounts.views.account_detail', args=(account.uuid,) ) return HttpResponseRedirect(reverse_url) else: form = ContactForm() variables = { 'form': form, } template = 'contacts/contact_cru.html' return render(request, template, variables)
Create the Contacts App - Part II > New Contact - Create View
Create the Contacts App - Part II > New Contact - Create View
Python
mit
deenaariff/Django,tabdon/crmeasyapp,tabdon/crmeasyapp
from django.shortcuts import render from django.contrib.auth.decorators import login_required + from django.http import HttpResponseRedirect + from django.core.urlresolvers import reverse + from django.http import HttpResponseForbidden from .models import Contact + from .forms import ContactForm @login_required() def contact_detail(request, uuid): contact = Contact.objects.get(uuid=uuid) return render(request, 'contacts/contact_detail.html', {'contact': contact} ) + @login_required() + def contact_cru(request): + + if request.POST: + form = ContactForm(request.POST) + if form.is_valid(): + # make sure the user owns the account + account = form.cleaned_data['account'] + if account.owner != request.user: + return HttpResponseForbidden() + # save the data + contact = form.save(commit=False) + contact.owner = request.user + contact.save() + # return the user to the account detail view + reverse_url = reverse( + 'crmapp.accounts.views.account_detail', + args=(account.uuid,) + ) + return HttpResponseRedirect(reverse_url) + else: + form = ContactForm() + + variables = { + 'form': form, + } + + template = 'contacts/contact_cru.html' + + return render(request, template, variables) +
Create the Contacts App - Part II > New Contact - Create View
## Code Before: from django.shortcuts import render from django.contrib.auth.decorators import login_required from .models import Contact @login_required() def contact_detail(request, uuid): contact = Contact.objects.get(uuid=uuid) return render(request, 'contacts/contact_detail.html', {'contact': contact} ) ## Instruction: Create the Contacts App - Part II > New Contact - Create View ## Code After: from django.shortcuts import render from django.contrib.auth.decorators import login_required from django.http import HttpResponseRedirect from django.core.urlresolvers import reverse from django.http import HttpResponseForbidden from .models import Contact from .forms import ContactForm @login_required() def contact_detail(request, uuid): contact = Contact.objects.get(uuid=uuid) return render(request, 'contacts/contact_detail.html', {'contact': contact} ) @login_required() def contact_cru(request): if request.POST: form = ContactForm(request.POST) if form.is_valid(): # make sure the user owns the account account = form.cleaned_data['account'] if account.owner != request.user: return HttpResponseForbidden() # save the data contact = form.save(commit=False) contact.owner = request.user contact.save() # return the user to the account detail view reverse_url = reverse( 'crmapp.accounts.views.account_detail', args=(account.uuid,) ) return HttpResponseRedirect(reverse_url) else: form = ContactForm() variables = { 'form': form, } template = 'contacts/contact_cru.html' return render(request, template, variables)
# ... existing code ... from django.shortcuts import render from django.contrib.auth.decorators import login_required from django.http import HttpResponseRedirect from django.core.urlresolvers import reverse from django.http import HttpResponseForbidden from .models import Contact from .forms import ContactForm # ... modified code ... {'contact': contact} ) @login_required() def contact_cru(request): if request.POST: form = ContactForm(request.POST) if form.is_valid(): # make sure the user owns the account account = form.cleaned_data['account'] if account.owner != request.user: return HttpResponseForbidden() # save the data contact = form.save(commit=False) contact.owner = request.user contact.save() # return the user to the account detail view reverse_url = reverse( 'crmapp.accounts.views.account_detail', args=(account.uuid,) ) return HttpResponseRedirect(reverse_url) else: form = ContactForm() variables = { 'form': form, } template = 'contacts/contact_cru.html' return render(request, template, variables) # ... rest of the code ...
ed2dc3478691592cb38a1f1923a39bed4bcf423c
tests/test_main.py
tests/test_main.py
from contextlib import redirect_stderr, redirect_stdout from io import StringIO from os.path import devnull from subprocess import check_call from sys import version from pytest import fixture, raises from csft import __main__ as main, __version__ @fixture def null(): with open(devnull, 'w') as fobj: yield fobj def test_call(null): check_call(['python', '-m', 'csft', 'csft'], stdout=null, stderr=null) def test_main(mocker): obj = object() csft2data = mocker.patch('csft.__main__.csft2data', return_value=obj) pr = mocker.patch('builtins.print') assert 0 == main.main(argv=['csft']) csft2data.assert_called_once_with(main._dir('csft')) pr.assert_called_once_with(obj) def test_wrong_path(capsys): with raises(SystemExit): main.main(argv=[]) assert capsys.readouterr() with raises(SystemExit): main.main(argv=['path/is/not/a/directory']) assert capsys.readouterr() def test_show_version(): def print_version(): try: main.main(argv=['-V']) except SystemExit as err: assert 0 == err.code buffer = StringIO() if version < '3.0': with redirect_stderr(buffer): print_version() else: with redirect_stdout(buffer): print_version() assert __version__ == buffer.getvalue().strip() buffer.close()
from os.path import devnull from subprocess import check_call from pytest import fixture, raises from csft import __main__ as main @fixture def null(): with open(devnull, 'w') as fobj: yield fobj def test_call(null): check_call(['python', '-m', 'csft', 'csft'], stdout=null, stderr=null) def test_main(mocker): obj = object() csft2data = mocker.patch('csft.__main__.csft2data', return_value=obj) pr = mocker.patch('builtins.print') assert 0 == main.main(argv=['csft']) csft2data.assert_called_once_with(main._dir('csft')) pr.assert_called_once_with(obj) def test_wrong_path(capsys): with raises(SystemExit): main.main(argv=[]) assert capsys.readouterr() with raises(SystemExit): main.main(argv=['path/is/not/a/directory']) assert capsys.readouterr() def test_show_version(capsys): try: main.main(argv=['-V']) except SystemExit as err: assert 0 == err.code from csft import __version__ assert __version__ == capsys.readouterr().out.strip()
Use capsys instead of redirect_stderr
Use capsys instead of redirect_stderr
Python
mit
yanqd0/csft
- from contextlib import redirect_stderr, redirect_stdout - from io import StringIO from os.path import devnull from subprocess import check_call - from sys import version from pytest import fixture, raises - from csft import __main__ as main, __version__ + from csft import __main__ as main @fixture def null(): with open(devnull, 'w') as fobj: yield fobj def test_call(null): check_call(['python', '-m', 'csft', 'csft'], stdout=null, stderr=null) def test_main(mocker): obj = object() csft2data = mocker.patch('csft.__main__.csft2data', return_value=obj) pr = mocker.patch('builtins.print') assert 0 == main.main(argv=['csft']) csft2data.assert_called_once_with(main._dir('csft')) pr.assert_called_once_with(obj) def test_wrong_path(capsys): with raises(SystemExit): main.main(argv=[]) assert capsys.readouterr() with raises(SystemExit): main.main(argv=['path/is/not/a/directory']) assert capsys.readouterr() - def test_show_version(): + def test_show_version(capsys): - def print_version(): - try: + try: - main.main(argv=['-V']) + main.main(argv=['-V']) - except SystemExit as err: + except SystemExit as err: - assert 0 == err.code + assert 0 == err.code + from csft import __version__ + assert __version__ == capsys.readouterr().out.strip() - buffer = StringIO() - if version < '3.0': - with redirect_stderr(buffer): - print_version() - else: - with redirect_stdout(buffer): - print_version() - assert __version__ == buffer.getvalue().strip() - buffer.close() -
Use capsys instead of redirect_stderr
## Code Before: from contextlib import redirect_stderr, redirect_stdout from io import StringIO from os.path import devnull from subprocess import check_call from sys import version from pytest import fixture, raises from csft import __main__ as main, __version__ @fixture def null(): with open(devnull, 'w') as fobj: yield fobj def test_call(null): check_call(['python', '-m', 'csft', 'csft'], stdout=null, stderr=null) def test_main(mocker): obj = object() csft2data = mocker.patch('csft.__main__.csft2data', return_value=obj) pr = mocker.patch('builtins.print') assert 0 == main.main(argv=['csft']) csft2data.assert_called_once_with(main._dir('csft')) pr.assert_called_once_with(obj) def test_wrong_path(capsys): with raises(SystemExit): main.main(argv=[]) assert capsys.readouterr() with raises(SystemExit): main.main(argv=['path/is/not/a/directory']) assert capsys.readouterr() def test_show_version(): def print_version(): try: main.main(argv=['-V']) except SystemExit as err: assert 0 == err.code buffer = StringIO() if version < '3.0': with redirect_stderr(buffer): print_version() else: with redirect_stdout(buffer): print_version() assert __version__ == buffer.getvalue().strip() buffer.close() ## Instruction: Use capsys instead of redirect_stderr ## Code After: from os.path import devnull from subprocess import check_call from pytest import fixture, raises from csft import __main__ as main @fixture def null(): with open(devnull, 'w') as fobj: yield fobj def test_call(null): check_call(['python', '-m', 'csft', 'csft'], stdout=null, stderr=null) def test_main(mocker): obj = object() csft2data = mocker.patch('csft.__main__.csft2data', return_value=obj) pr = mocker.patch('builtins.print') assert 0 == main.main(argv=['csft']) csft2data.assert_called_once_with(main._dir('csft')) pr.assert_called_once_with(obj) def test_wrong_path(capsys): with raises(SystemExit): main.main(argv=[]) assert capsys.readouterr() with raises(SystemExit): main.main(argv=['path/is/not/a/directory']) assert capsys.readouterr() def test_show_version(capsys): try: main.main(argv=['-V']) except SystemExit as err: assert 0 == err.code from csft import __version__ assert __version__ == capsys.readouterr().out.strip()
# ... existing code ... from os.path import devnull from subprocess import check_call from pytest import fixture, raises from csft import __main__ as main # ... modified code ... def test_show_version(capsys): try: main.main(argv=['-V']) except SystemExit as err: assert 0 == err.code from csft import __version__ assert __version__ == capsys.readouterr().out.strip() # ... rest of the code ...
5bc3e6a3fb112b529f738142850860dd98a9d428
tests/runtests.py
tests/runtests.py
import glob import os import unittest def build_test_suite(): suite = unittest.TestSuite() for test_case in glob.glob('tests/test_*.py'): modname = os.path.splitext(test_case)[0] modname = modname.replace('/', '.') module = __import__(modname, {}, {}, ['1']) suite.addTest(unittest.TestLoader().loadTestsFromModule(module)) return suite if __name__ == "__main__": suite = build_test_suite() runner = unittest.TextTestRunner() runner.run(suite)
import glob import os import unittest import sys def build_test_suite(): suite = unittest.TestSuite() for test_case in glob.glob('tests/test_*.py'): modname = os.path.splitext(test_case)[0] modname = modname.replace('/', '.') module = __import__(modname, {}, {}, ['1']) suite.addTest(unittest.TestLoader().loadTestsFromModule(module)) return suite if __name__ == "__main__": suite = build_test_suite() runner = unittest.TextTestRunner() result = runner.run(suite) sys.exit(not result.wasSuccessful())
Make unittest return exit code 1 on failure
Make unittest return exit code 1 on failure This is to allow travis to catch test failures
Python
bsd-3-clause
jorgecarleitao/pyglet-gui
import glob import os import unittest - + import sys def build_test_suite(): suite = unittest.TestSuite() for test_case in glob.glob('tests/test_*.py'): modname = os.path.splitext(test_case)[0] modname = modname.replace('/', '.') module = __import__(modname, {}, {}, ['1']) suite.addTest(unittest.TestLoader().loadTestsFromModule(module)) return suite if __name__ == "__main__": suite = build_test_suite() runner = unittest.TextTestRunner() - runner.run(suite) + result = runner.run(suite) + sys.exit(not result.wasSuccessful())
Make unittest return exit code 1 on failure
## Code Before: import glob import os import unittest def build_test_suite(): suite = unittest.TestSuite() for test_case in glob.glob('tests/test_*.py'): modname = os.path.splitext(test_case)[0] modname = modname.replace('/', '.') module = __import__(modname, {}, {}, ['1']) suite.addTest(unittest.TestLoader().loadTestsFromModule(module)) return suite if __name__ == "__main__": suite = build_test_suite() runner = unittest.TextTestRunner() runner.run(suite) ## Instruction: Make unittest return exit code 1 on failure ## Code After: import glob import os import unittest import sys def build_test_suite(): suite = unittest.TestSuite() for test_case in glob.glob('tests/test_*.py'): modname = os.path.splitext(test_case)[0] modname = modname.replace('/', '.') module = __import__(modname, {}, {}, ['1']) suite.addTest(unittest.TestLoader().loadTestsFromModule(module)) return suite if __name__ == "__main__": suite = build_test_suite() runner = unittest.TextTestRunner() result = runner.run(suite) sys.exit(not result.wasSuccessful())
... import os import unittest import sys def build_test_suite(): ... runner = unittest.TextTestRunner() result = runner.run(suite) sys.exit(not result.wasSuccessful()) ...
f6429a3c4b413231ad480f2768d47b78ec0c690b
great_expectations/cli/cli_logging.py
great_expectations/cli/cli_logging.py
import logging import warnings warnings.filterwarnings("ignore") ### # REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND. # PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR ### logger = logging.getLogger("great_expectations.cli") def _set_up_logger(): # Log to console with a simple formatter; used by CLI formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler() handler.setLevel(level=logging.WARNING) handler.setFormatter(formatter) module_logger = logging.getLogger("great_expectations") module_logger.addHandler(handler) return module_logger
import logging import warnings warnings.filterwarnings("ignore") ### # REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND. # PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR ### logger = logging.getLogger("great_expectations.cli") def _set_up_logger(): # Log to console with a simple formatter; used by CLI formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler() handler.setFormatter(formatter) module_logger = logging.getLogger("great_expectations") module_logger.addHandler(handler) module_logger.setLevel(level=logging.WARNING) return module_logger
Set level on module logger instead
Set level on module logger instead
Python
apache-2.0
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
import logging import warnings warnings.filterwarnings("ignore") ### # REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND. # PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR ### logger = logging.getLogger("great_expectations.cli") def _set_up_logger(): # Log to console with a simple formatter; used by CLI formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler() - handler.setLevel(level=logging.WARNING) + handler.setFormatter(formatter) module_logger = logging.getLogger("great_expectations") module_logger.addHandler(handler) + module_logger.setLevel(level=logging.WARNING) return module_logger
Set level on module logger instead
## Code Before: import logging import warnings warnings.filterwarnings("ignore") ### # REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND. # PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR ### logger = logging.getLogger("great_expectations.cli") def _set_up_logger(): # Log to console with a simple formatter; used by CLI formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler() handler.setLevel(level=logging.WARNING) handler.setFormatter(formatter) module_logger = logging.getLogger("great_expectations") module_logger.addHandler(handler) return module_logger ## Instruction: Set level on module logger instead ## Code After: import logging import warnings warnings.filterwarnings("ignore") ### # REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND. # PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR ### logger = logging.getLogger("great_expectations.cli") def _set_up_logger(): # Log to console with a simple formatter; used by CLI formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler() handler.setFormatter(formatter) module_logger = logging.getLogger("great_expectations") module_logger.addHandler(handler) module_logger.setLevel(level=logging.WARNING) return module_logger
... formatter = logging.Formatter("%(message)s") handler = logging.StreamHandler() handler.setFormatter(formatter) module_logger = logging.getLogger("great_expectations") module_logger.addHandler(handler) module_logger.setLevel(level=logging.WARNING) return module_logger ...
42f21057388361e50416197b25be9dfbdb2764b0
any_imagefield/forms/widgets.py
any_imagefield/forms/widgets.py
import mimetypes from django.contrib.admin.widgets import AdminFileWidget from django.template.loader import render_to_string class ImagePreviewWidget(AdminFileWidget): """ An :class:`~django.forms.FileInput` widget that also displays a preview of the image. """ template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' def render(self, name, value, attrs=None): is_image = False if value: if hasattr(value, 'path'): (mime_type, encoding) = mimetypes.guess_type(value.path) else: # Try to guess mime_type from name alone, for remote FileSystems (S3, etc...) (mime_type, encoding) = mimetypes.guess_type(value.name) is_image = mime_type and mime_type.startswith('image/') # Render different field for replacing input_field = super(ImagePreviewWidget, self).render(name, value, attrs) if not value: return input_field else: return render_to_string("any_imagefield/imagepreviewwidget/update.html", { 'value': value, 'is_image': is_image, 'input_field': input_field, 'input_text': self.input_text, })
import mimetypes from django.contrib.admin.widgets import AdminFileWidget from django.template.loader import render_to_string class ImagePreviewWidget(AdminFileWidget): """ An :class:`~django.forms.FileInput` widget that also displays a preview of the image. """ template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' def render(self, name, value, *args, **kwargs): is_image = False if value: if hasattr(value, 'path'): (mime_type, encoding) = mimetypes.guess_type(value.path) else: # Try to guess mime_type from name alone, for remote FileSystems (S3, etc...) (mime_type, encoding) = mimetypes.guess_type(value.name) is_image = mime_type and mime_type.startswith('image/') # Render different field for replacing input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs) if not value: return input_field else: return render_to_string("any_imagefield/imagepreviewwidget/update.html", { 'value': value, 'is_image': is_image, 'input_field': input_field, 'input_text': self.input_text, })
Fix render() kwargs for Django 2.1
Fix render() kwargs for Django 2.1
Python
apache-2.0
edoburu/django-any-imagefield,edoburu/django-any-imagefield
import mimetypes from django.contrib.admin.widgets import AdminFileWidget from django.template.loader import render_to_string class ImagePreviewWidget(AdminFileWidget): """ An :class:`~django.forms.FileInput` widget that also displays a preview of the image. """ template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' - def render(self, name, value, attrs=None): + def render(self, name, value, *args, **kwargs): is_image = False if value: if hasattr(value, 'path'): (mime_type, encoding) = mimetypes.guess_type(value.path) else: # Try to guess mime_type from name alone, for remote FileSystems (S3, etc...) (mime_type, encoding) = mimetypes.guess_type(value.name) is_image = mime_type and mime_type.startswith('image/') # Render different field for replacing - input_field = super(ImagePreviewWidget, self).render(name, value, attrs) + input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs) if not value: return input_field else: return render_to_string("any_imagefield/imagepreviewwidget/update.html", { 'value': value, 'is_image': is_image, 'input_field': input_field, 'input_text': self.input_text, })
Fix render() kwargs for Django 2.1
## Code Before: import mimetypes from django.contrib.admin.widgets import AdminFileWidget from django.template.loader import render_to_string class ImagePreviewWidget(AdminFileWidget): """ An :class:`~django.forms.FileInput` widget that also displays a preview of the image. """ template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' def render(self, name, value, attrs=None): is_image = False if value: if hasattr(value, 'path'): (mime_type, encoding) = mimetypes.guess_type(value.path) else: # Try to guess mime_type from name alone, for remote FileSystems (S3, etc...) (mime_type, encoding) = mimetypes.guess_type(value.name) is_image = mime_type and mime_type.startswith('image/') # Render different field for replacing input_field = super(ImagePreviewWidget, self).render(name, value, attrs) if not value: return input_field else: return render_to_string("any_imagefield/imagepreviewwidget/update.html", { 'value': value, 'is_image': is_image, 'input_field': input_field, 'input_text': self.input_text, }) ## Instruction: Fix render() kwargs for Django 2.1 ## Code After: import mimetypes from django.contrib.admin.widgets import AdminFileWidget from django.template.loader import render_to_string class ImagePreviewWidget(AdminFileWidget): """ An :class:`~django.forms.FileInput` widget that also displays a preview of the image. """ template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' def render(self, name, value, *args, **kwargs): is_image = False if value: if hasattr(value, 'path'): (mime_type, encoding) = mimetypes.guess_type(value.path) else: # Try to guess mime_type from name alone, for remote FileSystems (S3, etc...) (mime_type, encoding) = mimetypes.guess_type(value.name) is_image = mime_type and mime_type.startswith('image/') # Render different field for replacing input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs) if not value: return input_field else: return render_to_string("any_imagefield/imagepreviewwidget/update.html", { 'value': value, 'is_image': is_image, 'input_field': input_field, 'input_text': self.input_text, })
... template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' def render(self, name, value, *args, **kwargs): is_image = False if value: ... # Render different field for replacing input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs) if not value: return input_field ...
0e1dd74c70a2fa682b3cd3b0027162ad50ee9998
social/app/views/friend.py
social/app/views/friend.py
from django.http import HttpResponseRedirect from django.urls import reverse from django.views import generic from social.app.models.author import Author class FriendRequestsListView(generic.ListView): context_object_name = "all_friend_requests" template_name = "app/friend_requests_list.html" def get_queryset(self): return self.request.user.profile.incoming_friend_requests.all() def post(self, request): logged_in_author = self.request.user.profile accepted_friend_requests = request.POST.getlist('accepted_friend_requests') for new_friend_id in accepted_friend_requests: new_friend = Author.objects.get(id=new_friend_id) if new_friend.node.local: logged_in_author.accept_friend_request(new_friend) else: r = new_friend.node.post_friend_request(request, logged_in_author, new_friend) if 200 <= r.status_code < 300: # Success! logged_in_author.accept_friend_request(new_friend) else: # This one didn't work. Oh well! No easy way to show an error without ruining other accepts pass logged_in_author.save() return HttpResponseRedirect(reverse("app:friend-requests-list"))
from django.http import HttpResponseRedirect from django.urls import reverse from django.views import generic from social.app.models.author import Author class FriendRequestsListView(generic.ListView): context_object_name = "all_friend_requests" template_name = "app/friend_requests_list.html" def get_queryset(self): return self.request.user.profile.incoming_friend_requests.all() def post(self, request): logged_in_author = self.request.user.profile accepted_friend_requests = request.POST.getlist('accepted_friend_requests') for new_friend_id in accepted_friend_requests: new_friend = Author.objects.get(id=new_friend_id) if new_friend.node.local: logged_in_author.accept_friend_request(new_friend) else: r = new_friend.node.post_friend_request(request, logged_in_author, new_friend) if 200 <= r.status_code < 300: # Success! logged_in_author.accept_friend_request(new_friend) else: r.raise_for_status() logged_in_author.save() return HttpResponseRedirect(reverse("app:friend-requests-list"))
Put in a raise for status for now
Put in a raise for status for now
Python
apache-2.0
TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution
from django.http import HttpResponseRedirect from django.urls import reverse from django.views import generic from social.app.models.author import Author class FriendRequestsListView(generic.ListView): context_object_name = "all_friend_requests" template_name = "app/friend_requests_list.html" def get_queryset(self): return self.request.user.profile.incoming_friend_requests.all() def post(self, request): logged_in_author = self.request.user.profile accepted_friend_requests = request.POST.getlist('accepted_friend_requests') for new_friend_id in accepted_friend_requests: new_friend = Author.objects.get(id=new_friend_id) if new_friend.node.local: logged_in_author.accept_friend_request(new_friend) else: r = new_friend.node.post_friend_request(request, logged_in_author, new_friend) if 200 <= r.status_code < 300: # Success! logged_in_author.accept_friend_request(new_friend) else: + r.raise_for_status() - # This one didn't work. Oh well! No easy way to show an error without ruining other accepts - pass logged_in_author.save() return HttpResponseRedirect(reverse("app:friend-requests-list"))
Put in a raise for status for now
## Code Before: from django.http import HttpResponseRedirect from django.urls import reverse from django.views import generic from social.app.models.author import Author class FriendRequestsListView(generic.ListView): context_object_name = "all_friend_requests" template_name = "app/friend_requests_list.html" def get_queryset(self): return self.request.user.profile.incoming_friend_requests.all() def post(self, request): logged_in_author = self.request.user.profile accepted_friend_requests = request.POST.getlist('accepted_friend_requests') for new_friend_id in accepted_friend_requests: new_friend = Author.objects.get(id=new_friend_id) if new_friend.node.local: logged_in_author.accept_friend_request(new_friend) else: r = new_friend.node.post_friend_request(request, logged_in_author, new_friend) if 200 <= r.status_code < 300: # Success! logged_in_author.accept_friend_request(new_friend) else: # This one didn't work. Oh well! No easy way to show an error without ruining other accepts pass logged_in_author.save() return HttpResponseRedirect(reverse("app:friend-requests-list")) ## Instruction: Put in a raise for status for now ## Code After: from django.http import HttpResponseRedirect from django.urls import reverse from django.views import generic from social.app.models.author import Author class FriendRequestsListView(generic.ListView): context_object_name = "all_friend_requests" template_name = "app/friend_requests_list.html" def get_queryset(self): return self.request.user.profile.incoming_friend_requests.all() def post(self, request): logged_in_author = self.request.user.profile accepted_friend_requests = request.POST.getlist('accepted_friend_requests') for new_friend_id in accepted_friend_requests: new_friend = Author.objects.get(id=new_friend_id) if new_friend.node.local: logged_in_author.accept_friend_request(new_friend) else: r = new_friend.node.post_friend_request(request, logged_in_author, new_friend) if 200 <= r.status_code < 300: # Success! logged_in_author.accept_friend_request(new_friend) else: r.raise_for_status() logged_in_author.save() return HttpResponseRedirect(reverse("app:friend-requests-list"))
... logged_in_author.accept_friend_request(new_friend) else: r.raise_for_status() logged_in_author.save() ...
e14ceda6370b506b80f65d45abd36c9f728e5699
pitchfork/manage_globals/forms.py
pitchfork/manage_globals/forms.py
from flask.ext.wtf import Form from wtforms import TextField, SelectField, IntegerField, BooleanField,\ PasswordField, TextAreaField, SubmitField, HiddenField, RadioField from wtforms import validators class VerbSet(Form): name = TextField('Verb:', validators=[validators.required()]) active = BooleanField('Active:') submit = SubmitField('Submit') class DCSet(Form): name = TextField('Name:', validators=[validators.required()]) abbreviation = TextField( 'Abbreviation:', validators=[validators.required()] ) submit = SubmitField('Submit')
from flask.ext.wtf import Form from wtforms import fields, validators class VerbSet(Form): name = fields.TextField('Verb:', validators=[validators.required()]) active = fields.BooleanField('Active:') submit = fields.SubmitField('Submit') class DCSet(Form): name = fields.TextField('Name:', validators=[validators.required()]) abbreviation = fields.TextField( 'Abbreviation:', validators=[validators.required()] ) submit = fields.SubmitField('Submit')
Rework imports so not having to specify every type of field. Alter field definitions to reflect change
Rework imports so not having to specify every type of field. Alter field definitions to reflect change
Python
apache-2.0
rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork
from flask.ext.wtf import Form - from wtforms import TextField, SelectField, IntegerField, BooleanField,\ - PasswordField, TextAreaField, SubmitField, HiddenField, RadioField - from wtforms import validators + from wtforms import fields, validators class VerbSet(Form): - name = TextField('Verb:', validators=[validators.required()]) + name = fields.TextField('Verb:', validators=[validators.required()]) - active = BooleanField('Active:') + active = fields.BooleanField('Active:') - submit = SubmitField('Submit') + submit = fields.SubmitField('Submit') class DCSet(Form): - name = TextField('Name:', validators=[validators.required()]) + name = fields.TextField('Name:', validators=[validators.required()]) - abbreviation = TextField( + abbreviation = fields.TextField( 'Abbreviation:', validators=[validators.required()] ) - submit = SubmitField('Submit') + submit = fields.SubmitField('Submit')
Rework imports so not having to specify every type of field. Alter field definitions to reflect change
## Code Before: from flask.ext.wtf import Form from wtforms import TextField, SelectField, IntegerField, BooleanField,\ PasswordField, TextAreaField, SubmitField, HiddenField, RadioField from wtforms import validators class VerbSet(Form): name = TextField('Verb:', validators=[validators.required()]) active = BooleanField('Active:') submit = SubmitField('Submit') class DCSet(Form): name = TextField('Name:', validators=[validators.required()]) abbreviation = TextField( 'Abbreviation:', validators=[validators.required()] ) submit = SubmitField('Submit') ## Instruction: Rework imports so not having to specify every type of field. Alter field definitions to reflect change ## Code After: from flask.ext.wtf import Form from wtforms import fields, validators class VerbSet(Form): name = fields.TextField('Verb:', validators=[validators.required()]) active = fields.BooleanField('Active:') submit = fields.SubmitField('Submit') class DCSet(Form): name = fields.TextField('Name:', validators=[validators.required()]) abbreviation = fields.TextField( 'Abbreviation:', validators=[validators.required()] ) submit = fields.SubmitField('Submit')
# ... existing code ... from flask.ext.wtf import Form from wtforms import fields, validators class VerbSet(Form): name = fields.TextField('Verb:', validators=[validators.required()]) active = fields.BooleanField('Active:') submit = fields.SubmitField('Submit') class DCSet(Form): name = fields.TextField('Name:', validators=[validators.required()]) abbreviation = fields.TextField( 'Abbreviation:', validators=[validators.required()] ) submit = fields.SubmitField('Submit') # ... rest of the code ...
3356cd0c5c85a09107a6ba48e028a54eb5ca076c
script.py
script.py
import ast import click from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') def cli(code, printed, remove_builtins): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: if remove_builtins: class_object.remove_builtins() click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('')
import ast import click from graphing.graph import FunctionGrapher from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') @click.option('--output', help='Graphviz output file name') def cli(code, printed, remove_builtins, output): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: if remove_builtins: class_object.remove_builtins() click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('') if output: graph = FunctionGrapher() class_names = set(cls.name for cls in visitor.classes) for cls in visitor.classes: graph.add_dict_to_graph(class_names, cls.call_tree) graph.add_classes_to_graph(visitor.classes) graph.name = output graph.render()
Add graphviz file output argument
Add graphviz file output argument
Python
mit
LaurEars/codegrapher
import ast import click + from graphing.graph import FunctionGrapher from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') + @click.option('--output', help='Graphviz output file name') - def cli(code, printed, remove_builtins): + def cli(code, printed, remove_builtins, output): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: if remove_builtins: class_object.remove_builtins() click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('') + if output: + graph = FunctionGrapher() + class_names = set(cls.name for cls in visitor.classes) + for cls in visitor.classes: + graph.add_dict_to_graph(class_names, cls.call_tree) + graph.add_classes_to_graph(visitor.classes) + graph.name = output + graph.render()
Add graphviz file output argument
## Code Before: import ast import click from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') def cli(code, printed, remove_builtins): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: if remove_builtins: class_object.remove_builtins() click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('') ## Instruction: Add graphviz file output argument ## Code After: import ast import click from graphing.graph import FunctionGrapher from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') @click.option('--output', help='Graphviz output file name') def cli(code, printed, remove_builtins, output): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: if remove_builtins: class_object.remove_builtins() click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('') if output: graph = FunctionGrapher() class_names = set(cls.name for cls in visitor.classes) for cls in visitor.classes: graph.add_dict_to_graph(class_names, cls.call_tree) graph.add_classes_to_graph(visitor.classes) graph.name = output graph.render()
// ... existing code ... import click from graphing.graph import FunctionGrapher from parsing.parser import FileVisitor // ... modified code ... @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') @click.option('--output', help='Graphviz output file name') def cli(code, printed, remove_builtins, output): """ Parses a file. ... click.echo(class_object.pprint()) click.echo('') if output: graph = FunctionGrapher() class_names = set(cls.name for cls in visitor.classes) for cls in visitor.classes: graph.add_dict_to_graph(class_names, cls.call_tree) graph.add_classes_to_graph(visitor.classes) graph.name = output graph.render() // ... rest of the code ...
8f1b473e2dab982e989e9a041aa14e31050d2f4b
scripts/promote_orga.py
scripts/promote_orga.py
import click from byceps.database import db from bootstrap.helpers import promote_orga from bootstrap.util import app_context, get_config_name_from_env from bootstrap.validators import validate_brand, validate_user_screen_name @click.command() @click.argument('brand', callback=validate_brand) @click.argument('user', callback=validate_user_screen_name) def execute(brand, user): click.echo('Promoting user "{}" to orga for brand "{}" ... ' .format(user.screen_name, brand.title), nl=False) promote_orga(brand, user) db.session.commit() click.secho('done.', fg='green') if __name__ == '__main__': config_name = get_config_name_from_env() with app_context(config_name): execute()
import click from byceps.services.orga import service as orga_service from bootstrap.util import app_context, get_config_name_from_env from bootstrap.validators import validate_brand, validate_user_screen_name @click.command() @click.argument('brand', callback=validate_brand) @click.argument('user', callback=validate_user_screen_name) def execute(brand, user): click.echo('Promoting user "{}" to orga for brand "{}" ... ' .format(user.screen_name, brand.title), nl=False) orga_service.create_orga_flag(brand.id, user.id) click.secho('done.', fg='green') if __name__ == '__main__': config_name = get_config_name_from_env() with app_context(config_name): execute()
Use service in script to promote a user to organizer
Use service in script to promote a user to organizer
Python
bsd-3-clause
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
import click - from byceps.database import db + from byceps.services.orga import service as orga_service - from bootstrap.helpers import promote_orga from bootstrap.util import app_context, get_config_name_from_env from bootstrap.validators import validate_brand, validate_user_screen_name @click.command() @click.argument('brand', callback=validate_brand) @click.argument('user', callback=validate_user_screen_name) def execute(brand, user): click.echo('Promoting user "{}" to orga for brand "{}" ... ' .format(user.screen_name, brand.title), nl=False) + orga_service.create_orga_flag(brand.id, user.id) - promote_orga(brand, user) - db.session.commit() click.secho('done.', fg='green') if __name__ == '__main__': config_name = get_config_name_from_env() with app_context(config_name): execute()
Use service in script to promote a user to organizer
## Code Before: import click from byceps.database import db from bootstrap.helpers import promote_orga from bootstrap.util import app_context, get_config_name_from_env from bootstrap.validators import validate_brand, validate_user_screen_name @click.command() @click.argument('brand', callback=validate_brand) @click.argument('user', callback=validate_user_screen_name) def execute(brand, user): click.echo('Promoting user "{}" to orga for brand "{}" ... ' .format(user.screen_name, brand.title), nl=False) promote_orga(brand, user) db.session.commit() click.secho('done.', fg='green') if __name__ == '__main__': config_name = get_config_name_from_env() with app_context(config_name): execute() ## Instruction: Use service in script to promote a user to organizer ## Code After: import click from byceps.services.orga import service as orga_service from bootstrap.util import app_context, get_config_name_from_env from bootstrap.validators import validate_brand, validate_user_screen_name @click.command() @click.argument('brand', callback=validate_brand) @click.argument('user', callback=validate_user_screen_name) def execute(brand, user): click.echo('Promoting user "{}" to orga for brand "{}" ... ' .format(user.screen_name, brand.title), nl=False) orga_service.create_orga_flag(brand.id, user.id) click.secho('done.', fg='green') if __name__ == '__main__': config_name = get_config_name_from_env() with app_context(config_name): execute()
... import click from byceps.services.orga import service as orga_service from bootstrap.util import app_context, get_config_name_from_env from bootstrap.validators import validate_brand, validate_user_screen_name ... .format(user.screen_name, brand.title), nl=False) orga_service.create_orga_flag(brand.id, user.id) click.secho('done.', fg='green') ...
c0169c5073e4a83120f4d6860258c3085b4c1cf5
setup.py
setup.py
import subprocess as sp print('Warning: this setup.py uses flit, not setuptools.') print('Behavior may not be exactly what you expect. Use at your own risk!') sp.check_call(['flit', 'install', '--deps', 'production'])
import subprocess as sp import sys import os print('Warning: this setup.py uses flit, not setuptools.') print('Behavior may not be exactly what you expect. Use at your own risk!') flit = os.path.join(os.path.dirname(sys.executable), 'flit') cmd = [flit, 'install', '--deps', 'production'] print(" ".join(cmd)) sp.check_call(cmd)
Use flit that's been installed in the virtualenv
Use flit that's been installed in the virtualenv
Python
bsd-3-clause
jupyter/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,EdwardJKim/nbgrader,dementrock/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader
import subprocess as sp + import sys + import os print('Warning: this setup.py uses flit, not setuptools.') print('Behavior may not be exactly what you expect. Use at your own risk!') + flit = os.path.join(os.path.dirname(sys.executable), 'flit') - sp.check_call(['flit', 'install', '--deps', 'production']) + cmd = [flit, 'install', '--deps', 'production'] + print(" ".join(cmd)) + sp.check_call(cmd)
Use flit that's been installed in the virtualenv
## Code Before: import subprocess as sp print('Warning: this setup.py uses flit, not setuptools.') print('Behavior may not be exactly what you expect. Use at your own risk!') sp.check_call(['flit', 'install', '--deps', 'production']) ## Instruction: Use flit that's been installed in the virtualenv ## Code After: import subprocess as sp import sys import os print('Warning: this setup.py uses flit, not setuptools.') print('Behavior may not be exactly what you expect. Use at your own risk!') flit = os.path.join(os.path.dirname(sys.executable), 'flit') cmd = [flit, 'install', '--deps', 'production'] print(" ".join(cmd)) sp.check_call(cmd)
... import subprocess as sp import sys import os print('Warning: this setup.py uses flit, not setuptools.') ... print('Behavior may not be exactly what you expect. Use at your own risk!') flit = os.path.join(os.path.dirname(sys.executable), 'flit') cmd = [flit, 'install', '--deps', 'production'] print(" ".join(cmd)) sp.check_call(cmd) ...
98f0313e935db2491a615e868e3bd5da21769c03
gocd/api/pipeline.py
gocd/api/pipeline.py
from gocd.api.endpoint import Endpoint class Pipeline(Endpoint): base_path = 'go/api/pipelines/{id}' id = 'name' def __init__(self, server, name): self.server = server self.name = name def history(self, offset=0): return self._get('/history/{offset:d}'.format(offset=offset or 0)) def release(self): return self._post('/releaseLock') unlock = release def pause(self, reason=''): return self._post('/pause', pauseCause=reason) def unpause(self): return self._post('/unpause') def status(self): return self._get('/status') def instance(self, counter): return self._get('/instance/{counter:d}'.format(counter=counter)) def schedule(self, **material_args): return self._post('/schedule', ok_status=202, **material_args) run = schedule
from gocd.api.endpoint import Endpoint class Pipeline(Endpoint): base_path = 'go/api/pipelines/{id}' id = 'name' def __init__(self, server, name): self.server = server self.name = name def history(self, offset=0): return self._get('/history/{offset:d}'.format(offset=offset or 0)) def release(self): return self._post('/releaseLock') unlock = release def pause(self, reason=''): return self._post('/pause', pauseCause=reason) def unpause(self): return self._post('/unpause') def status(self): return self._get('/status') def instance(self, counter): return self._get('/instance/{counter:d}'.format(counter=counter)) def schedule(self, **material_args): return self._post('/schedule', ok_status=202, **material_args) run = schedule trigger = schedule
Add trigger as an alias for schedule
Add trigger as an alias for schedule "Have you triggered that pipeline" is a fairly common thing to say.
Python
mit
henriquegemignani/py-gocd,gaqzi/py-gocd
from gocd.api.endpoint import Endpoint class Pipeline(Endpoint): base_path = 'go/api/pipelines/{id}' id = 'name' def __init__(self, server, name): self.server = server self.name = name def history(self, offset=0): return self._get('/history/{offset:d}'.format(offset=offset or 0)) def release(self): return self._post('/releaseLock') unlock = release def pause(self, reason=''): return self._post('/pause', pauseCause=reason) def unpause(self): return self._post('/unpause') def status(self): return self._get('/status') def instance(self, counter): return self._get('/instance/{counter:d}'.format(counter=counter)) def schedule(self, **material_args): return self._post('/schedule', ok_status=202, **material_args) run = schedule + trigger = schedule
Add trigger as an alias for schedule
## Code Before: from gocd.api.endpoint import Endpoint class Pipeline(Endpoint): base_path = 'go/api/pipelines/{id}' id = 'name' def __init__(self, server, name): self.server = server self.name = name def history(self, offset=0): return self._get('/history/{offset:d}'.format(offset=offset or 0)) def release(self): return self._post('/releaseLock') unlock = release def pause(self, reason=''): return self._post('/pause', pauseCause=reason) def unpause(self): return self._post('/unpause') def status(self): return self._get('/status') def instance(self, counter): return self._get('/instance/{counter:d}'.format(counter=counter)) def schedule(self, **material_args): return self._post('/schedule', ok_status=202, **material_args) run = schedule ## Instruction: Add trigger as an alias for schedule ## Code After: from gocd.api.endpoint import Endpoint class Pipeline(Endpoint): base_path = 'go/api/pipelines/{id}' id = 'name' def __init__(self, server, name): self.server = server self.name = name def history(self, offset=0): return self._get('/history/{offset:d}'.format(offset=offset or 0)) def release(self): return self._post('/releaseLock') unlock = release def pause(self, reason=''): return self._post('/pause', pauseCause=reason) def unpause(self): return self._post('/unpause') def status(self): return self._get('/status') def instance(self, counter): return self._get('/instance/{counter:d}'.format(counter=counter)) def schedule(self, **material_args): return self._post('/schedule', ok_status=202, **material_args) run = schedule trigger = schedule
... return self._post('/schedule', ok_status=202, **material_args) run = schedule trigger = schedule ...
f2a0c0c7329087421f6d3c237d2bb5f9633d180c
linear_math_tests/test_alignedobjectarray.py
linear_math_tests/test_alignedobjectarray.py
from __future__ import unicode_literals, print_function, absolute_import import unittest import math import bullet class ClassTestName(unittest.TestCase): def setUp(self): pass def tearDown(self): pass
from __future__ import unicode_literals, print_function, absolute_import import unittest import math import bullet class ClassTestName(unittest.TestCase): def setUp(self): self.a = bullet.btVector3Array() for i in range(10): self.a.append(bullet.btVector3(i, i+1, i+2)) self.b = bullet.btVector3Array() for i in range(10, 20): self.b.append(bullet.btVector3(i, i+1, i+2)) def test_assignment(self): self.a[0] = bullet.btVector3(21, 22, 23) self.assertEqual(self.a[0], bullet.btVector3(21, 22, 23)) def _slice(): self.a[0:3] = bullet.btVector3() self.assertRaises(RuntimeError, _slice) def tearDown(self): del self.a del self.b
Add some basic tests for assignment. Note that slicing is not supported
Add some basic tests for assignment. Note that slicing is not supported
Python
mit
Klumhru/boost-python-bullet,Klumhru/boost-python-bullet,Klumhru/boost-python-bullet
from __future__ import unicode_literals, print_function, absolute_import import unittest import math import bullet class ClassTestName(unittest.TestCase): def setUp(self): - pass + self.a = bullet.btVector3Array() + for i in range(10): + self.a.append(bullet.btVector3(i, i+1, i+2)) + self.b = bullet.btVector3Array() + for i in range(10, 20): + self.b.append(bullet.btVector3(i, i+1, i+2)) + + def test_assignment(self): + self.a[0] = bullet.btVector3(21, 22, 23) + self.assertEqual(self.a[0], + bullet.btVector3(21, 22, 23)) + + def _slice(): + self.a[0:3] = bullet.btVector3() + + self.assertRaises(RuntimeError, _slice) def tearDown(self): - pass + del self.a + del self.b
Add some basic tests for assignment. Note that slicing is not supported
## Code Before: from __future__ import unicode_literals, print_function, absolute_import import unittest import math import bullet class ClassTestName(unittest.TestCase): def setUp(self): pass def tearDown(self): pass ## Instruction: Add some basic tests for assignment. Note that slicing is not supported ## Code After: from __future__ import unicode_literals, print_function, absolute_import import unittest import math import bullet class ClassTestName(unittest.TestCase): def setUp(self): self.a = bullet.btVector3Array() for i in range(10): self.a.append(bullet.btVector3(i, i+1, i+2)) self.b = bullet.btVector3Array() for i in range(10, 20): self.b.append(bullet.btVector3(i, i+1, i+2)) def test_assignment(self): self.a[0] = bullet.btVector3(21, 22, 23) self.assertEqual(self.a[0], bullet.btVector3(21, 22, 23)) def _slice(): self.a[0:3] = bullet.btVector3() self.assertRaises(RuntimeError, _slice) def tearDown(self): del self.a del self.b
# ... existing code ... class ClassTestName(unittest.TestCase): def setUp(self): self.a = bullet.btVector3Array() for i in range(10): self.a.append(bullet.btVector3(i, i+1, i+2)) self.b = bullet.btVector3Array() for i in range(10, 20): self.b.append(bullet.btVector3(i, i+1, i+2)) def test_assignment(self): self.a[0] = bullet.btVector3(21, 22, 23) self.assertEqual(self.a[0], bullet.btVector3(21, 22, 23)) def _slice(): self.a[0:3] = bullet.btVector3() self.assertRaises(RuntimeError, _slice) def tearDown(self): del self.a del self.b # ... rest of the code ...
fbe6722fd74b5e260892f5664226bc66d5424d79
kindred/Token.py
kindred/Token.py
class Token: """ Individual word with lemma, part-of-speech and location in text. :ivar word: Unprocessed word :ivar lemma: Lemmatized word :ivar partofspeech: Part-of-speech of word :ivar startPos: Start position of token in sentence :ivar endPos: End position of token in sentence """ def __init__(self,word,lemma,partofspeech,startPos,endPos): """ Constructor for Token class :param word: Unprocessed word :param lemma: Lemmatized word :param partofspeech: Part-of-speech of word :param startPos: Start position of token in sentence :param endPos: End position of token in sentence :type word: str :type lemma: str :type partofspeech: str :type startPos: int :type endPos: int """ self.word = word self.lemma = lemma self.partofspeech = partofspeech self.startPos = startPos self.endPos = endPos def __str__(self): return self.word def __repr__(self): return self.__str__()
class Token: """ Individual word with lemma, part-of-speech and location in text. :ivar word: Unprocessed word :ivar lemma: Lemmatized word :ivar partofspeech: Part-of-speech of word :ivar startPos: Start position of token in document text (note: not the sentence text) :ivar endPos: End position of token in document text (note: not the sentence text) """ def __init__(self,word,lemma,partofspeech,startPos,endPos): """ Constructor for Token class :param word: Unprocessed word :param lemma: Lemmatized word :param partofspeech: Part-of-speech of word :param startPos: Start position of token in document text (note: not the sentence text) :param endPos: End position of token in document text (note: not the sentence text) :type word: str :type lemma: str :type partofspeech: str :type startPos: int :type endPos: int """ self.word = word self.lemma = lemma self.partofspeech = partofspeech self.startPos = startPos self.endPos = endPos def __str__(self): return self.word def __repr__(self): return self.__str__()
Fix mistaken document about token pos
Fix mistaken document about token pos
Python
mit
jakelever/kindred,jakelever/kindred
class Token: """ Individual word with lemma, part-of-speech and location in text. :ivar word: Unprocessed word :ivar lemma: Lemmatized word :ivar partofspeech: Part-of-speech of word - :ivar startPos: Start position of token in sentence - :ivar endPos: End position of token in sentence + :ivar startPos: Start position of token in document text (note: not the sentence text) + :ivar endPos: End position of token in document text (note: not the sentence text) """ def __init__(self,word,lemma,partofspeech,startPos,endPos): """ Constructor for Token class :param word: Unprocessed word :param lemma: Lemmatized word :param partofspeech: Part-of-speech of word - :param startPos: Start position of token in sentence + :param startPos: Start position of token in document text (note: not the sentence text) - :param endPos: End position of token in sentence + :param endPos: End position of token in document text (note: not the sentence text) :type word: str :type lemma: str :type partofspeech: str :type startPos: int :type endPos: int """ self.word = word self.lemma = lemma self.partofspeech = partofspeech self.startPos = startPos self.endPos = endPos def __str__(self): return self.word def __repr__(self): return self.__str__()
Fix mistaken document about token pos
## Code Before: class Token: """ Individual word with lemma, part-of-speech and location in text. :ivar word: Unprocessed word :ivar lemma: Lemmatized word :ivar partofspeech: Part-of-speech of word :ivar startPos: Start position of token in sentence :ivar endPos: End position of token in sentence """ def __init__(self,word,lemma,partofspeech,startPos,endPos): """ Constructor for Token class :param word: Unprocessed word :param lemma: Lemmatized word :param partofspeech: Part-of-speech of word :param startPos: Start position of token in sentence :param endPos: End position of token in sentence :type word: str :type lemma: str :type partofspeech: str :type startPos: int :type endPos: int """ self.word = word self.lemma = lemma self.partofspeech = partofspeech self.startPos = startPos self.endPos = endPos def __str__(self): return self.word def __repr__(self): return self.__str__() ## Instruction: Fix mistaken document about token pos ## Code After: class Token: """ Individual word with lemma, part-of-speech and location in text. :ivar word: Unprocessed word :ivar lemma: Lemmatized word :ivar partofspeech: Part-of-speech of word :ivar startPos: Start position of token in document text (note: not the sentence text) :ivar endPos: End position of token in document text (note: not the sentence text) """ def __init__(self,word,lemma,partofspeech,startPos,endPos): """ Constructor for Token class :param word: Unprocessed word :param lemma: Lemmatized word :param partofspeech: Part-of-speech of word :param startPos: Start position of token in document text (note: not the sentence text) :param endPos: End position of token in document text (note: not the sentence text) :type word: str :type lemma: str :type partofspeech: str :type startPos: int :type endPos: int """ self.word = word self.lemma = lemma self.partofspeech = partofspeech self.startPos = startPos self.endPos = endPos def __str__(self): return self.word def __repr__(self): return self.__str__()
// ... existing code ... :ivar lemma: Lemmatized word :ivar partofspeech: Part-of-speech of word :ivar startPos: Start position of token in document text (note: not the sentence text) :ivar endPos: End position of token in document text (note: not the sentence text) """ // ... modified code ... :param lemma: Lemmatized word :param partofspeech: Part-of-speech of word :param startPos: Start position of token in document text (note: not the sentence text) :param endPos: End position of token in document text (note: not the sentence text) :type word: str :type lemma: str // ... rest of the code ...
d70360601669f9e58072cd121de79896690471fd
buildlet/datastore/tests/test_inmemory.py
buildlet/datastore/tests/test_inmemory.py
import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory
import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory, DataStoreNestableInMemoryAutoValue) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory class TestDataStoreNestableInMemoryAutoValue(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemoryAutoValue
Fix and add tests for datastore.inmemory
Fix and add tests for datastore.inmemory
Python
bsd-3-clause
tkf/buildlet
import unittest from ..inmemory import ( - DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory) + DataValueInMemory, DataStreamInMemory, + DataStoreNestableInMemory, DataStoreNestableInMemoryAutoValue) from .mixintestcase import ( + MixInValueTestCase, MixInStreamTestCase, - MixInValueTestCase, MixInStreamTestCase, MixInNestableAutoValueTestCase) + MixInNestableTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory - class TestDataStoreNestableInMemory(MixInNestableAutoValueTestCase, + class TestDataStoreNestableInMemory(MixInNestableTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory + + class TestDataStoreNestableInMemoryAutoValue(MixInNestableAutoValueTestCase, + unittest.TestCase): + dstype = DataStoreNestableInMemoryAutoValue +
Fix and add tests for datastore.inmemory
## Code Before: import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory ## Instruction: Fix and add tests for datastore.inmemory ## Code After: import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory, DataStoreNestableInMemoryAutoValue) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory class TestDataStoreNestableInMemoryAutoValue(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemoryAutoValue
... from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory, DataStoreNestableInMemoryAutoValue) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableTestCase, MixInNestableAutoValueTestCase) ... class TestDataStoreNestableInMemory(MixInNestableTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory class TestDataStoreNestableInMemoryAutoValue(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemoryAutoValue ...
0d2816e4ea0bf5a04794456651e79f7db9b2571f
src/jupyter_notebook_gist/config.py
src/jupyter_notebook_gist/config.py
from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, (str, bytes)): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
import six from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, six.string_types): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
Use six for correct Python2/3 compatibility
Use six for correct Python2/3 compatibility
Python
mpl-2.0
mreid-moz/jupyter-notebook-gist,mozilla/jupyter-notebook-gist,mozilla/jupyter-notebook-gist,mreid-moz/jupyter-notebook-gist
+ import six from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id - if not isinstance(client_id, (str, bytes)): + if not isinstance(client_id, six.string_types): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
Use six for correct Python2/3 compatibility
## Code Before: from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, (str, bytes)): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, }) ## Instruction: Use six for correct Python2/3 compatibility ## Code After: import six from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, six.string_types): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
// ... existing code ... import six from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode // ... modified code ... # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, six.string_types): client_id = None self.config_manager.update('notebook', { // ... rest of the code ...
90d8411412b79513338b014da63b18d0d29396d9
snmpy/log_processor.py
snmpy/log_processor.py
import re, snmpy_plugins class log_processor: def __init__(self, conf): self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])] self.proc(conf['logfile']) def len(self): return len(self.data) def key(self, idx): return 'string', self.data[idx - 1]['label'] def val(self, idx): return 'integer', self.data[idx - 1]['value'] @snmpy_plugins.task def proc(self, file): for line in snmpy_plugins.tail(file): for item in xrange(len(self.data)): find = self.data[item]['regex'].search(line) if find: self.data[item]['value'] += 1 break
import re import snmpy class log_processor(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): return 'string', self.data[idx - 1]['label'] def val(self, idx): return 'integer', self.data[idx - 1]['value'] def worker(self): self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])] self.tail() @snmpy.task def tail(self): for line in snmpy.tail(self.conf['logfile']): for item in xrange(len(self.data)): find = self.data[item]['regex'].search(line) if find: self.data[item]['value'] += 1 break
Convert to use the base class and update for new plugin path.
Convert to use the base class and update for new plugin path.
Python
mit
mk23/snmpy,mk23/snmpy
- import re, snmpy_plugins + import re + import snmpy - class log_processor: + class log_processor(snmpy.plugin): - def __init__(self, conf): + def __init__(self, conf, script=False): + snmpy.plugin.__init__(self, conf, script) - self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])] - self.proc(conf['logfile']) - - def len(self): - return len(self.data) def key(self, idx): return 'string', self.data[idx - 1]['label'] def val(self, idx): return 'integer', self.data[idx - 1]['value'] + def worker(self): + self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])] + self.tail() + - @snmpy_plugins.task + @snmpy.task - def proc(self, file): + def tail(self): - for line in snmpy_plugins.tail(file): + for line in snmpy.tail(self.conf['logfile']): for item in xrange(len(self.data)): find = self.data[item]['regex'].search(line) if find: self.data[item]['value'] += 1 break
Convert to use the base class and update for new plugin path.
## Code Before: import re, snmpy_plugins class log_processor: def __init__(self, conf): self.data = [{'value':0, 'label': conf['objects'][item]['label'], 'regex': re.compile(conf['objects'][item]['regex'])} for item in sorted(conf['objects'])] self.proc(conf['logfile']) def len(self): return len(self.data) def key(self, idx): return 'string', self.data[idx - 1]['label'] def val(self, idx): return 'integer', self.data[idx - 1]['value'] @snmpy_plugins.task def proc(self, file): for line in snmpy_plugins.tail(file): for item in xrange(len(self.data)): find = self.data[item]['regex'].search(line) if find: self.data[item]['value'] += 1 break ## Instruction: Convert to use the base class and update for new plugin path. ## Code After: import re import snmpy class log_processor(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): return 'string', self.data[idx - 1]['label'] def val(self, idx): return 'integer', self.data[idx - 1]['value'] def worker(self): self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])] self.tail() @snmpy.task def tail(self): for line in snmpy.tail(self.conf['logfile']): for item in xrange(len(self.data)): find = self.data[item]['regex'].search(line) if find: self.data[item]['value'] += 1 break
... import re import snmpy class log_processor(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): ... return 'integer', self.data[idx - 1]['value'] def worker(self): self.data = [{'value':0, 'label': self.conf['objects'][item]['label'], 'regex': re.compile(self.conf['objects'][item]['regex'])} for item in sorted(self.conf['objects'])] self.tail() @snmpy.task def tail(self): for line in snmpy.tail(self.conf['logfile']): for item in xrange(len(self.data)): find = self.data[item]['regex'].search(line) ...
b11ef81b180cc18acb44988f3e269af6b54f4c89
timewreport/interval.py
timewreport/interval.py
import dateutil.parser from datetime import datetime from dateutil.tz import tz class TimeWarriorInterval(object): def __init__(self, start, end, tags): self.__start = self.__get_local_datetime(start) self.__end = self.__get_local_datetime(end) if end is not None else None self.__tags = tags def __eq__(self, other): return self.__start == other.get_start() \ and self.__end == other.get_end() \ and self.__tags == other.get_tags() def get_start(self): return self.__start def get_end(self): return self.__end def get_tags(self): return self.__tags def is_open(self): return self.__end is None def get_duration(self): if self.is_open(): return datetime.now(tz=tz.tzlocal()) - self.__start else: return self.__end - self.__start def get_date(self): return datetime(self.__start.year, self.__start.month, self.__start.day) def __get_local_datetime(self, datetime_string): from_zone = tz.tzutc() to_zone = tz.tzlocal() date = dateutil.parser.parse(datetime_string) date.replace(tzinfo=from_zone) return date.astimezone(to_zone)
import dateutil.parser from datetime import datetime, date from dateutil.tz import tz class TimeWarriorInterval(object): def __init__(self, start, end, tags): self.__start = self.__get_local_datetime(start) self.__end = self.__get_local_datetime(end) if end is not None else None self.__tags = tags def __eq__(self, other): return self.__start == other.get_start() \ and self.__end == other.get_end() \ and self.__tags == other.get_tags() def get_start(self): return self.__start def get_end(self): return self.__end def get_tags(self): return self.__tags def is_open(self): return self.__end is None def get_duration(self): if self.is_open(): return datetime.now(tz=tz.tzlocal()) - self.__start else: return self.__end - self.__start def get_date(self): return date(self.__start.year, self.__start.month, self.__start.day) def __get_local_datetime(self, datetime_string): from_zone = tz.tzutc() to_zone = tz.tzlocal() date = dateutil.parser.parse(datetime_string) date.replace(tzinfo=from_zone) return date.astimezone(to_zone)
Make get_date() return date object instead of datetime
Make get_date() return date object instead of datetime
Python
mit
lauft/timew-report
import dateutil.parser - from datetime import datetime + from datetime import datetime, date from dateutil.tz import tz class TimeWarriorInterval(object): def __init__(self, start, end, tags): self.__start = self.__get_local_datetime(start) self.__end = self.__get_local_datetime(end) if end is not None else None self.__tags = tags def __eq__(self, other): return self.__start == other.get_start() \ and self.__end == other.get_end() \ and self.__tags == other.get_tags() def get_start(self): return self.__start def get_end(self): return self.__end def get_tags(self): return self.__tags def is_open(self): return self.__end is None def get_duration(self): if self.is_open(): return datetime.now(tz=tz.tzlocal()) - self.__start else: return self.__end - self.__start def get_date(self): - return datetime(self.__start.year, self.__start.month, self.__start.day) + return date(self.__start.year, self.__start.month, self.__start.day) def __get_local_datetime(self, datetime_string): from_zone = tz.tzutc() to_zone = tz.tzlocal() date = dateutil.parser.parse(datetime_string) date.replace(tzinfo=from_zone) return date.astimezone(to_zone)
Make get_date() return date object instead of datetime
## Code Before: import dateutil.parser from datetime import datetime from dateutil.tz import tz class TimeWarriorInterval(object): def __init__(self, start, end, tags): self.__start = self.__get_local_datetime(start) self.__end = self.__get_local_datetime(end) if end is not None else None self.__tags = tags def __eq__(self, other): return self.__start == other.get_start() \ and self.__end == other.get_end() \ and self.__tags == other.get_tags() def get_start(self): return self.__start def get_end(self): return self.__end def get_tags(self): return self.__tags def is_open(self): return self.__end is None def get_duration(self): if self.is_open(): return datetime.now(tz=tz.tzlocal()) - self.__start else: return self.__end - self.__start def get_date(self): return datetime(self.__start.year, self.__start.month, self.__start.day) def __get_local_datetime(self, datetime_string): from_zone = tz.tzutc() to_zone = tz.tzlocal() date = dateutil.parser.parse(datetime_string) date.replace(tzinfo=from_zone) return date.astimezone(to_zone) ## Instruction: Make get_date() return date object instead of datetime ## Code After: import dateutil.parser from datetime import datetime, date from dateutil.tz import tz class TimeWarriorInterval(object): def __init__(self, start, end, tags): self.__start = self.__get_local_datetime(start) self.__end = self.__get_local_datetime(end) if end is not None else None self.__tags = tags def __eq__(self, other): return self.__start == other.get_start() \ and self.__end == other.get_end() \ and self.__tags == other.get_tags() def get_start(self): return self.__start def get_end(self): return self.__end def get_tags(self): return self.__tags def is_open(self): return self.__end is None def get_duration(self): if self.is_open(): return datetime.now(tz=tz.tzlocal()) - self.__start else: return self.__end - self.__start def get_date(self): return date(self.__start.year, self.__start.month, self.__start.day) def __get_local_datetime(self, datetime_string): from_zone = tz.tzutc() to_zone = tz.tzlocal() date = dateutil.parser.parse(datetime_string) date.replace(tzinfo=from_zone) return date.astimezone(to_zone)
// ... existing code ... import dateutil.parser from datetime import datetime, date from dateutil.tz import tz // ... modified code ... def get_date(self): return date(self.__start.year, self.__start.month, self.__start.day) def __get_local_datetime(self, datetime_string): // ... rest of the code ...
f188f2eb81c1310b9862b435a492b4ce6d0fac2d
python3/aniso8601/resolution.py
python3/aniso8601/resolution.py
from enum import Enum class DateResolution(Enum): Year, Month, Week, Weekday, Day, Ordinal = range(6) class TimeResolution(Enum): Seconds, Minutes, Hours = range(3)
class DateResolution(object): Year, Month, Week, Weekday, Day, Ordinal = list(range(6)) class TimeResolution(object): Seconds, Minutes, Hours = list(range(3))
Remove use of enum in Python3
Remove use of enum in Python3
Python
bsd-3-clause
3stack-software/python-aniso8601-relativedelta
- from enum import Enum + class DateResolution(object): + Year, Month, Week, Weekday, Day, Ordinal = list(range(6)) - class DateResolution(Enum): - Year, Month, Week, Weekday, Day, Ordinal = range(6) + class TimeResolution(object): + Seconds, Minutes, Hours = list(range(3)) - class TimeResolution(Enum): - Seconds, Minutes, Hours = range(3) -
Remove use of enum in Python3
## Code Before: from enum import Enum class DateResolution(Enum): Year, Month, Week, Weekday, Day, Ordinal = range(6) class TimeResolution(Enum): Seconds, Minutes, Hours = range(3) ## Instruction: Remove use of enum in Python3 ## Code After: class DateResolution(object): Year, Month, Week, Weekday, Day, Ordinal = list(range(6)) class TimeResolution(object): Seconds, Minutes, Hours = list(range(3))
// ... existing code ... class DateResolution(object): Year, Month, Week, Weekday, Day, Ordinal = list(range(6)) class TimeResolution(object): Seconds, Minutes, Hours = list(range(3)) // ... rest of the code ...
e5f00a6a5e71d8f5fe98547732f4c9e15a3efc1e
src/nodeconductor_paas_oracle/apps.py
src/nodeconductor_paas_oracle/apps.py
from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from .backend import OracleBackend SupportedServices.register_backend(OracleBackend)
from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from nodeconductor.cost_tracking import CostTrackingRegister from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) # cost tracking from .cost_tracking import OracleCostTrackingBackend CostTrackingRegister.register(self.label, OracleCostTrackingBackend)
Add registration to cost tracking
Add registration to cost tracking
Python
mit
opennode/nodeconductor-paas-oracle
from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices + from nodeconductor.cost_tracking import CostTrackingRegister + from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) + # cost tracking + from .cost_tracking import OracleCostTrackingBackend + CostTrackingRegister.register(self.label, OracleCostTrackingBackend) +
Add registration to cost tracking
## Code Before: from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) ## Instruction: Add registration to cost tracking ## Code After: from django.apps import AppConfig class OracleConfig(AppConfig): name = 'nodeconductor_paas_oracle' verbose_name = 'Oracle' service_name = 'Oracle' def ready(self): from nodeconductor.structure import SupportedServices from nodeconductor.cost_tracking import CostTrackingRegister from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) # cost tracking from .cost_tracking import OracleCostTrackingBackend CostTrackingRegister.register(self.label, OracleCostTrackingBackend)
// ... existing code ... def ready(self): from nodeconductor.structure import SupportedServices from nodeconductor.cost_tracking import CostTrackingRegister from .backend import OracleBackend SupportedServices.register_backend(OracleBackend) # cost tracking from .cost_tracking import OracleCostTrackingBackend CostTrackingRegister.register(self.label, OracleCostTrackingBackend) // ... rest of the code ...
a9d3f47098bc7499d62d4866883fa45622f01b74
app/main/errors.py
app/main/errors.py
from flask import render_template, current_app, request from . import main from ..helpers.search_helpers import get_template_data @main.app_errorhandler(404) def page_not_found(e): template_data = get_template_data(main, {}) return render_template("errors/404.html", **template_data), 404 @main.app_errorhandler(500) def page_not_found(e): template_data = get_template_data(main, {}) return render_template("errors/500.html", **template_data), 500
from flask import render_template, current_app, request from . import main from ..helpers.search_helpers import get_template_data from dmutils.apiclient import APIError @main.app_errorhandler(APIError) def api_error_handler(e): return _render_error_page(e.status_code) @main.app_errorhandler(404) def page_not_found(e): return _render_error_page(404) @main.app_errorhandler(500) def page_not_found(e): return _render_error_page(500) def _render_error_page(status_code): templates = { 404: "errors/404.html", 500: "errors/500.html", 503: "errors/500.html", } if status_code not in templates: status_code = 500 template_data = get_template_data(main, {}) return render_template(templates[status_code], **template_data), status_code
Add API error handling similar to supplier app
Add API error handling similar to supplier app Currently 404s returned by the API are resulting in 500s on the buyer app for invalid supplier requests. This change takes the model used in the supplier frontend to automatically handle uncaught APIErrors. It is not identical to the supplier app version because the default template data is generated in a different way.
Python
mit
alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend
from flask import render_template, current_app, request from . import main from ..helpers.search_helpers import get_template_data + from dmutils.apiclient import APIError + + + @main.app_errorhandler(APIError) + def api_error_handler(e): + return _render_error_page(e.status_code) @main.app_errorhandler(404) def page_not_found(e): + return _render_error_page(404) - template_data = get_template_data(main, {}) - return render_template("errors/404.html", **template_data), 404 @main.app_errorhandler(500) def page_not_found(e): + return _render_error_page(500) + + + def _render_error_page(status_code): + templates = { + 404: "errors/404.html", + 500: "errors/500.html", + 503: "errors/500.html", + } + if status_code not in templates: + status_code = 500 template_data = get_template_data(main, {}) - return render_template("errors/500.html", **template_data), 500 + return render_template(templates[status_code], **template_data), status_code +
Add API error handling similar to supplier app
## Code Before: from flask import render_template, current_app, request from . import main from ..helpers.search_helpers import get_template_data @main.app_errorhandler(404) def page_not_found(e): template_data = get_template_data(main, {}) return render_template("errors/404.html", **template_data), 404 @main.app_errorhandler(500) def page_not_found(e): template_data = get_template_data(main, {}) return render_template("errors/500.html", **template_data), 500 ## Instruction: Add API error handling similar to supplier app ## Code After: from flask import render_template, current_app, request from . import main from ..helpers.search_helpers import get_template_data from dmutils.apiclient import APIError @main.app_errorhandler(APIError) def api_error_handler(e): return _render_error_page(e.status_code) @main.app_errorhandler(404) def page_not_found(e): return _render_error_page(404) @main.app_errorhandler(500) def page_not_found(e): return _render_error_page(500) def _render_error_page(status_code): templates = { 404: "errors/404.html", 500: "errors/500.html", 503: "errors/500.html", } if status_code not in templates: status_code = 500 template_data = get_template_data(main, {}) return render_template(templates[status_code], **template_data), status_code
... from . import main from ..helpers.search_helpers import get_template_data from dmutils.apiclient import APIError @main.app_errorhandler(APIError) def api_error_handler(e): return _render_error_page(e.status_code) ... @main.app_errorhandler(404) def page_not_found(e): return _render_error_page(404) ... @main.app_errorhandler(500) def page_not_found(e): return _render_error_page(500) def _render_error_page(status_code): templates = { 404: "errors/404.html", 500: "errors/500.html", 503: "errors/500.html", } if status_code not in templates: status_code = 500 template_data = get_template_data(main, {}) return render_template(templates[status_code], **template_data), status_code ...