commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
3.52k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
17
3.24k
d92c2dba7e549cee8059ecf4f1017956a630cd7a
web3/utils/validation.py
web3/utils/validation.py
from eth_utils import ( is_address, is_checksum_address, is_checksum_formatted_address, is_dict, is_list_like, ) def validate_abi(abi): """ Helper function for validating an ABI """ if not is_list_like(abi): raise ValueError("'abi' is not a list") for e in abi: if not is_dict(e): raise ValueError("The elements of 'abi' are not all dictionaries") def validate_address(value): """ """ validate_address_checksum(value) if not is_address(value): raise ValueError("'{0}' is not an address".format(value)) validate_address_checksum(value) def validate_address_checksum(value): """ Helper function for validating an address EIP55 checksum """ if is_checksum_formatted_address(value): if not is_checksum_address(value): raise ValueError("'{0}' has an invalid EIP55 checksum".format(value))
from eth_utils import ( is_address, is_checksum_address, is_checksum_formatted_address, is_dict, is_list_like, ) def validate_abi(abi): """ Helper function for validating an ABI """ if not is_list_like(abi): raise ValueError("'abi' is not a list") for e in abi: if not is_dict(e): raise ValueError("The elements of 'abi' are not all dictionaries") def validate_address(value): """ Helper function for validating an address """ validate_address_checksum(value) if not is_address(value): raise ValueError("'{0}' is not an address".format(value)) def validate_address_checksum(value): """ Helper function for validating an address EIP55 checksum """ if is_checksum_formatted_address(value): if not is_checksum_address(value): raise ValueError("'{0}' has an invalid EIP55 checksum".format(value))
Raise error specific to address checksum failure
Raise error specific to address checksum failure Because is_address() also checks for a valid checksum, the old code showed a generic "not an address" error if the checksum failed.
Python
mit
pipermerriam/web3.py
from eth_utils import ( is_address, is_checksum_address, is_checksum_formatted_address, is_dict, is_list_like, ) def validate_abi(abi): """ Helper function for validating an ABI """ if not is_list_like(abi): raise ValueError("'abi' is not a list") for e in abi: if not is_dict(e): raise ValueError("The elements of 'abi' are not all dictionaries") def validate_address(value): """ Helper function for validating an address """ + validate_address_checksum(value) if not is_address(value): raise ValueError("'{0}' is not an address".format(value)) - validate_address_checksum(value) def validate_address_checksum(value): """ Helper function for validating an address EIP55 checksum """ if is_checksum_formatted_address(value): if not is_checksum_address(value): raise ValueError("'{0}' has an invalid EIP55 checksum".format(value))
Raise error specific to address checksum failure
## Code Before: from eth_utils import ( is_address, is_checksum_address, is_checksum_formatted_address, is_dict, is_list_like, ) def validate_abi(abi): """ Helper function for validating an ABI """ if not is_list_like(abi): raise ValueError("'abi' is not a list") for e in abi: if not is_dict(e): raise ValueError("The elements of 'abi' are not all dictionaries") def validate_address(value): """ Helper function for validating an address """ if not is_address(value): raise ValueError("'{0}' is not an address".format(value)) validate_address_checksum(value) def validate_address_checksum(value): """ Helper function for validating an address EIP55 checksum """ if is_checksum_formatted_address(value): if not is_checksum_address(value): raise ValueError("'{0}' has an invalid EIP55 checksum".format(value)) ## Instruction: Raise error specific to address checksum failure ## Code After: from eth_utils import ( is_address, is_checksum_address, is_checksum_formatted_address, is_dict, is_list_like, ) def validate_abi(abi): """ Helper function for validating an ABI """ if not is_list_like(abi): raise ValueError("'abi' is not a list") for e in abi: if not is_dict(e): raise ValueError("The elements of 'abi' are not all dictionaries") def validate_address(value): """ Helper function for validating an address """ validate_address_checksum(value) if not is_address(value): raise ValueError("'{0}' is not an address".format(value)) def validate_address_checksum(value): """ Helper function for validating an address EIP55 checksum """ if is_checksum_formatted_address(value): if not is_checksum_address(value): raise ValueError("'{0}' has an invalid EIP55 checksum".format(value))
... Helper function for validating an address """ validate_address_checksum(value) if not is_address(value): raise ValueError("'{0}' is not an address".format(value)) ...
6f968a4aa4048163dd55f927a32da2477cd8c1ff
tx_salaries/search_indexes.py
tx_salaries/search_indexes.py
from haystack import indexes from tx_people.models import Organization from tx_salaries.models import Employee class EmployeeIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) content_auto = indexes.EdgeNgramField(model_attr='position__person__name') title = indexes.CharField(model_attr='title__name', faceted=True) title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) department_slug = indexes.CharField(model_attr='position__organization__stats__slug') entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug') def get_model(self): return Employee
from haystack import indexes from tx_salaries.models import Employee class EmployeeIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) content_auto = indexes.EdgeNgramField(model_attr='position__person__name') compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) department_slug = indexes.CharField(model_attr='position__organization__stats__slug') entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug') def get_model(self): return Employee
Index slugs to reduce search page queries
Index slugs to reduce search page queries
Python
apache-2.0
texastribune/tx_salaries,texastribune/tx_salaries
from haystack import indexes - from tx_people.models import Organization from tx_salaries.models import Employee class EmployeeIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) content_auto = indexes.EdgeNgramField(model_attr='position__person__name') compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) + title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) + department_slug = indexes.CharField(model_attr='position__organization__stats__slug') entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) + entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug') def get_model(self): return Employee
Index slugs to reduce search page queries
## Code Before: from haystack import indexes from tx_people.models import Organization from tx_salaries.models import Employee class EmployeeIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) content_auto = indexes.EdgeNgramField(model_attr='position__person__name') compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) def get_model(self): return Employee ## Instruction: Index slugs to reduce search page queries ## Code After: from haystack import indexes from tx_salaries.models import Employee class EmployeeIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) content_auto = indexes.EdgeNgramField(model_attr='position__person__name') compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) department_slug = indexes.CharField(model_attr='position__organization__stats__slug') entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug') def get_model(self): return Employee
# ... existing code ... from haystack import indexes from tx_salaries.models import Employee # ... modified code ... compensation = indexes.FloatField(model_attr='compensation', null=True) title = indexes.CharField(model_attr='title__name', faceted=True) title_slug = indexes.CharField(model_attr='title__stats__slug', faceted=True) department = indexes.CharField(model_attr='position__organization__name', faceted=True) department_slug = indexes.CharField(model_attr='position__organization__stats__slug') entity = indexes.CharField(model_attr='position__organization__parent__name', faceted=True) entity_slug = indexes.CharField(model_attr='position__organization__parent__stats__slug') def get_model(self): # ... rest of the code ...
62f6e116306901aedaa738236075c4faa00db74d
tests/config_test.py
tests/config_test.py
import glob import json import os import unittest # Local imports import config_yaml class ConfigExpanderTest(unittest.TestCase): def testAllFiles(self): in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml')) for input_file in in_yaml: expected = input_file.replace('in.yaml', 'out.json') with open(expected) as expected_in: expected_json = json.loads(expected_in.read(), encoding='utf-8') expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone') actual_json = expander.ExpandFile(input_file) self.assertEqual(expected_json, actual_json) if __name__ == '__main__': unittest.main()
import glob import json import os import unittest # Local imports import config_yaml class ConfigExpanderTest(unittest.TestCase): def testAllFiles(self): in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml')) for input_file in in_yaml: expected = input_file.replace('in.yaml', 'out.json') with open(expected) as expected_in: expected_json = json.loads(expected_in.read(), encoding='utf-8') expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone') actual_json = expander.ExpandFile(input_file) self.assertEqual(expected_json, actual_json) if __name__ == '__main__': unittest.main()
Fix module path (config -> config_yaml) to unbreak test.
Fix module path (config -> config_yaml) to unbreak test.
Python
apache-2.0
mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher
import glob import json import os import unittest # Local imports - import config + import config_yaml class ConfigExpanderTest(unittest.TestCase): def testAllFiles(self): in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml')) for input_file in in_yaml: expected = input_file.replace('in.yaml', 'out.json') with open(expected) as expected_in: expected_json = json.loads(expected_in.read(), encoding='utf-8') - expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone') + expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone') actual_json = expander.ExpandFile(input_file) self.assertEqual(expected_json, actual_json) if __name__ == '__main__': unittest.main()
Fix module path (config -> config_yaml) to unbreak test.
## Code Before: import glob import json import os import unittest # Local imports import config class ConfigExpanderTest(unittest.TestCase): def testAllFiles(self): in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml')) for input_file in in_yaml: expected = input_file.replace('in.yaml', 'out.json') with open(expected) as expected_in: expected_json = json.loads(expected_in.read(), encoding='utf-8') expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone') actual_json = expander.ExpandFile(input_file) self.assertEqual(expected_json, actual_json) if __name__ == '__main__': unittest.main() ## Instruction: Fix module path (config -> config_yaml) to unbreak test. ## Code After: import glob import json import os import unittest # Local imports import config_yaml class ConfigExpanderTest(unittest.TestCase): def testAllFiles(self): in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml')) for input_file in in_yaml: expected = input_file.replace('in.yaml', 'out.json') with open(expected) as expected_in: expected_json = json.loads(expected_in.read(), encoding='utf-8') expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone') actual_json = expander.ExpandFile(input_file) self.assertEqual(expected_json, actual_json) if __name__ == '__main__': unittest.main()
// ... existing code ... # Local imports import config_yaml // ... modified code ... expected_json = json.loads(expected_in.read(), encoding='utf-8') expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone') actual_json = expander.ExpandFile(input_file) // ... rest of the code ...
ec96669641c9b753c3ce74ce432213a17b0403fe
tests/aggregate_tests.py
tests/aggregate_tests.py
# Help with Python 3 compatibility, where the print statement is a function, an # implicit relative import is invalid, and the '/' operator performs true # division. Example: print 'hello world' raises a 'SyntaxError' exception. from __future__ import print_function from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import unittest import subprocess def check_usable_gpg(): """Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is available. """ os.environ["TEST_SKIP_GPG"] = "1" for gpg in ["gpg2", "gpg"]: try: subprocess.check_call([gpg, "--version"]) except OSError: pass else: # If one of the two exists, we can unset the skip envvar and ... os.environ.pop("TEST_SKIP_GPG", None) # ... abort the availability check.: break if __name__ == '__main__': suite = unittest.TestLoader().discover("tests", top_level_dir=".") all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful() if not all_tests_passed: sys.exit(1)
# Help with Python 3 compatibility, where the print statement is a function, an # implicit relative import is invalid, and the '/' operator performs true # division. Example: print 'hello world' raises a 'SyntaxError' exception. from __future__ import print_function from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import os import sys import unittest import subprocess def check_usable_gpg(): """Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is available. """ os.environ["TEST_SKIP_GPG"] = "1" for gpg in ["gpg2", "gpg"]: try: subprocess.check_call([gpg, "--version"]) except OSError: pass else: # If one of the two exists, we can unset the skip envvar and ... os.environ.pop("TEST_SKIP_GPG", None) # ... abort the availability check.: break if __name__ == '__main__': check_usable_gpg() suite = unittest.TestLoader().discover("tests", top_level_dir=".") all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful() if not all_tests_passed: sys.exit(1)
Copy and call in-toto's check_usable_gpg function
Copy and call in-toto's check_usable_gpg function Set environment variable in test aggregate script that may be used to skip tests if gpg is not available on the test system.
Python
mit
secure-systems-lab/securesystemslib,secure-systems-lab/securesystemslib
# Help with Python 3 compatibility, where the print statement is a function, an # implicit relative import is invalid, and the '/' operator performs true # division. Example: print 'hello world' raises a 'SyntaxError' exception. from __future__ import print_function from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals + import os import sys import unittest + import subprocess + + def check_usable_gpg(): + """Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is + available. """ + os.environ["TEST_SKIP_GPG"] = "1" + for gpg in ["gpg2", "gpg"]: + try: + subprocess.check_call([gpg, "--version"]) + + except OSError: + pass + + else: + # If one of the two exists, we can unset the skip envvar and ... + os.environ.pop("TEST_SKIP_GPG", None) + # ... abort the availability check.: + break if __name__ == '__main__': + check_usable_gpg() suite = unittest.TestLoader().discover("tests", top_level_dir=".") all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful() if not all_tests_passed: sys.exit(1)
Copy and call in-toto's check_usable_gpg function
## Code Before: # Help with Python 3 compatibility, where the print statement is a function, an # implicit relative import is invalid, and the '/' operator performs true # division. Example: print 'hello world' raises a 'SyntaxError' exception. from __future__ import print_function from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import sys import unittest if __name__ == '__main__': suite = unittest.TestLoader().discover("tests", top_level_dir=".") all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful() if not all_tests_passed: sys.exit(1) ## Instruction: Copy and call in-toto's check_usable_gpg function ## Code After: # Help with Python 3 compatibility, where the print statement is a function, an # implicit relative import is invalid, and the '/' operator performs true # division. Example: print 'hello world' raises a 'SyntaxError' exception. from __future__ import print_function from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import os import sys import unittest import subprocess def check_usable_gpg(): """Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is available. """ os.environ["TEST_SKIP_GPG"] = "1" for gpg in ["gpg2", "gpg"]: try: subprocess.check_call([gpg, "--version"]) except OSError: pass else: # If one of the two exists, we can unset the skip envvar and ... os.environ.pop("TEST_SKIP_GPG", None) # ... abort the availability check.: break if __name__ == '__main__': check_usable_gpg() suite = unittest.TestLoader().discover("tests", top_level_dir=".") all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful() if not all_tests_passed: sys.exit(1)
... from __future__ import unicode_literals import os import sys import unittest import subprocess def check_usable_gpg(): """Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is available. """ os.environ["TEST_SKIP_GPG"] = "1" for gpg in ["gpg2", "gpg"]: try: subprocess.check_call([gpg, "--version"]) except OSError: pass else: # If one of the two exists, we can unset the skip envvar and ... os.environ.pop("TEST_SKIP_GPG", None) # ... abort the availability check.: break if __name__ == '__main__': check_usable_gpg() suite = unittest.TestLoader().discover("tests", top_level_dir=".") all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful() ...
0273fc0109d1ef4a4de0450998a6c420cb90217a
util_funcs.py
util_funcs.py
"""Collection of module netural utility functions""" from sys import stderr from ssl import SSLError try: from urllib.request import urlopen, HTTPError, URLError except ImportError: from urllib2 import urlopen, HTTPError, URLError class HTMLGetError(Exception): pass def get_html(url): try: html = urlopen(url) except (HTTPError, URLError, SSLError, timeout) as err: raise HTMLGetError(err) return html.read().decode('utf-8') def progress_msg(processed, total): """Update user on percent done""" if total > 1: percent = int((float(processed) / total) * 100) stderr.write( "\r[%d/%d] %d%%" % (processed, total, percent) ) stderr.flush()
"""Collection of module netural utility functions""" from sys import stderr from ssl import SSLError from socket import timeout try: from urllib.request import urlopen, HTTPError, URLError except ImportError: from urllib2 import urlopen, HTTPError, URLError class HTMLGetError(Exception): pass def get_html(url): try: html = urlopen(url) except (HTTPError, URLError, SSLError, timeout) as err: raise HTMLGetError(err) return html.read().decode('utf-8') def progress_msg(processed, total): """Update user on percent done""" if total > 1: percent = int((float(processed) / total) * 100) stderr.write( "\r[%d/%d] %d%%" % (processed, total, percent) ) stderr.flush()
Remove superfluous parens; catch timeout
Remove superfluous parens; catch timeout
Python
mit
jblakeman/apt-select,jblakeman/apt-select
"""Collection of module netural utility functions""" from sys import stderr from ssl import SSLError + from socket import timeout try: from urllib.request import urlopen, HTTPError, URLError except ImportError: from urllib2 import urlopen, HTTPError, URLError class HTMLGetError(Exception): pass def get_html(url): try: html = urlopen(url) - except (HTTPError, URLError, SSLError) as err: + except (HTTPError, URLError, SSLError, timeout) as err: - raise(HTMLGetError(err)) + raise HTMLGetError(err) return html.read().decode('utf-8') def progress_msg(processed, total): """Update user on percent done""" if total > 1: percent = int((float(processed) / total) * 100) stderr.write( "\r[%d/%d] %d%%" % (processed, total, percent) ) stderr.flush()
Remove superfluous parens; catch timeout
## Code Before: """Collection of module netural utility functions""" from sys import stderr from ssl import SSLError try: from urllib.request import urlopen, HTTPError, URLError except ImportError: from urllib2 import urlopen, HTTPError, URLError class HTMLGetError(Exception): pass def get_html(url): try: html = urlopen(url) except (HTTPError, URLError, SSLError) as err: raise(HTMLGetError(err)) return html.read().decode('utf-8') def progress_msg(processed, total): """Update user on percent done""" if total > 1: percent = int((float(processed) / total) * 100) stderr.write( "\r[%d/%d] %d%%" % (processed, total, percent) ) stderr.flush() ## Instruction: Remove superfluous parens; catch timeout ## Code After: """Collection of module netural utility functions""" from sys import stderr from ssl import SSLError from socket import timeout try: from urllib.request import urlopen, HTTPError, URLError except ImportError: from urllib2 import urlopen, HTTPError, URLError class HTMLGetError(Exception): pass def get_html(url): try: html = urlopen(url) except (HTTPError, URLError, SSLError, timeout) as err: raise HTMLGetError(err) return html.read().decode('utf-8') def progress_msg(processed, total): """Update user on percent done""" if total > 1: percent = int((float(processed) / total) * 100) stderr.write( "\r[%d/%d] %d%%" % (processed, total, percent) ) stderr.flush()
# ... existing code ... from sys import stderr from ssl import SSLError from socket import timeout try: from urllib.request import urlopen, HTTPError, URLError # ... modified code ... try: html = urlopen(url) except (HTTPError, URLError, SSLError, timeout) as err: raise HTMLGetError(err) return html.read().decode('utf-8') # ... rest of the code ...
5b215758adab39923399db98b5975fc76d389472
__init__.py
__init__.py
import configparser import optparse from blo import Blo if __name__ == '__main__': parser = optparse.OptionParser("usage: %prog [options] markdown_file.md") parser.add_option("-c", "--config", dest="config_file", default="./blo.cfg", type="string", help="specify configuration file path to run on") (options, args) = parser.parse_args() if len(args) != 1: parser.error("incorrect number of arguments") cfg_file = options.config_file B = Blo() # TODO: implement main routine of Blo. # blo [-c config_file] markdown_file.md # -- if no -c option then load config file from default path (current directory). # ---- if no configuration file on current directory blo said error. # 1. init database (database name from environment variable or configuration file) # 2. parse markdown file from command line argument. # -- if command line argument path is directory then it will do recursive in directory. # 3. generate html and commit to database pass
import optparse from blo import Blo if __name__ == '__main__': parser = optparse.OptionParser("usage: %prog [options] markdown_file.md") parser.add_option("-c", "--config", dest="config_file", default="./blo.cfg", type="string", help="specify configuration file path to run on") (options, args) = parser.parse_args() if len(args) != 1: parser.error("incorrect number of arguments") cfg_file = options.config_file blo_main = Blo(cfg_file) blo_main.insert_article(args[0]) print('%s complete process.'%('blo',))
Implement main section of blo package.
Implement main section of blo package.
Python
mit
10nin/blo,10nin/blo
- import configparser import optparse from blo import Blo if __name__ == '__main__': - parser = optparse.OptionParser("usage: %prog [option] markdown_file.md") + parser = optparse.OptionParser("usage: %prog [options] markdown_file.md") parser.add_option("-c", "--config", dest="config_file", default="./blo.cfg", type="string", help="specify configuration file path to run on") (options, args) = parser.parse_args() if len(args) != 1: parser.error("incorrect number of arguments") cfg_file = options.config_file - B = Blo() + blo_main = Blo(cfg_file) + blo_main.insert_article(args[0]) + print('%s complete process.'%('blo',)) - # TODO: implement main routine of Blo. - # blo [-c config_file] markdown_file.md - # -- if no -c option then load config file from default path (current directory). - # ---- if no configuration file on current directory blo said error. - # 1. init database (database name from environment variable or configuration file) - # 2. parse markdown file from command line argument. - # -- if command line argument path is directory then it will do recursive in directory. - # 3. generate html and commit to database - pass
Implement main section of blo package.
## Code Before: import configparser import optparse from blo import Blo if __name__ == '__main__': parser = optparse.OptionParser("usage: %prog [option] markdown_file.md") parser.add_option("-c", "--config", dest="config_file", default="./blo.cfg", type="string", help="specify configuration file path to run on") (options, args) = parser.parse_args() if len(args) != 1: parser.error("incorrect number of arguments") cfg_file = options.config_file B = Blo() # TODO: implement main routine of Blo. # blo [-c config_file] markdown_file.md # -- if no -c option then load config file from default path (current directory). # ---- if no configuration file on current directory blo said error. # 1. init database (database name from environment variable or configuration file) # 2. parse markdown file from command line argument. # -- if command line argument path is directory then it will do recursive in directory. # 3. generate html and commit to database pass ## Instruction: Implement main section of blo package. ## Code After: import optparse from blo import Blo if __name__ == '__main__': parser = optparse.OptionParser("usage: %prog [options] markdown_file.md") parser.add_option("-c", "--config", dest="config_file", default="./blo.cfg", type="string", help="specify configuration file path to run on") (options, args) = parser.parse_args() if len(args) != 1: parser.error("incorrect number of arguments") cfg_file = options.config_file blo_main = Blo(cfg_file) blo_main.insert_article(args[0]) print('%s complete process.'%('blo',))
// ... existing code ... import optparse from blo import Blo // ... modified code ... if __name__ == '__main__': parser = optparse.OptionParser("usage: %prog [options] markdown_file.md") parser.add_option("-c", "--config", dest="config_file", default="./blo.cfg", type="string", help="specify configuration file path to run on") ... cfg_file = options.config_file blo_main = Blo(cfg_file) blo_main.insert_article(args[0]) print('%s complete process.'%('blo',)) // ... rest of the code ...
a895661f7ce1a814f308dbe8b5836a4cdb472c8c
cla_public/apps/base/filters.py
cla_public/apps/base/filters.py
from cla_public.apps.base import base @base.app_template_filter() def matches(value, pattern): return bool(re.search(pattern, value))
from cla_public.apps.base import base @base.app_template_filter() def test(value): return value
Revert "BE: Update custom template filter"
Revert "BE: Update custom template filter" This reverts commit ea0c0beb1d2aa0d5970b629ac06e6f9b9708bfdd.
Python
mit
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
- import re from cla_public.apps.base import base @base.app_template_filter() - def matches(value, pattern): - return bool(re.search(pattern, value)) + def test(value): + return value
Revert "BE: Update custom template filter"
## Code Before: import re from cla_public.apps.base import base @base.app_template_filter() def matches(value, pattern): return bool(re.search(pattern, value)) ## Instruction: Revert "BE: Update custom template filter" ## Code After: from cla_public.apps.base import base @base.app_template_filter() def test(value): return value
... from cla_public.apps.base import base @base.app_template_filter() def test(value): return value ...
445bd6d2b5f68da6d51d9acb84b1e15e6b4af2d8
k8s/models/common.py
k8s/models/common.py
from __future__ import absolute_import import six from ..base import Model from ..fields import Field, ReadOnlyField, RequiredField class ObjectMeta(Model): name = Field(six.text_type) namespace = Field(six.text_type, "default") resourceVersion = ReadOnlyField(six.text_type) labels = Field(dict) annotations = Field(dict)
from __future__ import absolute_import import six from ..base import Model from ..fields import Field, ReadOnlyField, RequiredField class ObjectMeta(Model): name = Field(six.text_type) namespace = Field(six.text_type, "default") resourceVersion = ReadOnlyField(six.text_type) labels = Field(dict) annotations = Field(dict) generateName = Field(six.text_type)
Add support for auto-generated names in metadata
Add support for auto-generated names in metadata
Python
apache-2.0
fiaas/k8s
from __future__ import absolute_import import six from ..base import Model from ..fields import Field, ReadOnlyField, RequiredField class ObjectMeta(Model): - name = RequiredField(six.text_type) + name = Field(six.text_type) namespace = Field(six.text_type, "default") resourceVersion = ReadOnlyField(six.text_type) labels = Field(dict) annotations = Field(dict) + generateName = Field(six.text_type)
Add support for auto-generated names in metadata
## Code Before: from __future__ import absolute_import import six from ..base import Model from ..fields import Field, ReadOnlyField, RequiredField class ObjectMeta(Model): name = RequiredField(six.text_type) namespace = Field(six.text_type, "default") resourceVersion = ReadOnlyField(six.text_type) labels = Field(dict) annotations = Field(dict) ## Instruction: Add support for auto-generated names in metadata ## Code After: from __future__ import absolute_import import six from ..base import Model from ..fields import Field, ReadOnlyField, RequiredField class ObjectMeta(Model): name = Field(six.text_type) namespace = Field(six.text_type, "default") resourceVersion = ReadOnlyField(six.text_type) labels = Field(dict) annotations = Field(dict) generateName = Field(six.text_type)
... class ObjectMeta(Model): name = Field(six.text_type) namespace = Field(six.text_type, "default") resourceVersion = ReadOnlyField(six.text_type) ... labels = Field(dict) annotations = Field(dict) generateName = Field(six.text_type) ...
2d2fb47e321faa032c98e92d34e6215b6026f1f0
keras/applications/__init__.py
keras/applications/__init__.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from .. import backend from .. import layers from .. import models from .. import utils import keras_applications if not hasattr(keras_applications, 'get_submodules_from_kwargs'): keras_applications.set_keras_submodules( backend=backend, layers=layers, models=models, utils=utils) def keras_modules_injection(base_fun): def wrapper(*args, **kwargs): kwargs['backend'] = backend kwargs['layers'] = layers kwargs['models'] = models kwargs['utils'] = utils return base_fun(*args, **kwargs) return wrapper from .vgg16 import VGG16 from .vgg19 import VGG19 from .resnet50 import ResNet50 from .inception_v3 import InceptionV3 from .inception_resnet_v2 import InceptionResNetV2 from .xception import Xception from .mobilenet import MobileNet from .mobilenet_v2 import MobileNetV2 from .densenet import DenseNet121, DenseNet169, DenseNet201 from .nasnet import NASNetMobile, NASNetLarge from .resnet import ResNet101, ResNet152 from .resnet_v2 import ResNet50V2, ResNet101V2, ResNet152V2 from .resnext import ResNeXt50, ResNeXt101
from __future__ import absolute_import from __future__ import division from __future__ import print_function from .. import backend from .. import layers from .. import models from .. import utils import keras_applications def keras_modules_injection(base_fun): def wrapper(*args, **kwargs): kwargs['backend'] = backend kwargs['layers'] = layers kwargs['models'] = models kwargs['utils'] = utils return base_fun(*args, **kwargs) return wrapper from .vgg16 import VGG16 from .vgg19 import VGG19 from .resnet50 import ResNet50 from .inception_v3 import InceptionV3 from .inception_resnet_v2 import InceptionResNetV2 from .xception import Xception from .mobilenet import MobileNet from .mobilenet_v2 import MobileNetV2 from .densenet import DenseNet121, DenseNet169, DenseNet201 from .nasnet import NASNetMobile, NASNetLarge from .resnet import ResNet101, ResNet152 from .resnet_v2 import ResNet50V2, ResNet101V2, ResNet152V2
Remove deprecated applications adapter code
Remove deprecated applications adapter code
Python
apache-2.0
keras-team/keras,keras-team/keras
from __future__ import absolute_import from __future__ import division from __future__ import print_function from .. import backend from .. import layers from .. import models from .. import utils import keras_applications - if not hasattr(keras_applications, 'get_submodules_from_kwargs'): - keras_applications.set_keras_submodules( - backend=backend, - layers=layers, - models=models, - utils=utils) - def keras_modules_injection(base_fun): def wrapper(*args, **kwargs): - if hasattr(keras_applications, 'get_submodules_from_kwargs'): - kwargs['backend'] = backend + kwargs['backend'] = backend - kwargs['layers'] = layers + kwargs['layers'] = layers - kwargs['models'] = models + kwargs['models'] = models - kwargs['utils'] = utils + kwargs['utils'] = utils return base_fun(*args, **kwargs) return wrapper from .vgg16 import VGG16 from .vgg19 import VGG19 from .resnet50 import ResNet50 from .inception_v3 import InceptionV3 from .inception_resnet_v2 import InceptionResNetV2 from .xception import Xception from .mobilenet import MobileNet from .mobilenet_v2 import MobileNetV2 from .densenet import DenseNet121, DenseNet169, DenseNet201 from .nasnet import NASNetMobile, NASNetLarge from .resnet import ResNet101, ResNet152 from .resnet_v2 import ResNet50V2, ResNet101V2, ResNet152V2 - from .resnext import ResNeXt50, ResNeXt101
Remove deprecated applications adapter code
## Code Before: from __future__ import absolute_import from __future__ import division from __future__ import print_function from .. import backend from .. import layers from .. import models from .. import utils import keras_applications if not hasattr(keras_applications, 'get_submodules_from_kwargs'): keras_applications.set_keras_submodules( backend=backend, layers=layers, models=models, utils=utils) def keras_modules_injection(base_fun): def wrapper(*args, **kwargs): if hasattr(keras_applications, 'get_submodules_from_kwargs'): kwargs['backend'] = backend kwargs['layers'] = layers kwargs['models'] = models kwargs['utils'] = utils return base_fun(*args, **kwargs) return wrapper from .vgg16 import VGG16 from .vgg19 import VGG19 from .resnet50 import ResNet50 from .inception_v3 import InceptionV3 from .inception_resnet_v2 import InceptionResNetV2 from .xception import Xception from .mobilenet import MobileNet from .mobilenet_v2 import MobileNetV2 from .densenet import DenseNet121, DenseNet169, DenseNet201 from .nasnet import NASNetMobile, NASNetLarge from .resnet import ResNet101, ResNet152 from .resnet_v2 import ResNet50V2, ResNet101V2, ResNet152V2 from .resnext import ResNeXt50, ResNeXt101 ## Instruction: Remove deprecated applications adapter code ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import print_function from .. import backend from .. import layers from .. import models from .. import utils import keras_applications def keras_modules_injection(base_fun): def wrapper(*args, **kwargs): kwargs['backend'] = backend kwargs['layers'] = layers kwargs['models'] = models kwargs['utils'] = utils return base_fun(*args, **kwargs) return wrapper from .vgg16 import VGG16 from .vgg19 import VGG19 from .resnet50 import ResNet50 from .inception_v3 import InceptionV3 from .inception_resnet_v2 import InceptionResNetV2 from .xception import Xception from .mobilenet import MobileNet from .mobilenet_v2 import MobileNetV2 from .densenet import DenseNet121, DenseNet169, DenseNet201 from .nasnet import NASNetMobile, NASNetLarge from .resnet import ResNet101, ResNet152 from .resnet_v2 import ResNet50V2, ResNet101V2, ResNet152V2
// ... existing code ... import keras_applications def keras_modules_injection(base_fun): // ... modified code ... def wrapper(*args, **kwargs): kwargs['backend'] = backend kwargs['layers'] = layers kwargs['models'] = models kwargs['utils'] = utils return base_fun(*args, **kwargs) ... from .resnet import ResNet101, ResNet152 from .resnet_v2 import ResNet50V2, ResNet101V2, ResNet152V2 // ... rest of the code ...
43dce889a79b77445eebe0d0e15532b64e7728d5
tests/test_upbeatbot.py
tests/test_upbeatbot.py
import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
Use more descriptive unit test names
Use more descriptive unit test names
Python
mit
nickdibari/UpBeatBot
import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() - def test_chosen_animal_returned(self): + def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') - def test_random_animal_returned_with_text(self): + def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) - def test_random_returned_no_text(self): + def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
Use more descriptive unit test names
## Code Before: import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal) ## Instruction: Use more descriptive unit test names ## Code After: import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
// ... existing code ... cls.upbeat_bot = UpBeatBot() def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) // ... modified code ... self.assertEqual(animal, 'dog') def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) ... self.assertTrue(animal) def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) // ... rest of the code ...
697833caade1323ddb9a0b4e51031f1d494262cd
201705/migonzalvar/biggest_set.py
201705/migonzalvar/biggest_set.py
from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic duration.elapsed = elapsed def nosolution_case(N): return range(1, N + 1) def negative_worst_case(N): case = list(range(-N + 1, 0)) case += [abs(sum(case))] return case def positive_worst_case(N): case = list(range(1, N)) case.insert(0, - sum(case)) return case def do(): for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300): result = has_subset_sum_zero(source) print(f'Result: {result}') print('Continue...') print() if __name__ == '__main__': do()
from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic duration.elapsed = elapsed def nosolution_case(N): return range(1, N + 1) def negative_worst_case(N): case = list(range(-N + 1, 0)) case += [abs(sum(case))] return case def positive_worst_case(N): case = list(range(1, N)) case.insert(0, - sum(case)) return case def do(): strategies = [nosolution_case, negative_worst_case, positive_worst_case] for strategy in strategies: print(f'## Using {strategy.__name__}') print() for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300) as duration: result = has_subset_sum_zero(source) print(f'Result: {result}') print(f'Duration: {duration.elapsed} seconds') if duration.elapsed >= secs: print('Limit reached. Stopping.') break print('Continue searching...') print() if __name__ == '__main__': do()
Use several strategies for performance
Use several strategies for performance
Python
bsd-3-clause
VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto
from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic - print(f'Duration: {elapsed} seconds') - if elapsed >= secs: - print('Limit reached. Stopping.') - raise SystemExit(0) + duration.elapsed = elapsed + + + def nosolution_case(N): + return range(1, N + 1) + + + def negative_worst_case(N): + case = list(range(-N + 1, 0)) + case += [abs(sum(case))] + return case + + + def positive_worst_case(N): + case = list(range(1, N)) + case.insert(0, - sum(case)) + return case def do(): + strategies = [nosolution_case, negative_worst_case, positive_worst_case] + for strategy in strategies: + print(f'## Using {strategy.__name__}') - for n in range(1, 100, 10): - source = range(1, n) - print(f'Length: {n} items') - with less_than(300): - result = has_subset_sum_zero(source) - print(f'Result: {result}') - print('Continue...') print() + for n in range(1, 100, 10): + source = range(1, n) + print(f'Length: {n} items') + with less_than(300) as duration: + result = has_subset_sum_zero(source) + print(f'Result: {result}') + print(f'Duration: {duration.elapsed} seconds') + if duration.elapsed >= secs: + print('Limit reached. Stopping.') + break + print('Continue searching...') + print() if __name__ == '__main__': do()
Use several strategies for performance
## Code Before: from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic print(f'Duration: {elapsed} seconds') if elapsed >= secs: print('Limit reached. Stopping.') raise SystemExit(0) def do(): for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300): result = has_subset_sum_zero(source) print(f'Result: {result}') print('Continue...') print() if __name__ == '__main__': do() ## Instruction: Use several strategies for performance ## Code After: from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic duration.elapsed = elapsed def nosolution_case(N): return range(1, N + 1) def negative_worst_case(N): case = list(range(-N + 1, 0)) case += [abs(sum(case))] return case def positive_worst_case(N): case = list(range(1, N)) case.insert(0, - sum(case)) return case def do(): strategies = [nosolution_case, negative_worst_case, positive_worst_case] for strategy in strategies: print(f'## Using {strategy.__name__}') print() for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300) as duration: result = has_subset_sum_zero(source) print(f'Result: {result}') print(f'Duration: {duration.elapsed} seconds') if duration.elapsed >= secs: print('Limit reached. Stopping.') break print('Continue searching...') print() if __name__ == '__main__': do()
... yield duration elapsed = time.time() - tic duration.elapsed = elapsed def nosolution_case(N): return range(1, N + 1) def negative_worst_case(N): case = list(range(-N + 1, 0)) case += [abs(sum(case))] return case def positive_worst_case(N): case = list(range(1, N)) case.insert(0, - sum(case)) return case def do(): strategies = [nosolution_case, negative_worst_case, positive_worst_case] for strategy in strategies: print(f'## Using {strategy.__name__}') print() for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300) as duration: result = has_subset_sum_zero(source) print(f'Result: {result}') print(f'Duration: {duration.elapsed} seconds') if duration.elapsed >= secs: print('Limit reached. Stopping.') break print('Continue searching...') print() ...
edfd2edc5496cb412477b7409f43aa53acf7dea9
tests/test_loadproblem.py
tests/test_loadproblem.py
import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem') self.assertRaises(Exception, loadproblem. \ load_file_as_module(problem_dir,file_name)) if __name__ == '__main__': unittest.main()
import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) self.assertRaises(Exception, loadproblem.load_file_as_module(file)) if __name__ == '__main__': unittest.main()
Fix parameter values for load function
Fix parameter values for load function
Python
apache-2.0
patrickspencer/mathdeck,patrickspencer/mathdeck
import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' - problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), + file = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'fixtures','loadproblem') + 'fixtures','loadproblem', file_name) - problem = loadproblem.load_file_as_module(problem_dir,file_name) + problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' - problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), + file = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'fixtures','loadproblem') + 'fixtures','loadproblem', file_name) - self.assertRaises(Exception, loadproblem. \ + self.assertRaises(Exception, loadproblem.load_file_as_module(file)) - load_file_as_module(problem_dir,file_name)) if __name__ == '__main__': unittest.main()
Fix parameter values for load function
## Code Before: import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem') problem = loadproblem.load_file_as_module(problem_dir,file_name) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem') self.assertRaises(Exception, loadproblem. \ load_file_as_module(problem_dir,file_name)) if __name__ == '__main__': unittest.main() ## Instruction: Fix parameter values for load function ## Code After: import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) self.assertRaises(Exception, loadproblem.load_file_as_module(file)) if __name__ == '__main__': unittest.main()
// ... existing code ... def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) // ... modified code ... def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) self.assertRaises(Exception, loadproblem.load_file_as_module(file)) if __name__ == '__main__': // ... rest of the code ...
806b19db6f50d63f5b0893e9d695f32830890dd2
crm/tests/test_contact_user.py
crm/tests/test_contact_user.py
from django.contrib.auth.models import User from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from login.tests.model_maker import make_user class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" contact = make_contact( 'pkimber', 'Patrick Kimber', ) make_user_contact(make_user('fred'), contact) user = User.objects.get(username='fred') self.assertIn('Kimber', user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" fred = make_user('fred') jsmith = make_contact('jsmith', 'John Smith') pkimber = make_contact('pkimber', 'Patrick Kimber') make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, fred, jsmith, )
from django.contrib.auth.models import User from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from login.tests.model_maker import make_user class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" contact = make_contact( 'pkimber', 'Patrick Kimber', ) make_user_contact(make_user('fred'), contact) user = User.objects.get(username='fred') user_contacts = user.usercontact_set.all() self.assertIn('Kimber', user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" fred = make_user('fred') jsmith = make_contact('jsmith', 'John Smith') pkimber = make_contact('pkimber', 'Patrick Kimber') make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, fred, jsmith, )
Make sure a user can only link to one contact
Make sure a user can only link to one contact
Python
apache-2.0
pkimber/crm,pkimber/crm,pkimber/crm
from django.contrib.auth.models import User + from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from login.tests.model_maker import make_user class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" contact = make_contact( 'pkimber', 'Patrick Kimber', ) make_user_contact(make_user('fred'), contact) user = User.objects.get(username='fred') user_contacts = user.usercontact_set.all() self.assertIn('Kimber', user_contacts[0].contact.name) + def test_one_contact_per_user(self): + """Make sure a user can only link to one contact""" + fred = make_user('fred') + jsmith = make_contact('jsmith', 'John Smith') + pkimber = make_contact('pkimber', 'Patrick Kimber') + make_user_contact(fred, pkimber) + self.assertRaises( + IntegrityError, + make_user_contact, + fred, + jsmith, + ) +
Make sure a user can only link to one contact
## Code Before: from django.contrib.auth.models import User from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from login.tests.model_maker import make_user class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" contact = make_contact( 'pkimber', 'Patrick Kimber', ) make_user_contact(make_user('fred'), contact) user = User.objects.get(username='fred') user_contacts = user.usercontact_set.all() self.assertIn('Kimber', user_contacts[0].contact.name) ## Instruction: Make sure a user can only link to one contact ## Code After: from django.contrib.auth.models import User from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from login.tests.model_maker import make_user class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" contact = make_contact( 'pkimber', 'Patrick Kimber', ) make_user_contact(make_user('fred'), contact) user = User.objects.get(username='fred') user_contacts = user.usercontact_set.all() self.assertIn('Kimber', user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" fred = make_user('fred') jsmith = make_contact('jsmith', 'John Smith') pkimber = make_contact('pkimber', 'Patrick Kimber') make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, fred, jsmith, )
# ... existing code ... from django.contrib.auth.models import User from django.db import IntegrityError from django.test import TestCase # ... modified code ... user_contacts = user.usercontact_set.all() self.assertIn('Kimber', user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" fred = make_user('fred') jsmith = make_contact('jsmith', 'John Smith') pkimber = make_contact('pkimber', 'Patrick Kimber') make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, fred, jsmith, ) # ... rest of the code ...
d5cd1eddf1ecf0c463a90d0e69413aadd311977a
lots/urls.py
lots/urls.py
from django.conf.urls import patterns, include, url from django.conf import settings from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), ) urlpatterns += patterns('', url(r'^media/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, }), url(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.STATIC_ROOT, }),)
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), )
Revert "Picture access from admin console"
Revert "Picture access from admin console" This reverts commit 324fa160fb629f6c4537ca15212c0822e8ac436d.
Python
mit
opencleveland/large-lots,skorasaurus/large-lots,opencleveland/large-lots,skorasaurus/large-lots,skorasaurus/large-lots,skorasaurus/large-lots,opencleveland/large-lots,opencleveland/large-lots
from django.conf.urls import patterns, include, url - from django.conf import settings from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), + url(r'^django-admin/', include(admin.site.urls)), ) - urlpatterns += patterns('', - url(r'^media/(?P<path>.*)$', 'django.views.static.serve', { - 'document_root': settings.MEDIA_ROOT, - }), - url(r'^static/(?P<path>.*)$', 'django.views.static.serve', { - 'document_root': settings.STATIC_ROOT, - }),) -
Revert "Picture access from admin console"
## Code Before: from django.conf.urls import patterns, include, url from django.conf import settings from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), ) urlpatterns += patterns('', url(r'^media/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, }), url(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.STATIC_ROOT, }),) ## Instruction: Revert "Picture access from admin console" ## Code After: from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), )
# ... existing code ... from django.conf.urls import patterns, include, url from django.contrib import admin # ... modified code ... url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), ) # ... rest of the code ...
8e4e12b3c9d64a8c6771b9deb7613c3653f47656
rpihelper/transmission/tasks.py
rpihelper/transmission/tasks.py
from rpihelper.celery import current_app, celery from rpihelper.dropboxclient.logic import Client as DropBoxClient from rpihelper.transmission.logic import ( transmissionrpc_client, transmissionrpc_add_torrent, ) __all__ = ( 'check_torrent_files', ) @celery.task def check_torrent_files(): tc = transmissionrpc_client() if not tc: current_app.logger.info('No connetion to remote transmission, stop task.') return dbc = DropBoxClient() for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']): file_url = dbc.file_url(f) success = transmissionrpc_add_torrent(tc, file_url) if success: dbc.rm_file(f) current_app.logger.info('Successfully added torrent "%s".' % file_url) else: from tempfile import NamedTemporaryFile from rpihelper.celery import current_app, celery from rpihelper.dropboxclient.logic import Client as DropBoxClient from rpihelper.transmission.logic import ( transmissionrpc_client, transmissionrpc_add_torrent, ) __all__ = ( 'check_torrent_files', ) @celery.task def check_torrent_files(): tc = transmissionrpc_client() if not tc: current_app.logger.info('No connetion to remote transmission, stop task.') return dbc = DropBoxClient() for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']): file_url = dbc.file_url(f) success = transmissionrpc_add_torrent(tc, file_url) if success: dbc.rm_file(f) current_app.logger.info('Successfully added torrent "%s".' % file_url) else: current_app.logger.info('Torrent "%s" not added, skip it.' % file_url)
from tempfile import NamedTemporaryFile from rpihelper.celery import current_app, celery from rpihelper.dropboxclient.logic import Client as DropBoxClient from rpihelper.transmission.logic import ( transmissionrpc_client, transmissionrpc_add_torrent, ) __all__ = ( 'check_torrent_files', ) @celery.task def check_torrent_files(): tc = transmissionrpc_client() if not tc: current_app.logger.info('No connetion to remote transmission, stop task.') return dbc = DropBoxClient() for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']): with NamedTemporaryFile() as tf: tf.write(dbc.file(f)) success = transmissionrpc_add_torrent(tc, 'file://%s' % tf.name) if success: dbc.rm_file(f) current_app.logger.info('Successfully added torrent "%s".' % f) else: current_app.logger.info('Torrent "%s" not added, skip it.' % f)
Fix transmission task for torrent files
Fix transmission task for torrent files
Python
mit
Gr1N/rpihelper,Gr1N/rpihelper
+ + from tempfile import NamedTemporaryFile from rpihelper.celery import current_app, celery from rpihelper.dropboxclient.logic import Client as DropBoxClient from rpihelper.transmission.logic import ( transmissionrpc_client, transmissionrpc_add_torrent, ) __all__ = ( 'check_torrent_files', ) @celery.task def check_torrent_files(): tc = transmissionrpc_client() if not tc: current_app.logger.info('No connetion to remote transmission, stop task.') return dbc = DropBoxClient() for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']): - file_url = dbc.file_url(f) + with NamedTemporaryFile() as tf: + tf.write(dbc.file(f)) - success = transmissionrpc_add_torrent(tc, file_url) + success = transmissionrpc_add_torrent(tc, 'file://%s' % tf.name) + if success: dbc.rm_file(f) - current_app.logger.info('Successfully added torrent "%s".' % file_url) + current_app.logger.info('Successfully added torrent "%s".' % f) else: - current_app.logger.info('Torrent "%s" not added, skip it.' % file_url) + current_app.logger.info('Torrent "%s" not added, skip it.' % f)
Fix transmission task for torrent files
## Code Before: from rpihelper.celery import current_app, celery from rpihelper.dropboxclient.logic import Client as DropBoxClient from rpihelper.transmission.logic import ( transmissionrpc_client, transmissionrpc_add_torrent, ) __all__ = ( 'check_torrent_files', ) @celery.task def check_torrent_files(): tc = transmissionrpc_client() if not tc: current_app.logger.info('No connetion to remote transmission, stop task.') return dbc = DropBoxClient() for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']): file_url = dbc.file_url(f) success = transmissionrpc_add_torrent(tc, file_url) if success: dbc.rm_file(f) current_app.logger.info('Successfully added torrent "%s".' % file_url) else: current_app.logger.info('Torrent "%s" not added, skip it.' % file_url) ## Instruction: Fix transmission task for torrent files ## Code After: from tempfile import NamedTemporaryFile from rpihelper.celery import current_app, celery from rpihelper.dropboxclient.logic import Client as DropBoxClient from rpihelper.transmission.logic import ( transmissionrpc_client, transmissionrpc_add_torrent, ) __all__ = ( 'check_torrent_files', ) @celery.task def check_torrent_files(): tc = transmissionrpc_client() if not tc: current_app.logger.info('No connetion to remote transmission, stop task.') return dbc = DropBoxClient() for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']): with NamedTemporaryFile() as tf: tf.write(dbc.file(f)) success = transmissionrpc_add_torrent(tc, 'file://%s' % tf.name) if success: dbc.rm_file(f) current_app.logger.info('Successfully added torrent "%s".' % f) else: current_app.logger.info('Torrent "%s" not added, skip it.' % f)
// ... existing code ... from tempfile import NamedTemporaryFile from rpihelper.celery import current_app, celery // ... modified code ... dbc = DropBoxClient() for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']): with NamedTemporaryFile() as tf: tf.write(dbc.file(f)) success = transmissionrpc_add_torrent(tc, 'file://%s' % tf.name) if success: dbc.rm_file(f) current_app.logger.info('Successfully added torrent "%s".' % f) else: current_app.logger.info('Torrent "%s" not added, skip it.' % f) // ... rest of the code ...
c47468128ab831133a12f942d32dd73b4198458e
scent.py
scent.py
import os import time import subprocess from sniffer.api import select_runnable, file_validator, runnable try: from pync import Notifier except ImportError: notify = None else: notify = Notifier.notify watch_paths = ['demo/', 'tests/'] @select_runnable('python_tests') @file_validator def py_files(filename): return all((filename.endswith('.py'), not os.path.basename(filename).startswith('.'))) @runnable def python_tests(*args): group = int(time.time()) # unique per run for count, (command, title) in enumerate(( (('make', 'test-unit'), "Unit Tests"), (('make', 'test-all'), "Combined Tests"), (('make', 'check'), "Static Analysis"), (('make', 'doc'), None), ), start=1): failure = subprocess.call(command) if failure: if notify: mark = "❌" * count notify(mark + " [FAIL] " + mark, title=title, group=group) return False else: if notify: mark = "✅" * count notify(mark + " [PASS] " + mark, title=title, group=group) return True
import os import time import subprocess from sniffer.api import select_runnable, file_validator, runnable try: from pync import Notifier except ImportError: notify = None else: notify = Notifier.notify watch_paths = ['demo/', 'tests/'] @select_runnable('python_tests') @file_validator def py_files(filename): return all((filename.endswith('.py'), not os.path.basename(filename).startswith('.'))) @runnable def python_tests(*args): group = int(time.time()) # unique per run for count, (command, title) in enumerate(( (('make', 'test-unit'), "Unit Tests"), (('make', 'test-int'), "Integration Tests"), (('make', 'test-all'), "Combined Tests"), (('make', 'check'), "Static Analysis"), (('make', 'doc'), None), ), start=1): failure = subprocess.call(command) if failure: if notify and title: mark = "❌" * count notify(mark + " [FAIL] " + mark, title=title, group=group) return False else: if notify and title: mark = "✅" * count notify(mark + " [PASS] " + mark, title=title, group=group) return True
Deploy Travis CI build 478 to GitHub
Deploy Travis CI build 478 to GitHub
Python
mit
jacebrowning/template-python-demo
import os import time import subprocess from sniffer.api import select_runnable, file_validator, runnable try: from pync import Notifier except ImportError: notify = None else: notify = Notifier.notify watch_paths = ['demo/', 'tests/'] @select_runnable('python_tests') @file_validator def py_files(filename): return all((filename.endswith('.py'), not os.path.basename(filename).startswith('.'))) @runnable def python_tests(*args): group = int(time.time()) # unique per run for count, (command, title) in enumerate(( (('make', 'test-unit'), "Unit Tests"), (('make', 'test-int'), "Integration Tests"), (('make', 'test-all'), "Combined Tests"), + (('make', 'check'), "Static Analysis"), + (('make', 'doc'), None), ), start=1): failure = subprocess.call(command) if failure: - if notify: + if notify and title: mark = "❌" * count notify(mark + " [FAIL] " + mark, title=title, group=group) return False else: - if notify: + if notify and title: mark = "✅" * count notify(mark + " [PASS] " + mark, title=title, group=group) return True
Deploy Travis CI build 478 to GitHub
## Code Before: import os import time import subprocess from sniffer.api import select_runnable, file_validator, runnable try: from pync import Notifier except ImportError: notify = None else: notify = Notifier.notify watch_paths = ['demo/', 'tests/'] @select_runnable('python_tests') @file_validator def py_files(filename): return all((filename.endswith('.py'), not os.path.basename(filename).startswith('.'))) @runnable def python_tests(*args): group = int(time.time()) # unique per run for count, (command, title) in enumerate(( (('make', 'test-unit'), "Unit Tests"), (('make', 'test-int'), "Integration Tests"), (('make', 'test-all'), "Combined Tests"), ), start=1): failure = subprocess.call(command) if failure: if notify: mark = "❌" * count notify(mark + " [FAIL] " + mark, title=title, group=group) return False else: if notify: mark = "✅" * count notify(mark + " [PASS] " + mark, title=title, group=group) return True ## Instruction: Deploy Travis CI build 478 to GitHub ## Code After: import os import time import subprocess from sniffer.api import select_runnable, file_validator, runnable try: from pync import Notifier except ImportError: notify = None else: notify = Notifier.notify watch_paths = ['demo/', 'tests/'] @select_runnable('python_tests') @file_validator def py_files(filename): return all((filename.endswith('.py'), not os.path.basename(filename).startswith('.'))) @runnable def python_tests(*args): group = int(time.time()) # unique per run for count, (command, title) in enumerate(( (('make', 'test-unit'), "Unit Tests"), (('make', 'test-int'), "Integration Tests"), (('make', 'test-all'), "Combined Tests"), (('make', 'check'), "Static Analysis"), (('make', 'doc'), None), ), start=1): failure = subprocess.call(command) if failure: if notify and title: mark = "❌" * count notify(mark + " [FAIL] " + mark, title=title, group=group) return False else: if notify and title: mark = "✅" * count notify(mark + " [PASS] " + mark, title=title, group=group) return True
# ... existing code ... (('make', 'test-int'), "Integration Tests"), (('make', 'test-all'), "Combined Tests"), (('make', 'check'), "Static Analysis"), (('make', 'doc'), None), ), start=1): # ... modified code ... if failure: if notify and title: mark = "❌" * count notify(mark + " [FAIL] " + mark, title=title, group=group) ... return False else: if notify and title: mark = "✅" * count notify(mark + " [PASS] " + mark, title=title, group=group) # ... rest of the code ...
99be8919a0bc274dc311ebe3201dfc490a1d0d07
setup.py
setup.py
import os from distutils.core import setup, find_packages # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
import os from distutils.core import setup # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
Remove find_packages import, it's not in distutils
Remove find_packages import, it's not in distutils
Python
bsd-2-clause
blaze/datashape,cowlicks/datashape,ContinuumIO/datashape,cpcloud/datashape,aterrel/datashape,quantopian/datashape,FrancescAlted/datashape,quantopian/datashape,aterrel/datashape,cowlicks/datashape,markflorisson/datashape,ContinuumIO/datashape,cpcloud/datashape,blaze/datashape,llllllllll/datashape,markflorisson/datashape,FrancescAlted/datashape,llllllllll/datashape
import os - from distutils.core import setup, find_packages + from distutils.core import setup # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", - packages = ["datashape", "datashape.test"], + packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
Remove find_packages import, it's not in distutils
## Code Before: import os from distutils.core import setup, find_packages # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.test"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], ) ## Instruction: Remove find_packages import, it's not in distutils ## Code After: import os from distutils.core import setup # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "DataShape", version = "0.1.0", author = "Continuum Analytics", author_email = "[email protected]", description = ("A data description language."), license = "BSD", keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development", "License :: OSI Approved :: BSD License", ], )
// ... existing code ... import os from distutils.core import setup # Utility function to read the README file. // ... modified code ... keywords = "data language", url = "http://packages.python.org/datashape", packages = ["datashape", "datashape.tests"], long_description = read('README.md'), classifiers = [ // ... rest of the code ...
a5409ca51e95b4d6ca99a63e0422ca1fe8d344f8
tags/templatetags/tags_tags.py
tags/templatetags/tags_tags.py
from __future__ import unicode_literals from __future__ import absolute_import from django import template from django.db.models.loading import get_model from ..models import CustomTag register = template.Library() @register.assignment_tag def get_obj_list(app, model, obj): ''' Return list of all objects of type app.model tagged with a tag pointing to obj (an object in the db, e.g. Person, Family, ...). ''' try: return get_model(app, model).objects.filter( tags__slug='%s.%s-%d' % ( obj._meta.app_label, obj._meta.model_name, obj.id)) except: return [] @register.assignment_tag def get_tag_list(app, model, tag): ''' Return list of all objects of type app.model tagged with the tag "tag". ''' try: return get_model(app, model).objects.filter(tags__slug='%s' % tag) except: return [] def as_tag_text(slug): try: tag = CustomTag.objects.get(slug=slug) return tag.as_tag_text() except ObjectDoesNotExist: raise Http404
from __future__ import unicode_literals from __future__ import absolute_import from django import template from django.core.exceptions import ObjectDoesNotExist from django.db.models.loading import get_model from django.http import Http404 from ..models import CustomTag register = template.Library() @register.assignment_tag def get_obj_list(app, model, obj): ''' Return list of all objects of type app.model tagged with a tag pointing to obj (an object in the db, e.g. Person, Family, ...). ''' try: return get_model(app, model).objects.filter( tags__slug='%s.%s-%d' % ( obj._meta.app_label, obj._meta.model_name, obj.id)) except: return [] @register.assignment_tag def get_tag_list(app, model, tag): ''' Return list of all objects of type app.model tagged with the tag "tag". ''' try: return get_model(app, model).objects.filter(tags__slug='%s' % tag) except: return [] @register.filter def as_tag_text(slug): try: tag = CustomTag.objects.get(slug=slug) return tag.as_tag_text() except ObjectDoesNotExist: raise Http404
Fix server error in tag search for non-existing tag.
Fix server error in tag search for non-existing tag.
Python
bsd-3-clause
ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio
from __future__ import unicode_literals from __future__ import absolute_import from django import template + from django.core.exceptions import ObjectDoesNotExist from django.db.models.loading import get_model + from django.http import Http404 from ..models import CustomTag register = template.Library() @register.assignment_tag def get_obj_list(app, model, obj): ''' Return list of all objects of type app.model tagged with a tag pointing to obj (an object in the db, e.g. Person, Family, ...). ''' try: return get_model(app, model).objects.filter( tags__slug='%s.%s-%d' % ( obj._meta.app_label, obj._meta.model_name, obj.id)) except: return [] @register.assignment_tag def get_tag_list(app, model, tag): ''' Return list of all objects of type app.model tagged with the tag "tag". ''' try: return get_model(app, model).objects.filter(tags__slug='%s' % tag) except: return [] @register.filter def as_tag_text(slug): + try: - tag = CustomTag.objects.get(slug=slug) + tag = CustomTag.objects.get(slug=slug) - return tag.as_tag_text() + return tag.as_tag_text() + except ObjectDoesNotExist: + raise Http404 +
Fix server error in tag search for non-existing tag.
## Code Before: from __future__ import unicode_literals from __future__ import absolute_import from django import template from django.db.models.loading import get_model from ..models import CustomTag register = template.Library() @register.assignment_tag def get_obj_list(app, model, obj): ''' Return list of all objects of type app.model tagged with a tag pointing to obj (an object in the db, e.g. Person, Family, ...). ''' try: return get_model(app, model).objects.filter( tags__slug='%s.%s-%d' % ( obj._meta.app_label, obj._meta.model_name, obj.id)) except: return [] @register.assignment_tag def get_tag_list(app, model, tag): ''' Return list of all objects of type app.model tagged with the tag "tag". ''' try: return get_model(app, model).objects.filter(tags__slug='%s' % tag) except: return [] @register.filter def as_tag_text(slug): tag = CustomTag.objects.get(slug=slug) return tag.as_tag_text() ## Instruction: Fix server error in tag search for non-existing tag. ## Code After: from __future__ import unicode_literals from __future__ import absolute_import from django import template from django.core.exceptions import ObjectDoesNotExist from django.db.models.loading import get_model from django.http import Http404 from ..models import CustomTag register = template.Library() @register.assignment_tag def get_obj_list(app, model, obj): ''' Return list of all objects of type app.model tagged with a tag pointing to obj (an object in the db, e.g. Person, Family, ...). ''' try: return get_model(app, model).objects.filter( tags__slug='%s.%s-%d' % ( obj._meta.app_label, obj._meta.model_name, obj.id)) except: return [] @register.assignment_tag def get_tag_list(app, model, tag): ''' Return list of all objects of type app.model tagged with the tag "tag". ''' try: return get_model(app, model).objects.filter(tags__slug='%s' % tag) except: return [] @register.filter def as_tag_text(slug): try: tag = CustomTag.objects.get(slug=slug) return tag.as_tag_text() except ObjectDoesNotExist: raise Http404
... from django import template from django.core.exceptions import ObjectDoesNotExist from django.db.models.loading import get_model from django.http import Http404 from ..models import CustomTag ... @register.filter def as_tag_text(slug): try: tag = CustomTag.objects.get(slug=slug) return tag.as_tag_text() except ObjectDoesNotExist: raise Http404 ...
cd8fe432077bdd65122189dd9191d7a5b8788e48
reinforcement-learning/play.py
reinforcement-learning/play.py
import time start = time.time() import env import rl env.make("text") for episode in range(1000): env.reset() episode_reward = 0 for t in range(100): episode_reward += env.actual_reward if env.done: print( "Episode %d finished after %d timesteps, with reward %d" % ((episode + 1), (t + 1), episode_reward)) break max_action = -1 index = -1 for item in env.actions: print(item) print(env.reward(item)) if env.reward(item) > max_action: print("greater") max_action = env.reward(item) action = [item, index] else: index += 1 print(action[0]) episode_reward += env.reward(action[0]) env.action(action[0]) env.render()
"""This is the agent which currently takes the action with highest immediate reward.""" import time start = time.time() import env import rl env.make("text") for episode in range(1000): env.reset() episode_reward = 0 for t in range(100): episode_reward += env.actual_reward if env.done: print( "Episode %d finished after %d timesteps, with reward %d" % ((episode + 1), (t + 1), episode_reward)) break action = rl.choose_action(rl.table[env.object[0]]) rl.q(env.player, action) print(action) episode_reward += env.reward(action) env.action(action) env.update() print(rl.table[env.object[0]]) print("Finished after", str(time.time() - start), "seconds")
Use proper q learning for agent.
Use proper q learning for agent.
Python
mit
danieloconell/Louis
"""This is the agent which currently takes the action with highest immediate reward.""" + import time + start = time.time() import env - import time + import rl - env.make("pygame") + env.make("text") - for episode in range(10): + for episode in range(1000): env.reset() episode_reward = 0 for t in range(100): episode_reward += env.actual_reward if env.done: print( "Episode %d finished after %d timesteps, with reward %d" % ((episode + 1), (t + 1), episode_reward)) break + action = rl.choose_action(rl.table[env.object[0]]) + rl.q(env.player, action) - max_action = -1 - index = -1 - for item in env.actions: - print(item) - print(env.reward(item)) - if env.reward(item) > max_action: - print("greater") - max_action = env.reward(item) - action = [item, index] - else: - index += 1 - print(action[0]) + print(action) - episode_reward += env.reward(action[0]) + episode_reward += env.reward(action) - env.action(action[0]) + env.action(action) - env.render() + env.update() + print(rl.table[env.object[0]]) + print("Finished after", str(time.time() - start), "seconds")
Use proper q learning for agent.
## Code Before: """This is the agent which currently takes the action with highest immediate reward.""" import env import time env.make("pygame") for episode in range(10): env.reset() episode_reward = 0 for t in range(100): episode_reward += env.actual_reward if env.done: print( "Episode %d finished after %d timesteps, with reward %d" % ((episode + 1), (t + 1), episode_reward)) break max_action = -1 index = -1 for item in env.actions: print(item) print(env.reward(item)) if env.reward(item) > max_action: print("greater") max_action = env.reward(item) action = [item, index] else: index += 1 print(action[0]) episode_reward += env.reward(action[0]) env.action(action[0]) env.render() ## Instruction: Use proper q learning for agent. ## Code After: """This is the agent which currently takes the action with highest immediate reward.""" import time start = time.time() import env import rl env.make("text") for episode in range(1000): env.reset() episode_reward = 0 for t in range(100): episode_reward += env.actual_reward if env.done: print( "Episode %d finished after %d timesteps, with reward %d" % ((episode + 1), (t + 1), episode_reward)) break action = rl.choose_action(rl.table[env.object[0]]) rl.q(env.player, action) print(action) episode_reward += env.reward(action) env.action(action) env.update() print(rl.table[env.object[0]]) print("Finished after", str(time.time() - start), "seconds")
... """This is the agent which currently takes the action with highest immediate reward.""" import time start = time.time() import env import rl env.make("text") for episode in range(1000): env.reset() episode_reward = 0 ... % ((episode + 1), (t + 1), episode_reward)) break action = rl.choose_action(rl.table[env.object[0]]) rl.q(env.player, action) print(action) episode_reward += env.reward(action) env.action(action) env.update() print(rl.table[env.object[0]]) print("Finished after", str(time.time() - start), "seconds") ...
5516b125bb00b928d85a044d3df777e1b0004d03
ovp_organizations/migrations/0008_auto_20161207_1941.py
ovp_organizations/migrations/0008_auto_20161207_1941.py
from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
Add ".only" restriction to query on migration 0008
Add ".only" restriction to query on migration 0008
Python
agpl-3.0
OpenVolunteeringPlatform/django-ovp-organizations,OpenVolunteeringPlatform/django-ovp-organizations
from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): - for organization in Organization.objects.all(): + for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): - for organization in Organization.objects.all(): + for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
Add ".only" restriction to query on migration 0008
## Code Before: from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ] ## Instruction: Add ".only" restriction to query on migration 0008 ## Code After: from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
# ... existing code ... def add_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) # ... modified code ... def remove_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() # ... rest of the code ...
0dc72761a3b4b17098633df27fdbb70058afe311
geotrek/signage/migrations/0013_auto_20200423_1255.py
geotrek/signage/migrations/0013_auto_20200423_1255.py
import django.db.models.deletion def delete_force(apps, schema_editor): # We can't import Infrastructure models directly as it may be a newer # version than this migration expects. We use the historical version. Blade = apps.get_model('signage', 'Blade') for blade in Blade.objects.filter(deleted=True): blade.delete() class Migration(migrations.Migration): dependencies = [ ('signage', '0012_auto_20200406_1411'), ] operations = [ migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]), migrations.RemoveField( model_name='blade', name='deleted', ), migrations.RemoveField( model_name='blade', name='structure', ), migrations.RemoveField( model_name='line', name='structure', ), migrations.AlterField( model_name='line', name='blade', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='signage.Blade', verbose_name='Blade'), ), ]
from django.db import migrations, models import django.db.models.deletion def delete_force(apps, schema_editor): # We can't import Infrastructure models directly as it may be a newer # version than this migration expects. We use the historical version. Blade = apps.get_model('signage', 'Blade') for blade in Blade.objects.filter(deleted=True): blade.delete() class Migration(migrations.Migration): dependencies = [ ('signage', '0012_auto_20200406_1411'), ] operations = [ migrations.AlterField( model_name='line', name='blade', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='signage.Blade', verbose_name='Blade'), ), migrations.RunPython(delete_force), migrations.RemoveField( model_name='blade', name='deleted', ), migrations.RemoveField( model_name='blade', name='structure', ), migrations.RemoveField( model_name='line', name='structure', ), ]
Change order migration, user runpython instead
Change order migration, user runpython instead
Python
bsd-2-clause
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin
from django.db import migrations, models import django.db.models.deletion + + + def delete_force(apps, schema_editor): + # We can't import Infrastructure models directly as it may be a newer + # version than this migration expects. We use the historical version. + Blade = apps.get_model('signage', 'Blade') + for blade in Blade.objects.filter(deleted=True): + blade.delete() class Migration(migrations.Migration): dependencies = [ ('signage', '0012_auto_20200406_1411'), ] operations = [ - migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]), + migrations.AlterField( + model_name='line', + name='blade', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', + to='signage.Blade', verbose_name='Blade'), + ), + migrations.RunPython(delete_force), migrations.RemoveField( model_name='blade', name='deleted', ), migrations.RemoveField( model_name='blade', name='structure', ), migrations.RemoveField( model_name='line', name='structure', ), - migrations.AlterField( - model_name='line', - name='blade', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', - to='signage.Blade', verbose_name='Blade'), - ), ]
Change order migration, user runpython instead
## Code Before: from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('signage', '0012_auto_20200406_1411'), ] operations = [ migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]), migrations.RemoveField( model_name='blade', name='deleted', ), migrations.RemoveField( model_name='blade', name='structure', ), migrations.RemoveField( model_name='line', name='structure', ), migrations.AlterField( model_name='line', name='blade', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='signage.Blade', verbose_name='Blade'), ), ] ## Instruction: Change order migration, user runpython instead ## Code After: from django.db import migrations, models import django.db.models.deletion def delete_force(apps, schema_editor): # We can't import Infrastructure models directly as it may be a newer # version than this migration expects. We use the historical version. Blade = apps.get_model('signage', 'Blade') for blade in Blade.objects.filter(deleted=True): blade.delete() class Migration(migrations.Migration): dependencies = [ ('signage', '0012_auto_20200406_1411'), ] operations = [ migrations.AlterField( model_name='line', name='blade', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='signage.Blade', verbose_name='Blade'), ), migrations.RunPython(delete_force), migrations.RemoveField( model_name='blade', name='deleted', ), migrations.RemoveField( model_name='blade', name='structure', ), migrations.RemoveField( model_name='line', name='structure', ), ]
# ... existing code ... from django.db import migrations, models import django.db.models.deletion def delete_force(apps, schema_editor): # We can't import Infrastructure models directly as it may be a newer # version than this migration expects. We use the historical version. Blade = apps.get_model('signage', 'Blade') for blade in Blade.objects.filter(deleted=True): blade.delete() # ... modified code ... operations = [ migrations.AlterField( model_name='line', name='blade', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines', to='signage.Blade', verbose_name='Blade'), ), migrations.RunPython(delete_force), migrations.RemoveField( model_name='blade', ... name='structure', ), ] # ... rest of the code ...
1d3bd1fe50806180c8fb6889b1bed28f602608d6
couchdb/tests/__main__.py
couchdb/tests/__main__.py
import unittest from couchdb.tests import client, couch_tests, design, couchhttp, \ multipart, mapping, view, package, tools, \ loader def suite(): suite = unittest.TestSuite() suite.addTest(client.suite()) suite.addTest(design.suite()) suite.addTest(couchhttp.suite()) suite.addTest(multipart.suite()) suite.addTest(mapping.suite()) suite.addTest(view.suite()) suite.addTest(couch_tests.suite()) suite.addTest(package.suite()) suite.addTest(tools.suite()) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
import unittest from couchdb.tests import client, couch_tests, design, couchhttp, \ multipart, mapping, view, package, tools, \ loader def suite(): suite = unittest.TestSuite() suite.addTest(client.suite()) suite.addTest(design.suite()) suite.addTest(couchhttp.suite()) suite.addTest(multipart.suite()) suite.addTest(mapping.suite()) suite.addTest(view.suite()) suite.addTest(couch_tests.suite()) suite.addTest(package.suite()) suite.addTest(tools.suite()) suite.addTest(loader.suite()) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
Include loader tests in test suite
Include loader tests in test suite
Python
bsd-3-clause
djc/couchdb-python,djc/couchdb-python
import unittest from couchdb.tests import client, couch_tests, design, couchhttp, \ - multipart, mapping, view, package, tools + multipart, mapping, view, package, tools, \ + loader def suite(): suite = unittest.TestSuite() suite.addTest(client.suite()) suite.addTest(design.suite()) suite.addTest(couchhttp.suite()) suite.addTest(multipart.suite()) suite.addTest(mapping.suite()) suite.addTest(view.suite()) suite.addTest(couch_tests.suite()) suite.addTest(package.suite()) suite.addTest(tools.suite()) + suite.addTest(loader.suite()) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
Include loader tests in test suite
## Code Before: import unittest from couchdb.tests import client, couch_tests, design, couchhttp, \ multipart, mapping, view, package, tools def suite(): suite = unittest.TestSuite() suite.addTest(client.suite()) suite.addTest(design.suite()) suite.addTest(couchhttp.suite()) suite.addTest(multipart.suite()) suite.addTest(mapping.suite()) suite.addTest(view.suite()) suite.addTest(couch_tests.suite()) suite.addTest(package.suite()) suite.addTest(tools.suite()) return suite if __name__ == '__main__': unittest.main(defaultTest='suite') ## Instruction: Include loader tests in test suite ## Code After: import unittest from couchdb.tests import client, couch_tests, design, couchhttp, \ multipart, mapping, view, package, tools, \ loader def suite(): suite = unittest.TestSuite() suite.addTest(client.suite()) suite.addTest(design.suite()) suite.addTest(couchhttp.suite()) suite.addTest(multipart.suite()) suite.addTest(mapping.suite()) suite.addTest(view.suite()) suite.addTest(couch_tests.suite()) suite.addTest(package.suite()) suite.addTest(tools.suite()) suite.addTest(loader.suite()) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
// ... existing code ... from couchdb.tests import client, couch_tests, design, couchhttp, \ multipart, mapping, view, package, tools, \ loader // ... modified code ... suite.addTest(package.suite()) suite.addTest(tools.suite()) suite.addTest(loader.suite()) return suite // ... rest of the code ...
5c97b9911a2dafde5fd1e4c40cda4e84974eb855
assembla/lib.py
assembla/lib.py
from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def keys(self): return self.data.keys() def values(self): return self.data.values() return self.data.get(*args, **kwargs) def __repr__(self): if 'name' in self.data: return "<%s: %s>" % (type(self).__name__, self.data['name']) if ('number' in self.data) and ('summary' in self.data): return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) return super(AssemblaObject, self).__repr__() def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __setitem__(self, key, value): self.data[key] = value def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def __repr__(self): if 'name' in self.data: return "<%s: %s>" % (type(self).__name__, self.data['name']) if ('number' in self.data) and ('summary' in self.data): return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) return super(AssemblaObject, self).__repr__() def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
Python
mit
markfinger/assembla
from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] + def __setitem__(self, key, value): + self.data[key] = value + def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) + + def __repr__(self): + if 'name' in self.data: + return "<%s: %s>" % (type(self).__name__, self.data['name']) + + if ('number' in self.data) and ('summary' in self.data): + return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) + + return super(AssemblaObject, self).__repr__() def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
## Code Before: from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper ## Instruction: Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets. ## Code After: from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __setitem__(self, key, value): self.data[key] = value def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def __repr__(self): if 'name' in self.data: return "<%s: %s>" % (type(self).__name__, self.data['name']) if ('number' in self.data) and ('summary' in self.data): return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) return super(AssemblaObject, self).__repr__() def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
... return self.data[key] def __setitem__(self, key, value): self.data[key] = value def keys(self): return self.data.keys() ... def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def __repr__(self): if 'name' in self.data: return "<%s: %s>" % (type(self).__name__, self.data['name']) if ('number' in self.data) and ('summary' in self.data): return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) return super(AssemblaObject, self).__repr__() ...
3c64002217795e5d8d3eebb7b06f8ad72f342564
thinglang/parser/tokens/functions.py
thinglang/parser/tokens/functions.py
from thinglang.lexer.symbols.base import LexicalAccess from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) def evaluate(self, resolver): return resolver.resolve(self) def describe(self): return '.'.join(str(x) for x in self.target) def __getitem__(self, item): return self.target[item] class ArgumentListPartial(ListInitializationPartial): pass class ArgumentListDecelerationPartial(ArgumentListPartial): pass class ArgumentList(ListInitialization): pass class MethodCall(BaseToken, ValueType): def __init__(self, slice): super(MethodCall, self).__init__(slice) self.target, self.arguments = slice self.value = self if not self.arguments: self.arguments = ArgumentList() def describe(self): return 'target={}, args={}'.format(self.target, self.arguments) def replace(self, original, replacement): self.arguments.replace(original, replacement) class ReturnStatement(DefinitionPairToken): def __init__(self, slice): super().__init__(slice) self.value = slice[1]
from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier from thinglang.lexer.symbols.functions import LexicalClassInitialization from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) self.target = [x for x in slice if not isinstance(x, LexicalAccess)] def evaluate(self, resolver): return resolver.resolve(self) def describe(self): return '.'.join(str(x) for x in self.target) def __getitem__(self, item): return self.target[item] class ArgumentListPartial(ListInitializationPartial): pass class ArgumentListDecelerationPartial(ArgumentListPartial): pass class ArgumentList(ListInitialization): pass class MethodCall(BaseToken, ValueType): def __init__(self, slice): super(MethodCall, self).__init__(slice) self.value = self if isinstance(slice[0], LexicalClassInitialization): self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])]) self.arguments = slice[2] self.constructing_call = True else: self.target, self.arguments = slice self.constructing_call = False if not self.arguments: self.arguments = ArgumentList() def describe(self): return 'target={}, args={}'.format(self.target, self.arguments) def replace(self, original, replacement): self.arguments.replace(original, replacement) class ReturnStatement(DefinitionPairToken): def __init__(self, slice): super().__init__(slice) self.value = slice[1]
Add proper support for constructor calls to MethodCall
Add proper support for constructor calls to MethodCall
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
- from thinglang.lexer.symbols.base import LexicalAccess + from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier + from thinglang.lexer.symbols.functions import LexicalClassInitialization from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) self.target = [x for x in slice if not isinstance(x, LexicalAccess)] + def evaluate(self, resolver): + return resolver.resolve(self) + def describe(self): return '.'.join(str(x) for x in self.target) def __getitem__(self, item): return self.target[item] class ArgumentListPartial(ListInitializationPartial): pass - class ArgumentListDecelerationPartial(ArgumentListPartial): pass class ArgumentList(ListInitialization): pass class MethodCall(BaseToken, ValueType): def __init__(self, slice): super(MethodCall, self).__init__(slice) - self.target, self.arguments = slice self.value = self + + if isinstance(slice[0], LexicalClassInitialization): + self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])]) + self.arguments = slice[2] + self.constructing_call = True + else: + self.target, self.arguments = slice + self.constructing_call = False + if not self.arguments: self.arguments = ArgumentList() def describe(self): return 'target={}, args={}'.format(self.target, self.arguments) def replace(self, original, replacement): self.arguments.replace(original, replacement) class ReturnStatement(DefinitionPairToken): def __init__(self, slice): super().__init__(slice) self.value = slice[1]
Add proper support for constructor calls to MethodCall
## Code Before: from thinglang.lexer.symbols.base import LexicalAccess from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) self.target = [x for x in slice if not isinstance(x, LexicalAccess)] def describe(self): return '.'.join(str(x) for x in self.target) def __getitem__(self, item): return self.target[item] class ArgumentListPartial(ListInitializationPartial): pass class ArgumentListDecelerationPartial(ArgumentListPartial): pass class ArgumentList(ListInitialization): pass class MethodCall(BaseToken, ValueType): def __init__(self, slice): super(MethodCall, self).__init__(slice) self.target, self.arguments = slice self.value = self if not self.arguments: self.arguments = ArgumentList() def describe(self): return 'target={}, args={}'.format(self.target, self.arguments) def replace(self, original, replacement): self.arguments.replace(original, replacement) class ReturnStatement(DefinitionPairToken): def __init__(self, slice): super().__init__(slice) self.value = slice[1] ## Instruction: Add proper support for constructor calls to MethodCall ## Code After: from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier from thinglang.lexer.symbols.functions import LexicalClassInitialization from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization from thinglang.utils.type_descriptors import ValueType class Access(BaseToken): def __init__(self, slice): super(Access, self).__init__(slice) self.target = [x for x in slice if not isinstance(x, LexicalAccess)] def evaluate(self, resolver): return resolver.resolve(self) def describe(self): return '.'.join(str(x) for x in self.target) def __getitem__(self, item): return self.target[item] class ArgumentListPartial(ListInitializationPartial): pass class ArgumentListDecelerationPartial(ArgumentListPartial): pass class ArgumentList(ListInitialization): pass class MethodCall(BaseToken, ValueType): def __init__(self, slice): super(MethodCall, self).__init__(slice) self.value = self if isinstance(slice[0], LexicalClassInitialization): self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])]) self.arguments = slice[2] self.constructing_call = True else: self.target, self.arguments = slice self.constructing_call = False if not self.arguments: self.arguments = ArgumentList() def describe(self): return 'target={}, args={}'.format(self.target, self.arguments) def replace(self, original, replacement): self.arguments.replace(original, replacement) class ReturnStatement(DefinitionPairToken): def __init__(self, slice): super().__init__(slice) self.value = slice[1]
# ... existing code ... from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier from thinglang.lexer.symbols.functions import LexicalClassInitialization from thinglang.parser.tokens import BaseToken, DefinitionPairToken from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization # ... modified code ... self.target = [x for x in slice if not isinstance(x, LexicalAccess)] def evaluate(self, resolver): return resolver.resolve(self) def describe(self): return '.'.join(str(x) for x in self.target) ... class ArgumentListPartial(ListInitializationPartial): pass class ArgumentListDecelerationPartial(ArgumentListPartial): ... def __init__(self, slice): super(MethodCall, self).__init__(slice) self.value = self if isinstance(slice[0], LexicalClassInitialization): self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])]) self.arguments = slice[2] self.constructing_call = True else: self.target, self.arguments = slice self.constructing_call = False if not self.arguments: # ... rest of the code ...
8c0af29e7b6ec3a5e76fdb1efc56068bf276ad39
helenae/flask_app.py
helenae/flask_app.py
from flask import Flask from flask_sqlalchemy import SQLAlchemy from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' # Initialize babel babel = Babel(app) @babel.localeselector def get_locale(): override = request.args.get('lang') if override: session['lang'] = override return session.get('lang', 'ru') import web.admin import web.views
from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy from flask.ext.babelex import Babel from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) # Initialize babel babel = Babel(app) @babel.localeselector def get_locale(): override = request.args.get('lang') if override: session['lang'] = override return session.get('lang', 'ru') import web.admin import web.views
Add babel plugin for Flask
Add babel plugin for Flask
Python
mit
Relrin/Helenae,Relrin/Helenae,Relrin/Helenae
- from flask import Flask + from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy + from flask.ext.babelex import Babel from db import tables as dbTables - app = Flask(__name__, template_folder='./web/templates/') + app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) + # Initialize babel + babel = Babel(app) + @babel.localeselector + def get_locale(): + override = request.args.get('lang') + if override: + session['lang'] = override + return session.get('lang', 'ru') + import web.admin import web.views
Add babel plugin for Flask
## Code Before: from flask import Flask from flask_sqlalchemy import SQLAlchemy from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) import web.admin import web.views ## Instruction: Add babel plugin for Flask ## Code After: from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy from flask.ext.babelex import Babel from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' db_connection = SQLAlchemy(app) # Initialize babel babel = Babel(app) @babel.localeselector def get_locale(): override = request.args.get('lang') if override: session['lang'] = override return session.get('lang', 'ru') import web.admin import web.views
... from flask import Flask, request, session from flask_sqlalchemy import SQLAlchemy from flask.ext.babelex import Babel from db import tables as dbTables app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='') app.config['SECRET_KEY'] = 'some_secret_key' app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan' ... db_connection = SQLAlchemy(app) # Initialize babel babel = Babel(app) @babel.localeselector def get_locale(): override = request.args.get('lang') if override: session['lang'] = override return session.get('lang', 'ru') import web.admin import web.views ...
54add3fa95ab450e5afcbbf7fe8a3205bfc5889c
indra/tests/test_reading_scripts_aws.py
indra/tests/test_reading_scripts_aws.py
import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp from indra.sources import sparser s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return
import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp from indra.sources import sparser s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return @attr('nonpublic') def test_bad_sparser(): txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' 'responses seen with Fos and Jun.') sp = sparser.process_text(txt, timeout=1) assert sp is None, "Reading succeeded unexpectedly."
Add test with currently known-stall sentance.
Add test with currently known-stall sentance.
Python
bsd-2-clause
bgyori/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra
import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp + from indra.sources import sparser s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return + + @attr('nonpublic') + def test_bad_sparser(): + txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' + 'responses seen with Fos and Jun.') + sp = sparser.process_text(txt, timeout=1) + assert sp is None, "Reading succeeded unexpectedly." +
Add test with currently known-stall sentance.
## Code Before: import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return ## Instruction: Add test with currently known-stall sentance. ## Code After: import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp from indra.sources import sparser s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return @attr('nonpublic') def test_bad_sparser(): txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' 'responses seen with Fos and Jun.') sp = sparser.process_text(txt, timeout=1) assert sp is None, "Reading succeeded unexpectedly."
// ... existing code ... from indra.tools.reading import submit_reading_pipeline as srp from indra.sources import sparser // ... modified code ... s3.delete_object(Bucket='bigmech', Key=entry['Key']) return @attr('nonpublic') def test_bad_sparser(): txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' 'responses seen with Fos and Jun.') sp = sparser.process_text(txt, timeout=1) assert sp is None, "Reading succeeded unexpectedly." // ... rest of the code ...
ad2b0447afbee92684ab0b4f14dc0d45a28f3ba2
tests/foomodulegen-auto.py
tests/foomodulegen-auto.py
import sys import re import pybindgen from pybindgen.typehandlers import base as typehandlers from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink) from pybindgen import (CppMethod, CppConstructor, CppClass, Enum) from pybindgen.gccxmlparser import ModuleParser from pybindgen.function import CustomFunctionWrapper from pybindgen.cppmethod import CustomCppMethodWrapper import foomodulegen_common def my_module_gen(): out = FileCodeSink(sys.stdout) pygen_file = open(sys.argv[2], "wt") module_parser = ModuleParser('foo2', '::') module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file)) pygen_file.close() foomodulegen_common.customize_module(module) module.generate(out) if __name__ == '__main__': try: import cProfile as profile except ImportError: else: try: import cProfile as profile except ImportError: my_module_gen() else: print >> sys.stderr, "** running under profiler" profile.run('my_module_gen()', 'foomodulegen-auto.pstat') if __name__ == '__main__': main()
import sys import re import pybindgen from pybindgen.typehandlers import base as typehandlers from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink) from pybindgen import (CppMethod, CppConstructor, CppClass, Enum) from pybindgen.gccxmlparser import ModuleParser from pybindgen.function import CustomFunctionWrapper from pybindgen.cppmethod import CustomCppMethodWrapper import foomodulegen_common def my_module_gen(): out = FileCodeSink(sys.stdout) pygen_file = open(sys.argv[2], "wt") module_parser = ModuleParser('foo2', '::') module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file)) pygen_file.close() foomodulegen_common.customize_module(module) module.generate(out) def main(): if sys.argv[1] == '-d': del sys.argv[1] import pdb pdb.set_trace() my_module_gen() else: try: import cProfile as profile except ImportError: my_module_gen() else: print >> sys.stderr, "** running under profiler" profile.run('my_module_gen()', 'foomodulegen-auto.pstat') if __name__ == '__main__': main()
Add a debug switch (-d) to enable debugger
Add a debug switch (-d) to enable debugger
Python
lgpl-2.1
cawka/pybindgen,caramucho/pybindgen,caramucho/pybindgen,cawka/pybindgen,cawka/pybindgen,caramucho/pybindgen,cawka/pybindgen,caramucho/pybindgen
import sys import re import pybindgen from pybindgen.typehandlers import base as typehandlers from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink) from pybindgen import (CppMethod, CppConstructor, CppClass, Enum) from pybindgen.gccxmlparser import ModuleParser from pybindgen.function import CustomFunctionWrapper from pybindgen.cppmethod import CustomCppMethodWrapper import foomodulegen_common def my_module_gen(): out = FileCodeSink(sys.stdout) pygen_file = open(sys.argv[2], "wt") module_parser = ModuleParser('foo2', '::') module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file)) pygen_file.close() foomodulegen_common.customize_module(module) module.generate(out) - if __name__ == '__main__': - try: - import cProfile as profile - except ImportError: + def main(): + if sys.argv[1] == '-d': + del sys.argv[1] + import pdb + pdb.set_trace() my_module_gen() else: + try: + import cProfile as profile + except ImportError: + my_module_gen() + else: - print >> sys.stderr, "** running under profiler" + print >> sys.stderr, "** running under profiler" - profile.run('my_module_gen()', 'foomodulegen-auto.pstat') + profile.run('my_module_gen()', 'foomodulegen-auto.pstat') + + if __name__ == '__main__': + main()
Add a debug switch (-d) to enable debugger
## Code Before: import sys import re import pybindgen from pybindgen.typehandlers import base as typehandlers from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink) from pybindgen import (CppMethod, CppConstructor, CppClass, Enum) from pybindgen.gccxmlparser import ModuleParser from pybindgen.function import CustomFunctionWrapper from pybindgen.cppmethod import CustomCppMethodWrapper import foomodulegen_common def my_module_gen(): out = FileCodeSink(sys.stdout) pygen_file = open(sys.argv[2], "wt") module_parser = ModuleParser('foo2', '::') module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file)) pygen_file.close() foomodulegen_common.customize_module(module) module.generate(out) if __name__ == '__main__': try: import cProfile as profile except ImportError: my_module_gen() else: print >> sys.stderr, "** running under profiler" profile.run('my_module_gen()', 'foomodulegen-auto.pstat') ## Instruction: Add a debug switch (-d) to enable debugger ## Code After: import sys import re import pybindgen from pybindgen.typehandlers import base as typehandlers from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink) from pybindgen import (CppMethod, CppConstructor, CppClass, Enum) from pybindgen.gccxmlparser import ModuleParser from pybindgen.function import CustomFunctionWrapper from pybindgen.cppmethod import CustomCppMethodWrapper import foomodulegen_common def my_module_gen(): out = FileCodeSink(sys.stdout) pygen_file = open(sys.argv[2], "wt") module_parser = ModuleParser('foo2', '::') module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file)) pygen_file.close() foomodulegen_common.customize_module(module) module.generate(out) def main(): if sys.argv[1] == '-d': del sys.argv[1] import pdb pdb.set_trace() my_module_gen() else: try: import cProfile as profile except ImportError: my_module_gen() else: print >> sys.stderr, "** running under profiler" profile.run('my_module_gen()', 'foomodulegen-auto.pstat') if __name__ == '__main__': main()
# ... existing code ... def main(): if sys.argv[1] == '-d': del sys.argv[1] import pdb pdb.set_trace() my_module_gen() else: try: import cProfile as profile except ImportError: my_module_gen() else: print >> sys.stderr, "** running under profiler" profile.run('my_module_gen()', 'foomodulegen-auto.pstat') if __name__ == '__main__': main() # ... rest of the code ...
343c5eb47510f784588e425619c43df916a40fe7
delivery/services/external_program_service.py
delivery/services/external_program_service.py
import subprocess import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
import subprocess from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
Remove at exit handler it doesnt work...
Remove at exit handler it doesnt work...
Python
mit
arteria-project/arteria-delivery
import subprocess - import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) - # On exiting the main program, make sure that the subprocess - # gets killed. - atexit.register(p.terminate) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
Remove at exit handler it doesnt work...
## Code Before: import subprocess import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) # On exiting the main program, make sure that the subprocess # gets killed. atexit.register(p.terminate) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code) ## Instruction: Remove at exit handler it doesnt work... ## Code After: import subprocess from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
# ... existing code ... import subprocess from delivery.models.execution import ExecutionResult, Execution # ... modified code ... stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) # ... rest of the code ...
4bc8d4016954e82fb566d7cf43ec21825a0e89de
indra/tests/test_tsv_assembler.py
indra/tests/test_tsv_assembler.py
import os from indra.sources import signor from indra.assemblers.tsv_assembler import TsvAssembler # Get some statements from Signor sp = SignorProcessor() stmts = sp.statements def test_tsv_init(): ta = TsvAssembler(stmts) ta.make_model('tsv_test') def test_tsv_add_stmts(): ta = TsvAssembler() ta.add_statements(stmts) assert len(ta.statements) == len(stmts) def test_make_model(): ta = TsvAssembler(stmts) ta.make_model('tsv_test.tsv') assert os.path.exists('tsv_test.tsv')
import os from indra.sources import signor from indra.assemblers.tsv_assembler import TsvAssembler # Get some statements from Signor from .test_signor import test_data_file, test_complexes_file sp = signor.process_from_file(test_data_file, test_complexes_file) stmts = sp.statements def test_tsv_init(): ta = TsvAssembler(stmts) ta.make_model('tsv_test') def test_tsv_add_stmts(): ta = TsvAssembler() ta.add_statements(stmts) assert len(ta.statements) == len(stmts) def test_make_model(): ta = TsvAssembler(stmts) ta.make_model('tsv_test.tsv') assert os.path.exists('tsv_test.tsv')
Fix TSV Assembler reference to Signor files
Fix TSV Assembler reference to Signor files
Python
bsd-2-clause
sorgerlab/belpy,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,bgyori/indra
import os + from indra.sources import signor from indra.assemblers.tsv_assembler import TsvAssembler - from indra.sources.signor import SignorProcessor # Get some statements from Signor - sp = SignorProcessor() + from .test_signor import test_data_file, test_complexes_file + sp = signor.process_from_file(test_data_file, test_complexes_file) stmts = sp.statements def test_tsv_init(): ta = TsvAssembler(stmts) ta.make_model('tsv_test') def test_tsv_add_stmts(): ta = TsvAssembler() ta.add_statements(stmts) assert len(ta.statements) == len(stmts) def test_make_model(): ta = TsvAssembler(stmts) ta.make_model('tsv_test.tsv') assert os.path.exists('tsv_test.tsv')
Fix TSV Assembler reference to Signor files
## Code Before: import os from indra.assemblers.tsv_assembler import TsvAssembler from indra.sources.signor import SignorProcessor # Get some statements from Signor sp = SignorProcessor() stmts = sp.statements def test_tsv_init(): ta = TsvAssembler(stmts) ta.make_model('tsv_test') def test_tsv_add_stmts(): ta = TsvAssembler() ta.add_statements(stmts) assert len(ta.statements) == len(stmts) def test_make_model(): ta = TsvAssembler(stmts) ta.make_model('tsv_test.tsv') assert os.path.exists('tsv_test.tsv') ## Instruction: Fix TSV Assembler reference to Signor files ## Code After: import os from indra.sources import signor from indra.assemblers.tsv_assembler import TsvAssembler # Get some statements from Signor from .test_signor import test_data_file, test_complexes_file sp = signor.process_from_file(test_data_file, test_complexes_file) stmts = sp.statements def test_tsv_init(): ta = TsvAssembler(stmts) ta.make_model('tsv_test') def test_tsv_add_stmts(): ta = TsvAssembler() ta.add_statements(stmts) assert len(ta.statements) == len(stmts) def test_make_model(): ta = TsvAssembler(stmts) ta.make_model('tsv_test.tsv') assert os.path.exists('tsv_test.tsv')
// ... existing code ... import os from indra.sources import signor from indra.assemblers.tsv_assembler import TsvAssembler # Get some statements from Signor from .test_signor import test_data_file, test_complexes_file sp = signor.process_from_file(test_data_file, test_complexes_file) stmts = sp.statements // ... rest of the code ...
e61e633e122953774ee4246ad61b23d9b7d264f3
semillas_backend/users/serializers.py
semillas_backend/users/serializers.py
from rest_framework import serializers from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ class Meta: model = User fields = ('id', 'name', 'picture')
from rest_framework import serializers from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ class Meta: model = User fields = ('id', 'name', 'picture', 'location', 'email', 'username', 'last_login')
Add location, email, username and last_login to user serializer
Add location, email, username and last_login to user serializer
Python
mit
Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend
from rest_framework import serializers from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer - + JSONRenderer().render(UserSerializer(user_instance).data) """ class Meta: - model = User + model = User - fields = ('id', 'name', 'picture') + fields = ('id', 'name', 'picture', 'location', 'email', 'username', 'last_login')
Add location, email, username and last_login to user serializer
## Code Before: from rest_framework import serializers from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ class Meta: model = User fields = ('id', 'name', 'picture') ## Instruction: Add location, email, username and last_login to user serializer ## Code After: from rest_framework import serializers from .models import User class UserSerializer(serializers.ModelSerializer): """ Usage: from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ class Meta: model = User fields = ('id', 'name', 'picture', 'location', 'email', 'username', 'last_login')
... from rest_framework.renderers import JSONRenderer from semillas_backend.users.serializers import UserSerializer JSONRenderer().render(UserSerializer(user_instance).data) """ class Meta: model = User fields = ('id', 'name', 'picture', 'location', 'email', 'username', 'last_login') ...
81dc92b3c2875b6775d33321b1bcd9f994be8a10
txircd/modules/extra/snotice_remoteconnect.py
txircd/modules/extra/snotice_remoteconnect.py
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoRemoteConnect(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeRemoteConnect" def __init__(self): self.burstingServer = None def actions(self): return [ ("remoteregister", 1, self.sendRemoteConnectNotice), ("servernoticetype", 1, self.checkSnoType), ("startburstcommand", 1, self.markStartBurst), ("endburstcommand", 1, self.markEndBurst) ] def sendRemoteConnectNotice(self, user, *params): server = self.ircd.servers[user.uuid[:3]].name return server = server.name message = "Client connected on {}: {} ({}) [{}]".format(server, user.hostmaskWithRealHost(), user.ip, user.gecos) snodata = { "mask": "connect", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "remoteconnect" def markStartBurst(self, server, command): self.burstingServer = server def markEndBurst(self, server, command): self.burstingServer = None snoRemoteConnect = SnoRemoteConnect()
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoRemoteConnect(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeRemoteConnect" def __init__(self): self.burstingServer = None def actions(self): return [ ("remoteregister", 1, self.sendRemoteConnectNotice), ("servernoticetype", 1, self.checkSnoType), ("startburstcommand", 1, self.markStartBurst), ("endburstcommand", 1, self.markEndBurst) ] def sendRemoteConnectNotice(self, user, *params): server = self.ircd.servers[user.uuid[:3]] if server == self.burstingServer: return server = server.name message = "Client connected on {}: {} ({}) [{}]".format(server, user.hostmaskWithRealHost(), user.ip, user.gecos) snodata = { "mask": "connect", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "remoteconnect" def markStartBurst(self, server, command): self.burstingServer = server def markEndBurst(self, server, command): self.burstingServer = None snoRemoteConnect = SnoRemoteConnect()
Fix checking a name against an object
Fix checking a name against an object
Python
bsd-3-clause
Heufneutje/txircd
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoRemoteConnect(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeRemoteConnect" def __init__(self): self.burstingServer = None def actions(self): return [ ("remoteregister", 1, self.sendRemoteConnectNotice), ("servernoticetype", 1, self.checkSnoType), ("startburstcommand", 1, self.markStartBurst), ("endburstcommand", 1, self.markEndBurst) ] def sendRemoteConnectNotice(self, user, *params): - server = self.ircd.servers[user.uuid[:3]].name + server = self.ircd.servers[user.uuid[:3]] if server == self.burstingServer: return + server = server.name message = "Client connected on {}: {} ({}) [{}]".format(server, user.hostmaskWithRealHost(), user.ip, user.gecos) snodata = { "mask": "connect", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "remoteconnect" def markStartBurst(self, server, command): self.burstingServer = server def markEndBurst(self, server, command): self.burstingServer = None snoRemoteConnect = SnoRemoteConnect()
Fix checking a name against an object
## Code Before: from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoRemoteConnect(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeRemoteConnect" def __init__(self): self.burstingServer = None def actions(self): return [ ("remoteregister", 1, self.sendRemoteConnectNotice), ("servernoticetype", 1, self.checkSnoType), ("startburstcommand", 1, self.markStartBurst), ("endburstcommand", 1, self.markEndBurst) ] def sendRemoteConnectNotice(self, user, *params): server = self.ircd.servers[user.uuid[:3]].name if server == self.burstingServer: return message = "Client connected on {}: {} ({}) [{}]".format(server, user.hostmaskWithRealHost(), user.ip, user.gecos) snodata = { "mask": "connect", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "remoteconnect" def markStartBurst(self, server, command): self.burstingServer = server def markEndBurst(self, server, command): self.burstingServer = None snoRemoteConnect = SnoRemoteConnect() ## Instruction: Fix checking a name against an object ## Code After: from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoRemoteConnect(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeRemoteConnect" def __init__(self): self.burstingServer = None def actions(self): return [ ("remoteregister", 1, self.sendRemoteConnectNotice), ("servernoticetype", 1, self.checkSnoType), ("startburstcommand", 1, self.markStartBurst), ("endburstcommand", 1, self.markEndBurst) ] def sendRemoteConnectNotice(self, user, *params): server = self.ircd.servers[user.uuid[:3]] if server == self.burstingServer: return server = server.name message = "Client connected on {}: {} ({}) [{}]".format(server, user.hostmaskWithRealHost(), user.ip, user.gecos) snodata = { "mask": "connect", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "remoteconnect" def markStartBurst(self, server, command): self.burstingServer = server def markEndBurst(self, server, command): self.burstingServer = None snoRemoteConnect = SnoRemoteConnect()
# ... existing code ... def sendRemoteConnectNotice(self, user, *params): server = self.ircd.servers[user.uuid[:3]] if server == self.burstingServer: return server = server.name message = "Client connected on {}: {} ({}) [{}]".format(server, user.hostmaskWithRealHost(), user.ip, user.gecos) snodata = { # ... rest of the code ...
3fcdb9e64ef955fd0a7e5b2fda481d351dfb4d18
spotify/__init__.py
spotify/__init__.py
import logging import os import weakref import cffi __version__ = '2.0.0a1' _header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h') _header = open(_header_file).read() _header += '#define SPOTIFY_API_VERSION ...\n' ffi = cffi.FFI() ffi.cdef(_header) lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')]) # Mapping between keys and objects that should be kept alive as long as the key # is alive. May be used to keep objects alive when there isn't a more # convenient place to keep a reference to it. The keys are weakrefs, so entries # disappear from the dict when the key is garbage collected, potentially # causing objects associated to the key to be garbage collected as well. For # further details, refer to the CFFI docs. global_weakrefs = weakref.WeakKeyDictionary() from spotify.error import * # noqa
from __future__ import unicode_literals import logging import os import weakref import cffi __version__ = '2.0.0a1' # Log to nowhere by default. For details, see: # http://docs.python.org/2/howto/logging.html#library-config logging.getLogger('spotify').addHandler(logging.NullHandler()) _header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h') _header = open(_header_file).read() _header += '#define SPOTIFY_API_VERSION ...\n' ffi = cffi.FFI() ffi.cdef(_header) lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')]) # Mapping between keys and objects that should be kept alive as long as the key # is alive. May be used to keep objects alive when there isn't a more # convenient place to keep a reference to it. The keys are weakrefs, so entries # disappear from the dict when the key is garbage collected, potentially # causing objects associated to the key to be garbage collected as well. For # further details, refer to the CFFI docs. global_weakrefs = weakref.WeakKeyDictionary() from spotify.error import * # noqa
Add NullHandler to the 'spotify' logger
Add NullHandler to the 'spotify' logger
Python
apache-2.0
jodal/pyspotify,mopidy/pyspotify,kotamat/pyspotify,jodal/pyspotify,kotamat/pyspotify,felix1m/pyspotify,mopidy/pyspotify,kotamat/pyspotify,felix1m/pyspotify,felix1m/pyspotify,jodal/pyspotify
from __future__ import unicode_literals + import logging import os import weakref import cffi __version__ = '2.0.0a1' + + + # Log to nowhere by default. For details, see: + # http://docs.python.org/2/howto/logging.html#library-config + logging.getLogger('spotify').addHandler(logging.NullHandler()) _header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h') _header = open(_header_file).read() _header += '#define SPOTIFY_API_VERSION ...\n' ffi = cffi.FFI() ffi.cdef(_header) lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')]) # Mapping between keys and objects that should be kept alive as long as the key # is alive. May be used to keep objects alive when there isn't a more # convenient place to keep a reference to it. The keys are weakrefs, so entries # disappear from the dict when the key is garbage collected, potentially # causing objects associated to the key to be garbage collected as well. For # further details, refer to the CFFI docs. global_weakrefs = weakref.WeakKeyDictionary() from spotify.error import * # noqa
Add NullHandler to the 'spotify' logger
## Code Before: from __future__ import unicode_literals import os import weakref import cffi __version__ = '2.0.0a1' _header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h') _header = open(_header_file).read() _header += '#define SPOTIFY_API_VERSION ...\n' ffi = cffi.FFI() ffi.cdef(_header) lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')]) # Mapping between keys and objects that should be kept alive as long as the key # is alive. May be used to keep objects alive when there isn't a more # convenient place to keep a reference to it. The keys are weakrefs, so entries # disappear from the dict when the key is garbage collected, potentially # causing objects associated to the key to be garbage collected as well. For # further details, refer to the CFFI docs. global_weakrefs = weakref.WeakKeyDictionary() from spotify.error import * # noqa ## Instruction: Add NullHandler to the 'spotify' logger ## Code After: from __future__ import unicode_literals import logging import os import weakref import cffi __version__ = '2.0.0a1' # Log to nowhere by default. For details, see: # http://docs.python.org/2/howto/logging.html#library-config logging.getLogger('spotify').addHandler(logging.NullHandler()) _header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h') _header = open(_header_file).read() _header += '#define SPOTIFY_API_VERSION ...\n' ffi = cffi.FFI() ffi.cdef(_header) lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')]) # Mapping between keys and objects that should be kept alive as long as the key # is alive. May be used to keep objects alive when there isn't a more # convenient place to keep a reference to it. The keys are weakrefs, so entries # disappear from the dict when the key is garbage collected, potentially # causing objects associated to the key to be garbage collected as well. For # further details, refer to the CFFI docs. global_weakrefs = weakref.WeakKeyDictionary() from spotify.error import * # noqa
# ... existing code ... from __future__ import unicode_literals import logging import os import weakref # ... modified code ... __version__ = '2.0.0a1' # Log to nowhere by default. For details, see: # http://docs.python.org/2/howto/logging.html#library-config logging.getLogger('spotify').addHandler(logging.NullHandler()) # ... rest of the code ...
539fae27f9911b9ad13edc5244ffbd12b1509006
utils.py
utils.py
__all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): try: #python3 from urllib.request import urlopen except: #python2 from urllib2 import urlopen from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urllib.request.urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
__all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): try: #python3 from urllib.request import urlopen except: #python2 from urllib2 import urlopen from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
Fix for python2/3 compatibility issue with urllib
Fix for python2/3 compatibility issue with urllib
Python
mit
mattloper/opendr,mattloper/opendr
__all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): - import urllib.request, urllib.error, urllib.parse + try: #python3 + from urllib.request import urlopen + except: #python2 + from urllib2 import urlopen + from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: - contents = urllib.request.urlopen(url).read() + contents = urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
Fix for python2/3 compatibility issue with urllib
## Code Before: __all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): import urllib.request, urllib.error, urllib.parse from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urllib.request.urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents) ## Instruction: Fix for python2/3 compatibility issue with urllib ## Code After: __all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): try: #python3 from urllib.request import urlopen except: #python2 from urllib2 import urlopen from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
... def wget(url, dest_fname=None): try: #python3 from urllib.request import urlopen except: #python2 from urllib2 import urlopen from os.path import split, join ... try: contents = urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) ...
98bb4305ccdd8a83763bedb1f09c261e9904487c
cla_backend/apps/legalaid/tests/test_views.py
cla_backend/apps/legalaid/tests/test_views.py
import unittest from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.contrib.auth.models import Permission from django.core.urlresolvers import reverse from legalaid.views import FullCaseViewSet from cla_backend.apps.call_centre.permissions import * from cla_backend.urls import * from rest_framework import routers class FullCaseViewSetTestCase(CLAProviderAuthBaseApiTestMixin, TestCase): def setUp(self): super(FullCaseViewSetTestCase, self).setUp() def test_filter_queryset_success_200(self): response = self.client.get('/call_centre/api/v1/case/?search=Mark%20O%E2%80%99Brien', HTTP_AUTHORIZATION='Bearer %s' % 'operator_manager_token') self.assertEqual(response.status_code, 200)
from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase, Client from django.contrib.auth.models import Permission from django.core.urlresolvers import reverse from legalaid.views import FullCaseViewSet from cla_backend.apps.call_centre.permissions import * from cla_backend.urls import * from rest_framework import routers from legalaid.tests.views.test_base import CLAOperatorAuthBaseApiTestMixin class FullCaseViewSetTestCase(CLAOperatorAuthBaseApiTestMixin,TestCase): def setUp(self): super(FullCaseViewSetTestCase, self).setUp() self.url = reverse('call_centre:case-list') def test_filter_queryset_for_unicode_characters_status_code_200(self): response = self.client.get(self.url+'?search=Mark%20O%E2%80%99Brien', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) self.assertEqual(response.status_code, 200) def test_filter_queryset_for_only_ASCII_characters_status_code_200(self): response = self.client.get(self.url+'?search=John Smith', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) self.assertEqual(response.status_code, 200)
Refactor the test for the method test_filter inside class FullCaseViewSet and create a new test that allows you to test for just ASCII characters
Refactor the test for the method test_filter inside class FullCaseViewSet and create a new test that allows you to test for just ASCII characters
Python
mit
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
- import unittest - from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase, Client from django.contrib.auth.models import Permission + from django.core.urlresolvers import reverse - from legalaid.tests.views.test_base import CLAProviderAuthBaseApiTestMixin from legalaid.views import FullCaseViewSet from cla_backend.apps.call_centre.permissions import * from cla_backend.urls import * - from rest_framework import routers + from legalaid.tests.views.test_base import CLAOperatorAuthBaseApiTestMixin - class FullCaseViewSetTestCase(CLAProviderAuthBaseApiTestMixin, TestCase): + class FullCaseViewSetTestCase(CLAOperatorAuthBaseApiTestMixin,TestCase): def setUp(self): super(FullCaseViewSetTestCase, self).setUp() + self.url = reverse('call_centre:case-list') - def test_filter_queryset_success_200(self): + def test_filter_queryset_for_unicode_characters_status_code_200(self): - response = self.client.get('/call_centre/api/v1/case/?search=Mark%20O%E2%80%99Brien', HTTP_AUTHORIZATION='Bearer %s' % 'operator_manager_token') + response = self.client.get(self.url+'?search=Mark%20O%E2%80%99Brien', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) self.assertEqual(response.status_code, 200) + def test_filter_queryset_for_only_ASCII_characters_status_code_200(self): + response = self.client.get(self.url+'?search=John Smith', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) + self.assertEqual(response.status_code, 200) + +
Refactor the test for the method test_filter inside class FullCaseViewSet and create a new test that allows you to test for just ASCII characters
## Code Before: import unittest from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase, Client from django.contrib.auth.models import Permission from legalaid.tests.views.test_base import CLAProviderAuthBaseApiTestMixin from legalaid.views import FullCaseViewSet from cla_backend.apps.call_centre.permissions import * from cla_backend.urls import * from rest_framework import routers class FullCaseViewSetTestCase(CLAProviderAuthBaseApiTestMixin, TestCase): def setUp(self): super(FullCaseViewSetTestCase, self).setUp() def test_filter_queryset_success_200(self): response = self.client.get('/call_centre/api/v1/case/?search=Mark%20O%E2%80%99Brien', HTTP_AUTHORIZATION='Bearer %s' % 'operator_manager_token') self.assertEqual(response.status_code, 200) ## Instruction: Refactor the test for the method test_filter inside class FullCaseViewSet and create a new test that allows you to test for just ASCII characters ## Code After: from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase, Client from django.contrib.auth.models import Permission from django.core.urlresolvers import reverse from legalaid.views import FullCaseViewSet from cla_backend.apps.call_centre.permissions import * from cla_backend.urls import * from rest_framework import routers from legalaid.tests.views.test_base import CLAOperatorAuthBaseApiTestMixin class FullCaseViewSetTestCase(CLAOperatorAuthBaseApiTestMixin,TestCase): def setUp(self): super(FullCaseViewSetTestCase, self).setUp() self.url = reverse('call_centre:case-list') def test_filter_queryset_for_unicode_characters_status_code_200(self): response = self.client.get(self.url+'?search=Mark%20O%E2%80%99Brien', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) self.assertEqual(response.status_code, 200) def test_filter_queryset_for_only_ASCII_characters_status_code_200(self): response = self.client.get(self.url+'?search=John Smith', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) self.assertEqual(response.status_code, 200)
// ... existing code ... from django.contrib.auth.models import User from django.core.urlresolvers import reverse // ... modified code ... from django.test import TestCase, Client from django.contrib.auth.models import Permission from django.core.urlresolvers import reverse from legalaid.views import FullCaseViewSet from cla_backend.apps.call_centre.permissions import * from cla_backend.urls import * from rest_framework import routers from legalaid.tests.views.test_base import CLAOperatorAuthBaseApiTestMixin class FullCaseViewSetTestCase(CLAOperatorAuthBaseApiTestMixin,TestCase): def setUp(self): super(FullCaseViewSetTestCase, self).setUp() self.url = reverse('call_centre:case-list') def test_filter_queryset_for_unicode_characters_status_code_200(self): response = self.client.get(self.url+'?search=Mark%20O%E2%80%99Brien', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) self.assertEqual(response.status_code, 200) def test_filter_queryset_for_only_ASCII_characters_status_code_200(self): response = self.client.get(self.url+'?search=John Smith', HTTP_AUTHORIZATION='Bearer %s' % self.operator_manager_token) self.assertEqual(response.status_code, 200) // ... rest of the code ...
993bab40e4df323c671c99eec63d366028818a36
rosie/chamber_of_deputies/tests/test_election_expenses_classifier.py
rosie/chamber_of_deputies/tests/test_election_expenses_classifier.py
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.subject = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.subject.fit(self.dataset), self.subject) def test_tranform(self): self.assertEqual(self.subject.transform(), self.subject)
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts
Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts
Python
mit
marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/rosie,datasciencebr/serenata-de-amor
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) - self.subject = ElectionExpensesClassifier() + self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): - self.assertEqual(self.subject.predict(self.dataset)[0], True) + self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): - self.assertEqual(self.subject.predict(self.dataset)[1], False) + self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): - self.assertEqual(self.subject.fit(self.dataset), self.subject) + self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): - self.assertEqual(self.subject.transform(), self.subject) + self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts
## Code Before: from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.subject = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.subject.fit(self.dataset), self.subject) def test_tranform(self): self.assertEqual(self.subject.transform(), self.subject) ## Instruction: Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts ## Code After: from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
// ... existing code ... self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier) // ... rest of the code ...
cae7a57304e207f319e9bb2e52837ee207d0d96e
mcdowell/src/main/python/ch1/ch1.py
mcdowell/src/main/python/ch1/ch1.py
def unique(string): counter = {} for c in string: if c in counter: return False else: counter[c] = 1 print(counter) for k in counter: if counter[k] > 1: return False else: return True def reverse(string): result = [] for i in range(len(string)): result.append(string[-(i+1)]) return "".join(result)
def unique(string): counter = {} for c in string: if c in counter: return False else: counter[c] = 1 else: return True def reverse(string): result = [] for i in range(len(string)): result.append(string[-(i+1)]) return "".join(result) def is_permutation(str1, str2): if len(str1) != len(str2): return False counter = {} for i in range(len(str1)): if str1[i] in counter: counter[str1[i]] += 1 else: counter[str1[i]] = 1 if str2[i] in counter: counter[str2[i]] -= 1 else: counter[str2[i]] = -1 for k in counter: if counter[k] != 0: return False else: return True
Add is_permutation function. Simplifiy unique function.
Add is_permutation function. Simplifiy unique function.
Python
mit
jamesewoo/tigeruppercut,jamesewoo/tigeruppercut
def unique(string): counter = {} for c in string: if c in counter: - counter[c] += 1 + return False else: counter[c] = 1 - print(counter) - for k in counter: - if counter[k] > 1: - return False else: return True def reverse(string): result = [] for i in range(len(string)): result.append(string[-(i+1)]) return "".join(result) + def is_permutation(str1, str2): + if len(str1) != len(str2): + return False + counter = {} + for i in range(len(str1)): + if str1[i] in counter: + counter[str1[i]] += 1 + else: + counter[str1[i]] = 1 + if str2[i] in counter: + counter[str2[i]] -= 1 + else: + counter[str2[i]] = -1 + for k in counter: + if counter[k] != 0: + return False + else: + return True
Add is_permutation function. Simplifiy unique function.
## Code Before: def unique(string): counter = {} for c in string: if c in counter: counter[c] += 1 else: counter[c] = 1 print(counter) for k in counter: if counter[k] > 1: return False else: return True def reverse(string): result = [] for i in range(len(string)): result.append(string[-(i+1)]) return "".join(result) ## Instruction: Add is_permutation function. Simplifiy unique function. ## Code After: def unique(string): counter = {} for c in string: if c in counter: return False else: counter[c] = 1 else: return True def reverse(string): result = [] for i in range(len(string)): result.append(string[-(i+1)]) return "".join(result) def is_permutation(str1, str2): if len(str1) != len(str2): return False counter = {} for i in range(len(str1)): if str1[i] in counter: counter[str1[i]] += 1 else: counter[str1[i]] = 1 if str2[i] in counter: counter[str2[i]] -= 1 else: counter[str2[i]] = -1 for k in counter: if counter[k] != 0: return False else: return True
// ... existing code ... for c in string: if c in counter: return False else: counter[c] = 1 else: return True // ... modified code ... return "".join(result) def is_permutation(str1, str2): if len(str1) != len(str2): return False counter = {} for i in range(len(str1)): if str1[i] in counter: counter[str1[i]] += 1 else: counter[str1[i]] = 1 if str2[i] in counter: counter[str2[i]] -= 1 else: counter[str2[i]] = -1 for k in counter: if counter[k] != 0: return False else: return True // ... rest of the code ...
5b8482aa7851f11df81e8a457c85b53dbcbeeddf
f8a_jobs/graph_sync.py
f8a_jobs/graph_sync.py
"""Functions to retrieve pending list and invoke Graph Sync.""" import f8a_jobs.defaults as configuration import requests import logging from urllib.parse import urljoin logger = logging.getLogger(__name__) def _api_call(url, params={}): try: logger.info("API Call for url: %s, params: %s" % (url, params)) r = requests.get(url, params=params) if r is None: logger.error("Returned response is: %s" % r) raise Exception("Empty response found") result = {"data": r.json()} except Exception: logger.error(traceback.format_exc()) result = {"error": "Failed to retrieve data from Data Model Importer backend"} return result def fetch_pending(params={}): """Invoke Pending Graph Sync APIs for given parameters.""" url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending") return _api_call(url, params) def invoke_sync(params={}): """Invoke Graph Sync APIs to sync for given parameters.""" url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all") return _api_call(url, params)
"""Functions to retrieve pending list and invoke Graph Sync.""" import f8a_jobs.defaults as configuration import requests import traceback import logging from urllib.parse import urljoin logger = logging.getLogger(__name__) def _api_call(url, params=None): params = params or {} try: logger.info("API Call for url: %s, params: %s" % (url, params)) r = requests.get(url, params=params) r.raise_for_status() result = {"data": r.json()} except requests.exceptions.HTTPError: logger.error(traceback.format_exc()) result = {"error": "Failed to retrieve data from Data Model Importer backend"} return result def fetch_pending(params=None): params = params or {} """Invoke Pending Graph Sync APIs for given parameters.""" url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending") return _api_call(url, params) def invoke_sync(params=None): params = params or {} """Invoke Graph Sync APIs to sync for given parameters.""" url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all") return _api_call(url, params)
Fix code for review comments
Fix code for review comments
Python
apache-2.0
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
"""Functions to retrieve pending list and invoke Graph Sync.""" import f8a_jobs.defaults as configuration import requests import traceback import logging + from urllib.parse import urljoin logger = logging.getLogger(__name__) - def _api_call(url, params={}): + def _api_call(url, params=None): + params = params or {} try: logger.info("API Call for url: %s, params: %s" % (url, params)) r = requests.get(url, params=params) + r.raise_for_status() - if r is None: - logger.error("Returned response is: %s" % r) - raise Exception("Empty response found") - result = {"data": r.json()} - except Exception: + except requests.exceptions.HTTPError: logger.error(traceback.format_exc()) result = {"error": "Failed to retrieve data from Data Model Importer backend"} return result - def fetch_pending(params={}): + def fetch_pending(params=None): + params = params or {} """Invoke Pending Graph Sync APIs for given parameters.""" - url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending") + url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending") return _api_call(url, params) - def invoke_sync(params={}): + def invoke_sync(params=None): + params = params or {} """Invoke Graph Sync APIs to sync for given parameters.""" - url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all") + url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all") return _api_call(url, params)
Fix code for review comments
## Code Before: """Functions to retrieve pending list and invoke Graph Sync.""" import f8a_jobs.defaults as configuration import requests import traceback import logging logger = logging.getLogger(__name__) def _api_call(url, params={}): try: logger.info("API Call for url: %s, params: %s" % (url, params)) r = requests.get(url, params=params) if r is None: logger.error("Returned response is: %s" % r) raise Exception("Empty response found") result = {"data": r.json()} except Exception: logger.error(traceback.format_exc()) result = {"error": "Failed to retrieve data from Data Model Importer backend"} return result def fetch_pending(params={}): """Invoke Pending Graph Sync APIs for given parameters.""" url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending") return _api_call(url, params) def invoke_sync(params={}): """Invoke Graph Sync APIs to sync for given parameters.""" url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all") return _api_call(url, params) ## Instruction: Fix code for review comments ## Code After: """Functions to retrieve pending list and invoke Graph Sync.""" import f8a_jobs.defaults as configuration import requests import traceback import logging from urllib.parse import urljoin logger = logging.getLogger(__name__) def _api_call(url, params=None): params = params or {} try: logger.info("API Call for url: %s, params: %s" % (url, params)) r = requests.get(url, params=params) r.raise_for_status() result = {"data": r.json()} except requests.exceptions.HTTPError: logger.error(traceback.format_exc()) result = {"error": "Failed to retrieve data from Data Model Importer backend"} return result def fetch_pending(params=None): params = params or {} """Invoke Pending Graph Sync APIs for given parameters.""" url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending") return _api_call(url, params) def invoke_sync(params=None): params = params or {} """Invoke Graph Sync APIs to sync for given parameters.""" url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all") return _api_call(url, params)
# ... existing code ... import traceback import logging from urllib.parse import urljoin # ... modified code ... def _api_call(url, params=None): params = params or {} try: logger.info("API Call for url: %s, params: %s" % (url, params)) r = requests.get(url, params=params) r.raise_for_status() result = {"data": r.json()} except requests.exceptions.HTTPError: logger.error(traceback.format_exc()) result = {"error": "Failed to retrieve data from Data Model Importer backend"} ... def fetch_pending(params=None): params = params or {} """Invoke Pending Graph Sync APIs for given parameters.""" url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending") return _api_call(url, params) def invoke_sync(params=None): params = params or {} """Invoke Graph Sync APIs to sync for given parameters.""" url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all") return _api_call(url, params) # ... rest of the code ...
ba4a20ee94355464ec8b35750660f7b8fe0cc3db
tests/test_yaml2ncml.py
tests/test_yaml2ncml.py
from __future__ import (absolute_import, division, print_function) import tempfile import pytest import ruamel.yaml as yaml from yaml2ncml import build def test_call(): output = subprocess.check_output(['yaml2ncml', 'roms_0.yaml']) with open('base_roms_test.ncml') as f: expected = f.read() assert output.decode() == expected def test_save_file(): outfile = tempfile.mktemp(suffix='.ncml') subprocess.call(['yaml2ncml', 'roms_0.yaml', '--output={}'.format(outfile)]) with open('base_roms_test.ncml') as f: expected = f.read() with open(outfile) as f: output = f.read() assert output == expected
from __future__ import (absolute_import, division, print_function) import subprocess import tempfile import pytest import ruamel.yaml as yaml from yaml2ncml import build def test_call(): output = subprocess.check_output(['yaml2ncml', 'roms_0.yaml']) with open('base_roms_test.ncml') as f: expected = f.read() assert output.decode() == expected def test_save_file(): outfile = tempfile.mktemp(suffix='.ncml') subprocess.call(['yaml2ncml', 'roms_0.yaml', '--output={}'.format(outfile)]) with open('base_roms_test.ncml') as f: expected = f.read() with open(outfile) as f: output = f.read() assert output == expected @pytest.fixture def load_ymal(fname='roms_1.yaml'): with open(fname, 'r') as stream: yml = yaml.load(stream, Loader=yaml.RoundTripLoader) return yml def test_bad_yaml(): with pytest.raises(ValueError): yml = load_ymal(fname='roms_1.yaml') build(yml)
Test bad call/better error msg
Test bad call/better error msg
Python
mit
ocefpaf/yaml2ncml,USGS-CMG/yaml2ncml
from __future__ import (absolute_import, division, print_function) import subprocess import tempfile + + import pytest + import ruamel.yaml as yaml + + from yaml2ncml import build def test_call(): output = subprocess.check_output(['yaml2ncml', 'roms_0.yaml']) with open('base_roms_test.ncml') as f: expected = f.read() assert output.decode() == expected def test_save_file(): outfile = tempfile.mktemp(suffix='.ncml') subprocess.call(['yaml2ncml', 'roms_0.yaml', '--output={}'.format(outfile)]) with open('base_roms_test.ncml') as f: expected = f.read() with open(outfile) as f: output = f.read() assert output == expected + + @pytest.fixture + def load_ymal(fname='roms_1.yaml'): + with open(fname, 'r') as stream: + yml = yaml.load(stream, Loader=yaml.RoundTripLoader) + return yml + + + def test_bad_yaml(): + with pytest.raises(ValueError): + yml = load_ymal(fname='roms_1.yaml') + build(yml) +
Test bad call/better error msg
## Code Before: from __future__ import (absolute_import, division, print_function) import subprocess import tempfile def test_call(): output = subprocess.check_output(['yaml2ncml', 'roms_0.yaml']) with open('base_roms_test.ncml') as f: expected = f.read() assert output.decode() == expected def test_save_file(): outfile = tempfile.mktemp(suffix='.ncml') subprocess.call(['yaml2ncml', 'roms_0.yaml', '--output={}'.format(outfile)]) with open('base_roms_test.ncml') as f: expected = f.read() with open(outfile) as f: output = f.read() assert output == expected ## Instruction: Test bad call/better error msg ## Code After: from __future__ import (absolute_import, division, print_function) import subprocess import tempfile import pytest import ruamel.yaml as yaml from yaml2ncml import build def test_call(): output = subprocess.check_output(['yaml2ncml', 'roms_0.yaml']) with open('base_roms_test.ncml') as f: expected = f.read() assert output.decode() == expected def test_save_file(): outfile = tempfile.mktemp(suffix='.ncml') subprocess.call(['yaml2ncml', 'roms_0.yaml', '--output={}'.format(outfile)]) with open('base_roms_test.ncml') as f: expected = f.read() with open(outfile) as f: output = f.read() assert output == expected @pytest.fixture def load_ymal(fname='roms_1.yaml'): with open(fname, 'r') as stream: yml = yaml.load(stream, Loader=yaml.RoundTripLoader) return yml def test_bad_yaml(): with pytest.raises(ValueError): yml = load_ymal(fname='roms_1.yaml') build(yml)
# ... existing code ... import subprocess import tempfile import pytest import ruamel.yaml as yaml from yaml2ncml import build # ... modified code ... output = f.read() assert output == expected @pytest.fixture def load_ymal(fname='roms_1.yaml'): with open(fname, 'r') as stream: yml = yaml.load(stream, Loader=yaml.RoundTripLoader) return yml def test_bad_yaml(): with pytest.raises(ValueError): yml = load_ymal(fname='roms_1.yaml') build(yml) # ... rest of the code ...
fa7172a5e3231e738d85df3baba130fdec7497d1
derrida/outwork/views.py
derrida/outwork/views.py
from django.views.generic import ListView from haystack.query import SearchQuerySet from haystack.inputs import Clean, Raw from derrida.outwork.models import Outwork class OutworkListView(ListView): model = Outwork template_name = 'outwork/outwork_list.html' paginate_by = 16 def get_queryset(self): # restrict to published articles sqs = SearchQuerySet().models(self.model).filter(published=True) if self.request.GET.get('query', None): sqs = sqs.filter(content=Clean(self.request.GET['query'])) # default sort ? return sqs # return Outwork.objects.published(for_user=self.request.user)
from django.views.generic import ListView from haystack.query import SearchQuerySet from haystack.inputs import Clean, Raw from derrida.outwork.models import Outwork class OutworkListView(ListView): model = Outwork template_name = 'outwork/outwork_list.html' paginate_by = 16 def get_queryset(self): # restrict to published articles sqs = SearchQuerySet().models(self.model).filter(published=Raw(True)) if self.request.GET.get('query', None): sqs = sqs.filter(content=Clean(self.request.GET['query'])) # default sort ? return sqs # return Outwork.objects.published(for_user=self.request.user)
Fix outwork list view to properly filter on published=true in Solr
Fix outwork list view to properly filter on published=true in Solr
Python
apache-2.0
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
from django.views.generic import ListView from haystack.query import SearchQuerySet - from haystack.inputs import Clean + from haystack.inputs import Clean, Raw from derrida.outwork.models import Outwork class OutworkListView(ListView): model = Outwork template_name = 'outwork/outwork_list.html' paginate_by = 16 def get_queryset(self): # restrict to published articles - sqs = SearchQuerySet().models(self.model).filter(published=True) + sqs = SearchQuerySet().models(self.model).filter(published=Raw(True)) if self.request.GET.get('query', None): sqs = sqs.filter(content=Clean(self.request.GET['query'])) # default sort ? return sqs # return Outwork.objects.published(for_user=self.request.user)
Fix outwork list view to properly filter on published=true in Solr
## Code Before: from django.views.generic import ListView from haystack.query import SearchQuerySet from haystack.inputs import Clean from derrida.outwork.models import Outwork class OutworkListView(ListView): model = Outwork template_name = 'outwork/outwork_list.html' paginate_by = 16 def get_queryset(self): # restrict to published articles sqs = SearchQuerySet().models(self.model).filter(published=True) if self.request.GET.get('query', None): sqs = sqs.filter(content=Clean(self.request.GET['query'])) # default sort ? return sqs # return Outwork.objects.published(for_user=self.request.user) ## Instruction: Fix outwork list view to properly filter on published=true in Solr ## Code After: from django.views.generic import ListView from haystack.query import SearchQuerySet from haystack.inputs import Clean, Raw from derrida.outwork.models import Outwork class OutworkListView(ListView): model = Outwork template_name = 'outwork/outwork_list.html' paginate_by = 16 def get_queryset(self): # restrict to published articles sqs = SearchQuerySet().models(self.model).filter(published=Raw(True)) if self.request.GET.get('query', None): sqs = sqs.filter(content=Clean(self.request.GET['query'])) # default sort ? return sqs # return Outwork.objects.published(for_user=self.request.user)
# ... existing code ... from django.views.generic import ListView from haystack.query import SearchQuerySet from haystack.inputs import Clean, Raw from derrida.outwork.models import Outwork # ... modified code ... def get_queryset(self): # restrict to published articles sqs = SearchQuerySet().models(self.model).filter(published=Raw(True)) if self.request.GET.get('query', None): sqs = sqs.filter(content=Clean(self.request.GET['query'])) # ... rest of the code ...
a102731c88f496b557dedd4024fb9b82801d134a
oauthlib/__init__.py
oauthlib/__init__.py
__author__ = 'The OAuthlib Community' __version__ = '2.1.0' import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass import logging from logging import NullHandler __author__ = 'The OAuthlib Community' __version__ = '2.1.0' import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger('oauthlib').addHandler(NullHandler())
import logging from logging import NullHandler __author__ = 'The OAuthlib Community' __version__ = '2.1.0' logging.getLogger('oauthlib').addHandler(NullHandler())
Remove Python 2.6 compatibility code.
Remove Python 2.6 compatibility code.
Python
bsd-3-clause
idan/oauthlib,oauthlib/oauthlib
+ import logging + from logging import NullHandler __author__ = 'The OAuthlib Community' __version__ = '2.1.0' - - import logging - try: # Python 2.7+ - from logging import NullHandler - except ImportError: - class NullHandler(logging.Handler): - - def emit(self, record): - pass - logging.getLogger('oauthlib').addHandler(NullHandler())
Remove Python 2.6 compatibility code.
## Code Before: __author__ = 'The OAuthlib Community' __version__ = '2.1.0' import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger('oauthlib').addHandler(NullHandler()) ## Instruction: Remove Python 2.6 compatibility code. ## Code After: import logging from logging import NullHandler __author__ = 'The OAuthlib Community' __version__ = '2.1.0' logging.getLogger('oauthlib').addHandler(NullHandler())
// ... existing code ... import logging from logging import NullHandler __author__ = 'The OAuthlib Community' // ... modified code ... __version__ = '2.1.0' logging.getLogger('oauthlib').addHandler(NullHandler()) // ... rest of the code ...
5237cb7f1339eb13b4c01f1c3611448a8f865726
terms/templatetags/terms.py
terms/templatetags/terms.py
from django.template import Library from ..html import TermsHTMLReconstructor register = Library() @register.filter @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
from django.template import Library from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor register = Library() @register.filter @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
Make sure the filter arg is a string.
Make sure the filter arg is a string.
Python
bsd-3-clause
BertrandBordage/django-terms,philippeowagner/django-terms,BertrandBordage/django-terms,philippeowagner/django-terms
from django.template import Library + from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor register = Library() @register.filter + @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
Make sure the filter arg is a string.
## Code Before: from django.template import Library from ..html import TermsHTMLReconstructor register = Library() @register.filter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out ## Instruction: Make sure the filter arg is a string. ## Code After: from django.template import Library from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor register = Library() @register.filter @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
# ... existing code ... from django.template import Library from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor # ... modified code ... @register.filter @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() # ... rest of the code ...
51dcc5fddeb649ec582c435d6244ea4d2e4f8991
zproject/jinja2/__init__.py
zproject/jinja2/__init__.py
from typing import Any from django.contrib.staticfiles.storage import staticfiles_storage from django.template.defaultfilters import slugify, pluralize from django.utils import translation from django.utils.timesince import timesince from jinja2 import Environment from two_factor.templatetags.two_factor import device_action from .compressors import minified_js from zerver.templatetags.app_filters import display_list, render_markdown_path def environment(**options: Any) -> Environment: env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'url': reverse, 'render_markdown_path': render_markdown_path, 'minified_js': minified_js, }) env.install_gettext_translations(translation, True) env.filters['slugify'] = slugify env.filters['pluralize'] = pluralize env.filters['display_list'] = display_list env.filters['device_action'] = device_action return env
from typing import Any from django.contrib.staticfiles.storage import staticfiles_storage from django.template.defaultfilters import slugify, pluralize from django.urls import reverse from django.utils import translation from django.utils.timesince import timesince from jinja2 import Environment from two_factor.templatetags.two_factor import device_action from .compressors import minified_js from zerver.templatetags.app_filters import display_list, render_markdown_path def environment(**options: Any) -> Environment: env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'url': reverse, 'render_markdown_path': render_markdown_path, 'minified_js': minified_js, }) env.install_gettext_translations(translation, True) env.filters['slugify'] = slugify env.filters['pluralize'] = pluralize env.filters['display_list'] = display_list env.filters['device_action'] = device_action env.filters['timesince'] = timesince return env
Add django timesince filter to jinja2 filters.
templates: Add django timesince filter to jinja2 filters.
Python
apache-2.0
eeshangarg/zulip,brainwane/zulip,rht/zulip,brainwane/zulip,rishig/zulip,brainwane/zulip,brainwane/zulip,eeshangarg/zulip,synicalsyntax/zulip,punchagan/zulip,eeshangarg/zulip,synicalsyntax/zulip,zulip/zulip,punchagan/zulip,andersk/zulip,kou/zulip,zulip/zulip,punchagan/zulip,kou/zulip,rishig/zulip,showell/zulip,punchagan/zulip,synicalsyntax/zulip,timabbott/zulip,timabbott/zulip,rishig/zulip,shubhamdhama/zulip,shubhamdhama/zulip,kou/zulip,hackerkid/zulip,eeshangarg/zulip,rishig/zulip,rht/zulip,hackerkid/zulip,hackerkid/zulip,andersk/zulip,tommyip/zulip,kou/zulip,synicalsyntax/zulip,rht/zulip,punchagan/zulip,rishig/zulip,tommyip/zulip,hackerkid/zulip,showell/zulip,shubhamdhama/zulip,tommyip/zulip,showell/zulip,synicalsyntax/zulip,kou/zulip,shubhamdhama/zulip,andersk/zulip,punchagan/zulip,timabbott/zulip,shubhamdhama/zulip,rht/zulip,timabbott/zulip,rishig/zulip,timabbott/zulip,andersk/zulip,shubhamdhama/zulip,andersk/zulip,punchagan/zulip,zulip/zulip,zulip/zulip,andersk/zulip,tommyip/zulip,zulip/zulip,rht/zulip,brainwane/zulip,timabbott/zulip,showell/zulip,brainwane/zulip,shubhamdhama/zulip,tommyip/zulip,zulip/zulip,rishig/zulip,eeshangarg/zulip,hackerkid/zulip,kou/zulip,rht/zulip,hackerkid/zulip,synicalsyntax/zulip,showell/zulip,zulip/zulip,synicalsyntax/zulip,eeshangarg/zulip,kou/zulip,showell/zulip,eeshangarg/zulip,tommyip/zulip,rht/zulip,showell/zulip,timabbott/zulip,andersk/zulip,hackerkid/zulip,tommyip/zulip,brainwane/zulip
from typing import Any from django.contrib.staticfiles.storage import staticfiles_storage from django.template.defaultfilters import slugify, pluralize from django.urls import reverse from django.utils import translation + from django.utils.timesince import timesince from jinja2 import Environment from two_factor.templatetags.two_factor import device_action from .compressors import minified_js from zerver.templatetags.app_filters import display_list, render_markdown_path def environment(**options: Any) -> Environment: env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'url': reverse, 'render_markdown_path': render_markdown_path, 'minified_js': minified_js, }) env.install_gettext_translations(translation, True) env.filters['slugify'] = slugify env.filters['pluralize'] = pluralize env.filters['display_list'] = display_list env.filters['device_action'] = device_action + env.filters['timesince'] = timesince return env
Add django timesince filter to jinja2 filters.
## Code Before: from typing import Any from django.contrib.staticfiles.storage import staticfiles_storage from django.template.defaultfilters import slugify, pluralize from django.urls import reverse from django.utils import translation from jinja2 import Environment from two_factor.templatetags.two_factor import device_action from .compressors import minified_js from zerver.templatetags.app_filters import display_list, render_markdown_path def environment(**options: Any) -> Environment: env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'url': reverse, 'render_markdown_path': render_markdown_path, 'minified_js': minified_js, }) env.install_gettext_translations(translation, True) env.filters['slugify'] = slugify env.filters['pluralize'] = pluralize env.filters['display_list'] = display_list env.filters['device_action'] = device_action return env ## Instruction: Add django timesince filter to jinja2 filters. ## Code After: from typing import Any from django.contrib.staticfiles.storage import staticfiles_storage from django.template.defaultfilters import slugify, pluralize from django.urls import reverse from django.utils import translation from django.utils.timesince import timesince from jinja2 import Environment from two_factor.templatetags.two_factor import device_action from .compressors import minified_js from zerver.templatetags.app_filters import display_list, render_markdown_path def environment(**options: Any) -> Environment: env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'url': reverse, 'render_markdown_path': render_markdown_path, 'minified_js': minified_js, }) env.install_gettext_translations(translation, True) env.filters['slugify'] = slugify env.filters['pluralize'] = pluralize env.filters['display_list'] = display_list env.filters['device_action'] = device_action env.filters['timesince'] = timesince return env
... from django.urls import reverse from django.utils import translation from django.utils.timesince import timesince from jinja2 import Environment from two_factor.templatetags.two_factor import device_action ... env.filters['display_list'] = display_list env.filters['device_action'] = device_action env.filters['timesince'] = timesince return env ...
e946f239695f74d83fcb1b4929ed2281846add4c
avalon/fusion/pipeline.py
avalon/fusion/pipeline.py
def imprint_container(tool, name, namespace, context, loader=None): """Imprint a Loader with metadata Containerisation enables a tracking of version, author and origin for loaded assets. Arguments: tool (object): The node in Fusion to imprint as container, usually a Loader. name (str): Name of resulting assembly namespace (str): Namespace under which to host container context (dict): Asset information loader (str, optional): Name of loader used to produce this container. Returns: None """ data = [ ("schema", "avalon-core:container-2.0"), ("id", "pyblish.avalon.container"), ("name", str(name)), ("namespace", str(namespace)), ("loader", str(loader)), ("representation", str(context["representation"]["_id"])), ] for key, value in data: tool.SetData("avalon.{}".format(key), value) def parse_container(tool): """Returns imprinted container data of a tool This reads the imprinted data from `imprint_container`. """ container = {} for key in ['schema', 'id', 'name', 'namespace', 'loader', 'representation']: value = tool.GetData('avalon.{}'.format(key)) container[key] = value return container
def imprint_container(tool, name, namespace, context, loader=None): """Imprint a Loader with metadata Containerisation enables a tracking of version, author and origin for loaded assets. Arguments: tool (object): The node in Fusion to imprint as container, usually a Loader. name (str): Name of resulting assembly namespace (str): Namespace under which to host container context (dict): Asset information loader (str, optional): Name of loader used to produce this container. Returns: None """ data = [ ("schema", "avalon-core:container-2.0"), ("id", "pyblish.avalon.container"), ("name", str(name)), ("namespace", str(namespace)), ("loader", str(loader)), ("representation", str(context["representation"]["_id"])), ] for key, value in data: tool.SetData("avalon.{}".format(key), value) def parse_container(tool): """Returns imprinted container data of a tool This reads the imprinted data from `imprint_container`. """ container = {} for key in ['schema', 'id', 'name', 'namespace', 'loader', 'representation']: value = tool.GetData('avalon.{}'.format(key)) container[key] = value # Store the tool's name container["objectName"] = tool.Name return container
Store tool's name when parsing container
Store tool's name when parsing container
Python
mit
MoonShineVFX/core,MoonShineVFX/core,getavalon/core,getavalon/core,mindbender-studio/core,mindbender-studio/core
def imprint_container(tool, name, namespace, context, loader=None): """Imprint a Loader with metadata Containerisation enables a tracking of version, author and origin for loaded assets. Arguments: tool (object): The node in Fusion to imprint as container, usually a Loader. name (str): Name of resulting assembly namespace (str): Namespace under which to host container context (dict): Asset information loader (str, optional): Name of loader used to produce this container. Returns: None """ data = [ ("schema", "avalon-core:container-2.0"), ("id", "pyblish.avalon.container"), ("name", str(name)), ("namespace", str(namespace)), ("loader", str(loader)), ("representation", str(context["representation"]["_id"])), ] for key, value in data: tool.SetData("avalon.{}".format(key), value) def parse_container(tool): """Returns imprinted container data of a tool - + This reads the imprinted data from `imprint_container`. - + """ container = {} for key in ['schema', 'id', 'name', 'namespace', 'loader', 'representation']: value = tool.GetData('avalon.{}'.format(key)) container[key] = value + # Store the tool's name + container["objectName"] = tool.Name + return container
Store tool's name when parsing container
## Code Before: def imprint_container(tool, name, namespace, context, loader=None): """Imprint a Loader with metadata Containerisation enables a tracking of version, author and origin for loaded assets. Arguments: tool (object): The node in Fusion to imprint as container, usually a Loader. name (str): Name of resulting assembly namespace (str): Namespace under which to host container context (dict): Asset information loader (str, optional): Name of loader used to produce this container. Returns: None """ data = [ ("schema", "avalon-core:container-2.0"), ("id", "pyblish.avalon.container"), ("name", str(name)), ("namespace", str(namespace)), ("loader", str(loader)), ("representation", str(context["representation"]["_id"])), ] for key, value in data: tool.SetData("avalon.{}".format(key), value) def parse_container(tool): """Returns imprinted container data of a tool This reads the imprinted data from `imprint_container`. """ container = {} for key in ['schema', 'id', 'name', 'namespace', 'loader', 'representation']: value = tool.GetData('avalon.{}'.format(key)) container[key] = value return container ## Instruction: Store tool's name when parsing container ## Code After: def imprint_container(tool, name, namespace, context, loader=None): """Imprint a Loader with metadata Containerisation enables a tracking of version, author and origin for loaded assets. Arguments: tool (object): The node in Fusion to imprint as container, usually a Loader. name (str): Name of resulting assembly namespace (str): Namespace under which to host container context (dict): Asset information loader (str, optional): Name of loader used to produce this container. Returns: None """ data = [ ("schema", "avalon-core:container-2.0"), ("id", "pyblish.avalon.container"), ("name", str(name)), ("namespace", str(namespace)), ("loader", str(loader)), ("representation", str(context["representation"]["_id"])), ] for key, value in data: tool.SetData("avalon.{}".format(key), value) def parse_container(tool): """Returns imprinted container data of a tool This reads the imprinted data from `imprint_container`. """ container = {} for key in ['schema', 'id', 'name', 'namespace', 'loader', 'representation']: value = tool.GetData('avalon.{}'.format(key)) container[key] = value # Store the tool's name container["objectName"] = tool.Name return container
... def parse_container(tool): """Returns imprinted container data of a tool This reads the imprinted data from `imprint_container`. """ container = {} ... container[key] = value # Store the tool's name container["objectName"] = tool.Name return container ...
fc9cd61f97924a1e3daf053319e9b49a73b58c80
dploy/__init__.py
dploy/__init__.py
import sys assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater" import dploy.main as main def stow(sources, dest, is_silent=True, is_dry_run=False): """ sub command stow """ main.Stow(sources, dest) # pylint: disable=protected-access def unstow(sources, dest): """ sub command unstow """ main.UnStow(sources, dest) # pylint: disable=protected-access def link(source, dest): """ sub command link """ main.Link(source, dest) # pylint: disable=protected-access
import sys assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater" import dploy.main as main def stow(sources, dest, is_silent=True, is_dry_run=False): """ sub command stow """ main.Stow(sources, dest, is_silent, is_dry_run) def unstow(sources, dest, is_silent=True, is_dry_run=False): """ sub command unstow """ main.UnStow(sources, dest, is_silent, is_dry_run) def link(source, dest, is_silent=True, is_dry_run=False): """ sub command link """ main.Link(source, dest, is_silent, is_dry_run)
Add is_silent & is_dry_run arguments to module API
Add is_silent & is_dry_run arguments to module API This way all the features of the command line commands is also in the module API
Python
mit
arecarn/dploy
import sys assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater" import dploy.main as main - def stow(sources, dest): + def stow(sources, dest, is_silent=True, is_dry_run=False): """ sub command stow """ + main.Stow(sources, dest, is_silent, is_dry_run) + def unstow(sources, dest, is_silent=True, is_dry_run=False): - main.Stow(sources, dest) # pylint: disable=protected-access - - - def unstow(sources, dest): """ sub command unstow """ + main.UnStow(sources, dest, is_silent, is_dry_run) - - main.UnStow(sources, dest) # pylint: disable=protected-access - def link(source, dest): + def link(source, dest, is_silent=True, is_dry_run=False): """ sub command link """ - main.Link(source, dest) # pylint: disable=protected-access + main.Link(source, dest, is_silent, is_dry_run)
Add is_silent & is_dry_run arguments to module API
## Code Before: import sys assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater" import dploy.main as main def stow(sources, dest): """ sub command stow """ main.Stow(sources, dest) # pylint: disable=protected-access def unstow(sources, dest): """ sub command unstow """ main.UnStow(sources, dest) # pylint: disable=protected-access def link(source, dest): """ sub command link """ main.Link(source, dest) # pylint: disable=protected-access ## Instruction: Add is_silent & is_dry_run arguments to module API ## Code After: import sys assert sys.version_info >= (3, 3), "Requires Python 3.3 or Greater" import dploy.main as main def stow(sources, dest, is_silent=True, is_dry_run=False): """ sub command stow """ main.Stow(sources, dest, is_silent, is_dry_run) def unstow(sources, dest, is_silent=True, is_dry_run=False): """ sub command unstow """ main.UnStow(sources, dest, is_silent, is_dry_run) def link(source, dest, is_silent=True, is_dry_run=False): """ sub command link """ main.Link(source, dest, is_silent, is_dry_run)
... def stow(sources, dest, is_silent=True, is_dry_run=False): """ sub command stow """ main.Stow(sources, dest, is_silent, is_dry_run) def unstow(sources, dest, is_silent=True, is_dry_run=False): """ sub command unstow """ main.UnStow(sources, dest, is_silent, is_dry_run) def link(source, dest, is_silent=True, is_dry_run=False): """ sub command link """ main.Link(source, dest, is_silent, is_dry_run) ...
8a0c17f39fd63a90b24ed79bd5bde4d52622e41d
irc/message.py
irc/message.py
class Tag(object): """ An IRC message tag ircv3.net/specs/core/message-tags-3.2.html """ @staticmethod def parse(item): key, sep, value = item.partition('=') value = value.replace('\\:', ';') value = value.replace('\\s', ' ') value = value.replace('\\n', '\n') value = value.replace('\\r', '\r') value = value.replace('\\\\', '\\') value = value or None return { 'key': key, 'value': value, from __future__ import print_function class Tag(object): """ An IRC message tag ircv3.net/specs/core/message-tags-3.2.html """ @staticmethod def parse(item): key, sep, value = item.partition('=') value = value.replace('\\:', ';') value = value.replace('\\s', ' ') value = value.replace('\\n', '\n') value = value.replace('\\r', '\r') value = value.replace('\\\\', '\\') value = value or None return { 'key': key, 'value': value, }
from __future__ import print_function class Tag(object): """ An IRC message tag ircv3.net/specs/core/message-tags-3.2.html """ @staticmethod def parse(item): r""" >>> Tag.parse('x') == {'key': 'x', 'value': None} True >>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'} True >>> Tag.parse('x=3')['value'] '3' >>> Tag.parse('x=red fox\\:green eggs')['value'] 'red fox;green eggs' >>> Tag.parse('x=red fox:green eggs')['value'] 'red fox:green eggs' >>> print(Tag.parse('x=a\\nb\\nc')['value']) a b c """ key, sep, value = item.partition('=') value = value.replace('\\:', ';') value = value.replace('\\s', ' ') value = value.replace('\\n', '\n') value = value.replace('\\r', '\r') value = value.replace('\\\\', '\\') value = value or None return { 'key': key, 'value': value, }
Add tests for tag parsing
Add tests for tag parsing
Python
mit
jaraco/irc
+ from __future__ import print_function class Tag(object): """ An IRC message tag ircv3.net/specs/core/message-tags-3.2.html """ @staticmethod def parse(item): + r""" + >>> Tag.parse('x') == {'key': 'x', 'value': None} + True + + >>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'} + True + + >>> Tag.parse('x=3')['value'] + '3' + + >>> Tag.parse('x=red fox\\:green eggs')['value'] + 'red fox;green eggs' + + >>> Tag.parse('x=red fox:green eggs')['value'] + 'red fox:green eggs' + + >>> print(Tag.parse('x=a\\nb\\nc')['value']) + a + b + c + """ key, sep, value = item.partition('=') value = value.replace('\\:', ';') value = value.replace('\\s', ' ') value = value.replace('\\n', '\n') value = value.replace('\\r', '\r') value = value.replace('\\\\', '\\') value = value or None return { 'key': key, 'value': value, }
Add tests for tag parsing
## Code Before: class Tag(object): """ An IRC message tag ircv3.net/specs/core/message-tags-3.2.html """ @staticmethod def parse(item): key, sep, value = item.partition('=') value = value.replace('\\:', ';') value = value.replace('\\s', ' ') value = value.replace('\\n', '\n') value = value.replace('\\r', '\r') value = value.replace('\\\\', '\\') value = value or None return { 'key': key, 'value': value, } ## Instruction: Add tests for tag parsing ## Code After: from __future__ import print_function class Tag(object): """ An IRC message tag ircv3.net/specs/core/message-tags-3.2.html """ @staticmethod def parse(item): r""" >>> Tag.parse('x') == {'key': 'x', 'value': None} True >>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'} True >>> Tag.parse('x=3')['value'] '3' >>> Tag.parse('x=red fox\\:green eggs')['value'] 'red fox;green eggs' >>> Tag.parse('x=red fox:green eggs')['value'] 'red fox:green eggs' >>> print(Tag.parse('x=a\\nb\\nc')['value']) a b c """ key, sep, value = item.partition('=') value = value.replace('\\:', ';') value = value.replace('\\s', ' ') value = value.replace('\\n', '\n') value = value.replace('\\r', '\r') value = value.replace('\\\\', '\\') value = value or None return { 'key': key, 'value': value, }
# ... existing code ... from __future__ import print_function class Tag(object): # ... modified code ... @staticmethod def parse(item): r""" >>> Tag.parse('x') == {'key': 'x', 'value': None} True >>> Tag.parse('x=yes') == {'key': 'x', 'value': 'yes'} True >>> Tag.parse('x=3')['value'] '3' >>> Tag.parse('x=red fox\\:green eggs')['value'] 'red fox;green eggs' >>> Tag.parse('x=red fox:green eggs')['value'] 'red fox:green eggs' >>> print(Tag.parse('x=a\\nb\\nc')['value']) a b c """ key, sep, value = item.partition('=') value = value.replace('\\:', ';') # ... rest of the code ...
d4db750d2ff2e18c9fced49fffe7a3073880078b
InvenTree/common/apps.py
InvenTree/common/apps.py
from django.apps import AppConfig class CommonConfig(AppConfig): name = 'common' def ready(self): import logging from django.apps import AppConfig class CommonConfig(AppConfig): name = 'common' def ready(self): pass
import logging from django.apps import AppConfig logger = logging.getLogger('inventree') class CommonConfig(AppConfig): name = 'common' def ready(self): self.clear_restart_flag() def clear_restart_flag(self): """ Clear the SERVER_RESTART_REQUIRED setting """ try: import common.models if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): logger.info("Clearing SERVER_RESTART_REQUIRED flag") common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) except: pass
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
Python
mit
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
+ + import logging from django.apps import AppConfig + + + logger = logging.getLogger('inventree') class CommonConfig(AppConfig): name = 'common' def ready(self): - pass + + self.clear_restart_flag() + def clear_restart_flag(self): + """ + Clear the SERVER_RESTART_REQUIRED setting + """ + + try: + import common.models + + if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): + logger.info("Clearing SERVER_RESTART_REQUIRED flag") + common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) + except: + pass +
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
## Code Before: from django.apps import AppConfig class CommonConfig(AppConfig): name = 'common' def ready(self): pass ## Instruction: Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads ## Code After: import logging from django.apps import AppConfig logger = logging.getLogger('inventree') class CommonConfig(AppConfig): name = 'common' def ready(self): self.clear_restart_flag() def clear_restart_flag(self): """ Clear the SERVER_RESTART_REQUIRED setting """ try: import common.models if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): logger.info("Clearing SERVER_RESTART_REQUIRED flag") common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) except: pass
// ... existing code ... import logging from django.apps import AppConfig logger = logging.getLogger('inventree') // ... modified code ... def ready(self): self.clear_restart_flag() def clear_restart_flag(self): """ Clear the SERVER_RESTART_REQUIRED setting """ try: import common.models if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): logger.info("Clearing SERVER_RESTART_REQUIRED flag") common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) except: pass // ... rest of the code ...
b2bc9a893a8b7fea59759e74be6235b890a1ff96
keybaseproofbot/models.py
keybaseproofbot/models.py
from sqlalchemy import Column, BigInteger, String from keybaseproofbot.database import Base class Proof(Base): __tablename__ = 'proofs' user_id = Column(Integer, primary_key=True) keybase_username = Column(String) telegram_username = Column(String) chat_id = Column(Integer) message_id = Column(Integer) proof_object = Column(String) signed_block = Column(String) def __init__(self, user_id, keybase_username, telegram_username, chat_id, message_id, proof_object, signed_block): self.user_id = user_id self.keybase_username = keybase_username self.telegram_username = telegram_username self.chat_id = chat_id self.message_id = message_id self.proof_object = proof_object self.signed_block = signed_block
from sqlalchemy import Column, BigInteger, String from keybaseproofbot.database import Base class Proof(Base): __tablename__ = 'proofs' user_id = Column(BigInteger, primary_key=True) keybase_username = Column(String) telegram_username = Column(String) chat_id = Column(BigInteger) message_id = Column(BigInteger) proof_object = Column(String) signed_block = Column(String) def __init__(self, user_id, keybase_username, telegram_username, chat_id, message_id, proof_object, signed_block): self.user_id = user_id self.keybase_username = keybase_username self.telegram_username = telegram_username self.chat_id = chat_id self.message_id = message_id self.proof_object = proof_object self.signed_block = signed_block
Make ids BigInteger for postgres
Make ids BigInteger for postgres
Python
mit
pingiun/keybaseproofbot
- from sqlalchemy import Column, Integer, String + from sqlalchemy import Column, BigInteger, String from keybaseproofbot.database import Base class Proof(Base): __tablename__ = 'proofs' - user_id = Column(Integer, primary_key=True) + user_id = Column(BigInteger, primary_key=True) keybase_username = Column(String) telegram_username = Column(String) - chat_id = Column(Integer) + chat_id = Column(BigInteger) - message_id = Column(Integer) + message_id = Column(BigInteger) proof_object = Column(String) signed_block = Column(String) def __init__(self, user_id, keybase_username, telegram_username, chat_id, message_id, proof_object, signed_block): self.user_id = user_id self.keybase_username = keybase_username self.telegram_username = telegram_username self.chat_id = chat_id self.message_id = message_id self.proof_object = proof_object self.signed_block = signed_block
Make ids BigInteger for postgres
## Code Before: from sqlalchemy import Column, Integer, String from keybaseproofbot.database import Base class Proof(Base): __tablename__ = 'proofs' user_id = Column(Integer, primary_key=True) keybase_username = Column(String) telegram_username = Column(String) chat_id = Column(Integer) message_id = Column(Integer) proof_object = Column(String) signed_block = Column(String) def __init__(self, user_id, keybase_username, telegram_username, chat_id, message_id, proof_object, signed_block): self.user_id = user_id self.keybase_username = keybase_username self.telegram_username = telegram_username self.chat_id = chat_id self.message_id = message_id self.proof_object = proof_object self.signed_block = signed_block ## Instruction: Make ids BigInteger for postgres ## Code After: from sqlalchemy import Column, BigInteger, String from keybaseproofbot.database import Base class Proof(Base): __tablename__ = 'proofs' user_id = Column(BigInteger, primary_key=True) keybase_username = Column(String) telegram_username = Column(String) chat_id = Column(BigInteger) message_id = Column(BigInteger) proof_object = Column(String) signed_block = Column(String) def __init__(self, user_id, keybase_username, telegram_username, chat_id, message_id, proof_object, signed_block): self.user_id = user_id self.keybase_username = keybase_username self.telegram_username = telegram_username self.chat_id = chat_id self.message_id = message_id self.proof_object = proof_object self.signed_block = signed_block
... from sqlalchemy import Column, BigInteger, String from keybaseproofbot.database import Base ... class Proof(Base): __tablename__ = 'proofs' user_id = Column(BigInteger, primary_key=True) keybase_username = Column(String) telegram_username = Column(String) chat_id = Column(BigInteger) message_id = Column(BigInteger) proof_object = Column(String) signed_block = Column(String) ...
af3515c8354dd525c2889eda75bfbc5cb7e2ecbf
massa/errors.py
massa/errors.py
from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify({'message': e.message}), 404 def invalid_input_handler(e): return jsonify({'message': e.message, 'details': e.details}), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if details: self.details = details def as_dict(self): data = {} if self.message: data['message'] = self.message if self.details: data['details'] = self.details return data class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify(e.as_dict()), 404 def invalid_input_handler(e): return jsonify(e.as_dict()), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details def as_dict(self): data = {} if self.message: data['message'] = self.message if self.details: data['details'] = self.details return data class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
Add method to retrieve the DomainError as a dict.
Add method to retrieve the DomainError as a dict.
Python
mit
jaapverloop/massa
from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): - return jsonify({'message': e.message}), 404 + return jsonify(e.as_dict()), 404 def invalid_input_handler(e): - return jsonify({'message': e.message, 'details': e.details}), 400 + return jsonify(e.as_dict()), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details + + def as_dict(self): + data = {} + if self.message: data['message'] = self.message + if self.details: data['details'] = self.details + return data class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
Add method to retrieve the DomainError as a dict.
## Code Before: from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify({'message': e.message}), 404 def invalid_input_handler(e): return jsonify({'message': e.message, 'details': e.details}), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.' ## Instruction: Add method to retrieve the DomainError as a dict. ## Code After: from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify(e.as_dict()), 404 def invalid_input_handler(e): return jsonify(e.as_dict()), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details def as_dict(self): data = {} if self.message: data['message'] = self.message if self.details: data['details'] = self.details return data class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
... def entity_not_found_handler(e): return jsonify(e.as_dict()), 404 def invalid_input_handler(e): return jsonify(e.as_dict()), 400 ... if message: self.message = message if details: self.details = details def as_dict(self): data = {} if self.message: data['message'] = self.message if self.details: data['details'] = self.details return data ...
cd944a2606159c8ea11ffe8075ce4ec186fd799c
tests/basic_test.py
tests/basic_test.py
import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None")
import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
Update tests to use class-based interface
Update tests to use class-based interface
Python
mit
AlterCodex/nxppy,Schoberm/nxppy,AlterCodex/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,AlterCodex/nxppy
import unittest - from either_or import either_or + from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy + reader = nxppy.Mifare() + self.assertIsInstance(reader, nxppy.Mifare) - self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") + self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy + reader = nxppy.Mifare() + self.assertIsInstance(reader, nxppy.Mifare) - self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") + self.assertIsNone(reader.select(), "Card UID is not None")
Update tests to use class-based interface
## Code Before: import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") ## Instruction: Update tests to use class-based interface ## Code After: import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
... import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): ... """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') ... """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None") ...
7ad1d9afdbf8db2960ac6b402f4da3f1675cc86f
fileupload/models.py
fileupload/models.py
from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ picture_file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.file.name
from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.file.name
Use the same name for the field in frontend and backend
Use the same name for the field in frontend and backend
Python
mit
sigurdga/django-dropzone-upload,sigurdga/django-dropzone-upload
from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ - picture_file = models.ImageField(upload_to="pictures") + file = models.ImageField(upload_to="pictures") def __unicode__(self): - return self.picture_file.name + return self.file.name
Use the same name for the field in frontend and backend
## Code Before: from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ picture_file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.picture_file.name ## Instruction: Use the same name for the field in frontend and backend ## Code After: from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.file.name
... """ file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.file.name ...
916b86865acf0297293e4a13f1da6838f9b2711f
scripts/lib/errors.py
scripts/lib/errors.py
""" Оповещение администратора о возникших ошибках """ from traceback import format_exception, format_exc from lib.config import emergency_id from lib.commands import vk, api class ErrorManager: """ Упрощенное оповещение об ошибках str name: название скрипта (обычно укороченное) Использование: with ErrorManager(name): main() """ try: yield except Exception as e: sendErrorMessage(name) raise e def sendErrorMessage(name, exception=None): """ Использует либо полученную ошибку, либо ту, что возникла последней """ exception = format_error(exception) message = "{}:\n{}".format(name, exception) vk(api.messages.send, user_id=emergency_id, message=message) def format_error(error): if error is not None: error_info = format_exception(type(error), error, error.__traceback__) return "".join(error_info) else: return format_exc()
""" Оповещение администратора о возникших ошибках """ from traceback import format_exception, format_exc from contextlib import contextmanager from lib.config import emergency_id from lib.commands import vk, api @contextmanager def ErrorManager(name): """ Упрощенное оповещение об ошибках str name: название скрипта (обычно укороченное) Использование: with ErrorManager(name): main() """ try: yield except Exception as e: sendErrorMessage(name) raise e def sendErrorMessage(name, exception=None): """ Использует либо полученную ошибку, либо ту, что возникла последней """ exception = format_error(exception) message = "{}:\n{}".format(name, exception) vk(api.messages.send, user_id=emergency_id, message=message) def format_error(error): if error is not None: error_info = format_exception(type(error), error, error.__traceback__) return "".join(error_info) else: return format_exc()
Change error class to function
Change error class to function
Python
mit
Varabe/Guild-Manager
""" Оповещение администратора о возникших ошибках """ from traceback import format_exception, format_exc + from contextlib import contextmanager from lib.config import emergency_id from lib.commands import vk, api - class ErrorManager: + @contextmanager + def ErrorManager(name): """ Упрощенное оповещение об ошибках - str name: название скрипта (обычно укороченное) + str name: название скрипта (обычно укороченное) - Использование: with ErrorManager(name): main() + Использование: with ErrorManager(name): main() """ + try: + yield + except Exception as e: - def __init__(self, name): - self.name = name - - def __enter__(self): - pass - - def __exit__(self, *args): - if args[0] is not None: - sendErrorMessage(self.name) + sendErrorMessage(name) + raise e def sendErrorMessage(name, exception=None): """ Использует либо полученную ошибку, либо ту, что возникла последней """ exception = format_error(exception) message = "{}:\n{}".format(name, exception) vk(api.messages.send, user_id=emergency_id, message=message) def format_error(error): if error is not None: error_info = format_exception(type(error), error, error.__traceback__) return "".join(error_info) else: return format_exc()
Change error class to function
## Code Before: """ Оповещение администратора о возникших ошибках """ from traceback import format_exception, format_exc from lib.config import emergency_id from lib.commands import vk, api class ErrorManager: """ Упрощенное оповещение об ошибках str name: название скрипта (обычно укороченное) Использование: with ErrorManager(name): main() """ def __init__(self, name): self.name = name def __enter__(self): pass def __exit__(self, *args): if args[0] is not None: sendErrorMessage(self.name) def sendErrorMessage(name, exception=None): """ Использует либо полученную ошибку, либо ту, что возникла последней """ exception = format_error(exception) message = "{}:\n{}".format(name, exception) vk(api.messages.send, user_id=emergency_id, message=message) def format_error(error): if error is not None: error_info = format_exception(type(error), error, error.__traceback__) return "".join(error_info) else: return format_exc() ## Instruction: Change error class to function ## Code After: """ Оповещение администратора о возникших ошибках """ from traceback import format_exception, format_exc from contextlib import contextmanager from lib.config import emergency_id from lib.commands import vk, api @contextmanager def ErrorManager(name): """ Упрощенное оповещение об ошибках str name: название скрипта (обычно укороченное) Использование: with ErrorManager(name): main() """ try: yield except Exception as e: sendErrorMessage(name) raise e def sendErrorMessage(name, exception=None): """ Использует либо полученную ошибку, либо ту, что возникла последней """ exception = format_error(exception) message = "{}:\n{}".format(name, exception) vk(api.messages.send, user_id=emergency_id, message=message) def format_error(error): if error is not None: error_info = format_exception(type(error), error, error.__traceback__) return "".join(error_info) else: return format_exc()
// ... existing code ... from traceback import format_exception, format_exc from contextlib import contextmanager from lib.config import emergency_id from lib.commands import vk, api // ... modified code ... @contextmanager def ErrorManager(name): """ Упрощенное оповещение об ошибках str name: название скрипта (обычно укороченное) Использование: with ErrorManager(name): main() """ try: yield except Exception as e: sendErrorMessage(name) raise e // ... rest of the code ...
f1e5e2cc7fd35e0446f105d619dc01d3ba837865
byceps/blueprints/admin/party/forms.py
byceps/blueprints/admin/party/forms.py
from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class UpdateForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) archived = BooleanField('archiviert') class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
Introduce base party form, limit `archived` flag to update form
Introduce base party form, limit `archived` flag to update form
Python
bsd-3-clause
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps
from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm - class UpdateForm(LocalizedForm): + class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) + + + class CreateForm(_BaseForm): + id = StringField('ID', validators=[Length(min=1, max=40)]) + + + class UpdateForm(_BaseForm): archived = BooleanField('archiviert') - - class CreateForm(UpdateForm): - id = StringField('ID', validators=[Length(min=1, max=40)]) -
Introduce base party form, limit `archived` flag to update form
## Code Before: from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class UpdateForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) archived = BooleanField('archiviert') class CreateForm(UpdateForm): id = StringField('ID', validators=[Length(min=1, max=40)]) ## Instruction: Introduce base party form, limit `archived` flag to update form ## Code After: from wtforms import BooleanField, DateTimeField, IntegerField, StringField from wtforms.validators import InputRequired, Length, Optional from ....util.l10n import LocalizedForm class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) ends_at = DateTimeField('Ende', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert')
// ... existing code ... class _BaseForm(LocalizedForm): title = StringField('Titel', validators=[Length(min=1, max=40)]) starts_at = DateTimeField('Beginn', format='%d.%m.%Y %H:%M', validators=[InputRequired()]) // ... modified code ... max_ticket_quantity = IntegerField('Maximale Anzahl Tickets', validators=[Optional()]) shop_id = StringField('Shop-ID', validators=[Optional()]) class CreateForm(_BaseForm): id = StringField('ID', validators=[Length(min=1, max=40)]) class UpdateForm(_BaseForm): archived = BooleanField('archiviert') // ... rest of the code ...
abd0a6854c90c3647d17dfb3ea980fa49aa5372f
pwndbg/commands/segments.py
pwndbg/commands/segments.py
from __future__ import print_function import gdb import pwndbg.regs class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('gsbase') @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def fsbase(): """ Prints out the FS base address. See also $fsbase. """ print(hex(pwndbg.regs.fsbase)) @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def gsbase(): """ Prints out the GS base address. See also $gsbase. """ print(hex(pwndbg.regs.gsbase))
from __future__ import print_function import gdb import pwndbg.regs import pwndbg.commands class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def fsbase(): """ Prints out the FS base address. See also $fsbase. """ print(hex(pwndbg.regs.fsbase)) @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def gsbase(): """ Prints out the GS base address. See also $gsbase. """ print(hex(pwndbg.regs.gsbase))
Add fsbase and gsbase commands
Add fsbase and gsbase commands
Python
mit
cebrusfs/217gdb,anthraxx/pwndbg,chubbymaggie/pwndbg,anthraxx/pwndbg,disconnect3d/pwndbg,0xddaa/pwndbg,0xddaa/pwndbg,cebrusfs/217gdb,zachriggle/pwndbg,disconnect3d/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,anthraxx/pwndbg,cebrusfs/217gdb,zachriggle/pwndbg,pwndbg/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg,chubbymaggie/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,0xddaa/pwndbg
from __future__ import print_function import gdb import pwndbg.regs + import pwndbg.commands class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') + @pwndbg.commands.OnlyWhenRunning + @pwndbg.commands.ParsedCommand + def fsbase(): + """ + Prints out the FS base address. See also $fsbase. + """ + print(hex(pwndbg.regs.fsbase)) + + + @pwndbg.commands.OnlyWhenRunning + @pwndbg.commands.ParsedCommand + def gsbase(): + """ + Prints out the GS base address. See also $gsbase. + """ + print(hex(pwndbg.regs.gsbase)) +
Add fsbase and gsbase commands
## Code Before: from __future__ import print_function import gdb import pwndbg.regs class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') ## Instruction: Add fsbase and gsbase commands ## Code After: from __future__ import print_function import gdb import pwndbg.regs import pwndbg.commands class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def fsbase(): """ Prints out the FS base address. See also $fsbase. """ print(hex(pwndbg.regs.fsbase)) @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def gsbase(): """ Prints out the GS base address. See also $gsbase. """ print(hex(pwndbg.regs.gsbase))
# ... existing code ... import gdb import pwndbg.regs import pwndbg.commands class segment(gdb.Function): # ... modified code ... segment('fsbase') segment('gsbase') @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def fsbase(): """ Prints out the FS base address. See also $fsbase. """ print(hex(pwndbg.regs.fsbase)) @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def gsbase(): """ Prints out the GS base address. See also $gsbase. """ print(hex(pwndbg.regs.gsbase)) # ... rest of the code ...
e68cb906810a26d93e0d15e0357a75a2b49d8784
boundary/plugin_get_components.py
boundary/plugin_get_components.py
from boundary import ApiCli class PluginGetComponents (ApiCli): def __init__(self): ApiCli.__init__(self) self.method = "GET" self.path = "v1/plugins" self.pluginName = None def addArguments(self): ApiCli.addArguments(self) self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name') def getArguments(self): ''' Extracts the specific arguments of this CLI ''' ApiCli.getArguments(self) if self.args.pluginName != None: self.pluginName = self.args.pluginName self.path = "v1/plugins/{0}/components".format(self.pluginName) def getDescription(self): return "Get the components of a plugin in a Boundary account"
from boundary import ApiCli class PluginGetComponents(ApiCli): def __init__(self): ApiCli.__init__(self) self.method = "GET" self.path = "v1/plugins" self.pluginName = None def addArguments(self): ApiCli.addArguments(self) self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name', required=True, help='Plugin name') def getArguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.getArguments(self) if self.args.pluginName is not None: self.pluginName = self.args.pluginName self.path = "v1/plugins/{0}/components".format(self.pluginName) def getDescription(self): return "Get the components of a plugin in a Boundary account"
Reformat code to PEP-8 standards
Reformat code to PEP-8 standards
Python
apache-2.0
jdgwartney/boundary-api-cli,boundary/pulse-api-cli,wcainboundary/boundary-api-cli,jdgwartney/pulse-api-cli,jdgwartney/boundary-api-cli,boundary/boundary-api-cli,boundary/pulse-api-cli,boundary/boundary-api-cli,wcainboundary/boundary-api-cli,jdgwartney/pulse-api-cli
from boundary import ApiCli + - class PluginGetComponents (ApiCli): + class PluginGetComponents(ApiCli): - def __init__(self): ApiCli.__init__(self) self.method = "GET" - self.path="v1/plugins" + self.path = "v1/plugins" self.pluginName = None - + def addArguments(self): ApiCli.addArguments(self) - self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name') + self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name', - + required=True, help='Plugin name') + def getArguments(self): - ''' + """ Extracts the specific arguments of this CLI - ''' + """ ApiCli.getArguments(self) - if self.args.pluginName != None: + if self.args.pluginName is not None: self.pluginName = self.args.pluginName - + self.path = "v1/plugins/{0}/components".format(self.pluginName) - + def getDescription(self): return "Get the components of a plugin in a Boundary account" - +
Reformat code to PEP-8 standards
## Code Before: from boundary import ApiCli class PluginGetComponents (ApiCli): def __init__(self): ApiCli.__init__(self) self.method = "GET" self.path="v1/plugins" self.pluginName = None def addArguments(self): ApiCli.addArguments(self) self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name') def getArguments(self): ''' Extracts the specific arguments of this CLI ''' ApiCli.getArguments(self) if self.args.pluginName != None: self.pluginName = self.args.pluginName self.path = "v1/plugins/{0}/components".format(self.pluginName) def getDescription(self): return "Get the components of a plugin in a Boundary account" ## Instruction: Reformat code to PEP-8 standards ## Code After: from boundary import ApiCli class PluginGetComponents(ApiCli): def __init__(self): ApiCli.__init__(self) self.method = "GET" self.path = "v1/plugins" self.pluginName = None def addArguments(self): ApiCli.addArguments(self) self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name', required=True, help='Plugin name') def getArguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.getArguments(self) if self.args.pluginName is not None: self.pluginName = self.args.pluginName self.path = "v1/plugins/{0}/components".format(self.pluginName) def getDescription(self): return "Get the components of a plugin in a Boundary account"
# ... existing code ... from boundary import ApiCli class PluginGetComponents(ApiCli): def __init__(self): ApiCli.__init__(self) self.method = "GET" self.path = "v1/plugins" self.pluginName = None def addArguments(self): ApiCli.addArguments(self) self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name', required=True, help='Plugin name') def getArguments(self): """ Extracts the specific arguments of this CLI """ ApiCli.getArguments(self) if self.args.pluginName is not None: self.pluginName = self.args.pluginName self.path = "v1/plugins/{0}/components".format(self.pluginName) def getDescription(self): return "Get the components of a plugin in a Boundary account" # ... rest of the code ...
aa4a032016944f581ad7485ebdf8c39108511098
commandbased/commandbasedrobot.py
commandbased/commandbasedrobot.py
import hal from wpilib.timedrobot import TimedRobot from wpilib.command.scheduler import Scheduler from wpilib.livewindow import LiveWindow class CommandBasedRobot(TimedRobot): ''' The base class for a Command-Based Robot. To use, instantiate commands and trigger them. ''' def startCompetition(self): """Initalizes the scheduler before starting robotInit()""" self.scheduler = Scheduler.getInstance() super().startCompetition() def commandPeriodic(self): ''' Run the scheduler regularly. If an error occurs during a competition, prevent it from crashing the program. ''' try: self.scheduler.run() except Exception as error: if not self.ds.isFMSAttached(): raise '''Just to be safe, stop all running commands.''' self.scheduler.removeAll() self.handleCrash(error) autonomousPeriodic = commandPeriodic teleopPeriodic = commandPeriodic disabledPeriodic = commandPeriodic def testPeriodic(self): ''' Test mode will not run normal commands, but motors can be controlled and sensors viewed with the SmartDashboard. ''' LiveWindow.run() def handleCrash(self, error): ''' Called if an exception is raised in the Scheduler during a competition. Writes an error message to the driver station by default. If you want more complex behavior, override this method in your robot class. ''' self.ds.reportError(str(error), printTrace=True)
from wpilib import TimedRobot from wpilib.command import Scheduler class CommandBasedRobot(TimedRobot): ''' The base class for a Command-Based Robot. To use, instantiate commands and trigger them. ''' def startCompetition(self): """Initalizes the scheduler before starting robotInit()""" self.scheduler = Scheduler.getInstance() super().startCompetition() def commandPeriodic(self): ''' Run the scheduler regularly. If an error occurs during a competition, prevent it from crashing the program. ''' try: self.scheduler.run() except Exception as error: if not self.ds.isFMSAttached(): raise '''Just to be safe, stop all running commands.''' self.scheduler.removeAll() self.handleCrash(error) autonomousPeriodic = commandPeriodic teleopPeriodic = commandPeriodic disabledPeriodic = commandPeriodic # testPeriodic deliberately omitted def handleCrash(self, error): ''' Called if an exception is raised in the Scheduler during a competition. Writes an error message to the driver station by default. If you want more complex behavior, override this method in your robot class. ''' self.ds.reportError(str(error), printTrace=True)
Remove LiveWindow call from CommandBasedRobot
Remove LiveWindow call from CommandBasedRobot LiveWindow is automatically updated regardless of mode as part of 2018 WPILib IterativeRobot changes, so calling LiveWindow.run() manually is unnecessary.
Python
bsd-3-clause
robotpy/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities
- import hal - - from wpilib.timedrobot import TimedRobot + from wpilib import TimedRobot - from wpilib.command.scheduler import Scheduler + from wpilib.command import Scheduler - from wpilib.livewindow import LiveWindow class CommandBasedRobot(TimedRobot): ''' The base class for a Command-Based Robot. To use, instantiate commands and trigger them. ''' def startCompetition(self): """Initalizes the scheduler before starting robotInit()""" self.scheduler = Scheduler.getInstance() super().startCompetition() - def commandPeriodic(self): ''' Run the scheduler regularly. If an error occurs during a competition, prevent it from crashing the program. ''' try: self.scheduler.run() except Exception as error: if not self.ds.isFMSAttached(): raise '''Just to be safe, stop all running commands.''' self.scheduler.removeAll() self.handleCrash(error) - autonomousPeriodic = commandPeriodic teleopPeriodic = commandPeriodic disabledPeriodic = commandPeriodic + # testPeriodic deliberately omitted - - - def testPeriodic(self): - ''' - Test mode will not run normal commands, but motors can be controlled - and sensors viewed with the SmartDashboard. - ''' - - LiveWindow.run() - def handleCrash(self, error): ''' Called if an exception is raised in the Scheduler during a competition. Writes an error message to the driver station by default. If you want more complex behavior, override this method in your robot class. ''' self.ds.reportError(str(error), printTrace=True)
Remove LiveWindow call from CommandBasedRobot
## Code Before: import hal from wpilib.timedrobot import TimedRobot from wpilib.command.scheduler import Scheduler from wpilib.livewindow import LiveWindow class CommandBasedRobot(TimedRobot): ''' The base class for a Command-Based Robot. To use, instantiate commands and trigger them. ''' def startCompetition(self): """Initalizes the scheduler before starting robotInit()""" self.scheduler = Scheduler.getInstance() super().startCompetition() def commandPeriodic(self): ''' Run the scheduler regularly. If an error occurs during a competition, prevent it from crashing the program. ''' try: self.scheduler.run() except Exception as error: if not self.ds.isFMSAttached(): raise '''Just to be safe, stop all running commands.''' self.scheduler.removeAll() self.handleCrash(error) autonomousPeriodic = commandPeriodic teleopPeriodic = commandPeriodic disabledPeriodic = commandPeriodic def testPeriodic(self): ''' Test mode will not run normal commands, but motors can be controlled and sensors viewed with the SmartDashboard. ''' LiveWindow.run() def handleCrash(self, error): ''' Called if an exception is raised in the Scheduler during a competition. Writes an error message to the driver station by default. If you want more complex behavior, override this method in your robot class. ''' self.ds.reportError(str(error), printTrace=True) ## Instruction: Remove LiveWindow call from CommandBasedRobot ## Code After: from wpilib import TimedRobot from wpilib.command import Scheduler class CommandBasedRobot(TimedRobot): ''' The base class for a Command-Based Robot. To use, instantiate commands and trigger them. ''' def startCompetition(self): """Initalizes the scheduler before starting robotInit()""" self.scheduler = Scheduler.getInstance() super().startCompetition() def commandPeriodic(self): ''' Run the scheduler regularly. If an error occurs during a competition, prevent it from crashing the program. ''' try: self.scheduler.run() except Exception as error: if not self.ds.isFMSAttached(): raise '''Just to be safe, stop all running commands.''' self.scheduler.removeAll() self.handleCrash(error) autonomousPeriodic = commandPeriodic teleopPeriodic = commandPeriodic disabledPeriodic = commandPeriodic # testPeriodic deliberately omitted def handleCrash(self, error): ''' Called if an exception is raised in the Scheduler during a competition. Writes an error message to the driver station by default. If you want more complex behavior, override this method in your robot class. ''' self.ds.reportError(str(error), printTrace=True)
... from wpilib import TimedRobot from wpilib.command import Scheduler ... self.scheduler = Scheduler.getInstance() super().startCompetition() def commandPeriodic(self): ... self.handleCrash(error) autonomousPeriodic = commandPeriodic teleopPeriodic = commandPeriodic disabledPeriodic = commandPeriodic # testPeriodic deliberately omitted def handleCrash(self, error): ...
bcf4c5e632ae3ee678ac10e93887b14c63d4eb4a
examples/plain_actor.py
examples/plain_actor.py
import pykka GetMessages = object() class PlainActor(pykka.ThreadingActor): def __init__(self): super().__init__() self.stored_messages = [] def on_receive(self, message): if message.get('command') == 'get_messages': return self.stored_messages else: self.stored_messages.append(message) if __name__ == '__main__': actor = PlainActor.start() actor.tell({'no': 'Norway', 'se': 'Sweden'}) actor.tell({'a': 3, 'b': 4, 'c': 5}) print(actor.ask({'command': 'get_messages'})) actor.stop()
import pykka GetMessages = object() class PlainActor(pykka.ThreadingActor): def __init__(self): super().__init__() self.stored_messages = [] def on_receive(self, message): if message is GetMessages: return self.stored_messages else: self.stored_messages.append(message) if __name__ == '__main__': actor = PlainActor.start() actor.tell({'no': 'Norway', 'se': 'Sweden'}) actor.tell({'a': 3, 'b': 4, 'c': 5}) print(actor.ask(GetMessages)) actor.stop()
Use custom message instead of dict
examples: Use custom message instead of dict
Python
apache-2.0
jodal/pykka
import pykka + + + GetMessages = object() class PlainActor(pykka.ThreadingActor): def __init__(self): super().__init__() self.stored_messages = [] def on_receive(self, message): - if message.get('command') == 'get_messages': + if message is GetMessages: return self.stored_messages else: self.stored_messages.append(message) if __name__ == '__main__': actor = PlainActor.start() actor.tell({'no': 'Norway', 'se': 'Sweden'}) actor.tell({'a': 3, 'b': 4, 'c': 5}) - print(actor.ask({'command': 'get_messages'})) + print(actor.ask(GetMessages)) actor.stop()
Use custom message instead of dict
## Code Before: import pykka class PlainActor(pykka.ThreadingActor): def __init__(self): super().__init__() self.stored_messages = [] def on_receive(self, message): if message.get('command') == 'get_messages': return self.stored_messages else: self.stored_messages.append(message) if __name__ == '__main__': actor = PlainActor.start() actor.tell({'no': 'Norway', 'se': 'Sweden'}) actor.tell({'a': 3, 'b': 4, 'c': 5}) print(actor.ask({'command': 'get_messages'})) actor.stop() ## Instruction: Use custom message instead of dict ## Code After: import pykka GetMessages = object() class PlainActor(pykka.ThreadingActor): def __init__(self): super().__init__() self.stored_messages = [] def on_receive(self, message): if message is GetMessages: return self.stored_messages else: self.stored_messages.append(message) if __name__ == '__main__': actor = PlainActor.start() actor.tell({'no': 'Norway', 'se': 'Sweden'}) actor.tell({'a': 3, 'b': 4, 'c': 5}) print(actor.ask(GetMessages)) actor.stop()
... import pykka GetMessages = object() ... def on_receive(self, message): if message is GetMessages: return self.stored_messages else: ... actor.tell({'no': 'Norway', 'se': 'Sweden'}) actor.tell({'a': 3, 'b': 4, 'c': 5}) print(actor.ask(GetMessages)) actor.stop() ...
3a414d5d4763802bc4bc506a57c1f487655d470a
engineering_project/estimatedtime.py
engineering_project/estimatedtime.py
import statistics class estimatedtime: def __init__(self, numberofpoints): self.listoftimes = [] self.points = numberofpoints def append(self, timeinseconds, inferprogress=True): # print(timeinseconds) self.listoftimes.append(timeinseconds) if inferprogress is True: self.points -= 1 def ETC(self): return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
import statistics class ETC: ''' Estimated Time to Completion ''' def __init__(self, numberofpoints): self.listoftimes = [] self.points = numberofpoints + 1 def append(self, timeinseconds, inferprogress=True): # print(timeinseconds) self.listoftimes.append(timeinseconds) if inferprogress is True: self.points -= 1 def ETC(self): return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
Change estimated time class to ETC
Change estimated time class to ETC
Python
mit
DavidLutton/EngineeringProject
import statistics - class estimatedtime: + class ETC: + ''' Estimated Time to Completion ''' def __init__(self, numberofpoints): self.listoftimes = [] - self.points = numberofpoints + self.points = numberofpoints + 1 def append(self, timeinseconds, inferprogress=True): # print(timeinseconds) self.listoftimes.append(timeinseconds) if inferprogress is True: self.points -= 1 - def ETA(self): + def ETC(self): return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
Change estimated time class to ETC
## Code Before: import statistics class estimatedtime: def __init__(self, numberofpoints): self.listoftimes = [] self.points = numberofpoints def append(self, timeinseconds, inferprogress=True): # print(timeinseconds) self.listoftimes.append(timeinseconds) if inferprogress is True: self.points -= 1 def ETA(self): return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points))) ## Instruction: Change estimated time class to ETC ## Code After: import statistics class ETC: ''' Estimated Time to Completion ''' def __init__(self, numberofpoints): self.listoftimes = [] self.points = numberofpoints + 1 def append(self, timeinseconds, inferprogress=True): # print(timeinseconds) self.listoftimes.append(timeinseconds) if inferprogress is True: self.points -= 1 def ETC(self): return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points)))
... class ETC: ''' Estimated Time to Completion ''' def __init__(self, numberofpoints): self.listoftimes = [] self.points = numberofpoints + 1 def append(self, timeinseconds, inferprogress=True): ... self.points -= 1 def ETC(self): return("{0:.5f}".format((statistics.mean(self.listoftimes) * self.points))) ...
bd4812a1ef93c51bedbc92e8064b3457b5d88992
tests/test_slice.py
tests/test_slice.py
import pytest import numpy as np from parameters import T_VALUES, KPT @pytest.mark.parametrize('slice_idx', [(0, 1), [1, 0], (0, ), (1, )]) def test_slice(t, get_model, slice_idx): """Check the slicing method.""" model = get_model(*t) model_sliced = model.slice_orbitals(slice_idx) assert np.isclose([model.pos[i] for i in slice_idx], model_sliced.pos).all() for k in KPT: assert np.isclose(model.hamilton(k)[np.ix_(slice_idx, slice_idx)], model_sliced.hamilton(k)).all()
"""Tests for the model slicing functionality.""" import pytest import numpy as np from parameters import T_VALUES, KPT @pytest.mark.parametrize('slice_idx', [(0, 1), [1, 0], (0, ), (1, )]) @pytest.mark.parametrize('t', T_VALUES) def test_slice(t, get_model, slice_idx): """Check the slicing method.""" model = get_model(*t) model_sliced = model.slice_orbitals(slice_idx) assert np.isclose([model.pos[i] for i in slice_idx], model_sliced.pos).all() for k in KPT: assert np.isclose(model.hamilton(k)[np.ix_(slice_idx, slice_idx)], model_sliced.hamilton(k)).all()
Fix pre-commit for slicing method.
Fix pre-commit for slicing method.
Python
apache-2.0
Z2PackDev/TBmodels,Z2PackDev/TBmodels
+ """Tests for the model slicing functionality.""" import pytest import numpy as np from parameters import T_VALUES, KPT @pytest.mark.parametrize('slice_idx', [(0, 1), [1, 0], (0, ), (1, )]) @pytest.mark.parametrize('t', T_VALUES) def test_slice(t, get_model, slice_idx): + """Check the slicing method.""" - m1 = get_model(*t) + model = get_model(*t) - m2 = m1.slice_orbitals(slice_idx) + model_sliced = model.slice_orbitals(slice_idx) - assert np.isclose([m1.pos[i] for i in slice_idx], m2.pos).all() + assert np.isclose([model.pos[i] for i in slice_idx], model_sliced.pos).all() for k in KPT: - assert np.isclose(m1.hamilton(k)[np.ix_(slice_idx, slice_idx)], m2.hamilton(k)).all() + assert np.isclose(model.hamilton(k)[np.ix_(slice_idx, slice_idx)], model_sliced.hamilton(k)).all()
Fix pre-commit for slicing method.
## Code Before: import pytest import numpy as np from parameters import T_VALUES, KPT @pytest.mark.parametrize('slice_idx', [(0, 1), [1, 0], (0, ), (1, )]) @pytest.mark.parametrize('t', T_VALUES) def test_slice(t, get_model, slice_idx): m1 = get_model(*t) m2 = m1.slice_orbitals(slice_idx) assert np.isclose([m1.pos[i] for i in slice_idx], m2.pos).all() for k in KPT: assert np.isclose(m1.hamilton(k)[np.ix_(slice_idx, slice_idx)], m2.hamilton(k)).all() ## Instruction: Fix pre-commit for slicing method. ## Code After: """Tests for the model slicing functionality.""" import pytest import numpy as np from parameters import T_VALUES, KPT @pytest.mark.parametrize('slice_idx', [(0, 1), [1, 0], (0, ), (1, )]) @pytest.mark.parametrize('t', T_VALUES) def test_slice(t, get_model, slice_idx): """Check the slicing method.""" model = get_model(*t) model_sliced = model.slice_orbitals(slice_idx) assert np.isclose([model.pos[i] for i in slice_idx], model_sliced.pos).all() for k in KPT: assert np.isclose(model.hamilton(k)[np.ix_(slice_idx, slice_idx)], model_sliced.hamilton(k)).all()
... """Tests for the model slicing functionality.""" import pytest ... @pytest.mark.parametrize('t', T_VALUES) def test_slice(t, get_model, slice_idx): """Check the slicing method.""" model = get_model(*t) model_sliced = model.slice_orbitals(slice_idx) assert np.isclose([model.pos[i] for i in slice_idx], model_sliced.pos).all() for k in KPT: assert np.isclose(model.hamilton(k)[np.ix_(slice_idx, slice_idx)], model_sliced.hamilton(k)).all() ...
97c26c367c2c4597842356e677064a012ea19cb6
events/forms.py
events/forms.py
from events.models import Event, City from django.forms.util import ErrorList from datetime import datetime class EventForm(forms.ModelForm): city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville") class Meta: model = Event exclude = ('submission_time', 'updated_time', 'decision_time', 'moderator', 'moderated', 'latitude', 'longitude')
from django import forms from events.models import Event, City from django.forms.util import ErrorList from datetime import datetime class EventForm(forms.ModelForm): city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville") class Meta: model = Event exclude = ('submission_time', 'updated_time', 'decision_time', 'moderator', 'moderated', 'latitude', 'longitude') def clean(self): cleaned_data = self.cleaned_data start_time = cleaned_data.get("start_time") end_time = cleaned_data.get("end_time") if start_time >= end_time: msg = u"L'évènement ne peut se terminer avant son début" self._errors["start_time"] = ErrorList([msg]) self._errors["end_time"] = ErrorList([msg]) del cleaned_data["start_time"] del cleaned_data["end_time"] elif start_time < datetime.today(): msg = u"Seul les évènements à venir sont acceptés" self._errors["start_time"] = ErrorList([msg]) del cleaned_data["start_time"] return cleaned_data
Validate entered dates in Event form
Validate entered dates in Event form
Python
agpl-3.0
vcorreze/agendaEteAccoord,mlhamel/agendadulibre,vcorreze/agendaEteAccoord,mlhamel/agendadulibre,mlhamel/agendadulibre,vcorreze/agendaEteAccoord
from django import forms from events.models import Event, City + from django.forms.util import ErrorList + from datetime import datetime class EventForm(forms.ModelForm): - city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville") + city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville") - class Meta: + class Meta: - model = Event + model = Event - exclude = ('submission_time', 'updated_time', 'decision_time', + exclude = ('submission_time', 'updated_time', 'decision_time', - 'moderator', 'moderated', 'latitude', 'longitude') + 'moderator', 'moderated', 'latitude', 'longitude') + def clean(self): + cleaned_data = self.cleaned_data + start_time = cleaned_data.get("start_time") + end_time = cleaned_data.get("end_time") + if start_time >= end_time: + msg = u"L'évènement ne peut se terminer avant son début" + self._errors["start_time"] = ErrorList([msg]) + self._errors["end_time"] = ErrorList([msg]) + + del cleaned_data["start_time"] + del cleaned_data["end_time"] + + elif start_time < datetime.today(): + msg = u"Seul les évènements à venir sont acceptés" + self._errors["start_time"] = ErrorList([msg]) + + del cleaned_data["start_time"] + + return cleaned_data +
Validate entered dates in Event form
## Code Before: from django import forms from events.models import Event, City class EventForm(forms.ModelForm): city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville") class Meta: model = Event exclude = ('submission_time', 'updated_time', 'decision_time', 'moderator', 'moderated', 'latitude', 'longitude') ## Instruction: Validate entered dates in Event form ## Code After: from django import forms from events.models import Event, City from django.forms.util import ErrorList from datetime import datetime class EventForm(forms.ModelForm): city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville") class Meta: model = Event exclude = ('submission_time', 'updated_time', 'decision_time', 'moderator', 'moderated', 'latitude', 'longitude') def clean(self): cleaned_data = self.cleaned_data start_time = cleaned_data.get("start_time") end_time = cleaned_data.get("end_time") if start_time >= end_time: msg = u"L'évènement ne peut se terminer avant son début" self._errors["start_time"] = ErrorList([msg]) self._errors["end_time"] = ErrorList([msg]) del cleaned_data["start_time"] del cleaned_data["end_time"] elif start_time < datetime.today(): msg = u"Seul les évènements à venir sont acceptés" self._errors["start_time"] = ErrorList([msg]) del cleaned_data["start_time"] return cleaned_data
# ... existing code ... from django import forms from events.models import Event, City from django.forms.util import ErrorList from datetime import datetime class EventForm(forms.ModelForm): city = forms.ModelChoiceField(City.objects.all(), empty_label=None, label="Ville") class Meta: model = Event exclude = ('submission_time', 'updated_time', 'decision_time', 'moderator', 'moderated', 'latitude', 'longitude') def clean(self): cleaned_data = self.cleaned_data start_time = cleaned_data.get("start_time") end_time = cleaned_data.get("end_time") if start_time >= end_time: msg = u"L'évènement ne peut se terminer avant son début" self._errors["start_time"] = ErrorList([msg]) self._errors["end_time"] = ErrorList([msg]) del cleaned_data["start_time"] del cleaned_data["end_time"] elif start_time < datetime.today(): msg = u"Seul les évènements à venir sont acceptés" self._errors["start_time"] = ErrorList([msg]) del cleaned_data["start_time"] return cleaned_data # ... rest of the code ...
3efd847f8569a30b018925b39d1552a4aead6e8f
destroyer/destroyer.py
destroyer/destroyer.py
import click from .services.twitter import TwitterDestroyer @click.group() def cli(): pass @click.command() @click.option('--unfollow_nonfollowers', default=False, type=click.BOOL) def twitter(unfollow_nonfollowers): twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers) twitter_destroyer.destroy() @click.command() def facebook(): facebook_destroyer = FacebookDestroyer() facebook_destroyer.destroy() def main(): cli.add_command(twitter) cli()
import click from .services.twitter import TwitterDestroyer from .services.facebook import FacebookDestroyer @click.group() def cli(): pass @click.command() @click.option('--unfollow_nonfollowers', default=False, type=click.BOOL) def twitter(unfollow_nonfollowers): twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers) twitter_destroyer.destroy() @click.command() def facebook(): facebook_destroyer = FacebookDestroyer() facebook_destroyer.destroy() def main(): cli.add_command(twitter) cli.add_command(facebook) cli()
Update main module with facebook integration
Update main module with facebook integration
Python
mit
jaredmichaelsmith/destroyer
import click from .services.twitter import TwitterDestroyer + from .services.facebook import FacebookDestroyer @click.group() def cli(): pass @click.command() @click.option('--unfollow_nonfollowers', default=False, type=click.BOOL) def twitter(unfollow_nonfollowers): twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers) twitter_destroyer.destroy() + @click.command() + def facebook(): + facebook_destroyer = FacebookDestroyer() + facebook_destroyer.destroy() + + def main(): cli.add_command(twitter) + cli.add_command(facebook) cli()
Update main module with facebook integration
## Code Before: import click from .services.twitter import TwitterDestroyer @click.group() def cli(): pass @click.command() @click.option('--unfollow_nonfollowers', default=False, type=click.BOOL) def twitter(unfollow_nonfollowers): twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers) twitter_destroyer.destroy() def main(): cli.add_command(twitter) cli() ## Instruction: Update main module with facebook integration ## Code After: import click from .services.twitter import TwitterDestroyer from .services.facebook import FacebookDestroyer @click.group() def cli(): pass @click.command() @click.option('--unfollow_nonfollowers', default=False, type=click.BOOL) def twitter(unfollow_nonfollowers): twitter_destroyer = TwitterDestroyer(unfollow_nonfollowers) twitter_destroyer.destroy() @click.command() def facebook(): facebook_destroyer = FacebookDestroyer() facebook_destroyer.destroy() def main(): cli.add_command(twitter) cli.add_command(facebook) cli()
# ... existing code ... from .services.twitter import TwitterDestroyer from .services.facebook import FacebookDestroyer # ... modified code ... @click.command() def facebook(): facebook_destroyer = FacebookDestroyer() facebook_destroyer.destroy() def main(): cli.add_command(twitter) cli.add_command(facebook) cli() # ... rest of the code ...
0a152c792e2ebf20056780b5a20765175d73108b
ipv6map/geodata/admin.py
ipv6map/geodata/admin.py
from django.contrib import admin from . import models class BaseReadOnlyAdmin(admin.ModelAdmin): list_display_links = None def has_change_permission(self, request, obj=None): return False if obj else True @admin.register(models.Version) class VersionAdmin(BaseReadOnlyAdmin): list_display = ['publish_date', 'location_count', 'is_active'] list_filter = ['is_active'] def location_count(self, obj): return obj.location_set.count() @admin.register(models.Location) class LocationAdmin(BaseReadOnlyAdmin): list_display = ['id', 'latitude', 'longitude', 'density', '_version'] list_filter = ['version'] return obj.version.publish_date def has_change_permission(self, request, obj=None): return False if obj else True
from django.contrib import admin from . import models @admin.register(models.Version) class VersionAdmin(admin.ModelAdmin): fieldsets = [ (None, { 'fields': ['publish_date', 'location_count'], }), ("Status", { 'fields': ['is_active'], }), ] list_display = ['publish_date', 'location_count', 'is_active'] list_filter = ['is_active'] readonly_fields = ['publish_date', 'location_count'] def location_count(self, obj): return obj.location_set.count() @admin.register(models.Location) class LocationAdmin(admin.ModelAdmin): list_display = ['id', 'latitude', 'longitude', 'density', '_version'] list_display_links = None list_filter = ['version'] def _version(self, obj): return obj.version.publish_date def has_change_permission(self, request, obj=None): return False if obj else True
Allow toggling active/inactive in VersionAdmin
Allow toggling active/inactive in VersionAdmin
Python
unlicense
rlmuraya/ipv6map,rlmuraya/ipv6map,rlmuraya/ipv6map,rlmuraya/ipv6map
from django.contrib import admin from . import models - class BaseReadOnlyAdmin(admin.ModelAdmin): - list_display_links = None - - def has_change_permission(self, request, obj=None): - return False if obj else True - - @admin.register(models.Version) - class VersionAdmin(BaseReadOnlyAdmin): + class VersionAdmin(admin.ModelAdmin): + fieldsets = [ + (None, { + 'fields': ['publish_date', 'location_count'], + }), + ("Status", { + 'fields': ['is_active'], + }), + ] list_display = ['publish_date', 'location_count', 'is_active'] list_filter = ['is_active'] + readonly_fields = ['publish_date', 'location_count'] def location_count(self, obj): return obj.location_set.count() @admin.register(models.Location) - class LocationAdmin(BaseReadOnlyAdmin): + class LocationAdmin(admin.ModelAdmin): list_display = ['id', 'latitude', 'longitude', 'density', '_version'] + list_display_links = None list_filter = ['version'] def _version(self, obj): return obj.version.publish_date + def has_change_permission(self, request, obj=None): + return False if obj else True +
Allow toggling active/inactive in VersionAdmin
## Code Before: from django.contrib import admin from . import models class BaseReadOnlyAdmin(admin.ModelAdmin): list_display_links = None def has_change_permission(self, request, obj=None): return False if obj else True @admin.register(models.Version) class VersionAdmin(BaseReadOnlyAdmin): list_display = ['publish_date', 'location_count', 'is_active'] list_filter = ['is_active'] def location_count(self, obj): return obj.location_set.count() @admin.register(models.Location) class LocationAdmin(BaseReadOnlyAdmin): list_display = ['id', 'latitude', 'longitude', 'density', '_version'] list_filter = ['version'] def _version(self, obj): return obj.version.publish_date ## Instruction: Allow toggling active/inactive in VersionAdmin ## Code After: from django.contrib import admin from . import models @admin.register(models.Version) class VersionAdmin(admin.ModelAdmin): fieldsets = [ (None, { 'fields': ['publish_date', 'location_count'], }), ("Status", { 'fields': ['is_active'], }), ] list_display = ['publish_date', 'location_count', 'is_active'] list_filter = ['is_active'] readonly_fields = ['publish_date', 'location_count'] def location_count(self, obj): return obj.location_set.count() @admin.register(models.Location) class LocationAdmin(admin.ModelAdmin): list_display = ['id', 'latitude', 'longitude', 'density', '_version'] list_display_links = None list_filter = ['version'] def _version(self, obj): return obj.version.publish_date def has_change_permission(self, request, obj=None): return False if obj else True
# ... existing code ... @admin.register(models.Version) class VersionAdmin(admin.ModelAdmin): fieldsets = [ (None, { 'fields': ['publish_date', 'location_count'], }), ("Status", { 'fields': ['is_active'], }), ] list_display = ['publish_date', 'location_count', 'is_active'] list_filter = ['is_active'] readonly_fields = ['publish_date', 'location_count'] def location_count(self, obj): # ... modified code ... @admin.register(models.Location) class LocationAdmin(admin.ModelAdmin): list_display = ['id', 'latitude', 'longitude', 'density', '_version'] list_display_links = None list_filter = ['version'] ... def _version(self, obj): return obj.version.publish_date def has_change_permission(self, request, obj=None): return False if obj else True # ... rest of the code ...
d1d7684edb6d687206deea75d2ba13194046e376
sixquiprend/models/chosen_card.py
sixquiprend/models/chosen_card.py
from sixquiprend.sixquiprend import app, db class ChosenCard(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE")) game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE")) card_id = db.Column(db.Integer, db.ForeignKey('card.id')) ################################################################################ ## Serializer ################################################################################ def serialize(self): return { 'id': self.id, 'user_id': self.user_id, 'game_id': self.game_id, 'card': Card.find(self.card_id) }
from sixquiprend.sixquiprend import app, db from sixquiprend.models.card import Card class ChosenCard(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE")) game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE")) card_id = db.Column(db.Integer, db.ForeignKey('card.id')) ################################################################################ ## Serializer ################################################################################ def serialize(self): return { 'id': self.id, 'user_id': self.user_id, 'game_id': self.game_id, 'card': Card.find(self.card_id) }
Move an import to top
Move an import to top
Python
mit
nyddogghr/SixQuiPrend,nyddogghr/SixQuiPrend,nyddogghr/SixQuiPrend,nyddogghr/SixQuiPrend
from sixquiprend.sixquiprend import app, db + from sixquiprend.models.card import Card class ChosenCard(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE")) game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE")) card_id = db.Column(db.Integer, db.ForeignKey('card.id')) ################################################################################ ## Serializer ################################################################################ def serialize(self): - from sixquiprend.models.card import Card return { 'id': self.id, 'user_id': self.user_id, 'game_id': self.game_id, 'card': Card.find(self.card_id) }
Move an import to top
## Code Before: from sixquiprend.sixquiprend import app, db class ChosenCard(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE")) game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE")) card_id = db.Column(db.Integer, db.ForeignKey('card.id')) ################################################################################ ## Serializer ################################################################################ def serialize(self): from sixquiprend.models.card import Card return { 'id': self.id, 'user_id': self.user_id, 'game_id': self.game_id, 'card': Card.find(self.card_id) } ## Instruction: Move an import to top ## Code After: from sixquiprend.sixquiprend import app, db from sixquiprend.models.card import Card class ChosenCard(db.Model): id = db.Column(db.Integer, primary_key=True) user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE")) game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE")) card_id = db.Column(db.Integer, db.ForeignKey('card.id')) ################################################################################ ## Serializer ################################################################################ def serialize(self): return { 'id': self.id, 'user_id': self.user_id, 'game_id': self.game_id, 'card': Card.find(self.card_id) }
// ... existing code ... from sixquiprend.sixquiprend import app, db from sixquiprend.models.card import Card class ChosenCard(db.Model): // ... modified code ... def serialize(self): return { 'id': self.id, // ... rest of the code ...
bddf5358b92d58549496de41ffeea724aeb2feb7
openmm/run_test.py
openmm/run_test.py
from simtk import openmm # Check major version number # If Z=0 for version X.Y.Z, out put is "X.Y" assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion() # Check git hash assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision
from simtk import openmm # Check major version number # If Z=0 for version X.Y.Z, out put is "X.Y" assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion() # Check git hash assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
Update test for 7.2 beta build
[openmm] Update test for 7.2 beta build
Python
mit
peastman/conda-recipes,omnia-md/conda-recipes,peastman/conda-recipes,omnia-md/conda-recipes,omnia-md/conda-recipes,peastman/conda-recipes
from simtk import openmm # Check major version number # If Z=0 for version X.Y.Z, out put is "X.Y" - assert openmm.Platform.getOpenMMVersion() == '7.1.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion() + assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion() # Check git hash - assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision + assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
Update test for 7.2 beta build
## Code Before: from simtk import openmm # Check major version number # If Z=0 for version X.Y.Z, out put is "X.Y" assert openmm.Platform.getOpenMMVersion() == '7.1.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion() # Check git hash assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision ## Instruction: Update test for 7.2 beta build ## Code After: from simtk import openmm # Check major version number # If Z=0 for version X.Y.Z, out put is "X.Y" assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion() # Check git hash assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
# ... existing code ... # Check major version number # If Z=0 for version X.Y.Z, out put is "X.Y" assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion() # Check git hash assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision # ... rest of the code ...
4eada6970d72b3863104790229286edf8d17720c
accelerator/tests/contexts/user_role_context.py
accelerator/tests/contexts/user_role_context.py
from builtins import object from accelerator.tests.factories import ( ExpertFactory, ProgramFactory, ProgramRoleFactory, ProgramRoleGrantFactory, UserRoleFactory, ) class UserRoleContext(object): def __init__(self, user_role_name, program=None, user=None): if user and not program: self.program = user.get_profile().current_program else: self.program = program or ProgramFactory() self.user = (user or ExpertFactory(profile__current_program=self.program)) self.user_role = _user_role_for_name(user_role_name) self.program_role = ProgramRoleFactory(user_role=self.user_role, program=self.program) self.program_role_grant = ProgramRoleGrantFactory( person=self.user, program_role=self.program_role)
from builtins import object from accelerator.tests.factories import ( ExpertFactory, ProgramFactory, ProgramRoleFactory, ProgramRoleGrantFactory, UserRoleFactory, ) from accelerator.models import UserRole class UserRoleContext(object): def __init__(self, user_role_name, program=None, user=None): if user and not program: self.program = user.get_profile().current_program else: self.program = program or ProgramFactory() self.user = (user or ExpertFactory(profile__current_program=self.program)) self.user_role = _user_role_for_name(user_role_name) self.program_role = ProgramRoleFactory(user_role=self.user_role, program=self.program) self.program_role_grant = ProgramRoleGrantFactory( person=self.user, program_role=self.program_role) def _user_role_for_name(user_role_name): return (UserRole.objects.filter(name=user_role_name).first() or UserRoleFactory(name=user_role_name))
Make UserRoleContext safe to use
[AC-7397] Make UserRoleContext safe to use
Python
mit
masschallenge/django-accelerator,masschallenge/django-accelerator
from builtins import object from accelerator.tests.factories import ( ExpertFactory, ProgramFactory, ProgramRoleFactory, ProgramRoleGrantFactory, UserRoleFactory, ) + from accelerator.models import UserRole class UserRoleContext(object): def __init__(self, user_role_name, program=None, user=None): if user and not program: self.program = user.get_profile().current_program else: self.program = program or ProgramFactory() self.user = (user or ExpertFactory(profile__current_program=self.program)) - self.user_role = UserRoleFactory(name=user_role_name) + self.user_role = _user_role_for_name(user_role_name) + self.program_role = ProgramRoleFactory(user_role=self.user_role, program=self.program) self.program_role_grant = ProgramRoleGrantFactory( person=self.user, program_role=self.program_role) + def _user_role_for_name(user_role_name): + + return (UserRole.objects.filter(name=user_role_name).first() or + UserRoleFactory(name=user_role_name)) +
Make UserRoleContext safe to use
## Code Before: from builtins import object from accelerator.tests.factories import ( ExpertFactory, ProgramFactory, ProgramRoleFactory, ProgramRoleGrantFactory, UserRoleFactory, ) class UserRoleContext(object): def __init__(self, user_role_name, program=None, user=None): if user and not program: self.program = user.get_profile().current_program else: self.program = program or ProgramFactory() self.user = (user or ExpertFactory(profile__current_program=self.program)) self.user_role = UserRoleFactory(name=user_role_name) self.program_role = ProgramRoleFactory(user_role=self.user_role, program=self.program) self.program_role_grant = ProgramRoleGrantFactory( person=self.user, program_role=self.program_role) ## Instruction: Make UserRoleContext safe to use ## Code After: from builtins import object from accelerator.tests.factories import ( ExpertFactory, ProgramFactory, ProgramRoleFactory, ProgramRoleGrantFactory, UserRoleFactory, ) from accelerator.models import UserRole class UserRoleContext(object): def __init__(self, user_role_name, program=None, user=None): if user and not program: self.program = user.get_profile().current_program else: self.program = program or ProgramFactory() self.user = (user or ExpertFactory(profile__current_program=self.program)) self.user_role = _user_role_for_name(user_role_name) self.program_role = ProgramRoleFactory(user_role=self.user_role, program=self.program) self.program_role_grant = ProgramRoleGrantFactory( person=self.user, program_role=self.program_role) def _user_role_for_name(user_role_name): return (UserRole.objects.filter(name=user_role_name).first() or UserRoleFactory(name=user_role_name))
// ... existing code ... UserRoleFactory, ) from accelerator.models import UserRole // ... modified code ... self.user = (user or ExpertFactory(profile__current_program=self.program)) self.user_role = _user_role_for_name(user_role_name) self.program_role = ProgramRoleFactory(user_role=self.user_role, program=self.program) ... person=self.user, program_role=self.program_role) def _user_role_for_name(user_role_name): return (UserRole.objects.filter(name=user_role_name).first() or UserRoleFactory(name=user_role_name)) // ... rest of the code ...
fbad1649e9939a3be4194e0d508ff5889f48bb6f
unleash/plugins/utils_assign.py
unleash/plugins/utils_assign.py
import re # regular expression for finding assignments _quotes = "['|\"|\"\"\"]" BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\ _quotes + r')' def find_assign(data, varname): """Finds a substring that looks like an assignment. :param data: Source to search in. :param varname: Name of the variable for which an assignment should be found. """ ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) if len(ASSIGN_RE.findall(data)) > 1: raise ValueError('Found multiple {}-strings.'.format(varname)) if len(ASSIGN_RE.findall(data)) < 1: raise ValueError('No version assignment ("{}") found.'.format(varname)) return ASSIGN_RE.search(data).group(2) def replace_assign(data, varname, new_value): ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) def repl(m): return m.group(1) + new_value + m.group(3) from unleash.exc import PluginError import re # regular expression for finding assignments _quotes = "['|\"|\"\"\"]" BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\ _quotes + r')' def find_assign(data, varname): """Finds a substring that looks like an assignment. :param data: Source to search in. :param varname: Name of the variable for which an assignment should be found. """ ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) if len(ASSIGN_RE.findall(data)) > 1: raise ValueError('Found multiple {}-strings.'.format(varname)) if len(ASSIGN_RE.findall(data)) < 1: raise ValueError('No version assignment ("{}") found.'.format(varname)) return ASSIGN_RE.search(data).group(2) def replace_assign(data, varname, new_value): ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) def repl(m): return m.group(1) + new_value + m.group(3) return ASSIGN_RE.sub(repl, data)
from unleash.exc import PluginError import re # regular expression for finding assignments _quotes = "['|\"|\"\"\"]" BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\ _quotes + r')' def find_assign(data, varname): """Finds a substring that looks like an assignment. :param data: Source to search in. :param varname: Name of the variable for which an assignment should be found. """ ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) if len(ASSIGN_RE.findall(data)) > 1: raise PluginError('Found multiple {}-strings.'.format(varname)) if len(ASSIGN_RE.findall(data)) < 1: raise PluginError('No version assignment ("{}") found.' .format(varname)) return ASSIGN_RE.search(data).group(2) def replace_assign(data, varname, new_value): ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) def repl(m): return m.group(1) + new_value + m.group(3) return ASSIGN_RE.sub(repl, data)
Raise PluginErrors instead of ValueErrors in versions.
Raise PluginErrors instead of ValueErrors in versions.
Python
mit
mbr/unleash
+ from unleash.exc import PluginError + import re # regular expression for finding assignments _quotes = "['|\"|\"\"\"]" BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\ _quotes + r')' def find_assign(data, varname): """Finds a substring that looks like an assignment. :param data: Source to search in. :param varname: Name of the variable for which an assignment should be found. """ ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) if len(ASSIGN_RE.findall(data)) > 1: - raise ValueError('Found multiple {}-strings.'.format(varname)) + raise PluginError('Found multiple {}-strings.'.format(varname)) if len(ASSIGN_RE.findall(data)) < 1: - raise ValueError('No version assignment ("{}") found.'.format(varname)) + raise PluginError('No version assignment ("{}") found.' + .format(varname)) return ASSIGN_RE.search(data).group(2) def replace_assign(data, varname, new_value): ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) def repl(m): return m.group(1) + new_value + m.group(3) return ASSIGN_RE.sub(repl, data)
Raise PluginErrors instead of ValueErrors in versions.
## Code Before: import re # regular expression for finding assignments _quotes = "['|\"|\"\"\"]" BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\ _quotes + r')' def find_assign(data, varname): """Finds a substring that looks like an assignment. :param data: Source to search in. :param varname: Name of the variable for which an assignment should be found. """ ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) if len(ASSIGN_RE.findall(data)) > 1: raise ValueError('Found multiple {}-strings.'.format(varname)) if len(ASSIGN_RE.findall(data)) < 1: raise ValueError('No version assignment ("{}") found.'.format(varname)) return ASSIGN_RE.search(data).group(2) def replace_assign(data, varname, new_value): ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) def repl(m): return m.group(1) + new_value + m.group(3) return ASSIGN_RE.sub(repl, data) ## Instruction: Raise PluginErrors instead of ValueErrors in versions. ## Code After: from unleash.exc import PluginError import re # regular expression for finding assignments _quotes = "['|\"|\"\"\"]" BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\ _quotes + r')' def find_assign(data, varname): """Finds a substring that looks like an assignment. :param data: Source to search in. :param varname: Name of the variable for which an assignment should be found. """ ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) if len(ASSIGN_RE.findall(data)) > 1: raise PluginError('Found multiple {}-strings.'.format(varname)) if len(ASSIGN_RE.findall(data)) < 1: raise PluginError('No version assignment ("{}") found.' .format(varname)) return ASSIGN_RE.search(data).group(2) def replace_assign(data, varname, new_value): ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname)) def repl(m): return m.group(1) + new_value + m.group(3) return ASSIGN_RE.sub(repl, data)
... from unleash.exc import PluginError import re ... if len(ASSIGN_RE.findall(data)) > 1: raise PluginError('Found multiple {}-strings.'.format(varname)) if len(ASSIGN_RE.findall(data)) < 1: raise PluginError('No version assignment ("{}") found.' .format(varname)) return ASSIGN_RE.search(data).group(2) ...
85c1a9e6dd9e4523d60638027da23fbfce7deff6
stack/cluster.py
stack/cluster.py
from troposphere import ( iam, Parameter, Ref, ) from troposphere.ecs import ( Cluster, ) from .template import template container_instance_type = Ref(template.add_parameter(Parameter( "ContainerInstanceType", Description="The container instance type", Type="String", Default="t2.micro", AllowedValues=["t2.micro", "t2.small", "t2.medium"] ))) template.add_mapping("ECSRegionMap", { "eu-west-1": {"AMI": "ami-4e6ffe3d"}, "us-east-1": {"AMI": "ami-8f7687e2"}, "us-west-2": {"AMI": "ami-84b44de4"}, }) # ECS cluster cluster = Cluster( "Cluster", template=template, )
from troposphere import ( iam, Parameter, Ref, ) from troposphere.ecs import ( Cluster, ) from .template import template container_instance_type = Ref(template.add_parameter(Parameter( "ContainerInstanceType", Description="The container instance type", Type="String", Default="t2.micro", AllowedValues=["t2.micro", "t2.small", "t2.medium"] ))) template.add_mapping("ECSRegionMap", { "eu-west-1": {"AMI": "ami-4e6ffe3d"}, "us-east-1": {"AMI": "ami-8f7687e2"}, "us-west-2": {"AMI": "ami-84b44de4"}, }) # ECS cluster cluster = Cluster( "Cluster", template=template, ) # ECS container role container_instance_role = iam.Role( "ContainerInstanceRole", template=template, AssumeRolePolicyDocument=dict(Statement=[dict( Effect="Allow", Principal=dict(Service=["ec2.amazonaws.com"]), Action=["sts:AssumeRole"], )]), Path="/", Policies=[ ] ) # ECS container instance profile container_instance_profile = iam.InstanceProfile( "ContainerInstanceProfile", template=template, Path="/", Roles=[Ref(container_instance_role)], )
Add an instance profile for container instances
Add an instance profile for container instances
Python
mit
caktus/aws-web-stacks,tobiasmcnulty/aws-container-basics
from troposphere import ( + iam, Parameter, Ref, ) from troposphere.ecs import ( Cluster, ) from .template import template container_instance_type = Ref(template.add_parameter(Parameter( "ContainerInstanceType", Description="The container instance type", Type="String", Default="t2.micro", AllowedValues=["t2.micro", "t2.small", "t2.medium"] ))) template.add_mapping("ECSRegionMap", { "eu-west-1": {"AMI": "ami-4e6ffe3d"}, "us-east-1": {"AMI": "ami-8f7687e2"}, "us-west-2": {"AMI": "ami-84b44de4"}, }) # ECS cluster cluster = Cluster( "Cluster", template=template, ) + + # ECS container role + container_instance_role = iam.Role( + "ContainerInstanceRole", + template=template, + AssumeRolePolicyDocument=dict(Statement=[dict( + Effect="Allow", + Principal=dict(Service=["ec2.amazonaws.com"]), + Action=["sts:AssumeRole"], + )]), + Path="/", + Policies=[ + ] + ) + + + # ECS container instance profile + container_instance_profile = iam.InstanceProfile( + "ContainerInstanceProfile", + template=template, + Path="/", + Roles=[Ref(container_instance_role)], + ) +
Add an instance profile for container instances
## Code Before: from troposphere import ( Parameter, Ref, ) from troposphere.ecs import ( Cluster, ) from .template import template container_instance_type = Ref(template.add_parameter(Parameter( "ContainerInstanceType", Description="The container instance type", Type="String", Default="t2.micro", AllowedValues=["t2.micro", "t2.small", "t2.medium"] ))) template.add_mapping("ECSRegionMap", { "eu-west-1": {"AMI": "ami-4e6ffe3d"}, "us-east-1": {"AMI": "ami-8f7687e2"}, "us-west-2": {"AMI": "ami-84b44de4"}, }) # ECS cluster cluster = Cluster( "Cluster", template=template, ) ## Instruction: Add an instance profile for container instances ## Code After: from troposphere import ( iam, Parameter, Ref, ) from troposphere.ecs import ( Cluster, ) from .template import template container_instance_type = Ref(template.add_parameter(Parameter( "ContainerInstanceType", Description="The container instance type", Type="String", Default="t2.micro", AllowedValues=["t2.micro", "t2.small", "t2.medium"] ))) template.add_mapping("ECSRegionMap", { "eu-west-1": {"AMI": "ami-4e6ffe3d"}, "us-east-1": {"AMI": "ami-8f7687e2"}, "us-west-2": {"AMI": "ami-84b44de4"}, }) # ECS cluster cluster = Cluster( "Cluster", template=template, ) # ECS container role container_instance_role = iam.Role( "ContainerInstanceRole", template=template, AssumeRolePolicyDocument=dict(Statement=[dict( Effect="Allow", Principal=dict(Service=["ec2.amazonaws.com"]), Action=["sts:AssumeRole"], )]), Path="/", Policies=[ ] ) # ECS container instance profile container_instance_profile = iam.InstanceProfile( "ContainerInstanceProfile", template=template, Path="/", Roles=[Ref(container_instance_role)], )
... from troposphere import ( iam, Parameter, Ref, ... template=template, ) # ECS container role container_instance_role = iam.Role( "ContainerInstanceRole", template=template, AssumeRolePolicyDocument=dict(Statement=[dict( Effect="Allow", Principal=dict(Service=["ec2.amazonaws.com"]), Action=["sts:AssumeRole"], )]), Path="/", Policies=[ ] ) # ECS container instance profile container_instance_profile = iam.InstanceProfile( "ContainerInstanceProfile", template=template, Path="/", Roles=[Ref(container_instance_role)], ) ...
788cc159e4d734b972e22ccf06dbcd8ed8f94885
distutils/_collections.py
distutils/_collections.py
import collections import itertools # from jaraco.collections 3.5 class DictStack(list, collections.abc.Mapping): """ A stack of dictionaries that behaves as a view on those dictionaries, giving preference to the last. >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) >>> stack['a'] 2 >>> stack['b'] 2 2 >>> len(stack) 3 >>> stack.push(dict(a=3)) >>> stack['a'] 3 >>> set(stack.keys()) == set(['a', 'b', 'c']) True >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) True >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) True >>> d = stack.pop() >>> stack['a'] 2 >>> d = stack.pop() >>> stack['a'] 1 >>> stack.get('b', None) >>> 'c' in stack True """ def __iter__(self): dicts = list.__iter__(self) return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) def __getitem__(self, key): for scope in reversed(self): if key in scope: return scope[key] raise KeyError(key) push = list.append def __contains__(self, other): return collections.abc.Mapping.__contains__(self, other)
import collections import itertools # from jaraco.collections 3.5.1 class DictStack(list, collections.abc.Mapping): """ A stack of dictionaries that behaves as a view on those dictionaries, giving preference to the last. >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) >>> stack['a'] 2 >>> stack['b'] 2 >>> stack['c'] 2 >>> len(stack) 3 >>> stack.push(dict(a=3)) >>> stack['a'] 3 >>> set(stack.keys()) == set(['a', 'b', 'c']) True >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) True >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) True >>> d = stack.pop() >>> stack['a'] 2 >>> d = stack.pop() >>> stack['a'] 1 >>> stack.get('b', None) >>> 'c' in stack True """ def __iter__(self): dicts = list.__iter__(self) return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) def __getitem__(self, key): for scope in reversed(tuple(list.__iter__(self))): if key in scope: return scope[key] raise KeyError(key) push = list.append def __contains__(self, other): return collections.abc.Mapping.__contains__(self, other) def __len__(self): return len(list(iter(self)))
Update DictStack implementation from jaraco.collections 3.5.1
Update DictStack implementation from jaraco.collections 3.5.1
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
import collections import itertools - # from jaraco.collections 3.5 + # from jaraco.collections 3.5.1 class DictStack(list, collections.abc.Mapping): """ A stack of dictionaries that behaves as a view on those dictionaries, giving preference to the last. >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) >>> stack['a'] 2 >>> stack['b'] 2 >>> stack['c'] 2 + >>> len(stack) + 3 >>> stack.push(dict(a=3)) >>> stack['a'] 3 >>> set(stack.keys()) == set(['a', 'b', 'c']) True >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) True >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) True >>> d = stack.pop() >>> stack['a'] 2 >>> d = stack.pop() >>> stack['a'] 1 >>> stack.get('b', None) >>> 'c' in stack True """ def __iter__(self): dicts = list.__iter__(self) return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) def __getitem__(self, key): - for scope in reversed(self): + for scope in reversed(tuple(list.__iter__(self))): if key in scope: return scope[key] raise KeyError(key) push = list.append def __contains__(self, other): return collections.abc.Mapping.__contains__(self, other) + def __len__(self): + return len(list(iter(self))) +
Update DictStack implementation from jaraco.collections 3.5.1
## Code Before: import collections import itertools # from jaraco.collections 3.5 class DictStack(list, collections.abc.Mapping): """ A stack of dictionaries that behaves as a view on those dictionaries, giving preference to the last. >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) >>> stack['a'] 2 >>> stack['b'] 2 >>> stack['c'] 2 >>> stack.push(dict(a=3)) >>> stack['a'] 3 >>> set(stack.keys()) == set(['a', 'b', 'c']) True >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) True >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) True >>> d = stack.pop() >>> stack['a'] 2 >>> d = stack.pop() >>> stack['a'] 1 >>> stack.get('b', None) >>> 'c' in stack True """ def __iter__(self): dicts = list.__iter__(self) return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) def __getitem__(self, key): for scope in reversed(self): if key in scope: return scope[key] raise KeyError(key) push = list.append def __contains__(self, other): return collections.abc.Mapping.__contains__(self, other) ## Instruction: Update DictStack implementation from jaraco.collections 3.5.1 ## Code After: import collections import itertools # from jaraco.collections 3.5.1 class DictStack(list, collections.abc.Mapping): """ A stack of dictionaries that behaves as a view on those dictionaries, giving preference to the last. >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) >>> stack['a'] 2 >>> stack['b'] 2 >>> stack['c'] 2 >>> len(stack) 3 >>> stack.push(dict(a=3)) >>> stack['a'] 3 >>> set(stack.keys()) == set(['a', 'b', 'c']) True >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) True >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) True >>> d = stack.pop() >>> stack['a'] 2 >>> d = stack.pop() >>> stack['a'] 1 >>> stack.get('b', None) >>> 'c' in stack True """ def __iter__(self): dicts = list.__iter__(self) return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) def __getitem__(self, key): for scope in reversed(tuple(list.__iter__(self))): if key in scope: return scope[key] raise KeyError(key) push = list.append def __contains__(self, other): return collections.abc.Mapping.__contains__(self, other) def __len__(self): return len(list(iter(self)))
# ... existing code ... # from jaraco.collections 3.5.1 class DictStack(list, collections.abc.Mapping): """ # ... modified code ... >>> stack['c'] 2 >>> len(stack) 3 >>> stack.push(dict(a=3)) >>> stack['a'] ... def __getitem__(self, key): for scope in reversed(tuple(list.__iter__(self))): if key in scope: return scope[key] ... def __contains__(self, other): return collections.abc.Mapping.__contains__(self, other) def __len__(self): return len(list(iter(self))) # ... rest of the code ...
8efd4b8661f5be47c04130de6d47c8b80c39454c
selvbetjening/core/events/management/commands/recalculate_attend_columns.py
selvbetjening/core/events/management/commands/recalculate_attend_columns.py
from django.core.management.base import NoArgsCommand class Command(NoArgsCommand): def handle_noargs(self, **options): attendees = Attend.objects.select_related().prefetch_related('selection_set') for attendee in attendees: attendee.recalculate_price() Attend.objects.recalculate_aggregations_paid(attendees)
from django.core.management.base import NoArgsCommand class Command(NoArgsCommand): def handle_noargs(self, **options): from selvbetjening.core.events.models import Attend attendees = Attend.objects.select_related().prefetch_related('selection_set') for attendee in attendees: attendee.recalculate_price() Attend.objects.recalculate_aggregations_paid(attendees)
Fix import that crashed the system under certain conditions
Fix import that crashed the system under certain conditions
Python
mit
animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening
from django.core.management.base import NoArgsCommand - - from selvbetjening.core.events.models import Attend class Command(NoArgsCommand): def handle_noargs(self, **options): + from selvbetjening.core.events.models import Attend attendees = Attend.objects.select_related().prefetch_related('selection_set') for attendee in attendees: attendee.recalculate_price() Attend.objects.recalculate_aggregations_paid(attendees)
Fix import that crashed the system under certain conditions
## Code Before: from django.core.management.base import NoArgsCommand from selvbetjening.core.events.models import Attend class Command(NoArgsCommand): def handle_noargs(self, **options): attendees = Attend.objects.select_related().prefetch_related('selection_set') for attendee in attendees: attendee.recalculate_price() Attend.objects.recalculate_aggregations_paid(attendees) ## Instruction: Fix import that crashed the system under certain conditions ## Code After: from django.core.management.base import NoArgsCommand class Command(NoArgsCommand): def handle_noargs(self, **options): from selvbetjening.core.events.models import Attend attendees = Attend.objects.select_related().prefetch_related('selection_set') for attendee in attendees: attendee.recalculate_price() Attend.objects.recalculate_aggregations_paid(attendees)
# ... existing code ... from django.core.management.base import NoArgsCommand # ... modified code ... class Command(NoArgsCommand): def handle_noargs(self, **options): from selvbetjening.core.events.models import Attend attendees = Attend.objects.select_related().prefetch_related('selection_set') # ... rest of the code ...
164b860e4a44a22a1686cf6133fac6258fc97db6
nbgrader/tests/apps/test_nbgrader_fetch.py
nbgrader/tests/apps/test_nbgrader_fetch.py
from nbgrader.tests import run_command from nbgrader.tests.apps.base import BaseTestApp class TestNbGraderFetch(BaseTestApp): def test_help(self): """Does the help display without error?""" import os from nbgrader.tests import run_command from nbgrader.tests.apps.base import BaseTestApp class TestNbGraderFetch(BaseTestApp): def test_help(self): """Does the help display without error?""" run_command("nbgrader fetch --help-all")
import os from nbgrader.tests import run_command from nbgrader.tests.apps.base import BaseTestApp class TestNbGraderFetch(BaseTestApp): def _fetch(self, assignment, exchange, flags="", retcode=0): run_command( 'nbgrader fetch abc101 {} ' '--TransferApp.exchange_directory={} ' '{}'.format(assignment, exchange, flags), retcode=retcode) def test_help(self): """Does the help display without error?""" run_command("nbgrader fetch --help-all") def test_fetch(self, exchange): self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb")) self._fetch("ps1", exchange) assert os.path.isfile("ps1/p1.ipynb") # make sure it fails if the assignment already exists self._fetch("ps1", exchange, retcode=1) # make sure it fails even if the assignment is incomplete os.remove("ps1/p1.ipynb") self._fetch("ps1", exchange, retcode=1)
Add some basic tests for nbgrader fetch
Add some basic tests for nbgrader fetch
Python
bsd-3-clause
modulexcite/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,alope107/nbgrader,modulexcite/nbgrader,dementrock/nbgrader,alope107/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,MatKallada/nbgrader,ellisonbg/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader
+ import os + from nbgrader.tests import run_command from nbgrader.tests.apps.base import BaseTestApp class TestNbGraderFetch(BaseTestApp): + def _fetch(self, assignment, exchange, flags="", retcode=0): + run_command( + 'nbgrader fetch abc101 {} ' + '--TransferApp.exchange_directory={} ' + '{}'.format(assignment, exchange, flags), + retcode=retcode) + def test_help(self): """Does the help display without error?""" run_command("nbgrader fetch --help-all") + def test_fetch(self, exchange): + self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb")) + self._fetch("ps1", exchange) + assert os.path.isfile("ps1/p1.ipynb") + + # make sure it fails if the assignment already exists + self._fetch("ps1", exchange, retcode=1) + + # make sure it fails even if the assignment is incomplete + os.remove("ps1/p1.ipynb") + self._fetch("ps1", exchange, retcode=1) +
Add some basic tests for nbgrader fetch
## Code Before: from nbgrader.tests import run_command from nbgrader.tests.apps.base import BaseTestApp class TestNbGraderFetch(BaseTestApp): def test_help(self): """Does the help display without error?""" run_command("nbgrader fetch --help-all") ## Instruction: Add some basic tests for nbgrader fetch ## Code After: import os from nbgrader.tests import run_command from nbgrader.tests.apps.base import BaseTestApp class TestNbGraderFetch(BaseTestApp): def _fetch(self, assignment, exchange, flags="", retcode=0): run_command( 'nbgrader fetch abc101 {} ' '--TransferApp.exchange_directory={} ' '{}'.format(assignment, exchange, flags), retcode=retcode) def test_help(self): """Does the help display without error?""" run_command("nbgrader fetch --help-all") def test_fetch(self, exchange): self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb")) self._fetch("ps1", exchange) assert os.path.isfile("ps1/p1.ipynb") # make sure it fails if the assignment already exists self._fetch("ps1", exchange, retcode=1) # make sure it fails even if the assignment is incomplete os.remove("ps1/p1.ipynb") self._fetch("ps1", exchange, retcode=1)
// ... existing code ... import os from nbgrader.tests import run_command from nbgrader.tests.apps.base import BaseTestApp // ... modified code ... class TestNbGraderFetch(BaseTestApp): def _fetch(self, assignment, exchange, flags="", retcode=0): run_command( 'nbgrader fetch abc101 {} ' '--TransferApp.exchange_directory={} ' '{}'.format(assignment, exchange, flags), retcode=retcode) def test_help(self): """Does the help display without error?""" run_command("nbgrader fetch --help-all") def test_fetch(self, exchange): self._copy_file("files/test.ipynb", os.path.join(exchange, "abc101/outbound/ps1/p1.ipynb")) self._fetch("ps1", exchange) assert os.path.isfile("ps1/p1.ipynb") # make sure it fails if the assignment already exists self._fetch("ps1", exchange, retcode=1) # make sure it fails even if the assignment is incomplete os.remove("ps1/p1.ipynb") self._fetch("ps1", exchange, retcode=1) // ... rest of the code ...
600ec67b175ca78c4dd72b4468368920ce390316
flask_controllers/GameModes.py
flask_controllers/GameModes.py
from flask.views import MethodView from flask_helpers.build_response import build_response from flask_helpers.ErrorHandler import ErrorHandler from python_cowbull_game.GameObject import GameObject class GameModes(MethodView): def get(self): digits = GameObject.digits_used guesses = GameObject.guesses_allowed game_modes = [mode for mode in GameObject.digits_used] return_list = [] for mode in game_modes: return_list.append( { "mode": mode, "digits": digits[mode], "guesses": guesses[mode] } ) return build_response( html_status=200, response_data=return_list, response_mimetype="application/json" from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response from flask_helpers.ErrorHandler import ErrorHandler from python_cowbull_game.GameObject import GameObject class GameModes(MethodView): def get(self): digits = GameObject.digits_used guesses = GameObject.guesses_allowed game_modes = [mode for mode in GameObject.digits_used] return_list = [] for mode in game_modes: return_list.append( { "mode": mode, "digits": digits[mode], "guesses": guesses[mode] } ) return build_response( html_status=200, response_data=return_list, response_mimetype="application/json" )
from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response from flask_helpers.ErrorHandler import ErrorHandler from python_cowbull_game.GameObject import GameObject class GameModes(MethodView): def get(self): textonly = request.args.get('textmode', None) if textonly: return build_response( html_status=200, response_data=GameObject.game_modes, response_mimetype="application/json" ) digits = GameObject.digits_used guesses = GameObject.guesses_allowed game_modes = GameObject.game_modes # game_modes = [mode for mode in GameObject.digits_used] return_list = [] for mode in game_modes: return_list.append( { "mode": mode, "digits": digits[mode], "guesses": guesses[mode] } ) return build_response( html_status=200, response_data=return_list, response_mimetype="application/json" )
Add text only mode to get game modes
Add text only mode to get game modes
Python
apache-2.0
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
+ from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response from flask_helpers.ErrorHandler import ErrorHandler from python_cowbull_game.GameObject import GameObject class GameModes(MethodView): def get(self): + textonly = request.args.get('textmode', None) + if textonly: + return build_response( + html_status=200, + response_data=GameObject.game_modes, + response_mimetype="application/json" + ) + digits = GameObject.digits_used guesses = GameObject.guesses_allowed + game_modes = GameObject.game_modes - game_modes = [mode for mode in GameObject.digits_used] + # game_modes = [mode for mode in GameObject.digits_used] return_list = [] for mode in game_modes: return_list.append( { "mode": mode, "digits": digits[mode], "guesses": guesses[mode] } ) return build_response( html_status=200, response_data=return_list, response_mimetype="application/json" )
Add text only mode to get game modes
## Code Before: from flask.views import MethodView from flask_helpers.build_response import build_response from flask_helpers.ErrorHandler import ErrorHandler from python_cowbull_game.GameObject import GameObject class GameModes(MethodView): def get(self): digits = GameObject.digits_used guesses = GameObject.guesses_allowed game_modes = [mode for mode in GameObject.digits_used] return_list = [] for mode in game_modes: return_list.append( { "mode": mode, "digits": digits[mode], "guesses": guesses[mode] } ) return build_response( html_status=200, response_data=return_list, response_mimetype="application/json" ) ## Instruction: Add text only mode to get game modes ## Code After: from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response from flask_helpers.ErrorHandler import ErrorHandler from python_cowbull_game.GameObject import GameObject class GameModes(MethodView): def get(self): textonly = request.args.get('textmode', None) if textonly: return build_response( html_status=200, response_data=GameObject.game_modes, response_mimetype="application/json" ) digits = GameObject.digits_used guesses = GameObject.guesses_allowed game_modes = GameObject.game_modes # game_modes = [mode for mode in GameObject.digits_used] return_list = [] for mode in game_modes: return_list.append( { "mode": mode, "digits": digits[mode], "guesses": guesses[mode] } ) return build_response( html_status=200, response_data=return_list, response_mimetype="application/json" )
# ... existing code ... from flask import request from flask.views import MethodView from flask_helpers.build_response import build_response # ... modified code ... class GameModes(MethodView): def get(self): textonly = request.args.get('textmode', None) if textonly: return build_response( html_status=200, response_data=GameObject.game_modes, response_mimetype="application/json" ) digits = GameObject.digits_used guesses = GameObject.guesses_allowed game_modes = GameObject.game_modes # game_modes = [mode for mode in GameObject.digits_used] return_list = [] # ... rest of the code ...
703a423f4a0aeda7cbeaa542e2f4e0581eee3bda
slot/utils.py
slot/utils.py
import datetime def to_ticks(dt): """Converts a timestamp to ticks""" return (dt - datetime.datetime(1970, 1, 1)).total_seconds() def ticks_to_timestamp(ticks): """Converts ticks to a timestamp""" converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700) return converted def ticks_utc_now(): """Returns the current timestamp in ticks""" time_now = datetime.datetime.utcnow() ticks = int(timestamp_to_ticks(time_now)) return ticks def ticks_local_now(): time_now = datetime.datetime.now(tz=this_timezone) ticks = int(timestamp_to_ticks(time_now)) return ticks def mobile_number_string_to_int(mobile_string): """Converts mobile numbers from a string to an integer""" return int(mobile_string) def redact_mobile_number(mobile_string): """Takes a mobile number as a string, and redacts all but the last 3 digits""" return str.format('XXXXX XXX{0}', mobile_string[-3:])
import datetime import pytz this_timezone = pytz.timezone('Europe/London') def timestamp_to_ticks(dt): """Converts a datetime to ticks (seconds since Epoch)""" delta = (dt - datetime.datetime(1970, 1, 1)) ticks = int(delta.total_seconds()) return ticks def ticks_to_timestamp(ticks): """Converts ticks (seconds since Epoch) to a datetime""" delta = datetime.timedelta(seconds=ticks) new_timestamp = datetime.datetime(1970, 1, 1) + delta return new_timestamp def ticks_utc_now(): """Returns the current timestamp in ticks""" time_now = datetime.datetime.utcnow() ticks = int(timestamp_to_ticks(time_now)) return ticks def ticks_local_now(): time_now = datetime.datetime.now(tz=this_timezone) ticks = int(timestamp_to_ticks(time_now)) return ticks def mobile_number_string_to_int(mobile_string): """Converts mobile numbers from a string to an integer""" return int(mobile_string) def redact_mobile_number(mobile_string): """Takes a mobile number as a string, and redacts all but the last 3 digits""" return str.format('XXXXX XXX{0}', mobile_string[-3:])
Add timezone support to timestamp helper methods
Add timezone support to timestamp helper methods
Python
mit
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
import datetime + import pytz + + this_timezone = pytz.timezone('Europe/London') - def to_ticks(dt): + def timestamp_to_ticks(dt): - """Converts a timestamp to ticks""" + """Converts a datetime to ticks (seconds since Epoch)""" - return (dt - datetime.datetime(1970, 1, 1)).total_seconds() + delta = (dt - datetime.datetime(1970, 1, 1)) + ticks = int(delta.total_seconds()) + return ticks def ticks_to_timestamp(ticks): - """Converts ticks to a timestamp""" - converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700) - return converted + """Converts ticks (seconds since Epoch) to a datetime""" + delta = datetime.timedelta(seconds=ticks) + new_timestamp = datetime.datetime(1970, 1, 1) + delta + return new_timestamp - def ticks_now(): + def ticks_utc_now(): """Returns the current timestamp in ticks""" - return int(to_ticks(datetime.datetime.utcnow())) + time_now = datetime.datetime.utcnow() + ticks = int(timestamp_to_ticks(time_now)) + return ticks + + + def ticks_local_now(): + time_now = datetime.datetime.now(tz=this_timezone) + ticks = int(timestamp_to_ticks(time_now)) + return ticks def mobile_number_string_to_int(mobile_string): """Converts mobile numbers from a string to an integer""" return int(mobile_string) def redact_mobile_number(mobile_string): """Takes a mobile number as a string, and redacts all but the last 3 digits""" return str.format('XXXXX XXX{0}', mobile_string[-3:])
Add timezone support to timestamp helper methods
## Code Before: import datetime def to_ticks(dt): """Converts a timestamp to ticks""" return (dt - datetime.datetime(1970, 1, 1)).total_seconds() def ticks_to_timestamp(ticks): """Converts ticks to a timestamp""" converted = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=3700) return converted def ticks_now(): """Returns the current timestamp in ticks""" return int(to_ticks(datetime.datetime.utcnow())) def mobile_number_string_to_int(mobile_string): """Converts mobile numbers from a string to an integer""" return int(mobile_string) def redact_mobile_number(mobile_string): """Takes a mobile number as a string, and redacts all but the last 3 digits""" return str.format('XXXXX XXX{0}', mobile_string[-3:]) ## Instruction: Add timezone support to timestamp helper methods ## Code After: import datetime import pytz this_timezone = pytz.timezone('Europe/London') def timestamp_to_ticks(dt): """Converts a datetime to ticks (seconds since Epoch)""" delta = (dt - datetime.datetime(1970, 1, 1)) ticks = int(delta.total_seconds()) return ticks def ticks_to_timestamp(ticks): """Converts ticks (seconds since Epoch) to a datetime""" delta = datetime.timedelta(seconds=ticks) new_timestamp = datetime.datetime(1970, 1, 1) + delta return new_timestamp def ticks_utc_now(): """Returns the current timestamp in ticks""" time_now = datetime.datetime.utcnow() ticks = int(timestamp_to_ticks(time_now)) return ticks def ticks_local_now(): time_now = datetime.datetime.now(tz=this_timezone) ticks = int(timestamp_to_ticks(time_now)) return ticks def mobile_number_string_to_int(mobile_string): """Converts mobile numbers from a string to an integer""" return int(mobile_string) def redact_mobile_number(mobile_string): """Takes a mobile number as a string, and redacts all but the last 3 digits""" return str.format('XXXXX XXX{0}', mobile_string[-3:])
// ... existing code ... import datetime import pytz this_timezone = pytz.timezone('Europe/London') def timestamp_to_ticks(dt): """Converts a datetime to ticks (seconds since Epoch)""" delta = (dt - datetime.datetime(1970, 1, 1)) ticks = int(delta.total_seconds()) return ticks def ticks_to_timestamp(ticks): """Converts ticks (seconds since Epoch) to a datetime""" delta = datetime.timedelta(seconds=ticks) new_timestamp = datetime.datetime(1970, 1, 1) + delta return new_timestamp def ticks_utc_now(): """Returns the current timestamp in ticks""" time_now = datetime.datetime.utcnow() ticks = int(timestamp_to_ticks(time_now)) return ticks def ticks_local_now(): time_now = datetime.datetime.now(tz=this_timezone) ticks = int(timestamp_to_ticks(time_now)) return ticks // ... rest of the code ...
13e86e405a3b7e2933a5f7fca14d7903f30201ee
Largest_Palindrome_Product.py
Largest_Palindrome_Product.py
def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 n = 2 print(largestPalindrome(n))
def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n))
Solve Largest Palindrome Product for range of n is
Solve Largest Palindrome Product for range of n is [1,6]
Python
mit
Kunal57/Python_Algorithms
def largestPalindrome(n): """ :type n: int :rtype: int """ - number1 = "" + number = "" - number2 = "" for x in range(n): - number1 += "9" + number += "9" - number2 += "9" - number1 = int(number1) + number = int(number) - number2 = int(number2) palindrome = 0 - for x in range(number1 + 1): - for i in range(number2 + 1): + upper = number + 1 + lower = 0 + for x in range(upper, lower, -1): + for i in range(upper, lower, -1): product = x * i - if (str(product) == str(product)[::-1]) and product > palindrome: + if product < palindrome: + break + elif isPalindrome(product): palindrome = product + upper = x + lower = i + break return palindrome % 1337 + def isPalindrome(num): + """ Return True is number is Palindrome, else return False """ + if str(num) == str(num)[::-1]: + return True + return False + n = 5 + print(largestPalindrome(n)) - n = 2 - print(largestPalindrome(n))
Solve Largest Palindrome Product for range of n is
## Code Before: def largestPalindrome(n): """ :type n: int :rtype: int """ number1 = "" number2 = "" for x in range(n): number1 += "9" number2 += "9" number1 = int(number1) number2 = int(number2) palindrome = 0 for x in range(number1 + 1): for i in range(number2 + 1): product = x * i if (str(product) == str(product)[::-1]) and product > palindrome: palindrome = product return palindrome % 1337 n = 2 print(largestPalindrome(n)) ## Instruction: Solve Largest Palindrome Product for range of n is ## Code After: def largestPalindrome(n): """ :type n: int :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n))
// ... existing code ... :rtype: int """ number = "" for x in range(n): number += "9" number = int(number) palindrome = 0 upper = number + 1 lower = 0 for x in range(upper, lower, -1): for i in range(upper, lower, -1): product = x * i if product < palindrome: break elif isPalindrome(product): palindrome = product upper = x lower = i break return palindrome % 1337 def isPalindrome(num): """ Return True is number is Palindrome, else return False """ if str(num) == str(num)[::-1]: return True return False n = 5 print(largestPalindrome(n)) // ... rest of the code ...
462656f9653ae43ea69080414735927b18e0debf
stats/random_walk.py
stats/random_walk.py
import neo4j import random DEFAULT_DEPTH = 5 NUM_WALKS = 100 # Passed sorted list (desc order), return top nodes TO_RETURN = lambda x: x[:10] random.seed() def random_walk(graph, node, depth=DEFAULT_DEPTH): # Pick random neighbor neighbors = {} i = 0 for r in node.relationships().outgoing: #TODO replace with i + r['count'] neighbors[(i, i + 1)] = r.getOtherNode(node) i += 1 choice = random.range(i) for x,y in neighbors: if x <= i and i < y: return [node].extend(random_walk(graph, neighbors[(x,y)], depth-1)) def run(graph, index, node): nodes = {} for i in range(NUM_WALKS): with graph.transaction: walked_nodes = random_walk(graph, node) # Loop through nodes (that aren't the start node), count for n in filter(lambda m: m.id != node.id, walked_nodes): if nodes.has_key(n): nodes[n]++ else nodes[n] = 1 return TO_RETURN(sorted(nodes, key=nodes.__getitem__))
import neo4j import random DEFAULT_DEPTH = 5 NUM_WALKS = 100 # Passed sorted list (desc order), return top nodes TO_RETURN = lambda x: x[:10] random.seed() def random_walk(graph, node, depth=DEFAULT_DEPTH): if depth == 0: return [node] # Pick random neighbor neighbors = {} i = 0 for r in node.relationships().outgoing: neighbors[(i, i + int(r['count']))] = r.getOtherNode(node) i += int(r['count']) if i == 0: # No neighbors return [node] r = random.randrange(i) for x,y in neighbors: if x <= r and r < y: return [node] + random_walk(graph, neighbors[(x,y)], depth-1) def run(graph, index, node): nodes = {} for i in range(NUM_WALKS): with graph.transaction: walked_nodes = random_walk(graph, node) # Loop through nodes (that aren't the start node), count for n in filter(lambda m: m.id != node.id, walked_nodes): if nodes.has_key(n): nodes[n] += 1 else: nodes[n] = 1 return TO_RETURN([{'name': n['name'], 'count': nodes[n]} for n in sorted(nodes, key=nodes.__getitem__)])
Modify random walk so that it works.
Modify random walk so that it works.
Python
mit
peplin/trinity
import neo4j import random - - from logbook import Logger - log = Logger('trinity.topics') DEFAULT_DEPTH = 5 NUM_WALKS = 100 # Passed sorted list (desc order), return top nodes TO_RETURN = lambda x: x[:10] random.seed() def random_walk(graph, node, depth=DEFAULT_DEPTH): + if depth == 0: + return [node] + # Pick random neighbor neighbors = {} i = 0 for r in node.relationships().outgoing: - #TODO replace with i + r['count'] - neighbors[(i, i + 1)] = r.getOtherNode(node) + neighbors[(i, i + int(r['count']))] = r.getOtherNode(node) - i += 1 + i += int(r['count']) + if i == 0: + # No neighbors + return [node] - choice = random.range(i) + r = random.randrange(i) for x,y in neighbors: - if x <= i and i < y: + if x <= r and r < y: - return [node].extend(random_walk(graph, neighbors[(x,y)], depth-1)) + return [node] + random_walk(graph, neighbors[(x,y)], depth-1) def run(graph, index, node): nodes = {} for i in range(NUM_WALKS): with graph.transaction: walked_nodes = random_walk(graph, node) # Loop through nodes (that aren't the start node), count for n in filter(lambda m: m.id != node.id, walked_nodes): if nodes.has_key(n): - nodes[n]++ + nodes[n] += 1 - else + else: nodes[n] = 1 - return TO_RETURN(sorted(nodes, key=nodes.__getitem__)) + return TO_RETURN([{'name': n['name'], 'count': nodes[n]} + for n in sorted(nodes, key=nodes.__getitem__)])
Modify random walk so that it works.
## Code Before: import neo4j import random from logbook import Logger log = Logger('trinity.topics') DEFAULT_DEPTH = 5 NUM_WALKS = 100 # Passed sorted list (desc order), return top nodes TO_RETURN = lambda x: x[:10] random.seed() def random_walk(graph, node, depth=DEFAULT_DEPTH): # Pick random neighbor neighbors = {} i = 0 for r in node.relationships().outgoing: #TODO replace with i + r['count'] neighbors[(i, i + 1)] = r.getOtherNode(node) i += 1 choice = random.range(i) for x,y in neighbors: if x <= i and i < y: return [node].extend(random_walk(graph, neighbors[(x,y)], depth-1)) def run(graph, index, node): nodes = {} for i in range(NUM_WALKS): with graph.transaction: walked_nodes = random_walk(graph, node) # Loop through nodes (that aren't the start node), count for n in filter(lambda m: m.id != node.id, walked_nodes): if nodes.has_key(n): nodes[n]++ else nodes[n] = 1 return TO_RETURN(sorted(nodes, key=nodes.__getitem__)) ## Instruction: Modify random walk so that it works. ## Code After: import neo4j import random DEFAULT_DEPTH = 5 NUM_WALKS = 100 # Passed sorted list (desc order), return top nodes TO_RETURN = lambda x: x[:10] random.seed() def random_walk(graph, node, depth=DEFAULT_DEPTH): if depth == 0: return [node] # Pick random neighbor neighbors = {} i = 0 for r in node.relationships().outgoing: neighbors[(i, i + int(r['count']))] = r.getOtherNode(node) i += int(r['count']) if i == 0: # No neighbors return [node] r = random.randrange(i) for x,y in neighbors: if x <= r and r < y: return [node] + random_walk(graph, neighbors[(x,y)], depth-1) def run(graph, index, node): nodes = {} for i in range(NUM_WALKS): with graph.transaction: walked_nodes = random_walk(graph, node) # Loop through nodes (that aren't the start node), count for n in filter(lambda m: m.id != node.id, walked_nodes): if nodes.has_key(n): nodes[n] += 1 else: nodes[n] = 1 return TO_RETURN([{'name': n['name'], 'count': nodes[n]} for n in sorted(nodes, key=nodes.__getitem__)])
# ... existing code ... import neo4j import random # ... modified code ... def random_walk(graph, node, depth=DEFAULT_DEPTH): if depth == 0: return [node] # Pick random neighbor neighbors = {} ... i = 0 for r in node.relationships().outgoing: neighbors[(i, i + int(r['count']))] = r.getOtherNode(node) i += int(r['count']) if i == 0: # No neighbors return [node] r = random.randrange(i) for x,y in neighbors: if x <= r and r < y: return [node] + random_walk(graph, neighbors[(x,y)], depth-1) def run(graph, index, node): ... for n in filter(lambda m: m.id != node.id, walked_nodes): if nodes.has_key(n): nodes[n] += 1 else: nodes[n] = 1 return TO_RETURN([{'name': n['name'], 'count': nodes[n]} for n in sorted(nodes, key=nodes.__getitem__)]) # ... rest of the code ...
513560a051d9388cd39384860ddce6a938501080
bad.py
bad.py
from selenium import webdriver from selenium.webdriver.common.keys import Keys import time driver = webdriver.Chrome() driver.get("http://clickingbad.nullism.com/") num_cooks = 100 num_sells = 50 cook = driver.find_element_by_id('make_btn') driver.execute_script("gm.add_widgets(" + str(init_drugs) + ")") driver.execute_script("gm.add_cash(" + str(init_cash) + ")") while True: try: counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_cooks: cook.click() counter+=1 time.sleep( 1 ) counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_sells: sell.click() counter+=1 time.sleep( 1 ) except: time.sleep( 5 ) pass
from selenium import webdriver from selenium.webdriver.common.keys import Keys import time driver = webdriver.Chrome() driver.get("http://clickingbad.nullism.com/") # Amount you'd like to have in terms of cash and # drugs to start the game init_drugs = 10000 init_cash = 10000 # Number of cooks and sells to do in a row num_cooks = 500 num_sells = 500 cook = driver.find_element_by_id('make_btn') sell = driver.find_element_by_id('sell_btn') driver.execute_script("gm.add_widgets(" + str(init_drugs) + ")") driver.execute_script("gm.add_cash(" + str(init_cash) + ")") while True: try: counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_cooks: cook.click() counter+=1 counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_sells: sell.click() counter+=1 time.sleep( 1 ) except: time.sleep( 5 ) pass
Allow user to set their initial amount of cash and drugs
Allow user to set their initial amount of cash and drugs
Python
apache-2.0
brint/cheating_bad
from selenium import webdriver from selenium.webdriver.common.keys import Keys import time driver = webdriver.Chrome() driver.get("http://clickingbad.nullism.com/") + # Amount you'd like to have in terms of cash and + # drugs to start the game + init_drugs = 10000 + init_cash = 10000 + + # Number of cooks and sells to do in a row - num_cooks = 100 + num_cooks = 500 - num_sells = 50 + num_sells = 500 cook = driver.find_element_by_id('make_btn') sell = driver.find_element_by_id('sell_btn') + driver.execute_script("gm.add_widgets(" + str(init_drugs) + ")") + driver.execute_script("gm.add_cash(" + str(init_cash) + ")") while True: try: counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_cooks: cook.click() counter+=1 - time.sleep( 1 ) counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_sells: sell.click() counter+=1 time.sleep( 1 ) except: time.sleep( 5 ) pass
Allow user to set their initial amount of cash and drugs
## Code Before: from selenium import webdriver from selenium.webdriver.common.keys import Keys import time driver = webdriver.Chrome() driver.get("http://clickingbad.nullism.com/") num_cooks = 100 num_sells = 50 cook = driver.find_element_by_id('make_btn') sell = driver.find_element_by_id('sell_btn') while True: try: counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_cooks: cook.click() counter+=1 time.sleep( 1 ) counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_sells: sell.click() counter+=1 time.sleep( 1 ) except: time.sleep( 5 ) pass ## Instruction: Allow user to set their initial amount of cash and drugs ## Code After: from selenium import webdriver from selenium.webdriver.common.keys import Keys import time driver = webdriver.Chrome() driver.get("http://clickingbad.nullism.com/") # Amount you'd like to have in terms of cash and # drugs to start the game init_drugs = 10000 init_cash = 10000 # Number of cooks and sells to do in a row num_cooks = 500 num_sells = 500 cook = driver.find_element_by_id('make_btn') sell = driver.find_element_by_id('sell_btn') driver.execute_script("gm.add_widgets(" + str(init_drugs) + ")") driver.execute_script("gm.add_cash(" + str(init_cash) + ")") while True: try: counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_cooks: cook.click() counter+=1 counter = 0 driver.execute_script("window.scrollTo(0,0);") while counter < num_sells: sell.click() counter+=1 time.sleep( 1 ) except: time.sleep( 5 ) pass
// ... existing code ... driver.get("http://clickingbad.nullism.com/") # Amount you'd like to have in terms of cash and # drugs to start the game init_drugs = 10000 init_cash = 10000 # Number of cooks and sells to do in a row num_cooks = 500 num_sells = 500 cook = driver.find_element_by_id('make_btn') // ... modified code ... sell = driver.find_element_by_id('sell_btn') driver.execute_script("gm.add_widgets(" + str(init_drugs) + ")") driver.execute_script("gm.add_cash(" + str(init_cash) + ")") while True: try: ... cook.click() counter+=1 counter = 0 driver.execute_script("window.scrollTo(0,0);") // ... rest of the code ...
cd621061773b7eafcea9358c9b762663a070ccc5
cc/license/jurisdiction.py
cc/license/jurisdiction.py
import RDF import zope.interface import interfaces import rdf_helper class Jurisdiction(object): zope.interface.implements(interfaces.IJurisdiction) def __init__(self, short_name): '''@param short_name can be e.g. mx''' model = rdf_helper.init_model( rdf_helper.JURI_RDF_PATH) self.code = short_name id_uri = RDF.Uri(self.id) try: self.local_url = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'jurisdictionSite'), None) except rdf_helper.NoValuesFoundException: self.local_url = None try: self.launched = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'launched'), None) except rdf_helper.NoValuesFoundException: self.launched = None
import RDF import zope.interface import interfaces import rdf_helper class Jurisdiction(object): zope.interface.implements(interfaces.IJurisdiction) def __init__(self, short_name): """Creates an object representing a jurisdiction. short_name is a (usually) two-letter code representing the same jurisdiction; for a complete list, see cc.license.jurisdiction_codes()""" model = rdf_helper.init_model( rdf_helper.JURI_RDF_PATH) self.code = short_name self.id = 'http://creativecommons.org/international/%s/' % short_name id_uri = RDF.Uri(self.id) try: self.local_url = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'jurisdictionSite'), None) except rdf_helper.NoValuesFoundException: self.local_url = None try: self.launched = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'launched'), None) except rdf_helper.NoValuesFoundException: self.launched = None
Add documentation and make Jurisdiction calls not fail when some of the values aren't found.
Add documentation and make Jurisdiction calls not fail when some of the values aren't found.
Python
mit
creativecommons/cc.license,creativecommons/cc.license
import RDF import zope.interface import interfaces import rdf_helper class Jurisdiction(object): zope.interface.implements(interfaces.IJurisdiction) def __init__(self, short_name): - '''@param short_name can be e.g. mx''' + """Creates an object representing a jurisdiction. + short_name is a (usually) two-letter code representing + the same jurisdiction; for a complete list, see + cc.license.jurisdiction_codes()""" model = rdf_helper.init_model( rdf_helper.JURI_RDF_PATH) self.code = short_name self.id = 'http://creativecommons.org/international/%s/' % short_name id_uri = RDF.Uri(self.id) + try: - self.local_url = rdf_helper.query_to_single_value(model, + self.local_url = rdf_helper.query_to_single_value(model, - id_uri, RDF.Uri(rdf_helper.NS_CC + 'jurisdictionSite'), None) + id_uri, RDF.Uri(rdf_helper.NS_CC + 'jurisdictionSite'), None) + except rdf_helper.NoValuesFoundException: + self.local_url = None + try: - self.launched = rdf_helper.query_to_single_value(model, + self.launched = rdf_helper.query_to_single_value(model, - id_uri, RDF.Uri(rdf_helper.NS_CC + 'launched'), None) + id_uri, RDF.Uri(rdf_helper.NS_CC + 'launched'), None) + except rdf_helper.NoValuesFoundException: + self.launched = None
Add documentation and make Jurisdiction calls not fail when some of the values aren't found.
## Code Before: import RDF import zope.interface import interfaces import rdf_helper class Jurisdiction(object): zope.interface.implements(interfaces.IJurisdiction) def __init__(self, short_name): '''@param short_name can be e.g. mx''' model = rdf_helper.init_model( rdf_helper.JURI_RDF_PATH) self.code = short_name self.id = 'http://creativecommons.org/international/%s/' % short_name id_uri = RDF.Uri(self.id) self.local_url = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'jurisdictionSite'), None) self.launched = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'launched'), None) ## Instruction: Add documentation and make Jurisdiction calls not fail when some of the values aren't found. ## Code After: import RDF import zope.interface import interfaces import rdf_helper class Jurisdiction(object): zope.interface.implements(interfaces.IJurisdiction) def __init__(self, short_name): """Creates an object representing a jurisdiction. short_name is a (usually) two-letter code representing the same jurisdiction; for a complete list, see cc.license.jurisdiction_codes()""" model = rdf_helper.init_model( rdf_helper.JURI_RDF_PATH) self.code = short_name self.id = 'http://creativecommons.org/international/%s/' % short_name id_uri = RDF.Uri(self.id) try: self.local_url = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'jurisdictionSite'), None) except rdf_helper.NoValuesFoundException: self.local_url = None try: self.launched = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'launched'), None) except rdf_helper.NoValuesFoundException: self.launched = None
... zope.interface.implements(interfaces.IJurisdiction) def __init__(self, short_name): """Creates an object representing a jurisdiction. short_name is a (usually) two-letter code representing the same jurisdiction; for a complete list, see cc.license.jurisdiction_codes()""" model = rdf_helper.init_model( rdf_helper.JURI_RDF_PATH) ... self.id = 'http://creativecommons.org/international/%s/' % short_name id_uri = RDF.Uri(self.id) try: self.local_url = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'jurisdictionSite'), None) except rdf_helper.NoValuesFoundException: self.local_url = None try: self.launched = rdf_helper.query_to_single_value(model, id_uri, RDF.Uri(rdf_helper.NS_CC + 'launched'), None) except rdf_helper.NoValuesFoundException: self.launched = None ...
80326d96a8137c1d285d3c24eda15039e03dedfe
opps/contrib/logging/models.py
opps/contrib/logging/models.py
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from opps.core.models import NotUserPublishable class Logging(NotUserPublishable): user = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, blank=True) application = models.CharField( _(u"Application"), max_length=75, null=True, blank=True, db_index=True) action = models.CharField( _(u"Action"), max_length=50, null=True, blank=True, db_index=True) text = models.TextField( _(u"Text"), null=True, blank=True, db_index=True) def save(self, *args, **kwargs): self.published = True super(Logging, self).save(*args, **kwargs)
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from opps.core.models import NotUserPublishable class Logging(NotUserPublishable): user = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, blank=True, ) application = models.CharField( _(u"Application"), max_length=75, null=True, blank=True, db_index=True) action = models.CharField( _(u"Action"), max_length=50, null=True, blank=True, db_index=True) text = models.TextField( _(u"Text"), null=True, blank=True, db_index=True) def save(self, *args, **kwargs): self.published = True super(Logging, self).save(*args, **kwargs)
Add index in text field on Logging
Add index in text field on Logging
Python
mit
opps/opps,williamroot/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from opps.core.models import NotUserPublishable class Logging(NotUserPublishable): user = models.ForeignKey( settings.AUTH_USER_MODEL, - null=True, blank=True) + null=True, blank=True, + ) application = models.CharField( _(u"Application"), max_length=75, null=True, blank=True, db_index=True) action = models.CharField( _(u"Action"), max_length=50, null=True, blank=True, db_index=True) text = models.TextField( _(u"Text"), null=True, blank=True, - ) + db_index=True) def save(self, *args, **kwargs): self.published = True super(Logging, self).save(*args, **kwargs)
Add index in text field on Logging
## Code Before: from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from opps.core.models import NotUserPublishable class Logging(NotUserPublishable): user = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, blank=True) application = models.CharField( _(u"Application"), max_length=75, null=True, blank=True, db_index=True) action = models.CharField( _(u"Action"), max_length=50, null=True, blank=True, db_index=True) text = models.TextField( _(u"Text"), null=True, blank=True, ) def save(self, *args, **kwargs): self.published = True super(Logging, self).save(*args, **kwargs) ## Instruction: Add index in text field on Logging ## Code After: from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from opps.core.models import NotUserPublishable class Logging(NotUserPublishable): user = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, blank=True, ) application = models.CharField( _(u"Application"), max_length=75, null=True, blank=True, db_index=True) action = models.CharField( _(u"Action"), max_length=50, null=True, blank=True, db_index=True) text = models.TextField( _(u"Text"), null=True, blank=True, db_index=True) def save(self, *args, **kwargs): self.published = True super(Logging, self).save(*args, **kwargs)
// ... existing code ... user = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, blank=True, ) application = models.CharField( _(u"Application"), // ... modified code ... _(u"Text"), null=True, blank=True, db_index=True) def save(self, *args, **kwargs): // ... rest of the code ...
39f327bb9e37d6d290eb3f3179f7e79d60b5ab6d
model.py
model.py
from sqlalchemy import create_engine engine = create_engine('postgresql://wn:wn@localhost:5432/wndb') Base = declarative_base() from sqlalchemy import Column, Integer, Float, DateTime, Boolean, String class Observation(Base): __tablename__ = 'obs' id = Column(Integer, primary_key=True) station_name = Column(String) x = Column(Float) y = Column(Float) z = Column(Float) jam_indicator = Column(Boolean) jam_intensity = Column(Float) date_time = Column(DateTime) def __repr__(self): return "<Observation(station_name='%r', x='%f', y='%f', z='%f', jam_indicator='%r', jam_intensity='%f', date_time='%r')>" % ( self.station_name, self.x, self.y, self.z, self.jam_indicator, self.jam_intensity, self.date_time) Base.metadata.create_all(engine)
from sqlalchemy import create_engine engine = create_engine('postgresql://wn:wn@localhost:5432/wndb') from sqlalchemy import Column, Integer, Float, DateTime, Boolean, String, MetaData metadata = MetaData() table = Table('obs', metadata, Column(Integer, primary_key=True), Column('station_name',String), Column('x',Float), Column('y',Float), Column('z',Float), Column('jam_indicator',Boolean), Column('jam_intensity',Float), Column('date_time',DateTime) )
Switch from ORM to Core
Switch from ORM to Core
Python
mit
labhack/whiskeynovember,labhack/whiskeynovember,labhack/whiskeynovember
- from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine engine = create_engine('postgresql://wn:wn@localhost:5432/wndb') - Base = declarative_base() + from sqlalchemy import Column, Integer, Float, DateTime, Boolean, String, MetaData - from sqlalchemy import Column, Integer, Float, DateTime, Boolean, String - class Observation(Base): - __tablename__ = 'obs' + metadata = MetaData() + table = Table('obs', metadata, Column(Integer, primary_key=True), + Column('station_name',String), + Column('x',Float), + Column('y',Float), + Column('z',Float), + Column('jam_indicator',Boolean), + Column('jam_intensity',Float), + Column('date_time',DateTime) + ) - id = Column(Integer, primary_key=True) - station_name = Column(String) - x = Column(Float) - y = Column(Float) - z = Column(Float) - jam_indicator = Column(Boolean) - jam_intensity = Column(Float) - date_time = Column(DateTime) - - def __repr__(self): - return "<Observation(station_name='%r', x='%f', y='%f', z='%f', jam_indicator='%r', jam_intensity='%f', date_time='%r')>" % ( - self.station_name, self.x, self.y, self.z, self.jam_indicator, self.jam_intensity, self.date_time) - - Base.metadata.create_all(engine) - -
Switch from ORM to Core
## Code Before: from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine engine = create_engine('postgresql://wn:wn@localhost:5432/wndb') Base = declarative_base() from sqlalchemy import Column, Integer, Float, DateTime, Boolean, String class Observation(Base): __tablename__ = 'obs' id = Column(Integer, primary_key=True) station_name = Column(String) x = Column(Float) y = Column(Float) z = Column(Float) jam_indicator = Column(Boolean) jam_intensity = Column(Float) date_time = Column(DateTime) def __repr__(self): return "<Observation(station_name='%r', x='%f', y='%f', z='%f', jam_indicator='%r', jam_intensity='%f', date_time='%r')>" % ( self.station_name, self.x, self.y, self.z, self.jam_indicator, self.jam_intensity, self.date_time) Base.metadata.create_all(engine) ## Instruction: Switch from ORM to Core ## Code After: from sqlalchemy import create_engine engine = create_engine('postgresql://wn:wn@localhost:5432/wndb') from sqlalchemy import Column, Integer, Float, DateTime, Boolean, String, MetaData metadata = MetaData() table = Table('obs', metadata, Column(Integer, primary_key=True), Column('station_name',String), Column('x',Float), Column('y',Float), Column('z',Float), Column('jam_indicator',Boolean), Column('jam_intensity',Float), Column('date_time',DateTime) )
# ... existing code ... from sqlalchemy import create_engine # ... modified code ... engine = create_engine('postgresql://wn:wn@localhost:5432/wndb') from sqlalchemy import Column, Integer, Float, DateTime, Boolean, String, MetaData metadata = MetaData() table = Table('obs', metadata, Column(Integer, primary_key=True), Column('station_name',String), Column('x',Float), Column('y',Float), Column('z',Float), Column('jam_indicator',Boolean), Column('jam_intensity',Float), Column('date_time',DateTime) ) # ... rest of the code ...
abb1d2db9052391c78fb09952b58a5331046aae5
pylinks/links/tests.py
pylinks/links/tests.py
from django.test import TestCase from .models import Category, Link class CategoryModelTests(TestCase): def test_category_sort(self): Category(title='Test 2', slug='test2').save() Category(title='Test 1', slug='test1').save() self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all())) class LinkModelTests(TestCase): def setUp(self): self.url = 'https://github.com/' self.link = Link(title='GitHub', url=self.url) def test_track_link(self): self.assertEqual(self.link.get_absolute_url(), self.url) self.link.save() self.assertEqual(self.link.visits, 0) self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id) self.assertEqual(str(self.link), 'GitHub') def test_increment_visits(self): self.link.save() client = Client() response = client.get('/links/go/%d/' % self.link.id) self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], self.link.url) self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
from django.test import Client, TestCase from .models import Category, Link class CategoryModelTests(TestCase): def test_category_sort(self): Category(title='Test 2', slug='test2').save() Category(title='Test 1', slug='test1').save() self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all())) class LinkModelTests(TestCase): def setUp(self): self.url = 'https://github.com/' self.link = Link(title='GitHub', url=self.url) def test_track_link(self): self.assertEqual(self.link.get_absolute_url(), self.url) self.link.save() self.assertEqual(self.link.visits, 0) self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id) def test_link_title(self): self.assertEqual(str(self.link), 'GitHub') def test_increment_visits(self): self.link.save() client = Client() response = client.get('/links/go/%d/' % self.link.id) self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], self.link.url) self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
Add test for link redirect
Add test for link redirect
Python
mit
michaelmior/pylinks,michaelmior/pylinks,michaelmior/pylinks
- from django.test import TestCase + from django.test import Client, TestCase from .models import Category, Link class CategoryModelTests(TestCase): def test_category_sort(self): Category(title='Test 2', slug='test2').save() Category(title='Test 1', slug='test1').save() self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all())) class LinkModelTests(TestCase): def setUp(self): self.url = 'https://github.com/' self.link = Link(title='GitHub', url=self.url) def test_track_link(self): self.assertEqual(self.link.get_absolute_url(), self.url) self.link.save() self.assertEqual(self.link.visits, 0) self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id) def test_link_title(self): self.assertEqual(str(self.link), 'GitHub') + def test_increment_visits(self): + self.link.save() + client = Client() + response = client.get('/links/go/%d/' % self.link.id) + self.assertEqual(response.status_code, 302) + self.assertEqual(response['Location'], self.link.url) + self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1) +
Add test for link redirect
## Code Before: from django.test import TestCase from .models import Category, Link class CategoryModelTests(TestCase): def test_category_sort(self): Category(title='Test 2', slug='test2').save() Category(title='Test 1', slug='test1').save() self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all())) class LinkModelTests(TestCase): def setUp(self): self.url = 'https://github.com/' self.link = Link(title='GitHub', url=self.url) def test_track_link(self): self.assertEqual(self.link.get_absolute_url(), self.url) self.link.save() self.assertEqual(self.link.visits, 0) self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id) def test_link_title(self): self.assertEqual(str(self.link), 'GitHub') ## Instruction: Add test for link redirect ## Code After: from django.test import Client, TestCase from .models import Category, Link class CategoryModelTests(TestCase): def test_category_sort(self): Category(title='Test 2', slug='test2').save() Category(title='Test 1', slug='test1').save() self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all())) class LinkModelTests(TestCase): def setUp(self): self.url = 'https://github.com/' self.link = Link(title='GitHub', url=self.url) def test_track_link(self): self.assertEqual(self.link.get_absolute_url(), self.url) self.link.save() self.assertEqual(self.link.visits, 0) self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id) def test_link_title(self): self.assertEqual(str(self.link), 'GitHub') def test_increment_visits(self): self.link.save() client = Client() response = client.get('/links/go/%d/' % self.link.id) self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], self.link.url) self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
# ... existing code ... from django.test import Client, TestCase from .models import Category, Link # ... modified code ... def test_link_title(self): self.assertEqual(str(self.link), 'GitHub') def test_increment_visits(self): self.link.save() client = Client() response = client.get('/links/go/%d/' % self.link.id) self.assertEqual(response.status_code, 302) self.assertEqual(response['Location'], self.link.url) self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1) # ... rest of the code ...
40fc5c555e471f28959cbe3ad7d929636384595a
casexml/apps/stock/utils.py
casexml/apps/stock/utils.py
UNDERSTOCK_THRESHOLD = 0.5 # months OVERSTOCK_THRESHOLD = 2. # months def months_of_stock_remaining(stock, daily_consumption): if daily_consumption: return stock / Decimal((daily_consumption * 30)) else: return None def stock_category(stock, daily_consumption): if stock is None: return 'nodata' elif stock == 0: return 'stockout' elif daily_consumption is None: return 'nodata' elif daily_consumption == 0: return 'overstock' months_left = months_of_stock_remaining(stock, daily_consumption) if months_left is None: return 'nodata' elif months_left < UNDERSTOCK_THRESHOLD: return 'understock' elif months_left > OVERSTOCK_THRESHOLD: return 'overstock' else: return 'adequate'
from decimal import Decimal UNDERSTOCK_THRESHOLD = 0.5 # months OVERSTOCK_THRESHOLD = 2. # months def months_of_stock_remaining(stock, daily_consumption): if daily_consumption: return stock / Decimal((daily_consumption * 30)) else: return None def stock_category(stock, daily_consumption): if stock is None: return 'nodata' elif stock == 0: return 'stockout' elif daily_consumption is None: return 'nodata' elif daily_consumption == 0: return 'overstock' months_left = months_of_stock_remaining(stock, daily_consumption) if months_left is None: return 'nodata' elif months_left < UNDERSTOCK_THRESHOLD: return 'understock' elif months_left > OVERSTOCK_THRESHOLD: return 'overstock' else: return 'adequate'
Fix error handling on aggregate status report
Fix error handling on aggregate status report Previously the catch block was a little too aggressive. It was swallowing a real error (since aggregate reports pass a float, not a decimal). Now we prevent the original possible errors by converting no matter what the type is and checking for zero/null values first.
Python
bsd-3-clause
dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq
+ from decimal import Decimal + UNDERSTOCK_THRESHOLD = 0.5 # months OVERSTOCK_THRESHOLD = 2. # months def months_of_stock_remaining(stock, daily_consumption): - try: + if daily_consumption: - return stock / (daily_consumption * 30) + return stock / Decimal((daily_consumption * 30)) - except (TypeError, ZeroDivisionError): + else: return None def stock_category(stock, daily_consumption): if stock is None: return 'nodata' elif stock == 0: return 'stockout' elif daily_consumption is None: return 'nodata' elif daily_consumption == 0: return 'overstock' months_left = months_of_stock_remaining(stock, daily_consumption) if months_left is None: return 'nodata' elif months_left < UNDERSTOCK_THRESHOLD: return 'understock' elif months_left > OVERSTOCK_THRESHOLD: return 'overstock' else: return 'adequate'
Fix error handling on aggregate status report
## Code Before: UNDERSTOCK_THRESHOLD = 0.5 # months OVERSTOCK_THRESHOLD = 2. # months def months_of_stock_remaining(stock, daily_consumption): try: return stock / (daily_consumption * 30) except (TypeError, ZeroDivisionError): return None def stock_category(stock, daily_consumption): if stock is None: return 'nodata' elif stock == 0: return 'stockout' elif daily_consumption is None: return 'nodata' elif daily_consumption == 0: return 'overstock' months_left = months_of_stock_remaining(stock, daily_consumption) if months_left is None: return 'nodata' elif months_left < UNDERSTOCK_THRESHOLD: return 'understock' elif months_left > OVERSTOCK_THRESHOLD: return 'overstock' else: return 'adequate' ## Instruction: Fix error handling on aggregate status report ## Code After: from decimal import Decimal UNDERSTOCK_THRESHOLD = 0.5 # months OVERSTOCK_THRESHOLD = 2. # months def months_of_stock_remaining(stock, daily_consumption): if daily_consumption: return stock / Decimal((daily_consumption * 30)) else: return None def stock_category(stock, daily_consumption): if stock is None: return 'nodata' elif stock == 0: return 'stockout' elif daily_consumption is None: return 'nodata' elif daily_consumption == 0: return 'overstock' months_left = months_of_stock_remaining(stock, daily_consumption) if months_left is None: return 'nodata' elif months_left < UNDERSTOCK_THRESHOLD: return 'understock' elif months_left > OVERSTOCK_THRESHOLD: return 'overstock' else: return 'adequate'
# ... existing code ... from decimal import Decimal UNDERSTOCK_THRESHOLD = 0.5 # months OVERSTOCK_THRESHOLD = 2. # months # ... modified code ... def months_of_stock_remaining(stock, daily_consumption): if daily_consumption: return stock / Decimal((daily_consumption * 30)) else: return None # ... rest of the code ...
7d3ffe4582a5b4032f9a59a3ea8edfded57a7a1f
src/nodeconductor_openstack/openstack/migrations/0031_tenant_backup_storage.py
src/nodeconductor_openstack/openstack/migrations/0031_tenant_backup_storage.py
from __future__ import unicode_literals from django.db import migrations from .. import models def cleanup_tenant_quotas(apps, schema_editor): for obj in models.Tenant.objects.all(): quotas_names = models.Tenant.QUOTAS_NAMES + [f.name for f in models.Tenant.get_quotas_fields()] obj.quotas.exclude(name__in=quotas_names).delete() class Migration(migrations.Migration): dependencies = [ ('openstack', '0030_subnet_dns_nameservers'), ] operations = [ migrations.RunPython(cleanup_tenant_quotas), ]
from __future__ import unicode_literals from django.db import migrations from .. import models def cleanup_tenant_quotas(apps, schema_editor): quota_names = models.Tenant.get_quotas_names() for obj in models.Tenant.objects.all(): obj.quotas.exclude(name__in=quota_names).delete() class Migration(migrations.Migration): dependencies = [ ('openstack', '0030_subnet_dns_nameservers'), ] operations = [ migrations.RunPython(cleanup_tenant_quotas), ]
Clean up quota cleanup migration
Clean up quota cleanup migration [WAL-433]
Python
mit
opennode/nodeconductor-openstack
from __future__ import unicode_literals - from django.contrib.contenttypes.models import ContentType from django.db import migrations - - from nodeconductor.quotas import models as quotas_models from .. import models def cleanup_tenant_quotas(apps, schema_editor): + quota_names = models.Tenant.get_quotas_names() for obj in models.Tenant.objects.all(): - quotas_names = models.Tenant.QUOTAS_NAMES + [f.name for f in models.Tenant.get_quotas_fields()] - obj.quotas.exclude(name__in=quotas_names).delete() + obj.quotas.exclude(name__in=quota_names).delete() class Migration(migrations.Migration): dependencies = [ ('openstack', '0030_subnet_dns_nameservers'), ] operations = [ migrations.RunPython(cleanup_tenant_quotas), ]
Clean up quota cleanup migration
## Code Before: from __future__ import unicode_literals from django.contrib.contenttypes.models import ContentType from django.db import migrations from nodeconductor.quotas import models as quotas_models from .. import models def cleanup_tenant_quotas(apps, schema_editor): for obj in models.Tenant.objects.all(): quotas_names = models.Tenant.QUOTAS_NAMES + [f.name for f in models.Tenant.get_quotas_fields()] obj.quotas.exclude(name__in=quotas_names).delete() class Migration(migrations.Migration): dependencies = [ ('openstack', '0030_subnet_dns_nameservers'), ] operations = [ migrations.RunPython(cleanup_tenant_quotas), ] ## Instruction: Clean up quota cleanup migration ## Code After: from __future__ import unicode_literals from django.db import migrations from .. import models def cleanup_tenant_quotas(apps, schema_editor): quota_names = models.Tenant.get_quotas_names() for obj in models.Tenant.objects.all(): obj.quotas.exclude(name__in=quota_names).delete() class Migration(migrations.Migration): dependencies = [ ('openstack', '0030_subnet_dns_nameservers'), ] operations = [ migrations.RunPython(cleanup_tenant_quotas), ]
# ... existing code ... from __future__ import unicode_literals from django.db import migrations from .. import models # ... modified code ... def cleanup_tenant_quotas(apps, schema_editor): quota_names = models.Tenant.get_quotas_names() for obj in models.Tenant.objects.all(): obj.quotas.exclude(name__in=quota_names).delete() # ... rest of the code ...
dc786699618e6ebc1206080d9c0fdb697d519668
pydy/viz/server.py
pydy/viz/server.py
import os import webbrowser import BaseHTTPServer from SimpleHTTPServer import SimpleHTTPRequestHandler __all__ = ['run_server'] def run_server(port=8000,scene_file="Null"): #change dir to static first. os.chdir("static/") HandlerClass = SimpleHTTPRequestHandler ServerClass = HTTPServer Protocol = "HTTP/1.0" server_address = ('127.0.0.1', port) HandlerClass.protocol_version = Protocol httpd = ServerClass(server_address, HandlerClass) sa = httpd.socket.getsockname() print("Serving HTTP on", sa[0], "port", sa[1], "...") print("hit ctrl+c to stop the server..") print("To view visualization, open:\n") url = "http://localhost:"+ str(sa[1]) + "/index.html?load=" + scene_file print(url) webbrowser.open(url) httpd.serve_forever() if __name__ == "__main__": run_server()
import os import sys import webbrowser if sys.version_info < (3, 0): from SimpleHTTPServer import SimpleHTTPRequestHandler from BaseHTTPServer import HTTPServer else: from http.server import SimpleHTTPRequestHandler from http.server import HTTPServer __all__ = ['run_server'] def run_server(port=8000,scene_file="Null"): #change dir to static first. os.chdir("static/") HandlerClass = SimpleHTTPRequestHandler ServerClass = HTTPServer Protocol = "HTTP/1.0" server_address = ('127.0.0.1', port) HandlerClass.protocol_version = Protocol httpd = ServerClass(server_address, HandlerClass) sa = httpd.socket.getsockname() print("Serving HTTP on", sa[0], "port", sa[1], "...") print("hit ctrl+c to stop the server..") print("To view visualization, open:\n") url = "http://localhost:"+ str(sa[1]) + "/index.html?load=" + scene_file print(url) webbrowser.open(url) httpd.serve_forever() if __name__ == "__main__": run_server()
Fix HTTPServer imports with Python 3
Fix HTTPServer imports with Python 3
Python
bsd-3-clause
Shekharrajak/pydy,Shekharrajak/pydy,skidzo/pydy,skidzo/pydy,oliverlee/pydy,Shekharrajak/pydy,oliverlee/pydy,skidzo/pydy,skidzo/pydy,Shekharrajak/pydy,oliverlee/pydy
import os + import sys import webbrowser - import BaseHTTPServer + if sys.version_info < (3, 0): - from SimpleHTTPServer import SimpleHTTPRequestHandler + from SimpleHTTPServer import SimpleHTTPRequestHandler + from BaseHTTPServer import HTTPServer + else: + from http.server import SimpleHTTPRequestHandler + from http.server import HTTPServer + __all__ = ['run_server'] def run_server(port=8000,scene_file="Null"): #change dir to static first. os.chdir("static/") HandlerClass = SimpleHTTPRequestHandler - ServerClass = BaseHTTPServer.HTTPServer + ServerClass = HTTPServer Protocol = "HTTP/1.0" server_address = ('127.0.0.1', port) HandlerClass.protocol_version = Protocol httpd = ServerClass(server_address, HandlerClass) sa = httpd.socket.getsockname() print("Serving HTTP on", sa[0], "port", sa[1], "...") print("hit ctrl+c to stop the server..") print("To view visualization, open:\n") url = "http://localhost:"+ str(sa[1]) + "/index.html?load=" + scene_file print(url) webbrowser.open(url) httpd.serve_forever() if __name__ == "__main__": run_server()
Fix HTTPServer imports with Python 3
## Code Before: import os import webbrowser import BaseHTTPServer from SimpleHTTPServer import SimpleHTTPRequestHandler __all__ = ['run_server'] def run_server(port=8000,scene_file="Null"): #change dir to static first. os.chdir("static/") HandlerClass = SimpleHTTPRequestHandler ServerClass = BaseHTTPServer.HTTPServer Protocol = "HTTP/1.0" server_address = ('127.0.0.1', port) HandlerClass.protocol_version = Protocol httpd = ServerClass(server_address, HandlerClass) sa = httpd.socket.getsockname() print("Serving HTTP on", sa[0], "port", sa[1], "...") print("hit ctrl+c to stop the server..") print("To view visualization, open:\n") url = "http://localhost:"+ str(sa[1]) + "/index.html?load=" + scene_file print(url) webbrowser.open(url) httpd.serve_forever() if __name__ == "__main__": run_server() ## Instruction: Fix HTTPServer imports with Python 3 ## Code After: import os import sys import webbrowser if sys.version_info < (3, 0): from SimpleHTTPServer import SimpleHTTPRequestHandler from BaseHTTPServer import HTTPServer else: from http.server import SimpleHTTPRequestHandler from http.server import HTTPServer __all__ = ['run_server'] def run_server(port=8000,scene_file="Null"): #change dir to static first. os.chdir("static/") HandlerClass = SimpleHTTPRequestHandler ServerClass = HTTPServer Protocol = "HTTP/1.0" server_address = ('127.0.0.1', port) HandlerClass.protocol_version = Protocol httpd = ServerClass(server_address, HandlerClass) sa = httpd.socket.getsockname() print("Serving HTTP on", sa[0], "port", sa[1], "...") print("hit ctrl+c to stop the server..") print("To view visualization, open:\n") url = "http://localhost:"+ str(sa[1]) + "/index.html?load=" + scene_file print(url) webbrowser.open(url) httpd.serve_forever() if __name__ == "__main__": run_server()
# ... existing code ... import os import sys import webbrowser if sys.version_info < (3, 0): from SimpleHTTPServer import SimpleHTTPRequestHandler from BaseHTTPServer import HTTPServer else: from http.server import SimpleHTTPRequestHandler from http.server import HTTPServer __all__ = ['run_server'] # ... modified code ... os.chdir("static/") HandlerClass = SimpleHTTPRequestHandler ServerClass = HTTPServer Protocol = "HTTP/1.0" server_address = ('127.0.0.1', port) # ... rest of the code ...
8cac0c660eee774c32b87d2511e4d2eeddf0ffe8
scripts/slave/chromium/dart_buildbot_run.py
scripts/slave/chromium/dart_buildbot_run.py
import sys def main(): builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='') is_release_bot = builder_name.startswith('release') script = '' if is_release_bot: script = 'src/dartium_tools/buildbot_release_annotated_steps.py' else: script = 'src/dartium_tools/buildbot_annotated_steps.py' return chromium_utils.RunCommand([sys.executable, script]) if __name__ == '__main__': sys.exit(main())
import os import sys from common import chromium_utils def main(): builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='') is_release_bot = builder_name.startswith('release') script = '' if is_release_bot: script = 'src/dartium_tools/buildbot_release_annotated_steps.py' else: script = 'src/dartium_tools/buildbot_annotated_steps.py' return chromium_utils.RunCommand([sys.executable, script]) if __name__ == '__main__': sys.exit(main())
Call dartium_tools/buildbot_annotated_steps.py directly, there is no need for moving this as part of the dartium patching process.
Call dartium_tools/buildbot_annotated_steps.py directly, there is no need for moving this as part of the dartium patching process. Additionally, start calling a new script for release builds (there are none yet, but this is what will be used to build the sdk and editor) TBR=foo Review URL: https://chromiumcodereview.appspot.com/11466003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@171512 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
+ import os import sys from common import chromium_utils + def main(): + builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='') + is_release_bot = builder_name.startswith('release') + script = '' + if is_release_bot: + script = 'src/dartium_tools/buildbot_release_annotated_steps.py' + else: + script = 'src/dartium_tools/buildbot_annotated_steps.py' + return chromium_utils.RunCommand([sys.executable, script]) - def main(): - return chromium_utils.RunCommand( - [sys.executable, - 'src/build/buildbot_annotated_steps.py', - ]) if __name__ == '__main__': sys.exit(main())
Call dartium_tools/buildbot_annotated_steps.py directly, there is no need for moving this as part of the dartium patching process.
## Code Before: import sys from common import chromium_utils def main(): return chromium_utils.RunCommand( [sys.executable, 'src/build/buildbot_annotated_steps.py', ]) if __name__ == '__main__': sys.exit(main()) ## Instruction: Call dartium_tools/buildbot_annotated_steps.py directly, there is no need for moving this as part of the dartium patching process. ## Code After: import os import sys from common import chromium_utils def main(): builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='') is_release_bot = builder_name.startswith('release') script = '' if is_release_bot: script = 'src/dartium_tools/buildbot_release_annotated_steps.py' else: script = 'src/dartium_tools/buildbot_annotated_steps.py' return chromium_utils.RunCommand([sys.executable, script]) if __name__ == '__main__': sys.exit(main())
... import os import sys ... from common import chromium_utils def main(): builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='') is_release_bot = builder_name.startswith('release') script = '' if is_release_bot: script = 'src/dartium_tools/buildbot_release_annotated_steps.py' else: script = 'src/dartium_tools/buildbot_annotated_steps.py' return chromium_utils.RunCommand([sys.executable, script]) if __name__ == '__main__': ...
50224b985a2215b8598f274efd33fc5c20054417
tests/test_str.py
tests/test_str.py
import pytest from hypothesis import given from hypothesis.strategies import lists, text from datatyping.datatyping import validate @given(ss=lists(text())) def test_simple(ss): assert validate([str], ss) is None @given(not_string=integers()) def test_simple_error(not_string): with pytest.raises(TypeError): validate(str, not_string)
import pytest from hypothesis import given from hypothesis.strategies import integers, text from datatyping.datatyping import validate @given(string=text()) def test_simple(string): assert validate(str, string) is None @given(not_string=integers()) def test_simple_error(not_string): with pytest.raises(TypeError): validate(str, not_string)
Rewrite str tests with hypothesis Remove lists from testing
Rewrite str tests with hypothesis Remove lists from testing
Python
mit
Zaab1t/datatyping
import pytest from hypothesis import given - from hypothesis.strategies import lists, text + from hypothesis.strategies import integers, text + from datatyping.datatyping import validate - @given(ss=lists(text())) + @given(string=text()) - def test_simple(ss): + def test_simple(string): - assert validate([str], ss) is None + assert validate(str, string) is None - @given(s=text()) + @given(not_string=integers()) - def test_simple_error(s): + def test_simple_error(not_string): with pytest.raises(TypeError): - validate([str], s) + validate(str, not_string) - - @given(ss=lists(lists(text()))) - def test_nested(ss): - assert validate([[str]], ss) is None -
Rewrite str tests with hypothesis Remove lists from testing
## Code Before: import pytest from hypothesis import given from hypothesis.strategies import lists, text from datatyping.datatyping import validate @given(ss=lists(text())) def test_simple(ss): assert validate([str], ss) is None @given(s=text()) def test_simple_error(s): with pytest.raises(TypeError): validate([str], s) @given(ss=lists(lists(text()))) def test_nested(ss): assert validate([[str]], ss) is None ## Instruction: Rewrite str tests with hypothesis Remove lists from testing ## Code After: import pytest from hypothesis import given from hypothesis.strategies import integers, text from datatyping.datatyping import validate @given(string=text()) def test_simple(string): assert validate(str, string) is None @given(not_string=integers()) def test_simple_error(not_string): with pytest.raises(TypeError): validate(str, not_string)
... import pytest from hypothesis import given from hypothesis.strategies import integers, text from datatyping.datatyping import validate @given(string=text()) def test_simple(string): assert validate(str, string) is None @given(not_string=integers()) def test_simple_error(not_string): with pytest.raises(TypeError): validate(str, not_string) ...