commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
3.52k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
17
3.24k
b8688879de84b405d8c54add3ca793df54e2f39a
bin/finnpos-restore-lemma.py
bin/finnpos-restore-lemma.py
from sys import stdin for line in stdin: line = line.strip() if line == '': print('') else: wf, feats, lemma, label, ann = line.split('\t') lemmas = ann if ann.find(' ') != -1: lemmas = ann[:ann.find(' ')] ann = [ann.find(' '):] lemma_dict = dict(eval(ann)) if label in lemma_dict: lemma = lemma_dict[label] lemma = lemma.lower() lemma = lemma.replace('#','') print('%s\t%s\t%s\t%s\t%s' % (wf, feats, lemma, label, ann))
from sys import stdin def part_count(lemma): return lemma.count('#') def compile_dict(label_lemma_pairs): res = {} for label, lemma in label_lemma_pairs: if label in res: old_lemma = res[label] if part_count(old_lemma) > part_count(lemma): res[label] = lemma else: res[label] = lemma return res for line in stdin: line = line.strip() if line == '': print('') else: wf, feats, lemma, label, ann = line.split('\t') lemmas = ann if ann.find(' ') != -1: lemmas = ann[:ann.find(' ')] ann = ann[ann.find(' ') + 1:] else: ann = '_' lemma_dict = {} if lemmas != '_': lemma_dict = compile_dict(eval(lemmas)) if label in lemma_dict: lemma = lemma_dict[label] lemma = lemma.lower() lemma = lemma.replace('#','') print('%s\t%s\t%s\t%s\t%s' % (wf, feats, lemma, label, ann))
Choose lemma with fewest parts.
Choose lemma with fewest parts.
Python
apache-2.0
mpsilfve/FinnPos,mpsilfve/FinnPos,mpsilfve/FinnPos,mpsilfve/FinnPos
from sys import stdin + + def part_count(lemma): + return lemma.count('#') + + def compile_dict(label_lemma_pairs): + res = {} + + for label, lemma in label_lemma_pairs: + if label in res: + old_lemma = res[label] + + if part_count(old_lemma) > part_count(lemma): + res[label] = lemma + else: + res[label] = lemma + + return res for line in stdin: line = line.strip() if line == '': print('') else: wf, feats, lemma, label, ann = line.split('\t') lemmas = ann if ann.find(' ') != -1: lemmas = ann[:ann.find(' ')] - ann = [ann.find(' '):] + ann = ann[ann.find(' ') + 1:] - + else: + ann = '_' + + lemma_dict = {} + if lemmas != '_': - lemma_dict = dict(eval(ann)) + lemma_dict = compile_dict(eval(lemmas)) if label in lemma_dict: lemma = lemma_dict[label] lemma = lemma.lower() lemma = lemma.replace('#','') print('%s\t%s\t%s\t%s\t%s' % (wf, feats, lemma, label, ann))
Choose lemma with fewest parts.
## Code Before: from sys import stdin for line in stdin: line = line.strip() if line == '': print('') else: wf, feats, lemma, label, ann = line.split('\t') lemmas = ann if ann.find(' ') != -1: lemmas = ann[:ann.find(' ')] ann = [ann.find(' '):] lemma_dict = dict(eval(ann)) if label in lemma_dict: lemma = lemma_dict[label] lemma = lemma.lower() lemma = lemma.replace('#','') print('%s\t%s\t%s\t%s\t%s' % (wf, feats, lemma, label, ann)) ## Instruction: Choose lemma with fewest parts. ## Code After: from sys import stdin def part_count(lemma): return lemma.count('#') def compile_dict(label_lemma_pairs): res = {} for label, lemma in label_lemma_pairs: if label in res: old_lemma = res[label] if part_count(old_lemma) > part_count(lemma): res[label] = lemma else: res[label] = lemma return res for line in stdin: line = line.strip() if line == '': print('') else: wf, feats, lemma, label, ann = line.split('\t') lemmas = ann if ann.find(' ') != -1: lemmas = ann[:ann.find(' ')] ann = ann[ann.find(' ') + 1:] else: ann = '_' lemma_dict = {} if lemmas != '_': lemma_dict = compile_dict(eval(lemmas)) if label in lemma_dict: lemma = lemma_dict[label] lemma = lemma.lower() lemma = lemma.replace('#','') print('%s\t%s\t%s\t%s\t%s' % (wf, feats, lemma, label, ann))
# ... existing code ... from sys import stdin def part_count(lemma): return lemma.count('#') def compile_dict(label_lemma_pairs): res = {} for label, lemma in label_lemma_pairs: if label in res: old_lemma = res[label] if part_count(old_lemma) > part_count(lemma): res[label] = lemma else: res[label] = lemma return res for line in stdin: # ... modified code ... if ann.find(' ') != -1: lemmas = ann[:ann.find(' ')] ann = ann[ann.find(' ') + 1:] else: ann = '_' lemma_dict = {} if lemmas != '_': lemma_dict = compile_dict(eval(lemmas)) if label in lemma_dict: # ... rest of the code ...
375513808e3fa83ff23de942aeedbd0d9cc4d1c2
tests/test_h5py.py
tests/test_h5py.py
import h5py import bitshuffle.h5 import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), compression=bitshuffle.h5.H5FILTER, compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype='float32', ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
import h5py import hdf5plugin import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), dtype='float32', **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
Change bitshuffle call to hdf5plugin
Change bitshuffle call to hdf5plugin
Python
bsd-3-clause
UCBerkeleySETI/blimpy,UCBerkeleySETI/blimpy
import h5py - import bitshuffle.h5 + import hdf5plugin import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), - compression=bitshuffle.h5.H5FILTER, - compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype='float32', + **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
Change bitshuffle call to hdf5plugin
## Code Before: import h5py import bitshuffle.h5 import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), compression=bitshuffle.h5.H5FILTER, compression_opts=(block_size, bitshuffle.h5.H5_COMPRESS_LZ4), dtype='float32', ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close() ## Instruction: Change bitshuffle call to hdf5plugin ## Code After: import h5py import hdf5plugin import numpy import tempfile def test_is_h5py_correctly_installed(): """ If this test fails you probably need to install h5py from source manually: $ pip install --no-binary=h5py h5py """ f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w") block_size = 0 dataset = f.create_dataset( "data", (100, 100, 100), dtype='float32', **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) array = numpy.random.rand(100, 100, 100) array = array.astype('float32') dataset[:] = array f.close()
# ... existing code ... import h5py import hdf5plugin import numpy import tempfile # ... modified code ... "data", (100, 100, 100), dtype='float32', **hdf5plugin.Bitshuffle(nelems=0, lz4=True) ) # ... rest of the code ...
ce3c7daff5eaaf8eefecf3f4e5bd9fbca40a7a2a
cob/subsystems/tasks_subsystem.py
cob/subsystems/tasks_subsystem.py
import os import logbook from .base import SubsystemBase _logger = logbook.Logger(__name__) class TasksSubsystem(SubsystemBase): NAME = 'tasks' def activate(self, flask_app): from ..celery.app import celery_app self._config = self.project.config.get('celery', {}) # ensure critical celery config exists self._config.setdefault('broker_url', 'amqp://guest:guest@localhost/') override_broker_url = os.environ.get('COB_CELERY_BROKER_URL') if override_broker_url is not None: self._config['broker_url'] = override_broker_url celery_app.conf.broker_url = self._config['broker_url'] self.queues = set() def get_queue_names(self): names = {queue_name for grain in self.grains for queue_name in grain.config.get('queue_names', [])} names.add('celery') return sorted(names) def configure_grain(self, grain, flask_app): _ = grain.load() def configure_app(self, flask_app): super().configure_app(flask_app) from ..celery.app import celery_app for task in celery_app.tasks.values(): queue_name = getattr(task, 'queue', None) if queue_name is not None: self.queues.add(queue_name) def iter_locations(self): return None
import os import logbook from .base import SubsystemBase _logger = logbook.Logger(__name__) class TasksSubsystem(SubsystemBase): NAME = 'tasks' def activate(self, flask_app): from ..celery.app import celery_app self._config = self.project.config.get('celery', {}) # ensure critical celery config exists self._config.setdefault('broker_url', 'amqp://guest:guest@localhost/') override_broker_url = os.environ.get('COB_CELERY_BROKER_URL') if override_broker_url is not None: self._config['broker_url'] = override_broker_url celery_app.conf.update(self._config) self.queues = set() def get_queue_names(self): names = {queue_name for grain in self.grains for queue_name in grain.config.get('queue_names', [])} names.add('celery') return sorted(names) def configure_grain(self, grain, flask_app): _ = grain.load() def configure_app(self, flask_app): super().configure_app(flask_app) from ..celery.app import celery_app for task in celery_app.tasks.values(): queue_name = getattr(task, 'queue', None) if queue_name is not None: self.queues.add(queue_name) def iter_locations(self): return None
Allow passing Celery configuration under the project's config
Allow passing Celery configuration under the project's config
Python
bsd-3-clause
getweber/weber-cli
import os import logbook from .base import SubsystemBase _logger = logbook.Logger(__name__) class TasksSubsystem(SubsystemBase): NAME = 'tasks' def activate(self, flask_app): from ..celery.app import celery_app self._config = self.project.config.get('celery', {}) # ensure critical celery config exists self._config.setdefault('broker_url', 'amqp://guest:guest@localhost/') override_broker_url = os.environ.get('COB_CELERY_BROKER_URL') if override_broker_url is not None: self._config['broker_url'] = override_broker_url - celery_app.conf.broker_url = self._config['broker_url'] + celery_app.conf.update(self._config) self.queues = set() def get_queue_names(self): names = {queue_name for grain in self.grains for queue_name in grain.config.get('queue_names', [])} names.add('celery') return sorted(names) def configure_grain(self, grain, flask_app): _ = grain.load() def configure_app(self, flask_app): super().configure_app(flask_app) from ..celery.app import celery_app for task in celery_app.tasks.values(): queue_name = getattr(task, 'queue', None) if queue_name is not None: self.queues.add(queue_name) def iter_locations(self): return None
Allow passing Celery configuration under the project's config
## Code Before: import os import logbook from .base import SubsystemBase _logger = logbook.Logger(__name__) class TasksSubsystem(SubsystemBase): NAME = 'tasks' def activate(self, flask_app): from ..celery.app import celery_app self._config = self.project.config.get('celery', {}) # ensure critical celery config exists self._config.setdefault('broker_url', 'amqp://guest:guest@localhost/') override_broker_url = os.environ.get('COB_CELERY_BROKER_URL') if override_broker_url is not None: self._config['broker_url'] = override_broker_url celery_app.conf.broker_url = self._config['broker_url'] self.queues = set() def get_queue_names(self): names = {queue_name for grain in self.grains for queue_name in grain.config.get('queue_names', [])} names.add('celery') return sorted(names) def configure_grain(self, grain, flask_app): _ = grain.load() def configure_app(self, flask_app): super().configure_app(flask_app) from ..celery.app import celery_app for task in celery_app.tasks.values(): queue_name = getattr(task, 'queue', None) if queue_name is not None: self.queues.add(queue_name) def iter_locations(self): return None ## Instruction: Allow passing Celery configuration under the project's config ## Code After: import os import logbook from .base import SubsystemBase _logger = logbook.Logger(__name__) class TasksSubsystem(SubsystemBase): NAME = 'tasks' def activate(self, flask_app): from ..celery.app import celery_app self._config = self.project.config.get('celery', {}) # ensure critical celery config exists self._config.setdefault('broker_url', 'amqp://guest:guest@localhost/') override_broker_url = os.environ.get('COB_CELERY_BROKER_URL') if override_broker_url is not None: self._config['broker_url'] = override_broker_url celery_app.conf.update(self._config) self.queues = set() def get_queue_names(self): names = {queue_name for grain in self.grains for queue_name in grain.config.get('queue_names', [])} names.add('celery') return sorted(names) def configure_grain(self, grain, flask_app): _ = grain.load() def configure_app(self, flask_app): super().configure_app(flask_app) from ..celery.app import celery_app for task in celery_app.tasks.values(): queue_name = getattr(task, 'queue', None) if queue_name is not None: self.queues.add(queue_name) def iter_locations(self): return None
... self._config['broker_url'] = override_broker_url celery_app.conf.update(self._config) self.queues = set() ...
666fc19e2949a30cbe40bf6020c141e84dfcae1e
app/soc/models/project_survey.py
app/soc/models/project_survey.py
__authors__ = [ '"Daniel Diniz" <[email protected]>', '"Lennard de Rijk" <[email protected]>', ] from soc.models.survey import Survey class ProjectSurvey(Survey): """Survey for Students that have a StudentProject. """ def __init__(self, *args, **kwargs): super(ProjectSurvey, self).__init__(*args, **kwargs) # TODO: prefix has to be set to gsoc_program once data has been transferred self.prefix = 'program' self.taking_access = 'student'
__authors__ = [ '"Daniel Diniz" <[email protected]>', '"Lennard de Rijk" <[email protected]>', ] from soc.models.survey import Survey class ProjectSurvey(Survey): """Survey for Students that have a StudentProject. """ def __init__(self, *args, **kwargs): super(ProjectSurvey, self).__init__(*args, **kwargs) self.prefix = 'gsoc_program' self.taking_access = 'student'
Set the default prefix for ProjectSurveys to gsoc_program.
Set the default prefix for ProjectSurveys to gsoc_program.
Python
apache-2.0
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
__authors__ = [ '"Daniel Diniz" <[email protected]>', '"Lennard de Rijk" <[email protected]>', ] from soc.models.survey import Survey class ProjectSurvey(Survey): """Survey for Students that have a StudentProject. """ def __init__(self, *args, **kwargs): super(ProjectSurvey, self).__init__(*args, **kwargs) - # TODO: prefix has to be set to gsoc_program once data has been transferred - self.prefix = 'program' + self.prefix = 'gsoc_program' self.taking_access = 'student'
Set the default prefix for ProjectSurveys to gsoc_program.
## Code Before: __authors__ = [ '"Daniel Diniz" <[email protected]>', '"Lennard de Rijk" <[email protected]>', ] from soc.models.survey import Survey class ProjectSurvey(Survey): """Survey for Students that have a StudentProject. """ def __init__(self, *args, **kwargs): super(ProjectSurvey, self).__init__(*args, **kwargs) # TODO: prefix has to be set to gsoc_program once data has been transferred self.prefix = 'program' self.taking_access = 'student' ## Instruction: Set the default prefix for ProjectSurveys to gsoc_program. ## Code After: __authors__ = [ '"Daniel Diniz" <[email protected]>', '"Lennard de Rijk" <[email protected]>', ] from soc.models.survey import Survey class ProjectSurvey(Survey): """Survey for Students that have a StudentProject. """ def __init__(self, *args, **kwargs): super(ProjectSurvey, self).__init__(*args, **kwargs) self.prefix = 'gsoc_program' self.taking_access = 'student'
# ... existing code ... def __init__(self, *args, **kwargs): super(ProjectSurvey, self).__init__(*args, **kwargs) self.prefix = 'gsoc_program' self.taking_access = 'student' # ... rest of the code ...
64f9ef6fcc71ef09e113161711369fe4d9781a18
shorpypaper.py
shorpypaper.py
from pyquery import PyQuery as pq import requests import subprocess APPLESCRIPT = """/usr/bin/osascript<<END tell application "Finder" set desktop picture to POSIX file "%s" end tell END""" def main(): # Load main site. root = 'http://www.shorpy.com' r = requests.get(root) j = pq(r.content) # Load first photo. first_photo = root + j('div.node div.content a').eq(1).attr('href') r = requests.get(first_photo) j = pq(r.content) image = j('img').eq(0).attr('src') with open('/tmp/dailyshorpy.jpg', 'wb') as handle: # To reset the cached dailyshorpy.jpg. subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Frog.jpg', shell=True) request = requests.get(image, stream=True) for block in request.iter_content(1024): if not block: break handle.write(block) subprocess.Popen(APPLESCRIPT % '/tmp/dailyshorpy.jpg', shell=True) if __name__ == '__main__': main()
from pyquery import PyQuery as pq import requests import subprocess APPLESCRIPT = """/usr/bin/osascript<<END tell application "Finder" set desktop picture to POSIX file "%s" end tell END""" def main(): # Load main site. root = 'http://www.shorpy.com' r = requests.get(root) j = pq(r.content) # Load first photo. first_photo = root + j('div.node div.content a').eq(1).attr('href') r = requests.get(first_photo) j = pq(r.content) image = j('img').eq(0).attr('src') with open('/tmp/dailyshorpy.jpg', 'wb') as handle: # To reset the cached dailyshorpy.jpg. subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Solid Colors/Solid Gray Light.png', shell=True) request = requests.get(image, stream=True) for block in request.iter_content(1024): if not block: break handle.write(block) subprocess.Popen(APPLESCRIPT % '/tmp/dailyshorpy.jpg', shell=True) if __name__ == '__main__': main()
Use a grey solid instead of the damn frog.
Use a grey solid instead of the damn frog.
Python
mit
nicksergeant/shorpypaper
from pyquery import PyQuery as pq import requests import subprocess APPLESCRIPT = """/usr/bin/osascript<<END tell application "Finder" set desktop picture to POSIX file "%s" end tell END""" def main(): # Load main site. root = 'http://www.shorpy.com' r = requests.get(root) j = pq(r.content) # Load first photo. first_photo = root + j('div.node div.content a').eq(1).attr('href') r = requests.get(first_photo) j = pq(r.content) image = j('img').eq(0).attr('src') with open('/tmp/dailyshorpy.jpg', 'wb') as handle: # To reset the cached dailyshorpy.jpg. - subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Frog.jpg', shell=True) + subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Solid Colors/Solid Gray Light.png', shell=True) request = requests.get(image, stream=True) for block in request.iter_content(1024): if not block: break handle.write(block) subprocess.Popen(APPLESCRIPT % '/tmp/dailyshorpy.jpg', shell=True) if __name__ == '__main__': main()
Use a grey solid instead of the damn frog.
## Code Before: from pyquery import PyQuery as pq import requests import subprocess APPLESCRIPT = """/usr/bin/osascript<<END tell application "Finder" set desktop picture to POSIX file "%s" end tell END""" def main(): # Load main site. root = 'http://www.shorpy.com' r = requests.get(root) j = pq(r.content) # Load first photo. first_photo = root + j('div.node div.content a').eq(1).attr('href') r = requests.get(first_photo) j = pq(r.content) image = j('img').eq(0).attr('src') with open('/tmp/dailyshorpy.jpg', 'wb') as handle: # To reset the cached dailyshorpy.jpg. subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Frog.jpg', shell=True) request = requests.get(image, stream=True) for block in request.iter_content(1024): if not block: break handle.write(block) subprocess.Popen(APPLESCRIPT % '/tmp/dailyshorpy.jpg', shell=True) if __name__ == '__main__': main() ## Instruction: Use a grey solid instead of the damn frog. ## Code After: from pyquery import PyQuery as pq import requests import subprocess APPLESCRIPT = """/usr/bin/osascript<<END tell application "Finder" set desktop picture to POSIX file "%s" end tell END""" def main(): # Load main site. root = 'http://www.shorpy.com' r = requests.get(root) j = pq(r.content) # Load first photo. first_photo = root + j('div.node div.content a').eq(1).attr('href') r = requests.get(first_photo) j = pq(r.content) image = j('img').eq(0).attr('src') with open('/tmp/dailyshorpy.jpg', 'wb') as handle: # To reset the cached dailyshorpy.jpg. subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Solid Colors/Solid Gray Light.png', shell=True) request = requests.get(image, stream=True) for block in request.iter_content(1024): if not block: break handle.write(block) subprocess.Popen(APPLESCRIPT % '/tmp/dailyshorpy.jpg', shell=True) if __name__ == '__main__': main()
// ... existing code ... # To reset the cached dailyshorpy.jpg. subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Solid Colors/Solid Gray Light.png', shell=True) request = requests.get(image, stream=True) // ... rest of the code ...
3a27568211c07cf614aa9865a2f08d2a9b9bfb71
dinosaurs/views.py
dinosaurs/views.py
import os import json import httplib as http import tornado.web import tornado.ioloop from dinosaurs import api from dinosaurs import settings class SingleStatic(tornado.web.StaticFileHandler): def initialize(self, path): self.dirname, self.filename = os.path.split(path) super(SingleStatic, self).initialize(self.dirname) def get(self, path=None, include_body=True): super(SingleStatic, self).get(self.filename, include_body) class DomainAPIHandler(tornado.web.RequestHandler): def get(self): self.write({ 'availableDomains': settings.DOMAINS.keys() }) class EmailAPIHandler(tornado.web.RequestHandler): def post(self): try: req_json = json.loads(self.request.body) except ValueError: raise tornado.web.HTTPError(http.BAD_REQUEST) email = req_json.get('email') domain = req_json.get('domain') connection = api.get_connection(domain) if not email or not domain or not connection: raise tornado.web.HTTPError(http.BAD_REQUEST) ret, passwd = api.create_email(connection, email) self.write({ 'password': passwd, 'email': ret['login'], 'domain': ret['domain'] }) self.set_status(http.CREATED)
import os import json import httplib as http import tornado.web import tornado.ioloop from dinosaurs import api from dinosaurs import settings class SingleStatic(tornado.web.StaticFileHandler): def initialize(self, path): self.dirname, self.filename = os.path.split(path) super(SingleStatic, self).initialize(self.dirname) def get(self, path=None, include_body=True): super(SingleStatic, self).get(self.filename, include_body) class DomainAPIHandler(tornado.web.RequestHandler): def get(self): self.write({ 'availableDomains': settings.DOMAINS.keys() }) class EmailAPIHandler(tornado.web.RequestHandler): def write_error(self, status_code, **kwargs): self.finish({ "code": status_code, "message": self._reason, }) def post(self): try: req_json = json.loads(self.request.body) except ValueError: raise tornado.web.HTTPError(http.BAD_REQUEST) email = req_json.get('email') domain = req_json.get('domain') connection = api.get_connection(domain) if not email or not domain or not connection: raise tornado.web.HTTPError(http.BAD_REQUEST) try: ret, passwd = api.create_email(connection, email) except api.YandexException as e: if e.message != 'occupied': raise self.write({}) raise tornado.web.HTTPError(http.FORBIDDEN) self.write({ 'password': passwd, 'email': ret['login'], 'domain': ret['domain'] }) self.set_status(http.CREATED)
Return errors in json only
Return errors in json only
Python
mit
chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy
import os import json import httplib as http import tornado.web import tornado.ioloop from dinosaurs import api from dinosaurs import settings class SingleStatic(tornado.web.StaticFileHandler): def initialize(self, path): self.dirname, self.filename = os.path.split(path) super(SingleStatic, self).initialize(self.dirname) def get(self, path=None, include_body=True): super(SingleStatic, self).get(self.filename, include_body) class DomainAPIHandler(tornado.web.RequestHandler): def get(self): self.write({ 'availableDomains': settings.DOMAINS.keys() }) class EmailAPIHandler(tornado.web.RequestHandler): + def write_error(self, status_code, **kwargs): + self.finish({ + "code": status_code, + "message": self._reason, + }) + def post(self): try: req_json = json.loads(self.request.body) except ValueError: raise tornado.web.HTTPError(http.BAD_REQUEST) email = req_json.get('email') domain = req_json.get('domain') connection = api.get_connection(domain) if not email or not domain or not connection: raise tornado.web.HTTPError(http.BAD_REQUEST) + try: - ret, passwd = api.create_email(connection, email) + ret, passwd = api.create_email(connection, email) + except api.YandexException as e: + if e.message != 'occupied': + raise + self.write({}) + raise tornado.web.HTTPError(http.FORBIDDEN) self.write({ 'password': passwd, 'email': ret['login'], 'domain': ret['domain'] }) self.set_status(http.CREATED)
Return errors in json only
## Code Before: import os import json import httplib as http import tornado.web import tornado.ioloop from dinosaurs import api from dinosaurs import settings class SingleStatic(tornado.web.StaticFileHandler): def initialize(self, path): self.dirname, self.filename = os.path.split(path) super(SingleStatic, self).initialize(self.dirname) def get(self, path=None, include_body=True): super(SingleStatic, self).get(self.filename, include_body) class DomainAPIHandler(tornado.web.RequestHandler): def get(self): self.write({ 'availableDomains': settings.DOMAINS.keys() }) class EmailAPIHandler(tornado.web.RequestHandler): def post(self): try: req_json = json.loads(self.request.body) except ValueError: raise tornado.web.HTTPError(http.BAD_REQUEST) email = req_json.get('email') domain = req_json.get('domain') connection = api.get_connection(domain) if not email or not domain or not connection: raise tornado.web.HTTPError(http.BAD_REQUEST) ret, passwd = api.create_email(connection, email) self.write({ 'password': passwd, 'email': ret['login'], 'domain': ret['domain'] }) self.set_status(http.CREATED) ## Instruction: Return errors in json only ## Code After: import os import json import httplib as http import tornado.web import tornado.ioloop from dinosaurs import api from dinosaurs import settings class SingleStatic(tornado.web.StaticFileHandler): def initialize(self, path): self.dirname, self.filename = os.path.split(path) super(SingleStatic, self).initialize(self.dirname) def get(self, path=None, include_body=True): super(SingleStatic, self).get(self.filename, include_body) class DomainAPIHandler(tornado.web.RequestHandler): def get(self): self.write({ 'availableDomains': settings.DOMAINS.keys() }) class EmailAPIHandler(tornado.web.RequestHandler): def write_error(self, status_code, **kwargs): self.finish({ "code": status_code, "message": self._reason, }) def post(self): try: req_json = json.loads(self.request.body) except ValueError: raise tornado.web.HTTPError(http.BAD_REQUEST) email = req_json.get('email') domain = req_json.get('domain') connection = api.get_connection(domain) if not email or not domain or not connection: raise tornado.web.HTTPError(http.BAD_REQUEST) try: ret, passwd = api.create_email(connection, email) except api.YandexException as e: if e.message != 'occupied': raise self.write({}) raise tornado.web.HTTPError(http.FORBIDDEN) self.write({ 'password': passwd, 'email': ret['login'], 'domain': ret['domain'] }) self.set_status(http.CREATED)
... class EmailAPIHandler(tornado.web.RequestHandler): def write_error(self, status_code, **kwargs): self.finish({ "code": status_code, "message": self._reason, }) def post(self): try: ... raise tornado.web.HTTPError(http.BAD_REQUEST) try: ret, passwd = api.create_email(connection, email) except api.YandexException as e: if e.message != 'occupied': raise self.write({}) raise tornado.web.HTTPError(http.FORBIDDEN) self.write({ ...
989601aef4d8a1eeb7cf873ebd2f93ad89b67e54
tests/install_tests/test_build.py
tests/install_tests/test_build.py
from distutils import ccompiler from distutils import sysconfig import unittest import pytest from install import build class TestCheckVersion(unittest.TestCase): def setUp(self): self.compiler = ccompiler.new_compiler() sysconfig.customize_compiler(self.compiler) self.settings = build.get_compiler_setting(False) @pytest.mark.gpu def test_check_cuda_version(self): with self.assertRaises(RuntimeError): build.get_cuda_version() assert build.check_cuda_version( self.compiler, self.settings) assert isinstance(build.get_cuda_version(), int) assert isinstance(build.get_cuda_version(True), str) @pytest.mark.gpu @pytest.mark.cudnn @pytest.mark.xfail(build.use_hip, reason='ROCm/HIP DNN support is not ready') def test_check_cudnn_version(self): with self.assertRaises(RuntimeError): build.get_cudnn_version() assert build.check_cudnn_version( self.compiler, self.settings) assert isinstance(build.get_cudnn_version(), int) assert isinstance(build.get_cudnn_version(True), str)
from distutils import ccompiler from distutils import sysconfig import unittest import pytest from install import build class TestCheckVersion(unittest.TestCase): def setUp(self): self.compiler = ccompiler.new_compiler() sysconfig.customize_compiler(self.compiler) self.settings = build.get_compiler_setting(False) @pytest.mark.gpu @pytest.mark.skipIf(build.use_hip, reason='For CUDA environment') def test_check_cuda_version(self): with self.assertRaises(RuntimeError): build.get_cuda_version() assert build.check_cuda_version( self.compiler, self.settings) assert isinstance(build.get_cuda_version(), int) assert isinstance(build.get_cuda_version(True), str) @pytest.mark.gpu @pytest.mark.skipIf(not build.use_hip, reason='For ROCm/HIP environment') def test_check_hip_version(self): with self.assertRaises(RuntimeError): build.get_hip_version() assert build.check_hip_version( self.compiler, self.settings) assert isinstance(build.get_hip_version(), int) assert isinstance(build.get_hip_version(True), str) @pytest.mark.gpu @pytest.mark.cudnn @pytest.mark.xfail(build.use_hip, reason='ROCm/HIP DNN support is not ready') def test_check_cudnn_version(self): with self.assertRaises(RuntimeError): build.get_cudnn_version() assert build.check_cudnn_version( self.compiler, self.settings) assert isinstance(build.get_cudnn_version(), int) assert isinstance(build.get_cudnn_version(True), str)
Fix to check HIP version
Fix to check HIP version
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
from distutils import ccompiler from distutils import sysconfig import unittest import pytest from install import build class TestCheckVersion(unittest.TestCase): def setUp(self): self.compiler = ccompiler.new_compiler() sysconfig.customize_compiler(self.compiler) self.settings = build.get_compiler_setting(False) @pytest.mark.gpu + @pytest.mark.skipIf(build.use_hip, reason='For CUDA environment') def test_check_cuda_version(self): with self.assertRaises(RuntimeError): build.get_cuda_version() assert build.check_cuda_version( self.compiler, self.settings) assert isinstance(build.get_cuda_version(), int) assert isinstance(build.get_cuda_version(True), str) + + @pytest.mark.gpu + @pytest.mark.skipIf(not build.use_hip, reason='For ROCm/HIP environment') + def test_check_hip_version(self): + with self.assertRaises(RuntimeError): + build.get_hip_version() + assert build.check_hip_version( + self.compiler, self.settings) + assert isinstance(build.get_hip_version(), int) + assert isinstance(build.get_hip_version(True), str) @pytest.mark.gpu @pytest.mark.cudnn @pytest.mark.xfail(build.use_hip, reason='ROCm/HIP DNN support is not ready') def test_check_cudnn_version(self): with self.assertRaises(RuntimeError): build.get_cudnn_version() assert build.check_cudnn_version( self.compiler, self.settings) assert isinstance(build.get_cudnn_version(), int) assert isinstance(build.get_cudnn_version(True), str)
Fix to check HIP version
## Code Before: from distutils import ccompiler from distutils import sysconfig import unittest import pytest from install import build class TestCheckVersion(unittest.TestCase): def setUp(self): self.compiler = ccompiler.new_compiler() sysconfig.customize_compiler(self.compiler) self.settings = build.get_compiler_setting(False) @pytest.mark.gpu def test_check_cuda_version(self): with self.assertRaises(RuntimeError): build.get_cuda_version() assert build.check_cuda_version( self.compiler, self.settings) assert isinstance(build.get_cuda_version(), int) assert isinstance(build.get_cuda_version(True), str) @pytest.mark.gpu @pytest.mark.cudnn @pytest.mark.xfail(build.use_hip, reason='ROCm/HIP DNN support is not ready') def test_check_cudnn_version(self): with self.assertRaises(RuntimeError): build.get_cudnn_version() assert build.check_cudnn_version( self.compiler, self.settings) assert isinstance(build.get_cudnn_version(), int) assert isinstance(build.get_cudnn_version(True), str) ## Instruction: Fix to check HIP version ## Code After: from distutils import ccompiler from distutils import sysconfig import unittest import pytest from install import build class TestCheckVersion(unittest.TestCase): def setUp(self): self.compiler = ccompiler.new_compiler() sysconfig.customize_compiler(self.compiler) self.settings = build.get_compiler_setting(False) @pytest.mark.gpu @pytest.mark.skipIf(build.use_hip, reason='For CUDA environment') def test_check_cuda_version(self): with self.assertRaises(RuntimeError): build.get_cuda_version() assert build.check_cuda_version( self.compiler, self.settings) assert isinstance(build.get_cuda_version(), int) assert isinstance(build.get_cuda_version(True), str) @pytest.mark.gpu @pytest.mark.skipIf(not build.use_hip, reason='For ROCm/HIP environment') def test_check_hip_version(self): with self.assertRaises(RuntimeError): build.get_hip_version() assert build.check_hip_version( self.compiler, self.settings) assert isinstance(build.get_hip_version(), int) assert isinstance(build.get_hip_version(True), str) @pytest.mark.gpu @pytest.mark.cudnn @pytest.mark.xfail(build.use_hip, reason='ROCm/HIP DNN support is not ready') def test_check_cudnn_version(self): with self.assertRaises(RuntimeError): build.get_cudnn_version() assert build.check_cudnn_version( self.compiler, self.settings) assert isinstance(build.get_cudnn_version(), int) assert isinstance(build.get_cudnn_version(True), str)
... @pytest.mark.gpu @pytest.mark.skipIf(build.use_hip, reason='For CUDA environment') def test_check_cuda_version(self): with self.assertRaises(RuntimeError): ... assert isinstance(build.get_cuda_version(), int) assert isinstance(build.get_cuda_version(True), str) @pytest.mark.gpu @pytest.mark.skipIf(not build.use_hip, reason='For ROCm/HIP environment') def test_check_hip_version(self): with self.assertRaises(RuntimeError): build.get_hip_version() assert build.check_hip_version( self.compiler, self.settings) assert isinstance(build.get_hip_version(), int) assert isinstance(build.get_hip_version(True), str) @pytest.mark.gpu ...
dc2f8342bc9b9c921086948ed10f99de9bcbc76d
client/python/setup.py
client/python/setup.py
from distutils.core import setup setup(name='Spiff', version='0.1', description="API to Spaceman Spiff", author='Trever Fischer', author_email='[email protected]', url='http://github.com/synhak/spiff', py_modules=['spiff'] )
from distutils.core import setup setup(name='Spiff', version='0.1', description="API to Spaceman Spiff", author='Trever Fischer', author_email='[email protected]', url='http://github.com/synhak/spiff', py_modules=['spiff'], requires=['requests'], )
Add deps for python lib
Add deps for python lib
Python
agpl-3.0
SYNHAK/spiff,SYNHAK/spiff,SYNHAK/spiff
from distutils.core import setup setup(name='Spiff', version='0.1', description="API to Spaceman Spiff", author='Trever Fischer', author_email='[email protected]', url='http://github.com/synhak/spiff', - py_modules=['spiff'] + py_modules=['spiff'], + requires=['requests'], )
Add deps for python lib
## Code Before: from distutils.core import setup setup(name='Spiff', version='0.1', description="API to Spaceman Spiff", author='Trever Fischer', author_email='[email protected]', url='http://github.com/synhak/spiff', py_modules=['spiff'] ) ## Instruction: Add deps for python lib ## Code After: from distutils.core import setup setup(name='Spiff', version='0.1', description="API to Spaceman Spiff", author='Trever Fischer', author_email='[email protected]', url='http://github.com/synhak/spiff', py_modules=['spiff'], requires=['requests'], )
// ... existing code ... author_email='[email protected]', url='http://github.com/synhak/spiff', py_modules=['spiff'], requires=['requests'], ) // ... rest of the code ...
a03fe14d4dba7b9a54efdebeb768551bda53e3c1
admin/common_auth/models.py
admin/common_auth/models.py
from django.db import models class AdminProfile(models.Model): user = models.OneToOneField('osf.OSFUser', related_name='admin_profile') desk_token = models.CharField(max_length=45, blank=True) desk_token_secret = models.CharField(max_length=45, blank=True) class Meta: # custom permissions for use in the OSF Admin App permissions = ( ('mark_spam', 'Can mark comments, projects and registrations as spam'), ('view_spam', 'Can view nodes, comments, and projects marked as spam'), ('view_metrics', 'Can view metrics on the OSF Admin app'), ('view_prereg', 'Can view entries for the preregistration chellenge on the admin'), ('administer_prereg', 'Can update, comment on, and approve entries to the prereg challenge'), ('view_desk', 'Can view details about Desk users'), )
from django.db import models class AdminProfile(models.Model): user = models.OneToOneField('osf.OSFUser', related_name='admin_profile') desk_token = models.CharField(max_length=45, blank=True) desk_token_secret = models.CharField(max_length=45, blank=True) def __unicode__(self): return self.user.username class Meta: # custom permissions for use in the OSF Admin App permissions = ( ('mark_spam', 'Can mark comments, projects and registrations as spam'), ('view_spam', 'Can view nodes, comments, and projects marked as spam'), ('view_metrics', 'Can view metrics on the OSF Admin app'), ('view_prereg', 'Can view entries for the preregistration chellenge on the admin'), ('administer_prereg', 'Can update, comment on, and approve entries to the prereg challenge'), ('view_desk', 'Can view details about Desk users'), )
Fix the display name of admin profile in the admin admin
Fix the display name of admin profile in the admin admin
Python
apache-2.0
HalcyonChimera/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,sloria/osf.io,chrisseto/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,icereval/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,cslzchen/osf.io,felliott/osf.io,chennan47/osf.io,binoculars/osf.io,monikagrabowska/osf.io,binoculars/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,leb2dg/osf.io,icereval/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,adlius/osf.io,Nesiehr/osf.io,leb2dg/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,pattisdr/osf.io,saradbowman/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,adlius/osf.io,leb2dg/osf.io,chennan47/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,acshi/osf.io,aaxelb/osf.io,cslzchen/osf.io,caneruguz/osf.io,sloria/osf.io,laurenrevere/osf.io,adlius/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,chrisseto/osf.io,pattisdr/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,acshi/osf.io,erinspace/osf.io,mattclark/osf.io,baylee-d/osf.io,chennan47/osf.io,acshi/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,hmoco/osf.io,hmoco/osf.io,aaxelb/osf.io,cwisecarver/osf.io,aaxelb/osf.io,TomBaxter/osf.io,binoculars/osf.io,adlius/osf.io,felliott/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,felliott/osf.io,saradbowman/osf.io,felliott/osf.io,laurenrevere/osf.io,caneruguz/osf.io,cwisecarver/osf.io,chrisseto/osf.io,acshi/osf.io,chrisseto/osf.io,acshi/osf.io,baylee-d/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,mfraezz/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,hmoco/osf.io,caseyrollins/osf.io,mattclark/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,crcresearch/osf.io,mattclark/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,erinspace/osf.io,Johnetordoff/osf.io
from django.db import models class AdminProfile(models.Model): user = models.OneToOneField('osf.OSFUser', related_name='admin_profile') desk_token = models.CharField(max_length=45, blank=True) desk_token_secret = models.CharField(max_length=45, blank=True) + + def __unicode__(self): + return self.user.username class Meta: # custom permissions for use in the OSF Admin App permissions = ( ('mark_spam', 'Can mark comments, projects and registrations as spam'), ('view_spam', 'Can view nodes, comments, and projects marked as spam'), ('view_metrics', 'Can view metrics on the OSF Admin app'), ('view_prereg', 'Can view entries for the preregistration chellenge on the admin'), ('administer_prereg', 'Can update, comment on, and approve entries to the prereg challenge'), ('view_desk', 'Can view details about Desk users'), )
Fix the display name of admin profile in the admin admin
## Code Before: from django.db import models class AdminProfile(models.Model): user = models.OneToOneField('osf.OSFUser', related_name='admin_profile') desk_token = models.CharField(max_length=45, blank=True) desk_token_secret = models.CharField(max_length=45, blank=True) class Meta: # custom permissions for use in the OSF Admin App permissions = ( ('mark_spam', 'Can mark comments, projects and registrations as spam'), ('view_spam', 'Can view nodes, comments, and projects marked as spam'), ('view_metrics', 'Can view metrics on the OSF Admin app'), ('view_prereg', 'Can view entries for the preregistration chellenge on the admin'), ('administer_prereg', 'Can update, comment on, and approve entries to the prereg challenge'), ('view_desk', 'Can view details about Desk users'), ) ## Instruction: Fix the display name of admin profile in the admin admin ## Code After: from django.db import models class AdminProfile(models.Model): user = models.OneToOneField('osf.OSFUser', related_name='admin_profile') desk_token = models.CharField(max_length=45, blank=True) desk_token_secret = models.CharField(max_length=45, blank=True) def __unicode__(self): return self.user.username class Meta: # custom permissions for use in the OSF Admin App permissions = ( ('mark_spam', 'Can mark comments, projects and registrations as spam'), ('view_spam', 'Can view nodes, comments, and projects marked as spam'), ('view_metrics', 'Can view metrics on the OSF Admin app'), ('view_prereg', 'Can view entries for the preregistration chellenge on the admin'), ('administer_prereg', 'Can update, comment on, and approve entries to the prereg challenge'), ('view_desk', 'Can view details about Desk users'), )
// ... existing code ... desk_token = models.CharField(max_length=45, blank=True) desk_token_secret = models.CharField(max_length=45, blank=True) def __unicode__(self): return self.user.username class Meta: // ... rest of the code ...
21193559b063e85f26971d5ae6181a0bd097cda3
tests/utilities_test.py
tests/utilities_test.py
import pytest import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n"
import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ############ # Vector # ############ @pyop.vector def multFirstColumn(column): img = column.reshape((2, 2), order = 'C') img[:, 0] *= 2 return img.flatten(0) def testVectorOnMatrix(): np.testing.assert_allclose( multFirstColumn(np.array([[1, 1, 1, 1], [2, 1, 2, 1]]).T), np.array([[2, 4], [1, 1], [2, 4], [1, 1]])) def testVectorOnVector(): np.testing.assert_allclose( multFirstColumn(np.array([1, 1, 1, 1])), np.array(np.array([2, 1, 2, 1])))
Test vector, passes matrix and vector input.
Test vector, passes matrix and vector input.
Python
bsd-3-clause
ryanorendorff/pyop
- import pytest import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" + + ############ + # Vector # + ############ + @pyop.vector + def multFirstColumn(column): + img = column.reshape((2, 2), order = 'C') + img[:, 0] *= 2 + return img.flatten(0) + + + def testVectorOnMatrix(): + np.testing.assert_allclose( + multFirstColumn(np.array([[1, 1, 1, 1], [2, 1, 2, 1]]).T), + np.array([[2, 4], [1, 1], [2, 4], [1, 1]])) + + + def testVectorOnVector(): + np.testing.assert_allclose( + multFirstColumn(np.array([1, 1, 1, 1])), + np.array(np.array([2, 1, 2, 1]))) +
Test vector, passes matrix and vector input.
## Code Before: import pytest import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ## Instruction: Test vector, passes matrix and vector input. ## Code After: import pyop import numpy as np ####################################################################### # Tests # ####################################################################### def testEnsure2dColumn(capsys): @pyop.ensure2dColumn def printShape(x): print(x.shape) return x input_vec = np.random.rand(10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 1)\n" input_vec = np.random.rand(10, 10) output = printShape(input_vec) print_out, _ = capsys.readouterr() np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ############ # Vector # ############ @pyop.vector def multFirstColumn(column): img = column.reshape((2, 2), order = 'C') img[:, 0] *= 2 return img.flatten(0) def testVectorOnMatrix(): np.testing.assert_allclose( multFirstColumn(np.array([[1, 1, 1, 1], [2, 1, 2, 1]]).T), np.array([[2, 4], [1, 1], [2, 4], [1, 1]])) def testVectorOnVector(): np.testing.assert_allclose( multFirstColumn(np.array([1, 1, 1, 1])), np.array(np.array([2, 1, 2, 1])))
# ... existing code ... import pyop # ... modified code ... np.testing.assert_allclose(input_vec, output) assert print_out == "(10, 10)\n" ############ # Vector # ############ @pyop.vector def multFirstColumn(column): img = column.reshape((2, 2), order = 'C') img[:, 0] *= 2 return img.flatten(0) def testVectorOnMatrix(): np.testing.assert_allclose( multFirstColumn(np.array([[1, 1, 1, 1], [2, 1, 2, 1]]).T), np.array([[2, 4], [1, 1], [2, 4], [1, 1]])) def testVectorOnVector(): np.testing.assert_allclose( multFirstColumn(np.array([1, 1, 1, 1])), np.array(np.array([2, 1, 2, 1]))) # ... rest of the code ...
5d2858d740eebfe180ceef22ae5cc80b902a5ccf
books/views.py
books/views.py
from django.core.urlresolvers import reverse from django.http.response import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.translation import ugettext as _ from books.forms import BookForm from shared.models import BookType def index(request): book_list = BookType.objects.all() args = {'book_list': book_list} if request.session['success_msg']: args['success_msg'] = { 'book_added': _("The book was added successfully!") }[request.session['success_msg']] return render(request, 'books/index.html', args) def add_book(request): if request.method == 'POST': form = BookForm(request.POST) if form.is_valid(): book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'], issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'], price=form.cleaned_data['price'] * 100) book_type.save() request.session['success_msg'] = 'book_added' return HttpResponseRedirect(reverse('index')) else: form = BookForm() return render(request, 'books/add.html', {'form': form}) def edit_book(request, book_id): return HttpResponse("Hello world!") def remove_book(request, book_id): return HttpResponse("Hello world!")
from django.core.urlresolvers import reverse from django.http.response import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.translation import ugettext as _ from books.forms import BookForm from shared.models import BookType def index(request): book_list = BookType.objects.all() args = {'book_list': book_list} if 'success_msg' in request.session: args['success_msg'] = { 'book_added': _("The book was added successfully!") }[request.session['success_msg']] del request.session['success_msg'] return render(request, 'books/index.html', args) def add_book(request): if request.method == 'POST': form = BookForm(request.POST) if form.is_valid(): book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'], issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'], price=form.cleaned_data['price'] * 100) book_type.save() request.session['success_msg'] = 'book_added' return HttpResponseRedirect(reverse('index')) else: form = BookForm() return render(request, 'books/add.html', {'form': form}) def edit_book(request, book_id): return HttpResponse("Hello world!") def remove_book(request, book_id): return HttpResponse("Hello world!")
Fix KeyError in alerts implementation
Fix KeyError in alerts implementation - Fix for alert that wasn't dismissing after refreshing the page
Python
agpl-3.0
m4tx/egielda,m4tx/egielda,m4tx/egielda
from django.core.urlresolvers import reverse from django.http.response import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.translation import ugettext as _ from books.forms import BookForm from shared.models import BookType def index(request): book_list = BookType.objects.all() args = {'book_list': book_list} - if request.session['success_msg']: + if 'success_msg' in request.session: args['success_msg'] = { 'book_added': _("The book was added successfully!") }[request.session['success_msg']] + del request.session['success_msg'] return render(request, 'books/index.html', args) def add_book(request): if request.method == 'POST': form = BookForm(request.POST) if form.is_valid(): book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'], issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'], price=form.cleaned_data['price'] * 100) book_type.save() request.session['success_msg'] = 'book_added' return HttpResponseRedirect(reverse('index')) else: form = BookForm() return render(request, 'books/add.html', {'form': form}) def edit_book(request, book_id): return HttpResponse("Hello world!") def remove_book(request, book_id): return HttpResponse("Hello world!")
Fix KeyError in alerts implementation
## Code Before: from django.core.urlresolvers import reverse from django.http.response import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.translation import ugettext as _ from books.forms import BookForm from shared.models import BookType def index(request): book_list = BookType.objects.all() args = {'book_list': book_list} if request.session['success_msg']: args['success_msg'] = { 'book_added': _("The book was added successfully!") }[request.session['success_msg']] return render(request, 'books/index.html', args) def add_book(request): if request.method == 'POST': form = BookForm(request.POST) if form.is_valid(): book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'], issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'], price=form.cleaned_data['price'] * 100) book_type.save() request.session['success_msg'] = 'book_added' return HttpResponseRedirect(reverse('index')) else: form = BookForm() return render(request, 'books/add.html', {'form': form}) def edit_book(request, book_id): return HttpResponse("Hello world!") def remove_book(request, book_id): return HttpResponse("Hello world!") ## Instruction: Fix KeyError in alerts implementation ## Code After: from django.core.urlresolvers import reverse from django.http.response import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.translation import ugettext as _ from books.forms import BookForm from shared.models import BookType def index(request): book_list = BookType.objects.all() args = {'book_list': book_list} if 'success_msg' in request.session: args['success_msg'] = { 'book_added': _("The book was added successfully!") }[request.session['success_msg']] del request.session['success_msg'] return render(request, 'books/index.html', args) def add_book(request): if request.method == 'POST': form = BookForm(request.POST) if form.is_valid(): book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'], issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'], price=form.cleaned_data['price'] * 100) book_type.save() request.session['success_msg'] = 'book_added' return HttpResponseRedirect(reverse('index')) else: form = BookForm() return render(request, 'books/add.html', {'form': form}) def edit_book(request, book_id): return HttpResponse("Hello world!") def remove_book(request, book_id): return HttpResponse("Hello world!")
// ... existing code ... book_list = BookType.objects.all() args = {'book_list': book_list} if 'success_msg' in request.session: args['success_msg'] = { 'book_added': _("The book was added successfully!") }[request.session['success_msg']] del request.session['success_msg'] return render(request, 'books/index.html', args) // ... rest of the code ...
108a05b050383bca218cd02be499f1fad58065dc
test/test_refmanage.py
test/test_refmanage.py
import unittest import pathlib2 as pathlib import refmanage class NoSpecifiedFunctionality(unittest.TestCase): """ Tests when no functionality has been specified on cli """ def test_no_args(self): """ `ref` without arguments should print the help text """ pass def test_version(self): """ `ref --version` should return version string """ pass class TestFunctionality(unittest.TestCase): """ Test "test" functionality """ def test_no_args(self): """ `ref test` without additonal arguments should print the help text """ pass def test_default(self): """ `ref test *.bib` without flags should default to --unparseable and print list of unparseable files """ pass def test_unparseable(self): """ `ref test -u *.bib` should print list of unparseable files """ pass def test_unparseable_verbose(self): """ `ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message """ pass def test_parseable(self): """ `ref test -p *.bib` should print list of parseable files """ pass def test_parseable_verbose(self): """ `ref test -pv *.bib` should print list of parseable files and nothing more """ pass def test_parseable_unparseable(self): """ `ref test -up *.bib` should exit with an error """ pass
import unittest import pathlib2 as pathlib import refmanage class NoSpecifiedFunctionality(unittest.TestCase): """ Tests when no functionality has been specified on cli """ def test_no_args(self): """ `ref` without arguments should print the help text """ self.fail() def test_version(self): """ `ref --version` should return version string """ self.fail() class TestFunctionality(unittest.TestCase): """ Test "test" functionality """ def test_no_args(self): """ `ref test` without additonal arguments should print the help text """ self.fail() def test_default(self): """ `ref test *.bib` without flags should default to --unparseable and print list of unparseable files """ self.fail() def test_unparseable(self): """ `ref test -u *.bib` should print list of unparseable files """ self.fail() def test_unparseable_verbose(self): """ `ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message """ self.fail() def test_parseable(self): """ `ref test -p *.bib` should print list of parseable files """ self.fail() def test_parseable_verbose(self): """ `ref test -pv *.bib` should print list of parseable files and nothing more """ self.fail() def test_parseable_unparseable(self): """ `ref test -up *.bib` should exit with an error """ self.fail()
Replace "pass" with "self.fail()" in tests
Replace "pass" with "self.fail()" in tests In this way, tests that haven't been written will run noisily instead of silently, encouraging completion of writing tests.
Python
mit
jrsmith3/refmanage
import unittest import pathlib2 as pathlib import refmanage class NoSpecifiedFunctionality(unittest.TestCase): """ Tests when no functionality has been specified on cli """ def test_no_args(self): """ `ref` without arguments should print the help text """ - pass + self.fail() def test_version(self): """ `ref --version` should return version string """ - pass + self.fail() class TestFunctionality(unittest.TestCase): """ Test "test" functionality """ def test_no_args(self): """ `ref test` without additonal arguments should print the help text """ - pass + self.fail() def test_default(self): """ `ref test *.bib` without flags should default to --unparseable and print list of unparseable files """ - pass + self.fail() def test_unparseable(self): """ `ref test -u *.bib` should print list of unparseable files """ - pass + self.fail() def test_unparseable_verbose(self): """ `ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message """ - pass + self.fail() def test_parseable(self): """ `ref test -p *.bib` should print list of parseable files """ - pass + self.fail() def test_parseable_verbose(self): """ `ref test -pv *.bib` should print list of parseable files and nothing more """ - pass + self.fail() def test_parseable_unparseable(self): """ `ref test -up *.bib` should exit with an error """ - pass + self.fail()
Replace "pass" with "self.fail()" in tests
## Code Before: import unittest import pathlib2 as pathlib import refmanage class NoSpecifiedFunctionality(unittest.TestCase): """ Tests when no functionality has been specified on cli """ def test_no_args(self): """ `ref` without arguments should print the help text """ pass def test_version(self): """ `ref --version` should return version string """ pass class TestFunctionality(unittest.TestCase): """ Test "test" functionality """ def test_no_args(self): """ `ref test` without additonal arguments should print the help text """ pass def test_default(self): """ `ref test *.bib` without flags should default to --unparseable and print list of unparseable files """ pass def test_unparseable(self): """ `ref test -u *.bib` should print list of unparseable files """ pass def test_unparseable_verbose(self): """ `ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message """ pass def test_parseable(self): """ `ref test -p *.bib` should print list of parseable files """ pass def test_parseable_verbose(self): """ `ref test -pv *.bib` should print list of parseable files and nothing more """ pass def test_parseable_unparseable(self): """ `ref test -up *.bib` should exit with an error """ pass ## Instruction: Replace "pass" with "self.fail()" in tests ## Code After: import unittest import pathlib2 as pathlib import refmanage class NoSpecifiedFunctionality(unittest.TestCase): """ Tests when no functionality has been specified on cli """ def test_no_args(self): """ `ref` without arguments should print the help text """ self.fail() def test_version(self): """ `ref --version` should return version string """ self.fail() class TestFunctionality(unittest.TestCase): """ Test "test" functionality """ def test_no_args(self): """ `ref test` without additonal arguments should print the help text """ self.fail() def test_default(self): """ `ref test *.bib` without flags should default to --unparseable and print list of unparseable files """ self.fail() def test_unparseable(self): """ `ref test -u *.bib` should print list of unparseable files """ self.fail() def test_unparseable_verbose(self): """ `ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message """ self.fail() def test_parseable(self): """ `ref test -p *.bib` should print list of parseable files """ self.fail() def test_parseable_verbose(self): """ `ref test -pv *.bib` should print list of parseable files and nothing more """ self.fail() def test_parseable_unparseable(self): """ `ref test -up *.bib` should exit with an error """ self.fail()
... `ref` without arguments should print the help text """ self.fail() def test_version(self): ... `ref --version` should return version string """ self.fail() class TestFunctionality(unittest.TestCase): ... `ref test` without additonal arguments should print the help text """ self.fail() def test_default(self): ... `ref test *.bib` without flags should default to --unparseable and print list of unparseable files """ self.fail() def test_unparseable(self): ... `ref test -u *.bib` should print list of unparseable files """ self.fail() def test_unparseable_verbose(self): ... `ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message """ self.fail() def test_parseable(self): ... `ref test -p *.bib` should print list of parseable files """ self.fail() def test_parseable_verbose(self): ... `ref test -pv *.bib` should print list of parseable files and nothing more """ self.fail() def test_parseable_unparseable(self): ... `ref test -up *.bib` should exit with an error """ self.fail() ...
cad627a986f0d2ee897e9889e78473976cbeb69d
corehq/apps/app_manager/tests/test_suite.py
corehq/apps/app_manager/tests/test_suite.py
from django.utils.unittest.case import TestCase from corehq.apps.app_manager.models import Application from corehq.apps.app_manager.tests.util import TestFileMixin # snippet from http://stackoverflow.com/questions/321795/comparing-xml-in-a-unit-test-in-python/7060342#7060342 from doctest import Example from lxml.doctestcompare import LXMLOutputChecker class XmlTest(TestCase): def assertXmlEqual(self, want, got): checker = LXMLOutputChecker() if not checker.check_output(want, got, 0): message = checker.output_difference(Example("", want), got, 0) raise AssertionError(message) # end snippet class SuiteTest(XmlTest, TestFileMixin): file_path = ('data', 'suite') def setUp(self): self.app = Application.wrap(self.get_json('app')) def test_normal_suite(self): self.assertXmlEqual(self.app.create_suite(), self.get_xml('normal-suite'))
from django.utils.unittest.case import TestCase from casexml.apps.case.tests import check_xml_line_by_line from corehq.apps.app_manager.models import Application from corehq.apps.app_manager.tests.util import TestFileMixin # snippet from http://stackoverflow.com/questions/321795/comparing-xml-in-a-unit-test-in-python/7060342#7060342 from doctest import Example from lxml.doctestcompare import LXMLOutputChecker class XmlTest(TestCase): def assertXmlEqual(self, want, got): checker = LXMLOutputChecker() if not checker.check_output(want, got, 0): message = checker.output_difference(Example("", want), got, 0) raise AssertionError(message) # end snippet class SuiteTest(XmlTest, TestFileMixin): file_path = ('data', 'suite') def setUp(self): self.app = Application.wrap(self.get_json('app')) def test_normal_suite(self): self.assertXmlEqual(self.get_xml('normal-suite'), self.app.create_suite())
Revert "make test output a litte more intuitive"
Revert "make test output a litte more intuitive" This reverts commit e09fa453b1bb72f08053d13cc3050012a20ba724.
Python
bsd-3-clause
qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,SEL-Columbia/commcare-hq,gmimano/commcaretest,qedsoftware/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq
from django.utils.unittest.case import TestCase + from casexml.apps.case.tests import check_xml_line_by_line from corehq.apps.app_manager.models import Application from corehq.apps.app_manager.tests.util import TestFileMixin # snippet from http://stackoverflow.com/questions/321795/comparing-xml-in-a-unit-test-in-python/7060342#7060342 from doctest import Example from lxml.doctestcompare import LXMLOutputChecker - class XmlTest(TestCase): def assertXmlEqual(self, want, got): checker = LXMLOutputChecker() if not checker.check_output(want, got, 0): message = checker.output_difference(Example("", want), got, 0) raise AssertionError(message) # end snippet - class SuiteTest(XmlTest, TestFileMixin): file_path = ('data', 'suite') - def setUp(self): self.app = Application.wrap(self.get_json('app')) def test_normal_suite(self): - self.assertXmlEqual(self.app.create_suite(), self.get_xml('normal-suite')) + self.assertXmlEqual(self.get_xml('normal-suite'), self.app.create_suite())
Revert "make test output a litte more intuitive"
## Code Before: from django.utils.unittest.case import TestCase from corehq.apps.app_manager.models import Application from corehq.apps.app_manager.tests.util import TestFileMixin # snippet from http://stackoverflow.com/questions/321795/comparing-xml-in-a-unit-test-in-python/7060342#7060342 from doctest import Example from lxml.doctestcompare import LXMLOutputChecker class XmlTest(TestCase): def assertXmlEqual(self, want, got): checker = LXMLOutputChecker() if not checker.check_output(want, got, 0): message = checker.output_difference(Example("", want), got, 0) raise AssertionError(message) # end snippet class SuiteTest(XmlTest, TestFileMixin): file_path = ('data', 'suite') def setUp(self): self.app = Application.wrap(self.get_json('app')) def test_normal_suite(self): self.assertXmlEqual(self.app.create_suite(), self.get_xml('normal-suite')) ## Instruction: Revert "make test output a litte more intuitive" ## Code After: from django.utils.unittest.case import TestCase from casexml.apps.case.tests import check_xml_line_by_line from corehq.apps.app_manager.models import Application from corehq.apps.app_manager.tests.util import TestFileMixin # snippet from http://stackoverflow.com/questions/321795/comparing-xml-in-a-unit-test-in-python/7060342#7060342 from doctest import Example from lxml.doctestcompare import LXMLOutputChecker class XmlTest(TestCase): def assertXmlEqual(self, want, got): checker = LXMLOutputChecker() if not checker.check_output(want, got, 0): message = checker.output_difference(Example("", want), got, 0) raise AssertionError(message) # end snippet class SuiteTest(XmlTest, TestFileMixin): file_path = ('data', 'suite') def setUp(self): self.app = Application.wrap(self.get_json('app')) def test_normal_suite(self): self.assertXmlEqual(self.get_xml('normal-suite'), self.app.create_suite())
// ... existing code ... from django.utils.unittest.case import TestCase from casexml.apps.case.tests import check_xml_line_by_line from corehq.apps.app_manager.models import Application from corehq.apps.app_manager.tests.util import TestFileMixin // ... modified code ... from doctest import Example from lxml.doctestcompare import LXMLOutputChecker class XmlTest(TestCase): ... # end snippet class SuiteTest(XmlTest, TestFileMixin): file_path = ('data', 'suite') def setUp(self): self.app = Application.wrap(self.get_json('app')) ... def test_normal_suite(self): self.assertXmlEqual(self.get_xml('normal-suite'), self.app.create_suite()) // ... rest of the code ...
9660fb734ecf2ad2c181eba790cdd2ddc9ed423e
cyder/core/system/forms.py
cyder/core/system/forms.py
from django import forms from cyder.base.eav.forms import get_eav_form from cyder.base.mixins import UsabilityFormMixin from cyder.core.system.models import System, SystemAV class SystemForm(forms.ModelForm): class Meta: model = System class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin): interface_type = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Static', 'Static Interface'), ('Dynamic', 'Dynamic Interface'))) class Meta: model = System SystemAVForm = get_eav_form(SystemAV, System)
from django import forms from cyder.base.eav.forms import get_eav_form from cyder.base.mixins import UsabilityFormMixin from cyder.core.system.models import System, SystemAV class SystemForm(forms.ModelForm): class Meta: model = System class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin): interface_type = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('static_interface', 'Static Interface'), ('dynamic_interface', 'Dynamic Interface'))) class Meta: model = System SystemAVForm = get_eav_form(SystemAV, System)
Fix system form interface_type choices
Fix system form interface_type choices
Python
bsd-3-clause
murrown/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,murrown/cyder,OSU-Net/cyder,murrown/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,zeeman/cyder,zeeman/cyder,OSU-Net/cyder,akeym/cyder,zeeman/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,drkitty/cyder,zeeman/cyder
from django import forms from cyder.base.eav.forms import get_eav_form from cyder.base.mixins import UsabilityFormMixin from cyder.core.system.models import System, SystemAV class SystemForm(forms.ModelForm): class Meta: model = System class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin): interface_type = forms.ChoiceField( widget=forms.RadioSelect, choices=( - ('Static', 'Static Interface'), - ('Dynamic', 'Dynamic Interface'))) + ('static_interface', 'Static Interface'), + ('dynamic_interface', 'Dynamic Interface'))) class Meta: model = System SystemAVForm = get_eav_form(SystemAV, System)
Fix system form interface_type choices
## Code Before: from django import forms from cyder.base.eav.forms import get_eav_form from cyder.base.mixins import UsabilityFormMixin from cyder.core.system.models import System, SystemAV class SystemForm(forms.ModelForm): class Meta: model = System class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin): interface_type = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('Static', 'Static Interface'), ('Dynamic', 'Dynamic Interface'))) class Meta: model = System SystemAVForm = get_eav_form(SystemAV, System) ## Instruction: Fix system form interface_type choices ## Code After: from django import forms from cyder.base.eav.forms import get_eav_form from cyder.base.mixins import UsabilityFormMixin from cyder.core.system.models import System, SystemAV class SystemForm(forms.ModelForm): class Meta: model = System class ExtendedSystemForm(forms.ModelForm, UsabilityFormMixin): interface_type = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('static_interface', 'Static Interface'), ('dynamic_interface', 'Dynamic Interface'))) class Meta: model = System SystemAVForm = get_eav_form(SystemAV, System)
... interface_type = forms.ChoiceField( widget=forms.RadioSelect, choices=( ('static_interface', 'Static Interface'), ('dynamic_interface', 'Dynamic Interface'))) class Meta: ...
e67c57128f88b61eac08e488e54343d48f1454c7
ddcz/forms/authentication.py
ddcz/forms/authentication.py
import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=20) password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=25) password = forms.CharField( label="Heslo", max_length=100, widget=forms.PasswordInput ) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
Update LoginForm to match reality
Update LoginForm to match reality
Python
mit
dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard
import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): - nick = forms.CharField(label="Nick", max_length=20) + nick = forms.CharField(label="Nick", max_length=25) + password = forms.CharField( - password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput) + label="Heslo", max_length=100, widget=forms.PasswordInput + ) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
Update LoginForm to match reality
## Code Before: import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=20) password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users ## Instruction: Update LoginForm to match reality ## Code After: import logging from django import forms from django.contrib.auth import forms as authforms from ..models import UserProfile logger = logging.getLogger(__name__) class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=25) password = forms.CharField( label="Heslo", max_length=100, widget=forms.PasswordInput ) class PasswordResetForm(authforms.PasswordResetForm): def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This is overridem from original form to use UserProfile instead of standard user model since that is normative for email storage. """ user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email) users = tuple( list( up.user for up in user_profiles if up.user.has_usable_password() and up.user.is_active ) ) logger.info( "Selected users for password reset: %s" % ", ".join([str(u.pk) for u in users]) ) return users
// ... existing code ... class LoginForm(forms.Form): nick = forms.CharField(label="Nick", max_length=25) password = forms.CharField( label="Heslo", max_length=100, widget=forms.PasswordInput ) // ... rest of the code ...
105ac0020dbc60fe57da7db75fb82cf872a0834d
crm_switzerland/models/res_partner.py
crm_switzerland/models/res_partner.py
from odoo import api, models class ResPartner(models.Model): _inherit = 'res.partner' @api.multi def schedule_meeting(self): old_action = super(ResPartner, self).schedule_meeting() new_action = self.env.ref( 'crm_switzerland.action_calendar_event_partner').read()[0] new_action['domain'] = [('partner_ids', 'in', self.ids)] new_action['context'] = { 'default_partner_ids': old_action['context'][ 'default_partner_ids'] } return new_action @api.model def _notify_prepare_template_context(self, message): # modification of context for lang message = message.with_context(lang=self.lang) return super(ResPartner, self).\ _notify_prepare_template_context(message)
from odoo import api, models class ResPartner(models.Model): _inherit = 'res.partner' @api.multi def schedule_meeting(self): old_action = super(ResPartner, self).schedule_meeting() new_action = self.env.ref( 'crm_switzerland.action_calendar_event_partner').read()[0] new_action['domain'] = [('partner_ids', 'in', self.ids)] new_action['context'] = { 'default_partner_ids': old_action['context'][ 'default_partner_ids'] } return new_action @api.model def _notify_prepare_template_context(self, message): # modification of context for lang message = message.with_context(lang=self[:1].lang or self.env.lang) return super(ResPartner, self).\ _notify_prepare_template_context(message)
FIX bug when sending notification to multiple partners
FIX bug when sending notification to multiple partners
Python
agpl-3.0
ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland
from odoo import api, models class ResPartner(models.Model): _inherit = 'res.partner' @api.multi def schedule_meeting(self): old_action = super(ResPartner, self).schedule_meeting() new_action = self.env.ref( 'crm_switzerland.action_calendar_event_partner').read()[0] new_action['domain'] = [('partner_ids', 'in', self.ids)] new_action['context'] = { 'default_partner_ids': old_action['context'][ 'default_partner_ids'] } return new_action @api.model def _notify_prepare_template_context(self, message): # modification of context for lang - message = message.with_context(lang=self.lang) + message = message.with_context(lang=self[:1].lang or self.env.lang) return super(ResPartner, self).\ _notify_prepare_template_context(message)
FIX bug when sending notification to multiple partners
## Code Before: from odoo import api, models class ResPartner(models.Model): _inherit = 'res.partner' @api.multi def schedule_meeting(self): old_action = super(ResPartner, self).schedule_meeting() new_action = self.env.ref( 'crm_switzerland.action_calendar_event_partner').read()[0] new_action['domain'] = [('partner_ids', 'in', self.ids)] new_action['context'] = { 'default_partner_ids': old_action['context'][ 'default_partner_ids'] } return new_action @api.model def _notify_prepare_template_context(self, message): # modification of context for lang message = message.with_context(lang=self.lang) return super(ResPartner, self).\ _notify_prepare_template_context(message) ## Instruction: FIX bug when sending notification to multiple partners ## Code After: from odoo import api, models class ResPartner(models.Model): _inherit = 'res.partner' @api.multi def schedule_meeting(self): old_action = super(ResPartner, self).schedule_meeting() new_action = self.env.ref( 'crm_switzerland.action_calendar_event_partner').read()[0] new_action['domain'] = [('partner_ids', 'in', self.ids)] new_action['context'] = { 'default_partner_ids': old_action['context'][ 'default_partner_ids'] } return new_action @api.model def _notify_prepare_template_context(self, message): # modification of context for lang message = message.with_context(lang=self[:1].lang or self.env.lang) return super(ResPartner, self).\ _notify_prepare_template_context(message)
// ... existing code ... def _notify_prepare_template_context(self, message): # modification of context for lang message = message.with_context(lang=self[:1].lang or self.env.lang) return super(ResPartner, self).\ _notify_prepare_template_context(message) // ... rest of the code ...
e0a6ea3d48691bedfb39a0a92d569ea4aaf61810
pavement.py
pavement.py
import paver.doctools import paver.setuputils from schevo.release import setup_meta options( setup=setup_meta, sphinx=Bunch( docroot='doc', builddir='build', sourcedir='source', ), ) @task @needs('paver.doctools.html') def openhtml(): index_file = path('doc/build/html/index.html') sh('open ' + index_file)
from schevo.release import setup_meta options( setup=setup_meta, sphinx=Bunch( docroot='doc', builddir='build', sourcedir='source', ), ) try: import paver.doctools except ImportError: pass else: @task @needs('paver.doctools.html') def openhtml(): index_file = path('doc/build/html/index.html') sh('open ' + index_file)
Make paver.doctools optional, to allow for downloading of ==dev eggs
Make paver.doctools optional, to allow for downloading of ==dev eggs Signed-off-by: Matthew R. Scott <[email protected]>
Python
mit
Schevo/schevo,Schevo/schevo
- import paver.doctools - import paver.setuputils - from schevo.release import setup_meta options( setup=setup_meta, sphinx=Bunch( docroot='doc', builddir='build', sourcedir='source', ), ) - @task + try: + import paver.doctools + except ImportError: + pass + else: + @task - @needs('paver.doctools.html') + @needs('paver.doctools.html') - def openhtml(): + def openhtml(): - index_file = path('doc/build/html/index.html') + index_file = path('doc/build/html/index.html') - sh('open ' + index_file) + sh('open ' + index_file)
Make paver.doctools optional, to allow for downloading of ==dev eggs
## Code Before: import paver.doctools import paver.setuputils from schevo.release import setup_meta options( setup=setup_meta, sphinx=Bunch( docroot='doc', builddir='build', sourcedir='source', ), ) @task @needs('paver.doctools.html') def openhtml(): index_file = path('doc/build/html/index.html') sh('open ' + index_file) ## Instruction: Make paver.doctools optional, to allow for downloading of ==dev eggs ## Code After: from schevo.release import setup_meta options( setup=setup_meta, sphinx=Bunch( docroot='doc', builddir='build', sourcedir='source', ), ) try: import paver.doctools except ImportError: pass else: @task @needs('paver.doctools.html') def openhtml(): index_file = path('doc/build/html/index.html') sh('open ' + index_file)
# ... existing code ... from schevo.release import setup_meta # ... modified code ... try: import paver.doctools except ImportError: pass else: @task @needs('paver.doctools.html') def openhtml(): index_file = path('doc/build/html/index.html') sh('open ' + index_file) # ... rest of the code ...
245a6b190c1ad3c5e380c5957d6b98593fdb4006
logserver/__main__.py
logserver/__main__.py
from argparse import ArgumentParser import logging from . import run_server from .handlers import SQLiteHandler parser = ArgumentParser( description="Run a standalone log server using a SQLite database.") parser.add_argument("-p", "--port", default=9123, help="Port to listen on") parser.add_argument("-t", "--table", default="logs", help="Name of table to store logs in") parser.add_argument("-f", "--filename", default="logs.sqlite", help="SQLite filename") args = parser.parse_args() handlers = [ logging.StreamHandler(), SQLiteHandler(args.filename, args.table) ] print("Listening for logs to handle on port", args.port) server = run_server(handlers, port=args.port)
from argparse import ArgumentParser import logging from . import run_server from .handlers import SQLiteHandler parser = ArgumentParser( description="Run a standalone log server using a SQLite database.") parser.add_argument("-p", "--port", default=9123, help="Port to listen on") parser.add_argument("-t", "--table", default="logs", help="Name of table to store logs in") parser.add_argument("-f", "--filename", default="logs.sqlite", help="SQLite filename") args = parser.parse_args() stream_handler = logging.StreamHandler() stream_handler.setFormatter(logging.Formatter( "[%(levelname)1.1s %(name)s %(asctime)s] %(msg)s")) handlers = [ stream_handler, SQLiteHandler(args.filename, args.table) ] print("Listening for logs to handle on port", args.port) server = run_server(handlers, port=args.port)
Format messages in stream handler
Format messages in stream handler
Python
mit
mivade/logserver
from argparse import ArgumentParser import logging from . import run_server from .handlers import SQLiteHandler parser = ArgumentParser( description="Run a standalone log server using a SQLite database.") parser.add_argument("-p", "--port", default=9123, help="Port to listen on") parser.add_argument("-t", "--table", default="logs", help="Name of table to store logs in") parser.add_argument("-f", "--filename", default="logs.sqlite", help="SQLite filename") args = parser.parse_args() + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(logging.Formatter( + "[%(levelname)1.1s %(name)s %(asctime)s] %(msg)s")) + handlers = [ - logging.StreamHandler(), + stream_handler, SQLiteHandler(args.filename, args.table) ] print("Listening for logs to handle on port", args.port) server = run_server(handlers, port=args.port)
Format messages in stream handler
## Code Before: from argparse import ArgumentParser import logging from . import run_server from .handlers import SQLiteHandler parser = ArgumentParser( description="Run a standalone log server using a SQLite database.") parser.add_argument("-p", "--port", default=9123, help="Port to listen on") parser.add_argument("-t", "--table", default="logs", help="Name of table to store logs in") parser.add_argument("-f", "--filename", default="logs.sqlite", help="SQLite filename") args = parser.parse_args() handlers = [ logging.StreamHandler(), SQLiteHandler(args.filename, args.table) ] print("Listening for logs to handle on port", args.port) server = run_server(handlers, port=args.port) ## Instruction: Format messages in stream handler ## Code After: from argparse import ArgumentParser import logging from . import run_server from .handlers import SQLiteHandler parser = ArgumentParser( description="Run a standalone log server using a SQLite database.") parser.add_argument("-p", "--port", default=9123, help="Port to listen on") parser.add_argument("-t", "--table", default="logs", help="Name of table to store logs in") parser.add_argument("-f", "--filename", default="logs.sqlite", help="SQLite filename") args = parser.parse_args() stream_handler = logging.StreamHandler() stream_handler.setFormatter(logging.Formatter( "[%(levelname)1.1s %(name)s %(asctime)s] %(msg)s")) handlers = [ stream_handler, SQLiteHandler(args.filename, args.table) ] print("Listening for logs to handle on port", args.port) server = run_server(handlers, port=args.port)
// ... existing code ... args = parser.parse_args() stream_handler = logging.StreamHandler() stream_handler.setFormatter(logging.Formatter( "[%(levelname)1.1s %(name)s %(asctime)s] %(msg)s")) handlers = [ stream_handler, SQLiteHandler(args.filename, args.table) ] // ... rest of the code ...
33f7e94385a8d4fbba797fc81b2565906604c9a4
src/zeit/content/cp/browser/area.py
src/zeit/content/cp/browser/area.py
import zeit.content.cp.browser.blocks.teaser import zeit.content.cp.interfaces import zeit.edit.browser.block import zeit.edit.browser.view import zope.formlib.form class ViewletManager(zeit.edit.browser.block.BlockViewletManager): @property def css_class(self): classes = super(ViewletManager, self).css_class return ' '.join(['editable-area', classes]) class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout): interface = zeit.content.cp.interfaces.IArea layout_prefix = 'teaserbar' # XXX should be area layouts = () form_fields = zope.formlib.form.Fields() # XXX implement me class EditCommon(zeit.edit.browser.view.EditBox): form_fields = zope.formlib.form.Fields( zeit.content.cp.interfaces.IArea).select( 'supertitle', 'teaserText', 'background_color') form_fields['background_color'].custom_widget = ( zeit.cms.browser.widget.ColorpickerWidget)
import zeit.content.cp.browser.blocks.teaser import zeit.content.cp.interfaces import zeit.edit.browser.block import zeit.edit.browser.view import zope.formlib.form class ViewletManager(zeit.edit.browser.block.BlockViewletManager): @property def css_class(self): classes = super(ViewletManager, self).css_class return ' '.join(['editable-area', classes]) class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout): interface = zeit.content.cp.interfaces.IArea layout_prefix = 'teaserbar' # XXX should be area layouts = () class EditCommon(zeit.edit.browser.view.EditBox): form_fields = zope.formlib.form.Fields( zeit.content.cp.interfaces.IArea).select( 'supertitle', 'teaserText', 'background_color') form_fields['background_color'].custom_widget = ( zeit.cms.browser.widget.ColorpickerWidget)
Remove field that has now the same default implementation on it's super class.
Remove field that has now the same default implementation on it's super class.
Python
bsd-3-clause
ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp
import zeit.content.cp.browser.blocks.teaser import zeit.content.cp.interfaces import zeit.edit.browser.block import zeit.edit.browser.view import zope.formlib.form class ViewletManager(zeit.edit.browser.block.BlockViewletManager): @property def css_class(self): classes = super(ViewletManager, self).css_class return ' '.join(['editable-area', classes]) class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout): interface = zeit.content.cp.interfaces.IArea layout_prefix = 'teaserbar' # XXX should be area layouts = () - form_fields = zope.formlib.form.Fields() # XXX implement me - class EditCommon(zeit.edit.browser.view.EditBox): form_fields = zope.formlib.form.Fields( zeit.content.cp.interfaces.IArea).select( 'supertitle', 'teaserText', 'background_color') form_fields['background_color'].custom_widget = ( zeit.cms.browser.widget.ColorpickerWidget)
Remove field that has now the same default implementation on it's super class.
## Code Before: import zeit.content.cp.browser.blocks.teaser import zeit.content.cp.interfaces import zeit.edit.browser.block import zeit.edit.browser.view import zope.formlib.form class ViewletManager(zeit.edit.browser.block.BlockViewletManager): @property def css_class(self): classes = super(ViewletManager, self).css_class return ' '.join(['editable-area', classes]) class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout): interface = zeit.content.cp.interfaces.IArea layout_prefix = 'teaserbar' # XXX should be area layouts = () form_fields = zope.formlib.form.Fields() # XXX implement me class EditCommon(zeit.edit.browser.view.EditBox): form_fields = zope.formlib.form.Fields( zeit.content.cp.interfaces.IArea).select( 'supertitle', 'teaserText', 'background_color') form_fields['background_color'].custom_widget = ( zeit.cms.browser.widget.ColorpickerWidget) ## Instruction: Remove field that has now the same default implementation on it's super class. ## Code After: import zeit.content.cp.browser.blocks.teaser import zeit.content.cp.interfaces import zeit.edit.browser.block import zeit.edit.browser.view import zope.formlib.form class ViewletManager(zeit.edit.browser.block.BlockViewletManager): @property def css_class(self): classes = super(ViewletManager, self).css_class return ' '.join(['editable-area', classes]) class EditProperties(zeit.content.cp.browser.blocks.teaser.EditLayout): interface = zeit.content.cp.interfaces.IArea layout_prefix = 'teaserbar' # XXX should be area layouts = () class EditCommon(zeit.edit.browser.view.EditBox): form_fields = zope.formlib.form.Fields( zeit.content.cp.interfaces.IArea).select( 'supertitle', 'teaserText', 'background_color') form_fields['background_color'].custom_widget = ( zeit.cms.browser.widget.ColorpickerWidget)
... layouts = () class EditCommon(zeit.edit.browser.view.EditBox): ...
5553481f8cc8537febbf24fbfea4315a3b61548f
corehq/apps/commtrack/management/commands/check_multiple_parentage.py
corehq/apps/commtrack/management/commands/check_multiple_parentage.py
from django.core.management.base import BaseCommand from corehq.apps.domain.models import Domain class Command(BaseCommand): def handle(self, *args, **options): self.stdout.write("Populating site codes...\n") domains = Domain.get_all() for d in domains: if d.commtrack_enabled: for loc_type in d.commtrack_settings.location_types: if len(loc_type.allowed_parents) > 1: self.stdout.write( "Found multiple parent options in domain: " + d.name )
from django.core.management.base import BaseCommand from corehq.apps.domain.models import Domain from corehq.apps.locations.models import Location import csv class Command(BaseCommand): def handle(self, *args, **options): with open('parentage_results.csv', 'wb+') as csvfile: csv_writer = csv.writer( csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL ) csv_writer.writerow([ 'id', 'name', 'is_test', 'location_type', 'number_of_offending_locations', ]) domains = Domain.get_all() for d in domains: if d.commtrack_enabled: for loc_type in d.commtrack_settings.location_types: if len(loc_type.allowed_parents) > 1: count = len(list( Location.filter_by_type( d.name, loc_type.name, ) )) csv_writer.writerow([ d._id, d.name, d.is_test, loc_type.name, count ])
Switch to CSV and add important info
Switch to CSV and add important info
Python
bsd-3-clause
puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from django.core.management.base import BaseCommand from corehq.apps.domain.models import Domain + from corehq.apps.locations.models import Location + import csv class Command(BaseCommand): def handle(self, *args, **options): - self.stdout.write("Populating site codes...\n") + with open('parentage_results.csv', 'wb+') as csvfile: + csv_writer = csv.writer( + csvfile, + delimiter=',', + quotechar='|', + quoting=csv.QUOTE_MINIMAL + ) - domains = Domain.get_all() + csv_writer.writerow([ + 'id', + 'name', + 'is_test', + 'location_type', + 'number_of_offending_locations', + ]) + domains = Domain.get_all() - for d in domains: - if d.commtrack_enabled: - for loc_type in d.commtrack_settings.location_types: - if len(loc_type.allowed_parents) > 1: - self.stdout.write( - "Found multiple parent options in domain: " + - d.name - ) + for d in domains: + if d.commtrack_enabled: + for loc_type in d.commtrack_settings.location_types: + if len(loc_type.allowed_parents) > 1: + count = len(list( + Location.filter_by_type( + d.name, + loc_type.name, + ) + )) + + csv_writer.writerow([ + d._id, + d.name, + d.is_test, + loc_type.name, + count + ]) +
Switch to CSV and add important info
## Code Before: from django.core.management.base import BaseCommand from corehq.apps.domain.models import Domain class Command(BaseCommand): def handle(self, *args, **options): self.stdout.write("Populating site codes...\n") domains = Domain.get_all() for d in domains: if d.commtrack_enabled: for loc_type in d.commtrack_settings.location_types: if len(loc_type.allowed_parents) > 1: self.stdout.write( "Found multiple parent options in domain: " + d.name ) ## Instruction: Switch to CSV and add important info ## Code After: from django.core.management.base import BaseCommand from corehq.apps.domain.models import Domain from corehq.apps.locations.models import Location import csv class Command(BaseCommand): def handle(self, *args, **options): with open('parentage_results.csv', 'wb+') as csvfile: csv_writer = csv.writer( csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL ) csv_writer.writerow([ 'id', 'name', 'is_test', 'location_type', 'number_of_offending_locations', ]) domains = Domain.get_all() for d in domains: if d.commtrack_enabled: for loc_type in d.commtrack_settings.location_types: if len(loc_type.allowed_parents) > 1: count = len(list( Location.filter_by_type( d.name, loc_type.name, ) )) csv_writer.writerow([ d._id, d.name, d.is_test, loc_type.name, count ])
... from django.core.management.base import BaseCommand from corehq.apps.domain.models import Domain from corehq.apps.locations.models import Location import csv ... class Command(BaseCommand): def handle(self, *args, **options): with open('parentage_results.csv', 'wb+') as csvfile: csv_writer = csv.writer( csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL ) csv_writer.writerow([ 'id', 'name', 'is_test', 'location_type', 'number_of_offending_locations', ]) domains = Domain.get_all() for d in domains: if d.commtrack_enabled: for loc_type in d.commtrack_settings.location_types: if len(loc_type.allowed_parents) > 1: count = len(list( Location.filter_by_type( d.name, loc_type.name, ) )) csv_writer.writerow([ d._id, d.name, d.is_test, loc_type.name, count ]) ...
ccda4d9c3e737161e0477c569e074ffb884a541c
src/sentry/api/authentication.py
src/sentry/api/authentication.py
from __future__ import absolute_import from django.contrib.auth.models import AnonymousUser from django.utils.crypto import constant_time_compare from rest_framework.authentication import BasicAuthentication from rest_framework.exceptions import AuthenticationFailed from sentry.app import raven from sentry.models import ApiKey, ProjectKey class QuietBasicAuthentication(BasicAuthentication): def authenticate_header(self, request): return 'xBasic realm="%s"' % self.www_authenticate_realm class ApiKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): if password: return try: key = ApiKey.objects.get_from_cache(key=userid) except ApiKey.DoesNotExist: return None if not key.is_active: raise AuthenticationFailed('Key is disabled') raven.tags_context({ 'api_key': userid, }) return (AnonymousUser(), key) class ProjectKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): try: pk = ProjectKey.objects.get_from_cache(public_key=userid) except ProjectKey.DoesNotExist: return None if not constant_time_compare(pk.secret_key, password): return None if not pk.is_active: raise AuthenticationFailed('Key is disabled') if not pk.roles.api: raise AuthenticationFailed('Key does not allow API access') return (AnonymousUser(), pk)
from __future__ import absolute_import from django.contrib.auth.models import AnonymousUser from django.utils.crypto import constant_time_compare from rest_framework.authentication import BasicAuthentication from rest_framework.exceptions import AuthenticationFailed from sentry.app import raven from sentry.models import ApiKey, ProjectKey class QuietBasicAuthentication(BasicAuthentication): def authenticate_header(self, request): return 'xBasic realm="%s"' % self.www_authenticate_realm class ApiKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): if password: return try: key = ApiKey.objects.get_from_cache(key=userid) except ApiKey.DoesNotExist: raise AuthenticationFailed('API key is not valid') if not key.is_active: raise AuthenticationFailed('Key is disabled') raven.tags_context({ 'api_key': userid, }) return (AnonymousUser(), key) class ProjectKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): try: pk = ProjectKey.objects.get_from_cache(public_key=userid) except ProjectKey.DoesNotExist: return None if not constant_time_compare(pk.secret_key, password): return None if not pk.is_active: raise AuthenticationFailed('Key is disabled') if not pk.roles.api: raise AuthenticationFailed('Key does not allow API access') return (AnonymousUser(), pk)
Raise hard error when API key is invalid
Raise hard error when API key is invalid
Python
bsd-3-clause
fotinakis/sentry,ifduyue/sentry,JamesMura/sentry,daevaorn/sentry,looker/sentry,gencer/sentry,JamesMura/sentry,fotinakis/sentry,fotinakis/sentry,gencer/sentry,zenefits/sentry,looker/sentry,jean/sentry,BuildingLink/sentry,mvaled/sentry,mvaled/sentry,mvaled/sentry,ifduyue/sentry,jean/sentry,nicholasserra/sentry,gencer/sentry,JamesMura/sentry,jean/sentry,beeftornado/sentry,looker/sentry,JackDanger/sentry,daevaorn/sentry,nicholasserra/sentry,looker/sentry,JackDanger/sentry,alexm92/sentry,alexm92/sentry,gencer/sentry,BuildingLink/sentry,zenefits/sentry,mitsuhiko/sentry,daevaorn/sentry,zenefits/sentry,JackDanger/sentry,nicholasserra/sentry,beeftornado/sentry,looker/sentry,gencer/sentry,JamesMura/sentry,jean/sentry,jean/sentry,ifduyue/sentry,mvaled/sentry,zenefits/sentry,daevaorn/sentry,BuildingLink/sentry,JamesMura/sentry,zenefits/sentry,fotinakis/sentry,BuildingLink/sentry,beeftornado/sentry,mvaled/sentry,alexm92/sentry,ifduyue/sentry,ifduyue/sentry,BuildingLink/sentry,mvaled/sentry,mitsuhiko/sentry
from __future__ import absolute_import from django.contrib.auth.models import AnonymousUser from django.utils.crypto import constant_time_compare from rest_framework.authentication import BasicAuthentication from rest_framework.exceptions import AuthenticationFailed from sentry.app import raven from sentry.models import ApiKey, ProjectKey class QuietBasicAuthentication(BasicAuthentication): def authenticate_header(self, request): return 'xBasic realm="%s"' % self.www_authenticate_realm class ApiKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): if password: return try: key = ApiKey.objects.get_from_cache(key=userid) except ApiKey.DoesNotExist: - return None + raise AuthenticationFailed('API key is not valid') if not key.is_active: raise AuthenticationFailed('Key is disabled') raven.tags_context({ 'api_key': userid, }) return (AnonymousUser(), key) class ProjectKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): try: pk = ProjectKey.objects.get_from_cache(public_key=userid) except ProjectKey.DoesNotExist: return None if not constant_time_compare(pk.secret_key, password): return None if not pk.is_active: raise AuthenticationFailed('Key is disabled') if not pk.roles.api: raise AuthenticationFailed('Key does not allow API access') return (AnonymousUser(), pk)
Raise hard error when API key is invalid
## Code Before: from __future__ import absolute_import from django.contrib.auth.models import AnonymousUser from django.utils.crypto import constant_time_compare from rest_framework.authentication import BasicAuthentication from rest_framework.exceptions import AuthenticationFailed from sentry.app import raven from sentry.models import ApiKey, ProjectKey class QuietBasicAuthentication(BasicAuthentication): def authenticate_header(self, request): return 'xBasic realm="%s"' % self.www_authenticate_realm class ApiKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): if password: return try: key = ApiKey.objects.get_from_cache(key=userid) except ApiKey.DoesNotExist: return None if not key.is_active: raise AuthenticationFailed('Key is disabled') raven.tags_context({ 'api_key': userid, }) return (AnonymousUser(), key) class ProjectKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): try: pk = ProjectKey.objects.get_from_cache(public_key=userid) except ProjectKey.DoesNotExist: return None if not constant_time_compare(pk.secret_key, password): return None if not pk.is_active: raise AuthenticationFailed('Key is disabled') if not pk.roles.api: raise AuthenticationFailed('Key does not allow API access') return (AnonymousUser(), pk) ## Instruction: Raise hard error when API key is invalid ## Code After: from __future__ import absolute_import from django.contrib.auth.models import AnonymousUser from django.utils.crypto import constant_time_compare from rest_framework.authentication import BasicAuthentication from rest_framework.exceptions import AuthenticationFailed from sentry.app import raven from sentry.models import ApiKey, ProjectKey class QuietBasicAuthentication(BasicAuthentication): def authenticate_header(self, request): return 'xBasic realm="%s"' % self.www_authenticate_realm class ApiKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): if password: return try: key = ApiKey.objects.get_from_cache(key=userid) except ApiKey.DoesNotExist: raise AuthenticationFailed('API key is not valid') if not key.is_active: raise AuthenticationFailed('Key is disabled') raven.tags_context({ 'api_key': userid, }) return (AnonymousUser(), key) class ProjectKeyAuthentication(QuietBasicAuthentication): def authenticate_credentials(self, userid, password): try: pk = ProjectKey.objects.get_from_cache(public_key=userid) except ProjectKey.DoesNotExist: return None if not constant_time_compare(pk.secret_key, password): return None if not pk.is_active: raise AuthenticationFailed('Key is disabled') if not pk.roles.api: raise AuthenticationFailed('Key does not allow API access') return (AnonymousUser(), pk)
// ... existing code ... key = ApiKey.objects.get_from_cache(key=userid) except ApiKey.DoesNotExist: raise AuthenticationFailed('API key is not valid') if not key.is_active: // ... rest of the code ...
3dda5003b3ce345a08369b15fc3447d2a4c7d1ad
examples/plotting_2d.py
examples/plotting_2d.py
from bluesky.examples import * from bluesky.standard_config import RE from matplotlib import pyplot as plt from xray_vision.backend.mpl.cross_section_2d import CrossSection import numpy as np import filestore.api as fsapi import time as ttime from filestore.handlers import NpyHandler fsapi.register_handler('npy', NpyHandler) def stepscan(motor, det): for i in np.linspace(-5, 5, 75): yield Msg('create') yield Msg('set', motor, i) yield Msg('trigger', det) yield Msg('read', motor) yield Msg('read', det) yield Msg('save') ic = LiveImage('det_2d') table_callback = LiveTable(fields=[motor._name, det_2d._name]) RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
from bluesky.examples import * from bluesky.tests.utils import setup_test_run_engine from matplotlib import pyplot as plt from xray_vision.backend.mpl.cross_section_2d import CrossSection import numpy as np import filestore.api as fsapi import time as ttime from filestore.handlers import NpyHandler fsapi.register_handler('npy', NpyHandler) def stepscan(motor, det): for i in np.linspace(-5, 5, 75): yield Msg('create') yield Msg('set', motor, i) yield Msg('trigger', det) yield Msg('read', motor) yield Msg('read', det) yield Msg('save') ic = LiveImage('det_2d') table_callback = LiveTable(fields=[motor._name, det_2d._name]) RE = setup_test_run_engine() RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
Set up RunEngine with required metadata.
FIX: Set up RunEngine with required metadata.
Python
bsd-3-clause
ericdill/bluesky,sameera2004/bluesky,sameera2004/bluesky,klauer/bluesky,klauer/bluesky,dchabot/bluesky,ericdill/bluesky,dchabot/bluesky
from bluesky.examples import * - from bluesky.standard_config import RE + from bluesky.tests.utils import setup_test_run_engine from matplotlib import pyplot as plt from xray_vision.backend.mpl.cross_section_2d import CrossSection import numpy as np import filestore.api as fsapi import time as ttime from filestore.handlers import NpyHandler fsapi.register_handler('npy', NpyHandler) def stepscan(motor, det): for i in np.linspace(-5, 5, 75): yield Msg('create') yield Msg('set', motor, i) yield Msg('trigger', det) yield Msg('read', motor) yield Msg('read', det) yield Msg('save') ic = LiveImage('det_2d') table_callback = LiveTable(fields=[motor._name, det_2d._name]) + RE = setup_test_run_engine() RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
Set up RunEngine with required metadata.
## Code Before: from bluesky.examples import * from bluesky.standard_config import RE from matplotlib import pyplot as plt from xray_vision.backend.mpl.cross_section_2d import CrossSection import numpy as np import filestore.api as fsapi import time as ttime from filestore.handlers import NpyHandler fsapi.register_handler('npy', NpyHandler) def stepscan(motor, det): for i in np.linspace(-5, 5, 75): yield Msg('create') yield Msg('set', motor, i) yield Msg('trigger', det) yield Msg('read', motor) yield Msg('read', det) yield Msg('save') ic = LiveImage('det_2d') table_callback = LiveTable(fields=[motor._name, det_2d._name]) RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i') ## Instruction: Set up RunEngine with required metadata. ## Code After: from bluesky.examples import * from bluesky.tests.utils import setup_test_run_engine from matplotlib import pyplot as plt from xray_vision.backend.mpl.cross_section_2d import CrossSection import numpy as np import filestore.api as fsapi import time as ttime from filestore.handlers import NpyHandler fsapi.register_handler('npy', NpyHandler) def stepscan(motor, det): for i in np.linspace(-5, 5, 75): yield Msg('create') yield Msg('set', motor, i) yield Msg('trigger', det) yield Msg('read', motor) yield Msg('read', det) yield Msg('save') ic = LiveImage('det_2d') table_callback = LiveTable(fields=[motor._name, det_2d._name]) RE = setup_test_run_engine() RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i')
# ... existing code ... from bluesky.examples import * from bluesky.tests.utils import setup_test_run_engine from matplotlib import pyplot as plt from xray_vision.backend.mpl.cross_section_2d import CrossSection # ... modified code ... ic = LiveImage('det_2d') table_callback = LiveTable(fields=[motor._name, det_2d._name]) RE = setup_test_run_engine() RE(stepscan(motor, det_2d), subs={'event': ic, 'all': table_callback}, beamline_id='c08i') # ... rest of the code ...
761b2675471dfee97943e4123e45fc058d8f8153
qsdl/simulator/defaultCostCallbacks.py
qsdl/simulator/defaultCostCallbacks.py
''' Created on 3.10.2012 @author: Teemu Pkknen ''' def get_callback_map(): AVG_AUTOCOMPLETE_INPUT_LENGTH = 5 def get_current_query_cost( simulation, key_cost, interaction_type ): if "basic" == interaction_type: return float(key_cost) * len( simulation.get_current_query_text() ) elif "autocomplete" == interaction_type: return float(key_cost) * AVG_AUTOCOMPLETE_INPUT_LENGTH return { 'get_default_current_query_cost': get_current_query_cost }
''' Created on 3.10.2012 @author: Teemu Pkknen ''' def get_callback_map(): AVG_AUTOCOMPLETE_INPUT_LENGTH = 5 def get_current_query_cost( simulation, key_cost, interaction_type = "basic" ): if "basic" == interaction_type: return float(key_cost) * len( simulation.get_current_query_text() ) elif "autocomplete" == interaction_type: return float(key_cost) * AVG_AUTOCOMPLETE_INPUT_LENGTH return { 'get_default_current_query_cost': get_current_query_cost }
Change default query cost calculation interaction type to "basic"
Change default query cost calculation interaction type to "basic"
Python
mit
fire-uta/ir-simulation,fire-uta/ir-simulation
''' Created on 3.10.2012 @author: Teemu Pkknen ''' def get_callback_map(): AVG_AUTOCOMPLETE_INPUT_LENGTH = 5 - def get_current_query_cost( simulation, key_cost, interaction_type ): + def get_current_query_cost( simulation, key_cost, interaction_type = "basic" ): if "basic" == interaction_type: return float(key_cost) * len( simulation.get_current_query_text() ) elif "autocomplete" == interaction_type: return float(key_cost) * AVG_AUTOCOMPLETE_INPUT_LENGTH return { 'get_default_current_query_cost': get_current_query_cost }
Change default query cost calculation interaction type to "basic"
## Code Before: ''' Created on 3.10.2012 @author: Teemu Pkknen ''' def get_callback_map(): AVG_AUTOCOMPLETE_INPUT_LENGTH = 5 def get_current_query_cost( simulation, key_cost, interaction_type ): if "basic" == interaction_type: return float(key_cost) * len( simulation.get_current_query_text() ) elif "autocomplete" == interaction_type: return float(key_cost) * AVG_AUTOCOMPLETE_INPUT_LENGTH return { 'get_default_current_query_cost': get_current_query_cost } ## Instruction: Change default query cost calculation interaction type to "basic" ## Code After: ''' Created on 3.10.2012 @author: Teemu Pkknen ''' def get_callback_map(): AVG_AUTOCOMPLETE_INPUT_LENGTH = 5 def get_current_query_cost( simulation, key_cost, interaction_type = "basic" ): if "basic" == interaction_type: return float(key_cost) * len( simulation.get_current_query_text() ) elif "autocomplete" == interaction_type: return float(key_cost) * AVG_AUTOCOMPLETE_INPUT_LENGTH return { 'get_default_current_query_cost': get_current_query_cost }
# ... existing code ... AVG_AUTOCOMPLETE_INPUT_LENGTH = 5 def get_current_query_cost( simulation, key_cost, interaction_type = "basic" ): if "basic" == interaction_type: return float(key_cost) * len( simulation.get_current_query_text() ) # ... rest of the code ...
c33aa32b868a33422f79103474cece38131a93c3
src/oscar/apps/customer/migrations/0005_auto_20170413_1857.py
src/oscar/apps/customer/migrations/0005_auto_20170413_1857.py
from __future__ import unicode_literals from django.db import migrations def forwards_func(apps, schema_editor): User = apps.get_model("auth", "User") for user in User.objects.all(): user.emails.update(email=user.email) class Migration(migrations.Migration): dependencies = [ ('customer', '0004_auto_20170413_1853'), ] operations = [ migrations.RunPython(forwards_func) ]
from __future__ import unicode_literals from django.db import migrations from oscar.core.compat import get_user_model User = get_user_model() def forwards_func(apps, schema_editor): for user in User.objects.all(): user.emails.update(email=user.email) class Migration(migrations.Migration): dependencies = [ ('customer', '0004_auto_20170413_1853'), ] operations = [ migrations.RunPython(forwards_func) ]
Load current User model for customer email migration.
Load current User model for customer email migration.
Python
bsd-3-clause
solarissmoke/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,sasha0/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar
from __future__ import unicode_literals from django.db import migrations + from oscar.core.compat import get_user_model + + User = get_user_model() + def forwards_func(apps, schema_editor): - User = apps.get_model("auth", "User") - for user in User.objects.all(): user.emails.update(email=user.email) class Migration(migrations.Migration): dependencies = [ ('customer', '0004_auto_20170413_1853'), ] operations = [ migrations.RunPython(forwards_func) ]
Load current User model for customer email migration.
## Code Before: from __future__ import unicode_literals from django.db import migrations def forwards_func(apps, schema_editor): User = apps.get_model("auth", "User") for user in User.objects.all(): user.emails.update(email=user.email) class Migration(migrations.Migration): dependencies = [ ('customer', '0004_auto_20170413_1853'), ] operations = [ migrations.RunPython(forwards_func) ] ## Instruction: Load current User model for customer email migration. ## Code After: from __future__ import unicode_literals from django.db import migrations from oscar.core.compat import get_user_model User = get_user_model() def forwards_func(apps, schema_editor): for user in User.objects.all(): user.emails.update(email=user.email) class Migration(migrations.Migration): dependencies = [ ('customer', '0004_auto_20170413_1853'), ] operations = [ migrations.RunPython(forwards_func) ]
# ... existing code ... from django.db import migrations from oscar.core.compat import get_user_model User = get_user_model() def forwards_func(apps, schema_editor): for user in User.objects.all(): user.emails.update(email=user.email) # ... rest of the code ...
ef2b13ec19d28b56647c0a11044cba6d400f9175
vimiv/image_enhance.py
vimiv/image_enhance.py
"""Wrapper functions for the _image_enhance C extension.""" from gi.repository import GdkPixbuf, GLib from vimiv import _image_enhance def enhance_bc(pixbuf, brightness, contrast): """Enhance brightness and contrast of a GdkPixbuf.Pixbuf. Args: pixbuf: Original GdkPixbuf.Pixbuf to work with. brightness: Float between -1.0 and 1.0 to change brightness. contrast: Float between -1.0 and 1.0 to change contrast. Return: The enhanced GdkPixbuf.Pixbuf """ width = pixbuf.get_width() height = pixbuf.get_height() data = pixbuf.get_pixels() has_alpha = pixbuf.get_has_alpha() c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C rowstride = 4 * width if has_alpha else 3 * width # Update plain bytes using C extension # Pylint does not read this properly # pylint: disable=no-member data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast) gdata = GLib.Bytes.new(data) return GdkPixbuf.Pixbuf.new_from_bytes( gdata, GdkPixbuf.Colorspace.RGB, has_alpha, 8, width, height, rowstride)
"""Wrapper functions for the _image_enhance C extension.""" from gi.repository import GdkPixbuf, GLib from vimiv import _image_enhance def enhance_bc(pixbuf, brightness, contrast): """Enhance brightness and contrast of a GdkPixbuf.Pixbuf. Args: pixbuf: Original GdkPixbuf.Pixbuf to work with. brightness: Float between -1.0 and 1.0 to change brightness. contrast: Float between -1.0 and 1.0 to change contrast. Return: The enhanced GdkPixbuf.Pixbuf """ data = pixbuf.get_pixels() has_alpha = pixbuf.get_has_alpha() c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C # Update plain bytes using C extension # Pylint does not read this properly # pylint: disable=no-member data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast) gdata = GLib.Bytes.new(data) return GdkPixbuf.Pixbuf.new_from_bytes(gdata, pixbuf.get_colorspace(), has_alpha, pixbuf.get_bits_per_sample(), pixbuf.get_width(), pixbuf.get_height(), pixbuf.get_rowstride())
Use rowstride directly from GdkPixbuf in enhance
Use rowstride directly from GdkPixbuf in enhance The custom calculation of rowstride failed for images with weird dimensions and completely broke enhance. fixes #51
Python
mit
karlch/vimiv,karlch/vimiv,karlch/vimiv
"""Wrapper functions for the _image_enhance C extension.""" from gi.repository import GdkPixbuf, GLib from vimiv import _image_enhance def enhance_bc(pixbuf, brightness, contrast): """Enhance brightness and contrast of a GdkPixbuf.Pixbuf. Args: pixbuf: Original GdkPixbuf.Pixbuf to work with. brightness: Float between -1.0 and 1.0 to change brightness. contrast: Float between -1.0 and 1.0 to change contrast. Return: The enhanced GdkPixbuf.Pixbuf """ - width = pixbuf.get_width() - height = pixbuf.get_height() data = pixbuf.get_pixels() has_alpha = pixbuf.get_has_alpha() c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C - rowstride = 4 * width if has_alpha else 3 * width # Update plain bytes using C extension # Pylint does not read this properly # pylint: disable=no-member data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast) gdata = GLib.Bytes.new(data) - return GdkPixbuf.Pixbuf.new_from_bytes( + return GdkPixbuf.Pixbuf.new_from_bytes(gdata, - gdata, GdkPixbuf.Colorspace.RGB, has_alpha, 8, width, height, rowstride) + pixbuf.get_colorspace(), + has_alpha, + pixbuf.get_bits_per_sample(), + pixbuf.get_width(), + pixbuf.get_height(), + pixbuf.get_rowstride())
Use rowstride directly from GdkPixbuf in enhance
## Code Before: """Wrapper functions for the _image_enhance C extension.""" from gi.repository import GdkPixbuf, GLib from vimiv import _image_enhance def enhance_bc(pixbuf, brightness, contrast): """Enhance brightness and contrast of a GdkPixbuf.Pixbuf. Args: pixbuf: Original GdkPixbuf.Pixbuf to work with. brightness: Float between -1.0 and 1.0 to change brightness. contrast: Float between -1.0 and 1.0 to change contrast. Return: The enhanced GdkPixbuf.Pixbuf """ width = pixbuf.get_width() height = pixbuf.get_height() data = pixbuf.get_pixels() has_alpha = pixbuf.get_has_alpha() c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C rowstride = 4 * width if has_alpha else 3 * width # Update plain bytes using C extension # Pylint does not read this properly # pylint: disable=no-member data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast) gdata = GLib.Bytes.new(data) return GdkPixbuf.Pixbuf.new_from_bytes( gdata, GdkPixbuf.Colorspace.RGB, has_alpha, 8, width, height, rowstride) ## Instruction: Use rowstride directly from GdkPixbuf in enhance ## Code After: """Wrapper functions for the _image_enhance C extension.""" from gi.repository import GdkPixbuf, GLib from vimiv import _image_enhance def enhance_bc(pixbuf, brightness, contrast): """Enhance brightness and contrast of a GdkPixbuf.Pixbuf. Args: pixbuf: Original GdkPixbuf.Pixbuf to work with. brightness: Float between -1.0 and 1.0 to change brightness. contrast: Float between -1.0 and 1.0 to change contrast. Return: The enhanced GdkPixbuf.Pixbuf """ data = pixbuf.get_pixels() has_alpha = pixbuf.get_has_alpha() c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C # Update plain bytes using C extension # Pylint does not read this properly # pylint: disable=no-member data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast) gdata = GLib.Bytes.new(data) return GdkPixbuf.Pixbuf.new_from_bytes(gdata, pixbuf.get_colorspace(), has_alpha, pixbuf.get_bits_per_sample(), pixbuf.get_width(), pixbuf.get_height(), pixbuf.get_rowstride())
// ... existing code ... The enhanced GdkPixbuf.Pixbuf """ data = pixbuf.get_pixels() has_alpha = pixbuf.get_has_alpha() c_has_alpha = 1 if has_alpha else 0 # Numbers are easier for C # Update plain bytes using C extension # Pylint does not read this properly // ... modified code ... data = _image_enhance.enhance_bc(data, c_has_alpha, brightness, contrast) gdata = GLib.Bytes.new(data) return GdkPixbuf.Pixbuf.new_from_bytes(gdata, pixbuf.get_colorspace(), has_alpha, pixbuf.get_bits_per_sample(), pixbuf.get_width(), pixbuf.get_height(), pixbuf.get_rowstride()) // ... rest of the code ...
d677f3762e0daf16e1be05a88b058194ddf43e15
comics/comics/perrybiblefellowship.py
comics/comics/perrybiblefellowship.py
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Perry Bible Fellowship" language = "en" url = "http://www.pbfcomics.com/" start_date = "2001-01-01" rights = "Nicholas Gurewitch" class Crawler(CrawlerBase): history_capable_date = "2001-01-01" time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): url = entry.summary.src('img[src*="/archive_b/"]') title = entry.title return CrawlerImage(url, title)
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Perry Bible Fellowship" language = "en" url = "http://www.pbfcomics.com/" start_date = "2001-01-01" rights = "Nicholas Gurewitch" class Crawler(CrawlerBase): history_capable_date = "2019-06-12" time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): page = self.parse_page(entry.link) images = page.root.xpath("//div[@id='comic']/img") crawler_images = [] for image in images: title = entry.title crawler_images.append(CrawlerImage(image.get("src"), title))
Rewrite "The Perry Bible Fellowship" after feed change
Rewrite "The Perry Bible Fellowship" after feed change
Python
agpl-3.0
datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Perry Bible Fellowship" language = "en" url = "http://www.pbfcomics.com/" start_date = "2001-01-01" rights = "Nicholas Gurewitch" class Crawler(CrawlerBase): - history_capable_date = "2001-01-01" + history_capable_date = "2019-06-12" time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): - url = entry.summary.src('img[src*="/archive_b/"]') + page = self.parse_page(entry.link) + images = page.root.xpath("//div[@id='comic']/img") + crawler_images = [] + for image in images: - title = entry.title + title = entry.title - return CrawlerImage(url, title) + crawler_images.append(CrawlerImage(image.get("src"), title))
Rewrite "The Perry Bible Fellowship" after feed change
## Code Before: from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Perry Bible Fellowship" language = "en" url = "http://www.pbfcomics.com/" start_date = "2001-01-01" rights = "Nicholas Gurewitch" class Crawler(CrawlerBase): history_capable_date = "2001-01-01" time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): url = entry.summary.src('img[src*="/archive_b/"]') title = entry.title return CrawlerImage(url, title) ## Instruction: Rewrite "The Perry Bible Fellowship" after feed change ## Code After: from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Perry Bible Fellowship" language = "en" url = "http://www.pbfcomics.com/" start_date = "2001-01-01" rights = "Nicholas Gurewitch" class Crawler(CrawlerBase): history_capable_date = "2019-06-12" time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): page = self.parse_page(entry.link) images = page.root.xpath("//div[@id='comic']/img") crawler_images = [] for image in images: title = entry.title crawler_images.append(CrawlerImage(image.get("src"), title))
... class Crawler(CrawlerBase): history_capable_date = "2019-06-12" time_zone = "US/Eastern" ... feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml") for entry in feed.for_date(pub_date): page = self.parse_page(entry.link) images = page.root.xpath("//div[@id='comic']/img") crawler_images = [] for image in images: title = entry.title crawler_images.append(CrawlerImage(image.get("src"), title)) ...
ad70a7ec6543d64ec185eb2d52ccfa291a1dfad6
servicerating/views.py
servicerating/views.py
import csv from django.http import HttpResponse from servicerating.models import Response def report_responses(request): qs = Response.objects.raw("SELECT servicerating_response.*, servicerating_extra.value AS clinic_code from servicerating_response INNER JOIN servicerating_extra ON servicerating_response.contact_id = servicerating_extra.contact_id WHERE servicerating_extra.key = 'clinic_code'") # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="servicerating_incl_clinic_code.csv"' writer = csv.writer(response) writer.writerow(["Contact", "Key", "Value", "Created At", "Updated At", "Clinic Code"]) for obj in qs: writer.writerow([obj.contact, obj.key, obj.value, obj.created_at, obj.updated_at, obj.clinic_code]) return response
import csv from django.http import HttpResponse from servicerating.models import Response def report_responses(request): qs = Response.objects.raw("SELECT servicerating_response.*, servicerating_extra.value AS clinic_code from servicerating_response INNER JOIN servicerating_extra ON servicerating_response.contact_id = servicerating_extra.contact_id WHERE servicerating_extra.key = 'clinic_code'") # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="servicerating_incl_clinic_code.csv"' writer = csv.writer(response) writer.writerow(["Rating ID", "Contact ID", "Key", "Value", "Created At", "Updated At", "Clinic Code"]) for obj in qs: writer.writerow([obj.id, obj.contact_id, obj.key, obj.value, obj.created_at, obj.updated_at, obj.clinic_code]) return response
Remove FK's from CSV export for massive speed boost
Remove FK's from CSV export for massive speed boost
Python
bsd-3-clause
praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control
import csv from django.http import HttpResponse from servicerating.models import Response def report_responses(request): qs = Response.objects.raw("SELECT servicerating_response.*, servicerating_extra.value AS clinic_code from servicerating_response INNER JOIN servicerating_extra ON servicerating_response.contact_id = servicerating_extra.contact_id WHERE servicerating_extra.key = 'clinic_code'") # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="servicerating_incl_clinic_code.csv"' writer = csv.writer(response) - writer.writerow(["Contact", "Key", "Value", "Created At", "Updated At", "Clinic Code"]) + writer.writerow(["Rating ID", "Contact ID", "Key", "Value", "Created At", "Updated At", "Clinic Code"]) for obj in qs: - writer.writerow([obj.contact, obj.key, obj.value, obj.created_at, + writer.writerow([obj.id, obj.contact_id, obj.key, obj.value, obj.created_at, obj.updated_at, obj.clinic_code]) return response
Remove FK's from CSV export for massive speed boost
## Code Before: import csv from django.http import HttpResponse from servicerating.models import Response def report_responses(request): qs = Response.objects.raw("SELECT servicerating_response.*, servicerating_extra.value AS clinic_code from servicerating_response INNER JOIN servicerating_extra ON servicerating_response.contact_id = servicerating_extra.contact_id WHERE servicerating_extra.key = 'clinic_code'") # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="servicerating_incl_clinic_code.csv"' writer = csv.writer(response) writer.writerow(["Contact", "Key", "Value", "Created At", "Updated At", "Clinic Code"]) for obj in qs: writer.writerow([obj.contact, obj.key, obj.value, obj.created_at, obj.updated_at, obj.clinic_code]) return response ## Instruction: Remove FK's from CSV export for massive speed boost ## Code After: import csv from django.http import HttpResponse from servicerating.models import Response def report_responses(request): qs = Response.objects.raw("SELECT servicerating_response.*, servicerating_extra.value AS clinic_code from servicerating_response INNER JOIN servicerating_extra ON servicerating_response.contact_id = servicerating_extra.contact_id WHERE servicerating_extra.key = 'clinic_code'") # Create the HttpResponse object with the appropriate CSV header. response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="servicerating_incl_clinic_code.csv"' writer = csv.writer(response) writer.writerow(["Rating ID", "Contact ID", "Key", "Value", "Created At", "Updated At", "Clinic Code"]) for obj in qs: writer.writerow([obj.id, obj.contact_id, obj.key, obj.value, obj.created_at, obj.updated_at, obj.clinic_code]) return response
... writer = csv.writer(response) writer.writerow(["Rating ID", "Contact ID", "Key", "Value", "Created At", "Updated At", "Clinic Code"]) for obj in qs: writer.writerow([obj.id, obj.contact_id, obj.key, obj.value, obj.created_at, obj.updated_at, obj.clinic_code]) ...
cbddfe308f4e0da728974777f10b245a966520b6
summarize/__init__.py
summarize/__init__.py
from __future__ import unicode_literals from itertools import combinations from operator import itemgetter from distance import jaccard from networkx import Graph, pagerank from nltk import tokenize from .utils import get_stopwords, get_words def summarize(text, sentence_count=5, language='english'): stopwords = get_stopwords(language) sentence_list = tokenize.sent_tokenize(text, language) wordsets = [get_words(sentence, stopwords) for sentence in sentence_list] graph = Graph() pairs = combinations(enumerate(filter(None, wordsets)), 2) for (index_a, words_a), (index_b, words_b) in pairs: similarity = 1 - jaccard(words_a, words_b) if similarity > 0: graph.add_edge(index_a, index_b, weight=similarity) ranked_sentence_indexes = pagerank(graph).items() sentences_by_rank = sorted( ranked_sentence_indexes, key=itemgetter(1), reverse=True) best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count]) best_sentences_in_order = sorted(best_sentences) return ' '.join(sentence_list[index] for index in best_sentences_in_order)
from __future__ import unicode_literals from itertools import combinations from operator import itemgetter from distance import jaccard from networkx import Graph, pagerank from nltk import tokenize from .utils import get_stopwords, get_words def summarize(text, sentence_count=5, language='english'): stopwords = get_stopwords(language) sentence_list = tokenize.sent_tokenize(text, language) wordsets = [get_words(sentence, stopwords) for sentence in sentence_list] graph = Graph() pairs = combinations(enumerate(wordsets), 2) for (index_a, words_a), (index_b, words_b) in pairs: if words_a and words_b: similarity = 1 - jaccard(words_a, words_b) if similarity > 0: graph.add_edge(index_a, index_b, weight=similarity) ranked_sentence_indexes = pagerank(graph).items() sentences_by_rank = sorted( ranked_sentence_indexes, key=itemgetter(1), reverse=True) best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count]) best_sentences_in_order = sorted(best_sentences) return ' '.join(sentence_list[index] for index in best_sentences_in_order)
Fix sentence index shifting with empty sentences
Fix sentence index shifting with empty sentences
Python
mit
despawnerer/summarize
from __future__ import unicode_literals from itertools import combinations from operator import itemgetter from distance import jaccard from networkx import Graph, pagerank from nltk import tokenize from .utils import get_stopwords, get_words def summarize(text, sentence_count=5, language='english'): stopwords = get_stopwords(language) sentence_list = tokenize.sent_tokenize(text, language) wordsets = [get_words(sentence, stopwords) for sentence in sentence_list] graph = Graph() - pairs = combinations(enumerate(filter(None, wordsets)), 2) + pairs = combinations(enumerate(wordsets), 2) for (index_a, words_a), (index_b, words_b) in pairs: + if words_a and words_b: - similarity = 1 - jaccard(words_a, words_b) + similarity = 1 - jaccard(words_a, words_b) - if similarity > 0: + if similarity > 0: - graph.add_edge(index_a, index_b, weight=similarity) + graph.add_edge(index_a, index_b, weight=similarity) ranked_sentence_indexes = pagerank(graph).items() sentences_by_rank = sorted( ranked_sentence_indexes, key=itemgetter(1), reverse=True) best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count]) best_sentences_in_order = sorted(best_sentences) return ' '.join(sentence_list[index] for index in best_sentences_in_order)
Fix sentence index shifting with empty sentences
## Code Before: from __future__ import unicode_literals from itertools import combinations from operator import itemgetter from distance import jaccard from networkx import Graph, pagerank from nltk import tokenize from .utils import get_stopwords, get_words def summarize(text, sentence_count=5, language='english'): stopwords = get_stopwords(language) sentence_list = tokenize.sent_tokenize(text, language) wordsets = [get_words(sentence, stopwords) for sentence in sentence_list] graph = Graph() pairs = combinations(enumerate(filter(None, wordsets)), 2) for (index_a, words_a), (index_b, words_b) in pairs: similarity = 1 - jaccard(words_a, words_b) if similarity > 0: graph.add_edge(index_a, index_b, weight=similarity) ranked_sentence_indexes = pagerank(graph).items() sentences_by_rank = sorted( ranked_sentence_indexes, key=itemgetter(1), reverse=True) best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count]) best_sentences_in_order = sorted(best_sentences) return ' '.join(sentence_list[index] for index in best_sentences_in_order) ## Instruction: Fix sentence index shifting with empty sentences ## Code After: from __future__ import unicode_literals from itertools import combinations from operator import itemgetter from distance import jaccard from networkx import Graph, pagerank from nltk import tokenize from .utils import get_stopwords, get_words def summarize(text, sentence_count=5, language='english'): stopwords = get_stopwords(language) sentence_list = tokenize.sent_tokenize(text, language) wordsets = [get_words(sentence, stopwords) for sentence in sentence_list] graph = Graph() pairs = combinations(enumerate(wordsets), 2) for (index_a, words_a), (index_b, words_b) in pairs: if words_a and words_b: similarity = 1 - jaccard(words_a, words_b) if similarity > 0: graph.add_edge(index_a, index_b, weight=similarity) ranked_sentence_indexes = pagerank(graph).items() sentences_by_rank = sorted( ranked_sentence_indexes, key=itemgetter(1), reverse=True) best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count]) best_sentences_in_order = sorted(best_sentences) return ' '.join(sentence_list[index] for index in best_sentences_in_order)
// ... existing code ... graph = Graph() pairs = combinations(enumerate(wordsets), 2) for (index_a, words_a), (index_b, words_b) in pairs: if words_a and words_b: similarity = 1 - jaccard(words_a, words_b) if similarity > 0: graph.add_edge(index_a, index_b, weight=similarity) ranked_sentence_indexes = pagerank(graph).items() // ... rest of the code ...
0a2c2a32ceb19503816a9ef35d3de5468097f364
gui_app/utils/StringUtil.py
gui_app/utils/StringUtil.py
import ast def isEmpty(value): if value: return False else: return True def isNotEmpty(value): if not value: return False else: return True def stringToDict(param): if isNotEmpty(param) or param != '': return ast.literal_eval(param) def stringToDictList(list): dic_list = [] if list is not None: for r in list: dic_list.append(stringToDict(r)) return dic_list def deleteNullDict(dic): if dic is not None: diccopy = dic.copy() if 'csrfmiddlewaretoken' in diccopy: del diccopy['csrfmiddlewaretoken'] for key, value in dic.items(): if isEmpty(value) or value == 'None' or value == '': del diccopy[key] dic = diccopy return dic def putKeyVlue(param): param = stringToDict(param) if param is not None: param = ast.literal_eval(param) return param def list_to_record(list): if isEmpty(list): return None record = None for param in list: record = param break return record def isNone(*value): for v in value: if v is None: return True return False
import ast def isEmpty(value): if value: return False else: return True def isNotEmpty(value): if not value: return False else: return True def stringToDict(param): if isNotEmpty(param) or param != '': return ast.literal_eval(param) def stringToDictList(list): dic_list = [] if list is not None: for r in list: dic_list.append(stringToDict(r)) return dic_list def deleteNullDict(dic): if dic is not None: diccopy = dic.copy() if 'csrfmiddlewaretoken' in diccopy: del diccopy['csrfmiddlewaretoken'] for key, value in dic.items(): if value == 'None': del diccopy[key] dic = diccopy return dic def putKeyVlue(param): param = stringToDict(param) if param is not None: param = ast.literal_eval(param) return param def list_to_record(list): if isEmpty(list): return None record = None for param in list: record = param break return record def isNone(*value): for v in value: if v is None: return True return False
Support to request null string
Support to request null string
Python
apache-2.0
cloudconductor/cloud_conductor_gui,cloudconductor/cloud_conductor_gui,cloudconductor/cloud_conductor_gui
import ast def isEmpty(value): if value: return False else: return True def isNotEmpty(value): if not value: return False else: return True def stringToDict(param): if isNotEmpty(param) or param != '': return ast.literal_eval(param) def stringToDictList(list): dic_list = [] if list is not None: for r in list: dic_list.append(stringToDict(r)) return dic_list def deleteNullDict(dic): if dic is not None: diccopy = dic.copy() if 'csrfmiddlewaretoken' in diccopy: del diccopy['csrfmiddlewaretoken'] for key, value in dic.items(): - if isEmpty(value) or value == 'None' or value == '': + if value == 'None': del diccopy[key] dic = diccopy return dic def putKeyVlue(param): param = stringToDict(param) if param is not None: param = ast.literal_eval(param) return param def list_to_record(list): if isEmpty(list): return None record = None for param in list: record = param break return record def isNone(*value): for v in value: if v is None: return True return False
Support to request null string
## Code Before: import ast def isEmpty(value): if value: return False else: return True def isNotEmpty(value): if not value: return False else: return True def stringToDict(param): if isNotEmpty(param) or param != '': return ast.literal_eval(param) def stringToDictList(list): dic_list = [] if list is not None: for r in list: dic_list.append(stringToDict(r)) return dic_list def deleteNullDict(dic): if dic is not None: diccopy = dic.copy() if 'csrfmiddlewaretoken' in diccopy: del diccopy['csrfmiddlewaretoken'] for key, value in dic.items(): if isEmpty(value) or value == 'None' or value == '': del diccopy[key] dic = diccopy return dic def putKeyVlue(param): param = stringToDict(param) if param is not None: param = ast.literal_eval(param) return param def list_to_record(list): if isEmpty(list): return None record = None for param in list: record = param break return record def isNone(*value): for v in value: if v is None: return True return False ## Instruction: Support to request null string ## Code After: import ast def isEmpty(value): if value: return False else: return True def isNotEmpty(value): if not value: return False else: return True def stringToDict(param): if isNotEmpty(param) or param != '': return ast.literal_eval(param) def stringToDictList(list): dic_list = [] if list is not None: for r in list: dic_list.append(stringToDict(r)) return dic_list def deleteNullDict(dic): if dic is not None: diccopy = dic.copy() if 'csrfmiddlewaretoken' in diccopy: del diccopy['csrfmiddlewaretoken'] for key, value in dic.items(): if value == 'None': del diccopy[key] dic = diccopy return dic def putKeyVlue(param): param = stringToDict(param) if param is not None: param = ast.literal_eval(param) return param def list_to_record(list): if isEmpty(list): return None record = None for param in list: record = param break return record def isNone(*value): for v in value: if v is None: return True return False
// ... existing code ... for key, value in dic.items(): if value == 'None': del diccopy[key] dic = diccopy // ... rest of the code ...
2088b3df274fd31c28baa6193c937046c04b98a6
scripts/generate_wiki_languages.py
scripts/generate_wiki_languages.py
from urllib2 import urlopen import csv import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) # Column 2 is the language code lang_keys = [row[2] for row in data] del lang_keys[0] # Get rid of the headers # Generate the XML x = lb.E keys = [x.item(k) for k in lang_keys] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) )
from urllib2 import urlopen import csv import json import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) lang_keys = [] lang_local_names = [] lang_eng_names = [] for row in data: lang_keys.append(row[2]) lang_local_names.append(row[10]) lang_eng_names.append(row[1]) # Generate the XML, for Android x = lb.E keys = [x.item(k) for k in lang_keys] # Skip the headers! del keys[0] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) # Generate the JSON, for iOS langs_json = [] # Start from 1, to skip the headers for i in xrange(1, len(lang_keys)): langs_json.append({ "code": lang_keys[i], "name": lang_local_names[i], "canonical_name": lang_eng_names[i] }) open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
Modify language generation script to make JSON for iOS
Modify language generation script to make JSON for iOS Change-Id: Ib5aec2f6cfcb5bd1187cf8863ecd50f1b1a2d20c
Python
apache-2.0
Wikinaut/wikipedia-app,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,creaITve/apps-android-tbrc-works,reproio/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,reproio/apps-android-wikipedia,wikimedia/apps-android-wikipedia,BrunoMRodrigues/apps-android-tbrc-work,BrunoMRodrigues/apps-android-tbrc-work,carloshwa/apps-android-wikipedia,creaITve/apps-android-tbrc-works,BrunoMRodrigues/apps-android-tbrc-work,Wikinaut/wikipedia-app,Wikinaut/wikipedia-app,BrunoMRodrigues/apps-android-tbrc-work,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,carloshwa/apps-android-wikipedia,wikimedia/apps-android-wikipedia,Wikinaut/wikipedia-app,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,anirudh24seven/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,reproio/apps-android-wikipedia,creaITve/apps-android-tbrc-works,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,parvez3019/apps-android-wikipedia,parvez3019/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,creaITve/apps-android-tbrc-works,wikimedia/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,Duct-and-rice/KrswtkhrWiki4Android,Wikinaut/wikipedia-app
from urllib2 import urlopen import csv + import json import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) - # Column 2 is the language code - lang_keys = [row[2] for row in data] + lang_keys = [] + lang_local_names = [] + lang_eng_names = [] + for row in data: + lang_keys.append(row[2]) + lang_local_names.append(row[10]) + lang_eng_names.append(row[1]) + # Generate the XML, for Android - del lang_keys[0] # Get rid of the headers - - # Generate the XML x = lb.E keys = [x.item(k) for k in lang_keys] + # Skip the headers! + del keys[0] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) + # Generate the JSON, for iOS + langs_json = [] + # Start from 1, to skip the headers + for i in xrange(1, len(lang_keys)): + langs_json.append({ + "code": lang_keys[i], + "name": lang_local_names[i], + "canonical_name": lang_eng_names[i] + }) + + open("languages_list.json", "w").write(json.dumps(langs_json, indent=4)) +
Modify language generation script to make JSON for iOS
## Code Before: from urllib2 import urlopen import csv import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) # Column 2 is the language code lang_keys = [row[2] for row in data] del lang_keys[0] # Get rid of the headers # Generate the XML x = lb.E keys = [x.item(k) for k in lang_keys] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) ## Instruction: Modify language generation script to make JSON for iOS ## Code After: from urllib2 import urlopen import csv import json import lxml.builder as lb from lxml import etree # Returns CSV of all wikipedias, ordered by number of 'good' articles URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good" data = csv.reader(urlopen(URL)) lang_keys = [] lang_local_names = [] lang_eng_names = [] for row in data: lang_keys.append(row[2]) lang_local_names.append(row[10]) lang_eng_names.append(row[1]) # Generate the XML, for Android x = lb.E keys = [x.item(k) for k in lang_keys] # Skip the headers! del keys[0] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ) open("languages_list.xml", "w").write( etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True) ) # Generate the JSON, for iOS langs_json = [] # Start from 1, to skip the headers for i in xrange(1, len(lang_keys)): langs_json.append({ "code": lang_keys[i], "name": lang_local_names[i], "canonical_name": lang_eng_names[i] }) open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
# ... existing code ... from urllib2 import urlopen import csv import json import lxml.builder as lb from lxml import etree # ... modified code ... data = csv.reader(urlopen(URL)) lang_keys = [] lang_local_names = [] lang_eng_names = [] for row in data: lang_keys.append(row[2]) lang_local_names.append(row[10]) lang_eng_names.append(row[1]) # Generate the XML, for Android x = lb.E ... keys = [x.item(k) for k in lang_keys] # Skip the headers! del keys[0] resources = x.resources( getattr(x, 'string-array')(*keys, name="preference_language_keys"), ... ) # Generate the JSON, for iOS langs_json = [] # Start from 1, to skip the headers for i in xrange(1, len(lang_keys)): langs_json.append({ "code": lang_keys[i], "name": lang_local_names[i], "canonical_name": lang_eng_names[i] }) open("languages_list.json", "w").write(json.dumps(langs_json, indent=4)) # ... rest of the code ...
4b245b9a859552adb9c19fafc4bdfab5780782f2
d1_common_python/src/d1_common/__init__.py
d1_common_python/src/d1_common/__init__.py
__version__ = "2.1.0" __all__ = [ 'const', 'exceptions', 'upload', 'xmlrunner', 'types.exceptions', 'types.dataoneTypes', 'types.dataoneErrors', 'ext.mimeparser', ]
__version__ = "2.1.0" # Set default logging handler to avoid "No handler found" warnings. import logging try: from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
Add logging NullHandler to prevent "no handler found" errors
Add logging NullHandler to prevent "no handler found" errors This fixes the issue where "no handler found" errors would be printed by the library if library clients did not set up logging.
Python
apache-2.0
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
__version__ = "2.1.0" + # Set default logging handler to avoid "No handler found" warnings. + import logging - __all__ = [ - 'const', - 'exceptions', - 'upload', - 'xmlrunner', - 'types.exceptions', - 'types.dataoneTypes', - 'types.dataoneErrors', - 'ext.mimeparser', - ] + try: + from logging import NullHandler + except ImportError: + class NullHandler(logging.Handler): + def emit(self, record): + pass + + logging.getLogger(__name__).addHandler(NullHandler()) +
Add logging NullHandler to prevent "no handler found" errors
## Code Before: __version__ = "2.1.0" __all__ = [ 'const', 'exceptions', 'upload', 'xmlrunner', 'types.exceptions', 'types.dataoneTypes', 'types.dataoneErrors', 'ext.mimeparser', ] ## Instruction: Add logging NullHandler to prevent "no handler found" errors ## Code After: __version__ = "2.1.0" # Set default logging handler to avoid "No handler found" warnings. import logging try: from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
... __version__ = "2.1.0" # Set default logging handler to avoid "No handler found" warnings. import logging try: from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) ...
01a9b6457d78dd583637bf8174edda40e2bd3276
django_website/blog/feeds.py
django_website/blog/feeds.py
from __future__ import absolute_import from django.contrib.syndication.views import Feed from .models import Entry class WeblogEntryFeed(Feed): title = "The Django weblog" link = "http://www.djangoproject.com/weblog/" description = "Latest news about Django, the Python Web framework." def items(self): return Entry.objects.published()[:10] def item_pubdate(self, item): return item.pub_date
from __future__ import absolute_import from django.contrib.syndication.views import Feed from .models import Entry class WeblogEntryFeed(Feed): title = "The Django weblog" link = "http://www.djangoproject.com/weblog/" description = "Latest news about Django, the Python Web framework." def items(self): return Entry.objects.published()[:10] def item_pubdate(self, item): return item.pub_date def item_author_name(self, item): return item.author def item_description(self, item): return item.body_html
Add author name and body to the weblog RSS feed.
Add author name and body to the weblog RSS feed.
Python
bsd-3-clause
alawnchen/djangoproject.com,django/djangoproject.com,nanuxbe/django,django/djangoproject.com,khkaminska/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,rmoorman/djangoproject.com,django/djangoproject.com,nanuxbe/django,django/djangoproject.com,xavierdutreilh/djangoproject.com,gnarf/djangoproject.com,relekang/djangoproject.com,rmoorman/djangoproject.com,relekang/djangoproject.com,rmoorman/djangoproject.com,hassanabidpk/djangoproject.com,vxvinh1511/djangoproject.com,vxvinh1511/djangoproject.com,khkaminska/djangoproject.com,nanuxbe/django,alawnchen/djangoproject.com,khkaminska/djangoproject.com,vxvinh1511/djangoproject.com,xavierdutreilh/djangoproject.com,xavierdutreilh/djangoproject.com,hassanabidpk/djangoproject.com,xavierdutreilh/djangoproject.com,gnarf/djangoproject.com,relekang/djangoproject.com,hassanabidpk/djangoproject.com,vxvinh1511/djangoproject.com,django/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,gnarf/djangoproject.com,gnarf/djangoproject.com,hassanabidpk/djangoproject.com,nanuxbe/django,alawnchen/djangoproject.com
from __future__ import absolute_import from django.contrib.syndication.views import Feed from .models import Entry class WeblogEntryFeed(Feed): title = "The Django weblog" link = "http://www.djangoproject.com/weblog/" description = "Latest news about Django, the Python Web framework." def items(self): return Entry.objects.published()[:10] def item_pubdate(self, item): return item.pub_date + def item_author_name(self, item): + return item.author + + def item_description(self, item): + return item.body_html +
Add author name and body to the weblog RSS feed.
## Code Before: from __future__ import absolute_import from django.contrib.syndication.views import Feed from .models import Entry class WeblogEntryFeed(Feed): title = "The Django weblog" link = "http://www.djangoproject.com/weblog/" description = "Latest news about Django, the Python Web framework." def items(self): return Entry.objects.published()[:10] def item_pubdate(self, item): return item.pub_date ## Instruction: Add author name and body to the weblog RSS feed. ## Code After: from __future__ import absolute_import from django.contrib.syndication.views import Feed from .models import Entry class WeblogEntryFeed(Feed): title = "The Django weblog" link = "http://www.djangoproject.com/weblog/" description = "Latest news about Django, the Python Web framework." def items(self): return Entry.objects.published()[:10] def item_pubdate(self, item): return item.pub_date def item_author_name(self, item): return item.author def item_description(self, item): return item.body_html
// ... existing code ... def item_pubdate(self, item): return item.pub_date def item_author_name(self, item): return item.author def item_description(self, item): return item.body_html // ... rest of the code ...
6fe391b2e2f9b88a6835a6636a5d58810852ab5e
pyhole/tests/test_log.py
pyhole/tests/test_log.py
"""Pyhole Log Unit Tests""" import os import unittest from pyhole.core import logger from pyhole.core import utils class TestLogger(unittest.TestCase): def test_logger(self): test_log_dir = utils.get_home_directory() + "logs/" try: # NOTE(jk0): If the configuration file doesn't exist, the config # class will generate it and raise a SystemExit. logger.setup_logger(name="test") except SystemExit: logger.setup_logger(name="test") test_log = logger.get_logger("TEST") self.assertEqual("TEST", test_log.name) self.assertEqual(test_log.level, 0) os.unlink(test_log_dir + "test.log")
"""Pyhole Log Unit Tests""" import os import unittest from pyhole.core import logger from pyhole.core import utils class TestLogger(unittest.TestCase): def test_logger(self): test_log_dir = utils.get_home_directory() + "logs/" try: # NOTE(jk0): If the configuration file doesn't exist, the config # class will generate it and raise a SystemExit. logger.setup_logger("test") except SystemExit: logger.setup_logger("test") test_log = logger.get_logger("TEST") self.assertEqual("TEST", test_log.name) self.assertEqual(test_log.level, 0) os.unlink(test_log_dir + "test.log")
Use setup_logger properly in tests.
Use setup_logger properly in tests.
Python
apache-2.0
jk0/pyhole,jk0/pyhole,jk0/pyhole
"""Pyhole Log Unit Tests""" import os import unittest from pyhole.core import logger from pyhole.core import utils class TestLogger(unittest.TestCase): def test_logger(self): test_log_dir = utils.get_home_directory() + "logs/" try: # NOTE(jk0): If the configuration file doesn't exist, the config # class will generate it and raise a SystemExit. - logger.setup_logger(name="test") + logger.setup_logger("test") except SystemExit: - logger.setup_logger(name="test") + logger.setup_logger("test") test_log = logger.get_logger("TEST") self.assertEqual("TEST", test_log.name) self.assertEqual(test_log.level, 0) os.unlink(test_log_dir + "test.log")
Use setup_logger properly in tests.
## Code Before: """Pyhole Log Unit Tests""" import os import unittest from pyhole.core import logger from pyhole.core import utils class TestLogger(unittest.TestCase): def test_logger(self): test_log_dir = utils.get_home_directory() + "logs/" try: # NOTE(jk0): If the configuration file doesn't exist, the config # class will generate it and raise a SystemExit. logger.setup_logger(name="test") except SystemExit: logger.setup_logger(name="test") test_log = logger.get_logger("TEST") self.assertEqual("TEST", test_log.name) self.assertEqual(test_log.level, 0) os.unlink(test_log_dir + "test.log") ## Instruction: Use setup_logger properly in tests. ## Code After: """Pyhole Log Unit Tests""" import os import unittest from pyhole.core import logger from pyhole.core import utils class TestLogger(unittest.TestCase): def test_logger(self): test_log_dir = utils.get_home_directory() + "logs/" try: # NOTE(jk0): If the configuration file doesn't exist, the config # class will generate it and raise a SystemExit. logger.setup_logger("test") except SystemExit: logger.setup_logger("test") test_log = logger.get_logger("TEST") self.assertEqual("TEST", test_log.name) self.assertEqual(test_log.level, 0) os.unlink(test_log_dir + "test.log")
// ... existing code ... # NOTE(jk0): If the configuration file doesn't exist, the config # class will generate it and raise a SystemExit. logger.setup_logger("test") except SystemExit: logger.setup_logger("test") test_log = logger.get_logger("TEST") // ... rest of the code ...
0f9418eed089938e0094f40cc15682ef59e041a1
__init__.py
__init__.py
import default_settings from flask.ext.plugins import Plugin from flask import current_app as app from pybossa_gravatar.gravatar import Gravatar from pybossa.model.user import User from sqlalchemy import event __plugin__ = "PyBossaGravatar" __version__ = "0.1.0" gravatar = Gravatar() class PyBossaGravatar(Plugin): """A PyBossa plugin for Gravatar integration.""" def setup(self): """Setup the plugin.""" self.load_config() gravatar.init_app(app) self.setup_event_listener() def load_config(self): """Configure the plugin.""" settings = [key for key in dir(default_settings) if key.isupper()] for s in settings: if not app.config.get(s): app.config[s] = getattr(default_settings, s) def setup_event_listener(self): """Setup event listener.""" @event.listens_for(User, 'before_insert') def add_user_event(mapper, conn, target): """Set gravatar by default for new users.""" gravatar.set(target, update_repo=False)
import default_settings from flask.ext.plugins import Plugin from flask import current_app as app from flask import redirect from pybossa_gravatar.gravatar import Gravatar from pybossa.model.user import User from sqlalchemy import event from flask.ext.login import current_user __plugin__ = "PyBossaGravatar" __version__ = "0.1.0" gravatar = Gravatar() class PyBossaGravatar(Plugin): """A PyBossa plugin for Gravatar integration.""" def setup(self): """Setup the plugin.""" self.load_config() gravatar.init_app(app) self.setup_event_listener() self.setup_url_rule() def load_config(self): """Configure the plugin.""" settings = [key for key in dir(default_settings) if key.isupper()] for s in settings: if not app.config.get(s): app.config[s] = getattr(default_settings, s) def setup_event_listener(self): """Setup event listener.""" @event.listens_for(User, 'before_insert') def add_user_event(mapper, conn, target): """Set gravatar by default for new users.""" gravatar.set(target, update_repo=False) def setup_url_rule(self): """Setup URL rule.""" @app.route('/account/set-gravatar') def set_gravatar(self): """Set gravatar for the current user.""" if current_user.is_anonymous(): return redirect(url_for('account.signin')) gravatar.set(current_user)
Add URL rule to set Gravatar for current user
Add URL rule to set Gravatar for current user
Python
bsd-3-clause
alexandermendes/pybossa-gravatar
import default_settings from flask.ext.plugins import Plugin from flask import current_app as app + from flask import redirect from pybossa_gravatar.gravatar import Gravatar from pybossa.model.user import User from sqlalchemy import event + from flask.ext.login import current_user __plugin__ = "PyBossaGravatar" __version__ = "0.1.0" gravatar = Gravatar() class PyBossaGravatar(Plugin): """A PyBossa plugin for Gravatar integration.""" def setup(self): """Setup the plugin.""" self.load_config() gravatar.init_app(app) self.setup_event_listener() + self.setup_url_rule() def load_config(self): """Configure the plugin.""" settings = [key for key in dir(default_settings) if key.isupper()] for s in settings: if not app.config.get(s): app.config[s] = getattr(default_settings, s) def setup_event_listener(self): """Setup event listener.""" @event.listens_for(User, 'before_insert') def add_user_event(mapper, conn, target): """Set gravatar by default for new users.""" gravatar.set(target, update_repo=False) + + + def setup_url_rule(self): + """Setup URL rule.""" + + @app.route('/account/set-gravatar') + def set_gravatar(self): + """Set gravatar for the current user.""" + if current_user.is_anonymous(): + return redirect(url_for('account.signin')) + gravatar.set(current_user) + +
Add URL rule to set Gravatar for current user
## Code Before: import default_settings from flask.ext.plugins import Plugin from flask import current_app as app from pybossa_gravatar.gravatar import Gravatar from pybossa.model.user import User from sqlalchemy import event __plugin__ = "PyBossaGravatar" __version__ = "0.1.0" gravatar = Gravatar() class PyBossaGravatar(Plugin): """A PyBossa plugin for Gravatar integration.""" def setup(self): """Setup the plugin.""" self.load_config() gravatar.init_app(app) self.setup_event_listener() def load_config(self): """Configure the plugin.""" settings = [key for key in dir(default_settings) if key.isupper()] for s in settings: if not app.config.get(s): app.config[s] = getattr(default_settings, s) def setup_event_listener(self): """Setup event listener.""" @event.listens_for(User, 'before_insert') def add_user_event(mapper, conn, target): """Set gravatar by default for new users.""" gravatar.set(target, update_repo=False) ## Instruction: Add URL rule to set Gravatar for current user ## Code After: import default_settings from flask.ext.plugins import Plugin from flask import current_app as app from flask import redirect from pybossa_gravatar.gravatar import Gravatar from pybossa.model.user import User from sqlalchemy import event from flask.ext.login import current_user __plugin__ = "PyBossaGravatar" __version__ = "0.1.0" gravatar = Gravatar() class PyBossaGravatar(Plugin): """A PyBossa plugin for Gravatar integration.""" def setup(self): """Setup the plugin.""" self.load_config() gravatar.init_app(app) self.setup_event_listener() self.setup_url_rule() def load_config(self): """Configure the plugin.""" settings = [key for key in dir(default_settings) if key.isupper()] for s in settings: if not app.config.get(s): app.config[s] = getattr(default_settings, s) def setup_event_listener(self): """Setup event listener.""" @event.listens_for(User, 'before_insert') def add_user_event(mapper, conn, target): """Set gravatar by default for new users.""" gravatar.set(target, update_repo=False) def setup_url_rule(self): """Setup URL rule.""" @app.route('/account/set-gravatar') def set_gravatar(self): """Set gravatar for the current user.""" if current_user.is_anonymous(): return redirect(url_for('account.signin')) gravatar.set(current_user)
// ... existing code ... from flask.ext.plugins import Plugin from flask import current_app as app from flask import redirect from pybossa_gravatar.gravatar import Gravatar from pybossa.model.user import User from sqlalchemy import event from flask.ext.login import current_user __plugin__ = "PyBossaGravatar" // ... modified code ... gravatar.init_app(app) self.setup_event_listener() self.setup_url_rule() ... """Set gravatar by default for new users.""" gravatar.set(target, update_repo=False) def setup_url_rule(self): """Setup URL rule.""" @app.route('/account/set-gravatar') def set_gravatar(self): """Set gravatar for the current user.""" if current_user.is_anonymous(): return redirect(url_for('account.signin')) gravatar.set(current_user) // ... rest of the code ...
d504abc78d94e8af90a5bf8950f3ad4e2d47e5f7
src/ansible/models.py
src/ansible/models.py
from django.db import models class Playbook(models.Model): class Meta: verbose_name_plural = "playbooks" name = models.CharField(max_length=200) path = models.CharField(max_length=200, default="~/") ansible_config = models.CharField(max_length=200, default="~/") inventory = models.CharField(max_length=200, default="hosts") user = models.CharField(max_length=200, default="ubuntu") def __str__(self): return "Playbook name: %s" % self.playbook.name
from django.db import models class Playbook(models.Model): class Meta: verbose_name_plural = "playbooks" name = models.CharField(max_length=200) path = models.CharField(max_length=200, default="~/") ansible_config = models.CharField(max_length=200, default="~/") inventory = models.CharField(max_length=200, default="hosts") user = models.CharField(max_length=200, default="ubuntu") def __str__(self): return "%s" % self.name
Fix string output of Playbook
Fix string output of Playbook
Python
bsd-3-clause
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
from django.db import models class Playbook(models.Model): class Meta: verbose_name_plural = "playbooks" name = models.CharField(max_length=200) path = models.CharField(max_length=200, default="~/") ansible_config = models.CharField(max_length=200, default="~/") inventory = models.CharField(max_length=200, default="hosts") user = models.CharField(max_length=200, default="ubuntu") def __str__(self): - return "Playbook name: %s" % self.playbook.name + return "%s" % self.name
Fix string output of Playbook
## Code Before: from django.db import models class Playbook(models.Model): class Meta: verbose_name_plural = "playbooks" name = models.CharField(max_length=200) path = models.CharField(max_length=200, default="~/") ansible_config = models.CharField(max_length=200, default="~/") inventory = models.CharField(max_length=200, default="hosts") user = models.CharField(max_length=200, default="ubuntu") def __str__(self): return "Playbook name: %s" % self.playbook.name ## Instruction: Fix string output of Playbook ## Code After: from django.db import models class Playbook(models.Model): class Meta: verbose_name_plural = "playbooks" name = models.CharField(max_length=200) path = models.CharField(max_length=200, default="~/") ansible_config = models.CharField(max_length=200, default="~/") inventory = models.CharField(max_length=200, default="hosts") user = models.CharField(max_length=200, default="ubuntu") def __str__(self): return "%s" % self.name
... def __str__(self): return "%s" % self.name ...
b4ef0f107ca8fefbe556babb00f31c7b88019d50
pydarkstar/__init__.py
pydarkstar/__init__.py
__version__ = 0.1 import pydarkstar.logutils import logging pydarkstar.logutils.setError() try: import sqlalchemy except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install sqlalchemy') exit(-1) try: import pymysql except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install pymysql') exit(-1) try: import bs4 except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install beautifulsoup4') exit(-1) import scrub
__version__ = 0.1 import pydarkstar.logutils import logging pydarkstar.logutils.setError() try: import sqlalchemy except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install sqlalchemy') exit(-1) try: import pymysql except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install pymysql') exit(-1) try: import bs4 except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install beautifulsoup4') exit(-1)
Revert "Change imports to relative."
Revert "Change imports to relative." This reverts commit 9d0990249b7e0e46e38a665cb8c32a1ee435c291.
Python
mit
LegionXI/pydarkstar,AdamGagorik/pydarkstar
__version__ = 0.1 import pydarkstar.logutils import logging pydarkstar.logutils.setError() try: import sqlalchemy except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install sqlalchemy') exit(-1) try: import pymysql except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install pymysql') exit(-1) try: import bs4 except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install beautifulsoup4') exit(-1) - - import scrub
Revert "Change imports to relative."
## Code Before: __version__ = 0.1 import pydarkstar.logutils import logging pydarkstar.logutils.setError() try: import sqlalchemy except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install sqlalchemy') exit(-1) try: import pymysql except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install pymysql') exit(-1) try: import bs4 except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install beautifulsoup4') exit(-1) import scrub ## Instruction: Revert "Change imports to relative." ## Code After: __version__ = 0.1 import pydarkstar.logutils import logging pydarkstar.logutils.setError() try: import sqlalchemy except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install sqlalchemy') exit(-1) try: import pymysql except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install pymysql') exit(-1) try: import bs4 except ImportError as e: logging.exception(e.__class__.__name__) logging.error('pip install beautifulsoup4') exit(-1)
... logging.error('pip install beautifulsoup4') exit(-1) ...
422bb9ebfcff9826cf58d17a20df61cea21fdd77
app/supplier_constants.py
app/supplier_constants.py
KEY_DUNS_NUMBER = 'supplierDunsNumber' KEY_ORGANISATION_SIZE = 'supplierOrganisationSize' KEY_REGISTERED_NAME = 'supplierRegisteredName' KEY_REGISTRATION_BUILDING = 'supplierRegisteredBuilding' KEY_REGISTRATION_COUNTRY = 'supplierRegisteredCountry' KEY_REGISTRATION_NUMBER = 'supplierCompanyRegistrationNumber' KEY_REGISTRATION_POSTCODE = 'supplierRegisteredPostcode' KEY_REGISTRATION_TOWN = 'supplierRegisteredTown' KEY_TRADING_NAME = 'supplierTradingName' KEY_TRADING_STATUS = 'supplierTradingStatus' KEY_VAT_NUMBER = 'supplierVatNumber'
KEY_DUNS_NUMBER = 'supplierDunsNumber' KEY_ORGANISATION_SIZE = 'supplierOrganisationSize' KEY_REGISTERED_NAME = 'supplierRegisteredName' KEY_REGISTRATION_BUILDING = 'supplierRegisteredBuilding' KEY_REGISTRATION_COUNTRY = 'supplierRegisteredCountry' KEY_REGISTRATION_NUMBER = 'supplierCompanyRegistrationNumber' KEY_REGISTRATION_POSTCODE = 'supplierRegisteredPostcode' KEY_REGISTRATION_TOWN = 'supplierRegisteredTown' KEY_TRADING_NAME = 'supplierTradingName' KEY_TRADING_STATUS = 'supplierTradingStatus'
Remove VAT number from supplier constants
Remove VAT number from supplier constants
Python
mit
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
KEY_DUNS_NUMBER = 'supplierDunsNumber' KEY_ORGANISATION_SIZE = 'supplierOrganisationSize' KEY_REGISTERED_NAME = 'supplierRegisteredName' KEY_REGISTRATION_BUILDING = 'supplierRegisteredBuilding' KEY_REGISTRATION_COUNTRY = 'supplierRegisteredCountry' KEY_REGISTRATION_NUMBER = 'supplierCompanyRegistrationNumber' KEY_REGISTRATION_POSTCODE = 'supplierRegisteredPostcode' KEY_REGISTRATION_TOWN = 'supplierRegisteredTown' KEY_TRADING_NAME = 'supplierTradingName' KEY_TRADING_STATUS = 'supplierTradingStatus' - KEY_VAT_NUMBER = 'supplierVatNumber'
Remove VAT number from supplier constants
## Code Before: KEY_DUNS_NUMBER = 'supplierDunsNumber' KEY_ORGANISATION_SIZE = 'supplierOrganisationSize' KEY_REGISTERED_NAME = 'supplierRegisteredName' KEY_REGISTRATION_BUILDING = 'supplierRegisteredBuilding' KEY_REGISTRATION_COUNTRY = 'supplierRegisteredCountry' KEY_REGISTRATION_NUMBER = 'supplierCompanyRegistrationNumber' KEY_REGISTRATION_POSTCODE = 'supplierRegisteredPostcode' KEY_REGISTRATION_TOWN = 'supplierRegisteredTown' KEY_TRADING_NAME = 'supplierTradingName' KEY_TRADING_STATUS = 'supplierTradingStatus' KEY_VAT_NUMBER = 'supplierVatNumber' ## Instruction: Remove VAT number from supplier constants ## Code After: KEY_DUNS_NUMBER = 'supplierDunsNumber' KEY_ORGANISATION_SIZE = 'supplierOrganisationSize' KEY_REGISTERED_NAME = 'supplierRegisteredName' KEY_REGISTRATION_BUILDING = 'supplierRegisteredBuilding' KEY_REGISTRATION_COUNTRY = 'supplierRegisteredCountry' KEY_REGISTRATION_NUMBER = 'supplierCompanyRegistrationNumber' KEY_REGISTRATION_POSTCODE = 'supplierRegisteredPostcode' KEY_REGISTRATION_TOWN = 'supplierRegisteredTown' KEY_TRADING_NAME = 'supplierTradingName' KEY_TRADING_STATUS = 'supplierTradingStatus'
// ... existing code ... KEY_TRADING_NAME = 'supplierTradingName' KEY_TRADING_STATUS = 'supplierTradingStatus' // ... rest of the code ...
697ffec14e11e3558c8ebd33637aeebd7119a772
mltsp/__init__.py
mltsp/__init__.py
__version__ = '0.3dev' def install(): """Install MLTSP config file in ~/.config/mltsp/mltsp.yaml. """ import os import shutil cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml') cfg_dir = os.path.dirname(cfg) if os.path.exists(cfg): print('Existing configuration at {} -- not overwriting.'.format(cfg)) return if not os.path.exists(cfg_dir): os.makedirs(cfg_dir) shutil.copyfile(os.path.join(os.path.dirname(__file__), 'mltsp.yaml.example'), cfg) print('Installed {}'.format(cfg)) print('Please customize this file with authentication tokens, etc.')
__version__ = '0.3dev' def install(): """Install MLTSP config file in ~/.config/mltsp/mltsp.yaml. """ import os import shutil from distutils.dir_util import copy_tree data_src = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data") data_dst = os.path.expanduser('~/.local/mltsp/') copy_tree(data_src, data_dst, update=1) print("Created data directory at {} and copied sample data.".format( os.path.expanduser('~/.local/mltsp/'))) cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml') cfg_dir = os.path.dirname(cfg) if os.path.exists(cfg): print('Existing configuration at {} -- not overwriting.'.format(cfg)) return if not os.path.exists(cfg_dir): os.makedirs(cfg_dir) shutil.copyfile(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'mltsp.yaml.example'), cfg) print('Installed {}'.format(cfg)) print('Please customize this file with authentication tokens, etc.')
Copy data directory to ~/.local/mltsp during install; fix path to mltsp.yaml.example
Copy data directory to ~/.local/mltsp during install; fix path to mltsp.yaml.example
Python
bsd-3-clause
mltsp/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,bnaul/mltsp,acrellin/mltsp,acrellin/mltsp,bnaul/mltsp,mltsp/mltsp,mltsp/mltsp,mltsp/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,acrellin/mltsp,mltsp/mltsp
__version__ = '0.3dev' def install(): """Install MLTSP config file in ~/.config/mltsp/mltsp.yaml. """ import os import shutil + from distutils.dir_util import copy_tree + + data_src = os.path.join(os.path.dirname(os.path.dirname(__file__)), + "data") + data_dst = os.path.expanduser('~/.local/mltsp/') + copy_tree(data_src, data_dst, update=1) + print("Created data directory at {} and copied sample data.".format( + os.path.expanduser('~/.local/mltsp/'))) cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml') cfg_dir = os.path.dirname(cfg) if os.path.exists(cfg): print('Existing configuration at {} -- not overwriting.'.format(cfg)) return if not os.path.exists(cfg_dir): os.makedirs(cfg_dir) - shutil.copyfile(os.path.join(os.path.dirname(__file__), + shutil.copyfile(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'mltsp.yaml.example'), cfg) print('Installed {}'.format(cfg)) print('Please customize this file with authentication tokens, etc.')
Copy data directory to ~/.local/mltsp during install; fix path to mltsp.yaml.example
## Code Before: __version__ = '0.3dev' def install(): """Install MLTSP config file in ~/.config/mltsp/mltsp.yaml. """ import os import shutil cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml') cfg_dir = os.path.dirname(cfg) if os.path.exists(cfg): print('Existing configuration at {} -- not overwriting.'.format(cfg)) return if not os.path.exists(cfg_dir): os.makedirs(cfg_dir) shutil.copyfile(os.path.join(os.path.dirname(__file__), 'mltsp.yaml.example'), cfg) print('Installed {}'.format(cfg)) print('Please customize this file with authentication tokens, etc.') ## Instruction: Copy data directory to ~/.local/mltsp during install; fix path to mltsp.yaml.example ## Code After: __version__ = '0.3dev' def install(): """Install MLTSP config file in ~/.config/mltsp/mltsp.yaml. """ import os import shutil from distutils.dir_util import copy_tree data_src = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data") data_dst = os.path.expanduser('~/.local/mltsp/') copy_tree(data_src, data_dst, update=1) print("Created data directory at {} and copied sample data.".format( os.path.expanduser('~/.local/mltsp/'))) cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml') cfg_dir = os.path.dirname(cfg) if os.path.exists(cfg): print('Existing configuration at {} -- not overwriting.'.format(cfg)) return if not os.path.exists(cfg_dir): os.makedirs(cfg_dir) shutil.copyfile(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'mltsp.yaml.example'), cfg) print('Installed {}'.format(cfg)) print('Please customize this file with authentication tokens, etc.')
... import os import shutil from distutils.dir_util import copy_tree data_src = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data") data_dst = os.path.expanduser('~/.local/mltsp/') copy_tree(data_src, data_dst, update=1) print("Created data directory at {} and copied sample data.".format( os.path.expanduser('~/.local/mltsp/'))) cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml') ... os.makedirs(cfg_dir) shutil.copyfile(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'mltsp.yaml.example'), cfg) ...
12728f6b924a3d45f78b3955cb9fcb563db6a81f
pida_abc_type.py
pida_abc_type.py
from abc import ABCMeta, abstractmethod class IdaTypes: __metaclass__ = ABCMeta @abstractmethod def decode(self, data): raise NotImplementedError() @abstractmethod def get_name(self): raise NotImplementedError() @abstractmethod def get_type(self): raise NotImplementedError()
from abc import ABCMeta, abstractmethod class IdaTypes: __metaclass__ = ABCMeta @abstractmethod def decode(self, data): raise NotImplementedError() @abstractmethod def get_type(self): raise NotImplementedError()
Delete abstract method get name
Delete abstract method get name
Python
mit
goodwinxp/ATFGenerator,goodwinxp/ATFGenerator,goodwinxp/ATFGenerator
from abc import ABCMeta, abstractmethod class IdaTypes: __metaclass__ = ABCMeta @abstractmethod def decode(self, data): raise NotImplementedError() @abstractmethod - def get_name(self): - raise NotImplementedError() - - @abstractmethod def get_type(self): raise NotImplementedError()
Delete abstract method get name
## Code Before: from abc import ABCMeta, abstractmethod class IdaTypes: __metaclass__ = ABCMeta @abstractmethod def decode(self, data): raise NotImplementedError() @abstractmethod def get_name(self): raise NotImplementedError() @abstractmethod def get_type(self): raise NotImplementedError() ## Instruction: Delete abstract method get name ## Code After: from abc import ABCMeta, abstractmethod class IdaTypes: __metaclass__ = ABCMeta @abstractmethod def decode(self, data): raise NotImplementedError() @abstractmethod def get_type(self): raise NotImplementedError()
... @abstractmethod def get_type(self): raise NotImplementedError() ...
a15bbbd22d8fa32abd7b10179a3289f1ec396c3a
tests/test_ultrametric.py
tests/test_ultrametric.py
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert t.node[9]['num_leaves'] == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
Use num_leaves function in tests
Use num_leaves function in tests
Python
mit
jni/viridis
from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do - assert t.node[9]['num_leaves'] == 3 + assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
Use num_leaves function in tests
## Code Before: from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert t.node[9]['num_leaves'] == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1] ## Instruction: Use num_leaves function in tests ## Code After: from viridis import tree from six.moves import range import pytest @pytest.fixture def base_tree(): t = tree.Ultrametric(list(range(6))) t.merge(0, 1, 0.1) t.merge(6, 2, 0.2) t.merge(3, 4, 0.3) t.merge(8, 5, 0.4) t.merge(7, 8, 0.5) return t def test_split(base_tree): t = base_tree t.split(0, 2) assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 def test_children(base_tree): t = base_tree assert t.children(6) == [0, 1]
# ... existing code ... assert t.node[9]['num_leaves'] == 3 t.split(0, 4) # nothing to do assert tree.num_leaves(t, 9) == 3 # ... rest of the code ...
bde51dc314bd920dafd5f72fbc4ccae099f5be59
tsune/settings/ci.py
tsune/settings/ci.py
from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', #'NAME': 'db.sqlite3', } } INSTALLED_APPS += ('django_jenkins',) PROJECT_APPS = { 'cardbox', }
from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', #'NAME': 'db.sqlite3', } } INSTALLED_APPS += ('django_jenkins',) PROJECT_APPS = { 'cardbox', 'deckglue', 'memorize', }
Add deckglue and memorize to django_jenkins
Add deckglue and memorize to django_jenkins
Python
mit
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', #'NAME': 'db.sqlite3', } } INSTALLED_APPS += ('django_jenkins',) PROJECT_APPS = { 'cardbox', + 'deckglue', + 'memorize', } + +
Add deckglue and memorize to django_jenkins
## Code Before: from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', #'NAME': 'db.sqlite3', } } INSTALLED_APPS += ('django_jenkins',) PROJECT_APPS = { 'cardbox', } ## Instruction: Add deckglue and memorize to django_jenkins ## Code After: from .base import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', #'NAME': 'db.sqlite3', } } INSTALLED_APPS += ('django_jenkins',) PROJECT_APPS = { 'cardbox', 'deckglue', 'memorize', }
// ... existing code ... PROJECT_APPS = { 'cardbox', 'deckglue', 'memorize', } // ... rest of the code ...
f312b856046cb46255971bcd30b8c418d7040455
__openerp__.py
__openerp__.py
{ 'name': 'Human Employee Streamline', 'version': '1.2', 'author': 'XCG Consulting', 'category': 'Human Resources', 'description': """ enchancements to the hr module to streamline its usage """, 'website': 'http://www.openerp-experts.com', 'depends': [ 'base', 'hr', ], 'data': [ 'security/ir.model.access.csv', 'security/record_rules.xml', 'admin_doc.xml', 'hr_employee.xml', ], 'test': [ ], 'installable': True, }
{ 'name': 'Human Employee Streamline', 'version': '1.2', 'author': 'XCG Consulting', 'category': 'Human Resources', 'description': """ enchancements to the hr module to streamline its usage """, 'website': 'http://www.openerp-experts.com', 'depends': [ 'base', 'hr', 'hr_contract', ], 'data': [ 'security/ir.model.access.csv', 'security/record_rules.xml', 'admin_doc.xml', 'hr_employee.xml', ], 'test': [ ], 'installable': True, }
Add dependencies on hr_contract as it should have been done
Add dependencies on hr_contract as it should have been done
Python
agpl-3.0
xcgd/hr_streamline
{ 'name': 'Human Employee Streamline', 'version': '1.2', 'author': 'XCG Consulting', 'category': 'Human Resources', 'description': """ enchancements to the hr module to streamline its usage """, 'website': 'http://www.openerp-experts.com', 'depends': [ 'base', 'hr', + 'hr_contract', ], 'data': [ 'security/ir.model.access.csv', 'security/record_rules.xml', 'admin_doc.xml', 'hr_employee.xml', ], 'test': [ ], 'installable': True, }
Add dependencies on hr_contract as it should have been done
## Code Before: { 'name': 'Human Employee Streamline', 'version': '1.2', 'author': 'XCG Consulting', 'category': 'Human Resources', 'description': """ enchancements to the hr module to streamline its usage """, 'website': 'http://www.openerp-experts.com', 'depends': [ 'base', 'hr', ], 'data': [ 'security/ir.model.access.csv', 'security/record_rules.xml', 'admin_doc.xml', 'hr_employee.xml', ], 'test': [ ], 'installable': True, } ## Instruction: Add dependencies on hr_contract as it should have been done ## Code After: { 'name': 'Human Employee Streamline', 'version': '1.2', 'author': 'XCG Consulting', 'category': 'Human Resources', 'description': """ enchancements to the hr module to streamline its usage """, 'website': 'http://www.openerp-experts.com', 'depends': [ 'base', 'hr', 'hr_contract', ], 'data': [ 'security/ir.model.access.csv', 'security/record_rules.xml', 'admin_doc.xml', 'hr_employee.xml', ], 'test': [ ], 'installable': True, }
# ... existing code ... 'base', 'hr', 'hr_contract', ], 'data': [ # ... rest of the code ...
e08395a35c37fa7f7c0311cc4c7a71537b8b4227
tests/misc/print_exception.py
tests/misc/print_exception.py
try: import uio as io except ImportError: import io import sys if hasattr(sys, 'print_exception'): print_exception = sys.print_exception else: import traceback print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f) def print_exc(e): buf = io.StringIO() print_exception(e, buf) s = buf.getvalue() for l in s.split("\n"): # uPy on pyboard prints <stdin> as file, so remove filename. if l.startswith(" File "): l = l.split('"') print(l[0], l[2]) # uPy and CPy tracebacks differ in that CPy prints a source line for # each traceback entry. In this case, we know that offending line # has 4-space indent, so filter it out. elif not l.startswith(" "): print(l) # basic exception message try: 1/0 except Exception as e: print('caught') print_exc(e) # exception message with more than 1 source-code line def f(): g() def g(): 2/0 try: f() except Exception as e: print('caught') print_exc(e)
try: import uio as io except ImportError: import io import sys if hasattr(sys, 'print_exception'): print_exception = sys.print_exception else: import traceback print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f) def print_exc(e): buf = io.StringIO() print_exception(e, buf) s = buf.getvalue() for l in s.split("\n"): # uPy on pyboard prints <stdin> as file, so remove filename. if l.startswith(" File "): l = l.split('"') print(l[0], l[2]) # uPy and CPy tracebacks differ in that CPy prints a source line for # each traceback entry. In this case, we know that offending line # has 4-space indent, so filter it out. elif not l.startswith(" "): print(l) # basic exception message try: 1/0 except Exception as e: print('caught') print_exc(e) # exception message with more than 1 source-code line def f(): g() def g(): 2/0 try: f() except Exception as e: print('caught') print_exc(e) # Here we have a function with lots of bytecode generated for a single source-line, and # there is an error right at the end of the bytecode. It should report the correct line. def f(): f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X}) return 1 try: f() except Exception as e: print_exc(e)
Add test for line number printing with large bytecode chunk.
tests/misc: Add test for line number printing with large bytecode chunk.
Python
mit
henriknelson/micropython,AriZuu/micropython,AriZuu/micropython,micropython/micropython-esp32,micropython/micropython-esp32,PappaPeppar/micropython,MrSurly/micropython,MrSurly/micropython-esp32,infinnovation/micropython,trezor/micropython,micropython/micropython-esp32,lowRISC/micropython,torwag/micropython,PappaPeppar/micropython,swegener/micropython,MrSurly/micropython,Peetz0r/micropython-esp32,TDAbboud/micropython,hiway/micropython,kerneltask/micropython,cwyark/micropython,adafruit/micropython,trezor/micropython,adafruit/micropython,bvernoux/micropython,henriknelson/micropython,pramasoul/micropython,kerneltask/micropython,MrSurly/micropython-esp32,trezor/micropython,cwyark/micropython,torwag/micropython,hiway/micropython,adafruit/circuitpython,cwyark/micropython,tobbad/micropython,MrSurly/micropython,adafruit/circuitpython,henriknelson/micropython,MrSurly/micropython-esp32,pramasoul/micropython,tralamazza/micropython,pozetroninc/micropython,deshipu/micropython,cwyark/micropython,chrisdearman/micropython,adafruit/circuitpython,HenrikSolver/micropython,hiway/micropython,oopy/micropython,henriknelson/micropython,ryannathans/micropython,dmazzella/micropython,swegener/micropython,pramasoul/micropython,adafruit/circuitpython,pozetroninc/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,toolmacher/micropython,ryannathans/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,chrisdearman/micropython,oopy/micropython,selste/micropython,pozetroninc/micropython,infinnovation/micropython,selste/micropython,pfalcon/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,Peetz0r/micropython-esp32,Peetz0r/micropython-esp32,dmazzella/micropython,puuu/micropython,pfalcon/micropython,tobbad/micropython,chrisdearman/micropython,lowRISC/micropython,oopy/micropython,PappaPeppar/micropython,hiway/micropython,pfalcon/micropython,alex-robbins/micropython,cwyark/micropython,AriZuu/micropython,SHA2017-badge/micropython-esp32,TDAbboud/micropython,HenrikSolver/micropython,swegener/micropython,Peetz0r/micropython-esp32,MrSurly/micropython,torwag/micropython,alex-robbins/micropython,blazewicz/micropython,kerneltask/micropython,torwag/micropython,chrisdearman/micropython,trezor/micropython,alex-robbins/micropython,MrSurly/micropython-esp32,blazewicz/micropython,lowRISC/micropython,bvernoux/micropython,dmazzella/micropython,ryannathans/micropython,puuu/micropython,tobbad/micropython,ryannathans/micropython,pramasoul/micropython,AriZuu/micropython,Timmenem/micropython,blazewicz/micropython,bvernoux/micropython,tralamazza/micropython,MrSurly/micropython,swegener/micropython,blazewicz/micropython,deshipu/micropython,deshipu/micropython,trezor/micropython,selste/micropython,puuu/micropython,hiway/micropython,tralamazza/micropython,puuu/micropython,infinnovation/micropython,TDAbboud/micropython,TDAbboud/micropython,henriknelson/micropython,toolmacher/micropython,TDAbboud/micropython,selste/micropython,Timmenem/micropython,alex-robbins/micropython,pfalcon/micropython,oopy/micropython,AriZuu/micropython,lowRISC/micropython,pozetroninc/micropython,pramasoul/micropython,lowRISC/micropython,torwag/micropython,Timmenem/micropython,infinnovation/micropython,swegener/micropython,bvernoux/micropython,toolmacher/micropython,kerneltask/micropython,PappaPeppar/micropython,pfalcon/micropython,HenrikSolver/micropython,PappaPeppar/micropython,dmazzella/micropython,pozetroninc/micropython,tobbad/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,toolmacher/micropython,adafruit/micropython,HenrikSolver/micropython,SHA2017-badge/micropython-esp32,HenrikSolver/micropython,chrisdearman/micropython,adafruit/circuitpython,adafruit/micropython,Timmenem/micropython,micropython/micropython-esp32,infinnovation/micropython,micropython/micropython-esp32,deshipu/micropython,adafruit/circuitpython,adafruit/micropython,kerneltask/micropython,selste/micropython,Timmenem/micropython,toolmacher/micropython,ryannathans/micropython,oopy/micropython,tobbad/micropython,MrSurly/micropython-esp32
try: import uio as io except ImportError: import io import sys if hasattr(sys, 'print_exception'): print_exception = sys.print_exception else: import traceback print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f) def print_exc(e): buf = io.StringIO() print_exception(e, buf) s = buf.getvalue() for l in s.split("\n"): # uPy on pyboard prints <stdin> as file, so remove filename. if l.startswith(" File "): l = l.split('"') print(l[0], l[2]) # uPy and CPy tracebacks differ in that CPy prints a source line for # each traceback entry. In this case, we know that offending line # has 4-space indent, so filter it out. elif not l.startswith(" "): print(l) # basic exception message try: 1/0 except Exception as e: print('caught') print_exc(e) # exception message with more than 1 source-code line def f(): g() def g(): 2/0 try: f() except Exception as e: print('caught') print_exc(e) + # Here we have a function with lots of bytecode generated for a single source-line, and + # there is an error right at the end of the bytecode. It should report the correct line. + def f(): + f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X}) + return 1 + try: + f() + except Exception as e: + print_exc(e) +
Add test for line number printing with large bytecode chunk.
## Code Before: try: import uio as io except ImportError: import io import sys if hasattr(sys, 'print_exception'): print_exception = sys.print_exception else: import traceback print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f) def print_exc(e): buf = io.StringIO() print_exception(e, buf) s = buf.getvalue() for l in s.split("\n"): # uPy on pyboard prints <stdin> as file, so remove filename. if l.startswith(" File "): l = l.split('"') print(l[0], l[2]) # uPy and CPy tracebacks differ in that CPy prints a source line for # each traceback entry. In this case, we know that offending line # has 4-space indent, so filter it out. elif not l.startswith(" "): print(l) # basic exception message try: 1/0 except Exception as e: print('caught') print_exc(e) # exception message with more than 1 source-code line def f(): g() def g(): 2/0 try: f() except Exception as e: print('caught') print_exc(e) ## Instruction: Add test for line number printing with large bytecode chunk. ## Code After: try: import uio as io except ImportError: import io import sys if hasattr(sys, 'print_exception'): print_exception = sys.print_exception else: import traceback print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f) def print_exc(e): buf = io.StringIO() print_exception(e, buf) s = buf.getvalue() for l in s.split("\n"): # uPy on pyboard prints <stdin> as file, so remove filename. if l.startswith(" File "): l = l.split('"') print(l[0], l[2]) # uPy and CPy tracebacks differ in that CPy prints a source line for # each traceback entry. In this case, we know that offending line # has 4-space indent, so filter it out. elif not l.startswith(" "): print(l) # basic exception message try: 1/0 except Exception as e: print('caught') print_exc(e) # exception message with more than 1 source-code line def f(): g() def g(): 2/0 try: f() except Exception as e: print('caught') print_exc(e) # Here we have a function with lots of bytecode generated for a single source-line, and # there is an error right at the end of the bytecode. It should report the correct line. def f(): f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X}) return 1 try: f() except Exception as e: print_exc(e)
... print('caught') print_exc(e) # Here we have a function with lots of bytecode generated for a single source-line, and # there is an error right at the end of the bytecode. It should report the correct line. def f(): f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X}) return 1 try: f() except Exception as e: print_exc(e) ...
236f10e790757db0cc563f5f19ca5863877b1e7f
busstops/management/tests/test_import_singapore.py
busstops/management/tests/test_import_singapore.py
import os import vcr from django.test import TestCase, override_settings from django.core.management import call_command from ...models import StopPoint, Service, Place FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures') class ImportSingaporeTest(TestCase): @classmethod def setUpTestData(cls): with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')): call_command('import_singapore') call_command('import_singapore_places') def test_import_stops(self): self.assertEqual(499, StopPoint.objects.all().count()) stop = StopPoint.objects.first() self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A') def test_import_services(self): service = Service.objects.get() self.assertEqual(service.operator.get().name, 'SBS Transit') self.assertEqual(service.slug, 'sg-sbst-10') def test_import_places(self): self.assertEqual(307, Place.objects.count()) place = Place.objects.get(name='Central Singapore') response = self.client.get(place.get_absolute_url()) self.assertContains(response, '<h1>Central Singapore</h1>') self.assertContains(response, 'Fort Canning') self.assertContains(response, 'Bayfront Subzone')
import os import vcr from django.test import TestCase from django.core.management import call_command from ...models import StopPoint, Service, Place FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures') class ImportSingaporeTest(TestCase): @classmethod def setUpTestData(cls): with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')): call_command('import_singapore') call_command('import_singapore_places') def test_import_stops(self): self.assertEqual(499, StopPoint.objects.all().count()) stop = StopPoint.objects.first() self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A') def test_import_services(self): service = Service.objects.get() self.assertEqual(service.operator.get().name, 'SBS Transit') self.assertEqual(service.slug, 'sg-sbst-10') def test_import_places(self): self.assertEqual(307, Place.objects.count()) place = Place.objects.get(name='Central Singapore') response = self.client.get(place.get_absolute_url()) self.assertContains(response, '<h1>Central Singapore</h1>') self.assertContains(response, 'Fort Canning') self.assertContains(response, 'Bayfront Subzone')
Remove unused import to fix flake8
Remove unused import to fix flake8
Python
mpl-2.0
jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk
import os import vcr - from django.test import TestCase, override_settings + from django.test import TestCase from django.core.management import call_command from ...models import StopPoint, Service, Place FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures') class ImportSingaporeTest(TestCase): @classmethod def setUpTestData(cls): with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')): call_command('import_singapore') call_command('import_singapore_places') def test_import_stops(self): self.assertEqual(499, StopPoint.objects.all().count()) stop = StopPoint.objects.first() self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A') def test_import_services(self): service = Service.objects.get() self.assertEqual(service.operator.get().name, 'SBS Transit') self.assertEqual(service.slug, 'sg-sbst-10') def test_import_places(self): self.assertEqual(307, Place.objects.count()) place = Place.objects.get(name='Central Singapore') response = self.client.get(place.get_absolute_url()) self.assertContains(response, '<h1>Central Singapore</h1>') self.assertContains(response, 'Fort Canning') self.assertContains(response, 'Bayfront Subzone')
Remove unused import to fix flake8
## Code Before: import os import vcr from django.test import TestCase, override_settings from django.core.management import call_command from ...models import StopPoint, Service, Place FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures') class ImportSingaporeTest(TestCase): @classmethod def setUpTestData(cls): with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')): call_command('import_singapore') call_command('import_singapore_places') def test_import_stops(self): self.assertEqual(499, StopPoint.objects.all().count()) stop = StopPoint.objects.first() self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A') def test_import_services(self): service = Service.objects.get() self.assertEqual(service.operator.get().name, 'SBS Transit') self.assertEqual(service.slug, 'sg-sbst-10') def test_import_places(self): self.assertEqual(307, Place.objects.count()) place = Place.objects.get(name='Central Singapore') response = self.client.get(place.get_absolute_url()) self.assertContains(response, '<h1>Central Singapore</h1>') self.assertContains(response, 'Fort Canning') self.assertContains(response, 'Bayfront Subzone') ## Instruction: Remove unused import to fix flake8 ## Code After: import os import vcr from django.test import TestCase from django.core.management import call_command from ...models import StopPoint, Service, Place FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures') class ImportSingaporeTest(TestCase): @classmethod def setUpTestData(cls): with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')): call_command('import_singapore') call_command('import_singapore_places') def test_import_stops(self): self.assertEqual(499, StopPoint.objects.all().count()) stop = StopPoint.objects.first() self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A') def test_import_services(self): service = Service.objects.get() self.assertEqual(service.operator.get().name, 'SBS Transit') self.assertEqual(service.slug, 'sg-sbst-10') def test_import_places(self): self.assertEqual(307, Place.objects.count()) place = Place.objects.get(name='Central Singapore') response = self.client.get(place.get_absolute_url()) self.assertContains(response, '<h1>Central Singapore</h1>') self.assertContains(response, 'Fort Canning') self.assertContains(response, 'Bayfront Subzone')
... import os import vcr from django.test import TestCase from django.core.management import call_command from ...models import StopPoint, Service, Place ...
bb5b84cd71ff95bd2539afce75491139fbc6f066
pi_control_client/gpio.py
pi_control_client/gpio.py
from rpc import RPCClient class GPIOClient(RPCClient): def __init__(self, rabbit_url, device_key): super(GPIOClient, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service', device_key=device_key) def on(self, pin_number): return self._call({'pin': pin_number, 'action': 'on'}) def off(self, pin_number): return self._call({'pin': pin_number, 'action': 'off'}) def read(self, pin_number): return self._call({'pin': pin_number, 'action': 'read'}) def get_config(self, pin_number=None): if pin_number: return self._call({'pin': pin_number, 'action': 'get_config'}) return self._call({'action': 'get_config'})
from rpc import RPCClient class GPIOClient(RPCClient): def __init__(self, rabbit_url): super(GPIOClient, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service') def on(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'on'}) def off(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'off'}) def read(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'read'}) def config(self, device_key, pin_number=None): if pin_number: return self._call(device_key, {'pin': pin_number, 'action': 'get_config'}) return self._call(device_key, {'action': 'get_config'})
Add device_key on every call in GPIO client
Add device_key on every call in GPIO client
Python
mit
HydAu/Projectweekends_Pi-Control-Client,projectweekend/Pi-Control-Client
from rpc import RPCClient class GPIOClient(RPCClient): - def __init__(self, rabbit_url, device_key): + def __init__(self, rabbit_url): super(GPIOClient, self).__init__( rabbit_url=rabbit_url, - queue_name='gpio_service', + queue_name='gpio_service') - device_key=device_key) - def on(self, pin_number): + def on(self, device_key, pin_number): - return self._call({'pin': pin_number, 'action': 'on'}) + return self._call(device_key, {'pin': pin_number, 'action': 'on'}) - def off(self, pin_number): + def off(self, device_key, pin_number): - return self._call({'pin': pin_number, 'action': 'off'}) + return self._call(device_key, {'pin': pin_number, 'action': 'off'}) - def read(self, pin_number): + def read(self, device_key, pin_number): - return self._call({'pin': pin_number, 'action': 'read'}) + return self._call(device_key, {'pin': pin_number, 'action': 'read'}) - def get_config(self, pin_number=None): + def config(self, device_key, pin_number=None): if pin_number: - return self._call({'pin': pin_number, 'action': 'get_config'}) + return self._call(device_key, {'pin': pin_number, 'action': 'get_config'}) - return self._call({'action': 'get_config'}) + return self._call(device_key, {'action': 'get_config'})
Add device_key on every call in GPIO client
## Code Before: from rpc import RPCClient class GPIOClient(RPCClient): def __init__(self, rabbit_url, device_key): super(GPIOClient, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service', device_key=device_key) def on(self, pin_number): return self._call({'pin': pin_number, 'action': 'on'}) def off(self, pin_number): return self._call({'pin': pin_number, 'action': 'off'}) def read(self, pin_number): return self._call({'pin': pin_number, 'action': 'read'}) def get_config(self, pin_number=None): if pin_number: return self._call({'pin': pin_number, 'action': 'get_config'}) return self._call({'action': 'get_config'}) ## Instruction: Add device_key on every call in GPIO client ## Code After: from rpc import RPCClient class GPIOClient(RPCClient): def __init__(self, rabbit_url): super(GPIOClient, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service') def on(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'on'}) def off(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'off'}) def read(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'read'}) def config(self, device_key, pin_number=None): if pin_number: return self._call(device_key, {'pin': pin_number, 'action': 'get_config'}) return self._call(device_key, {'action': 'get_config'})
// ... existing code ... class GPIOClient(RPCClient): def __init__(self, rabbit_url): super(GPIOClient, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service') def on(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'on'}) def off(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'off'}) def read(self, device_key, pin_number): return self._call(device_key, {'pin': pin_number, 'action': 'read'}) def config(self, device_key, pin_number=None): if pin_number: return self._call(device_key, {'pin': pin_number, 'action': 'get_config'}) return self._call(device_key, {'action': 'get_config'}) // ... rest of the code ...
f012d59f163a8b8a693dc894d211f077ae015d11
Instanssi/kompomaatti/tests.py
Instanssi/kompomaatti/tests.py
from django.test import TestCase from Instanssi.kompomaatti.models import Entry VALID_YOUTUBE_URLS = [ # must handle various protocols in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", "http://youtu.be/asdf123456/" ] class KompomaattiTests(TestCase): def setUp(self): pass def test_youtube_urls(self): """Test that various YouTube URLs are parsed properly.""" for url in VALID_YOUTUBE_URLS: print("Test URL: %s" % url) self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456")
from django.test import TestCase from Instanssi.kompomaatti.models import Entry VALID_YOUTUBE_URLS = [ # must handle various protocols and hostnames in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", "youtube.com/v/asdf123456/", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", "https://youtu.be/asdf123456/" ] class KompomaattiTests(TestCase): def setUp(self): pass def test_youtube_urls(self): """Test YouTube video id extraction from URLs.""" for url in VALID_YOUTUBE_URLS: self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456", msg="failing URL: %s" % url)
Add more test data; improve feedback on failing case
kompomaatti: Add more test data; improve feedback on failing case
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
from django.test import TestCase from Instanssi.kompomaatti.models import Entry VALID_YOUTUBE_URLS = [ - # must handle various protocols in the video URL + # must handle various protocols and hostnames in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", + "youtube.com/v/asdf123456/", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", - "http://youtu.be/asdf123456/" + "https://youtu.be/asdf123456/" ] class KompomaattiTests(TestCase): def setUp(self): pass def test_youtube_urls(self): - """Test that various YouTube URLs are parsed properly.""" + """Test YouTube video id extraction from URLs.""" for url in VALID_YOUTUBE_URLS: - print("Test URL: %s" % url) - self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456") + self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456", + msg="failing URL: %s" % url)
Add more test data; improve feedback on failing case
## Code Before: from django.test import TestCase from Instanssi.kompomaatti.models import Entry VALID_YOUTUBE_URLS = [ # must handle various protocols in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", "http://youtu.be/asdf123456/" ] class KompomaattiTests(TestCase): def setUp(self): pass def test_youtube_urls(self): """Test that various YouTube URLs are parsed properly.""" for url in VALID_YOUTUBE_URLS: print("Test URL: %s" % url) self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456") ## Instruction: Add more test data; improve feedback on failing case ## Code After: from django.test import TestCase from Instanssi.kompomaatti.models import Entry VALID_YOUTUBE_URLS = [ # must handle various protocols and hostnames in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", "youtube.com/v/asdf123456/", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", "https://youtu.be/asdf123456/" ] class KompomaattiTests(TestCase): def setUp(self): pass def test_youtube_urls(self): """Test YouTube video id extraction from URLs.""" for url in VALID_YOUTUBE_URLS: self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456", msg="failing URL: %s" % url)
// ... existing code ... VALID_YOUTUBE_URLS = [ # must handle various protocols and hostnames in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", // ... modified code ... "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", "youtube.com/v/asdf123456/", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", "https://youtu.be/asdf123456/" ] ... def test_youtube_urls(self): """Test YouTube video id extraction from URLs.""" for url in VALID_YOUTUBE_URLS: self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456", msg="failing URL: %s" % url) // ... rest of the code ...
5c1fc4b6ebbd2ee54318c5bc9877868858ea03ee
auth0/v2/authentication/base.py
auth0/v2/authentication/base.py
import json import requests from ..exceptions import Auth0Error class AuthenticationBase(object): def post(self, url, data={}, headers={}): response = requests.post(url=url, data=json.dumps(data), headers=headers) return self._process_response(response) def _process_response(self, response): text = json.loads(response.text) if response.text else {} if 'error' in text: raise Auth0Error(status_code=text['error'], error_code=text['error'], message=text['error_description']) return text
import json import requests from ..exceptions import Auth0Error class AuthenticationBase(object): def post(self, url, data={}, headers={}): response = requests.post(url=url, data=json.dumps(data), headers=headers) return self._process_response(response) def get(self, url, params={}, headers={}): return requests.get(url=url, params=params, headers=headers).text def _process_response(self, response): text = json.loads(response.text) if response.text else {} if 'error' in text: raise Auth0Error(status_code=text['error'], error_code=text['error'], message=text['error_description']) return text
Add .get() method to AuthenticationBase
Add .get() method to AuthenticationBase
Python
mit
auth0/auth0-python,auth0/auth0-python
import json import requests from ..exceptions import Auth0Error class AuthenticationBase(object): def post(self, url, data={}, headers={}): response = requests.post(url=url, data=json.dumps(data), headers=headers) return self._process_response(response) + def get(self, url, params={}, headers={}): + return requests.get(url=url, params=params, headers=headers).text + def _process_response(self, response): text = json.loads(response.text) if response.text else {} if 'error' in text: raise Auth0Error(status_code=text['error'], error_code=text['error'], message=text['error_description']) return text
Add .get() method to AuthenticationBase
## Code Before: import json import requests from ..exceptions import Auth0Error class AuthenticationBase(object): def post(self, url, data={}, headers={}): response = requests.post(url=url, data=json.dumps(data), headers=headers) return self._process_response(response) def _process_response(self, response): text = json.loads(response.text) if response.text else {} if 'error' in text: raise Auth0Error(status_code=text['error'], error_code=text['error'], message=text['error_description']) return text ## Instruction: Add .get() method to AuthenticationBase ## Code After: import json import requests from ..exceptions import Auth0Error class AuthenticationBase(object): def post(self, url, data={}, headers={}): response = requests.post(url=url, data=json.dumps(data), headers=headers) return self._process_response(response) def get(self, url, params={}, headers={}): return requests.get(url=url, params=params, headers=headers).text def _process_response(self, response): text = json.loads(response.text) if response.text else {} if 'error' in text: raise Auth0Error(status_code=text['error'], error_code=text['error'], message=text['error_description']) return text
... return self._process_response(response) def get(self, url, params={}, headers={}): return requests.get(url=url, params=params, headers=headers).text def _process_response(self, response): text = json.loads(response.text) if response.text else {} ...
5a43c61c0688e2837492e7f034a0dd2c157c6e4d
hypatia/__init__.py
hypatia/__init__.py
__author__ = "Lillian Lemmer" __copyright__ = "Copyright 2015 Lillian Lemmer" __credits__ = ["Lillian Lemmer"] __license__ = "MIT" __maintainer__ = __author__ __site__ = "http://lillian-lemmer.github.io/hypatia/" __email__ = "[email protected]" __status__ = "Development" __version__ = "0.2.8" __contributors__ = [ "Lillian Lemmer", "Brian Houston Morrow", "Eric James Michael Ritz" ]
__author__ = "Lillian Lemmer" __copyright__ = "Copyright 2015 Lillian Lemmer" __credits__ = ["Lillian Lemmer"] __license__ = "MIT" __maintainer__ = __author__ __site__ = "http://lillian-lemmer.github.io/hypatia/" __email__ = "[email protected]" __status__ = "Development" class Version: """A represntation of Hypatia's current version. This class contains integer fields for the major, minor, and patch version numbers, respectively. This is useful for comparison within code if it becomes necessary to have code behave differently based on the version, e.g. for backwards compatibility. The class also supports str() which converts an instance into a human-readable string, e.g. '0.2.8'. Public Properties: * major * minor * patch """ def __init__(self, major, minor, patch): self.major = major self.minor = minor self.patch = patch def __str__(self): return "%d.%d.%d" % (self.major, self.minor, self.patch) # str(__version__) will produce a string like "0.2.8" __version__ = Version(0, 2, 8) __contributors__ = [ "Lillian Lemmer", "Brian Houston Morrow", "Eric James Michael Ritz" ]
Add a class for representing the current version
[Feature] Add a class for representing the current version This patch implements the `Version` class inside of the `__init__.py` file alongside the rest of Hypatia's meta-data. The class has public integer properties representing the major, minor, and patch portions of the version number. This makes it possible for other code in the engine to support or deprecate features based on the version by comparing their numeric values, e.g. if Version.major < 3: # Report some error about a deprecated feature. The `Version` class also implements `__str__()` so that it is possible to convert `__version__` into a string, e.g. for `print()`, for the purpose of output, e.g. print(__version__) # "0.2.8" Signed-off-by: Eric James Michael Ritz <[email protected]>
Python
mit
lillian-lemmer/hypatia,hypatia-software-org/hypatia-engine,brechin/hypatia,lillian-lemmer/hypatia,hypatia-software-org/hypatia-engine,Applemann/hypatia,Applemann/hypatia,brechin/hypatia
__author__ = "Lillian Lemmer" __copyright__ = "Copyright 2015 Lillian Lemmer" __credits__ = ["Lillian Lemmer"] __license__ = "MIT" __maintainer__ = __author__ __site__ = "http://lillian-lemmer.github.io/hypatia/" __email__ = "[email protected]" __status__ = "Development" - __version__ = "0.2.8" + + + + class Version: + """A represntation of Hypatia's current version. + + This class contains integer fields for the major, minor, and patch + version numbers, respectively. This is useful for comparison + within code if it becomes necessary to have code behave + differently based on the version, e.g. for backwards + compatibility. The class also supports str() which converts an + instance into a human-readable string, e.g. '0.2.8'. + + Public Properties: + + * major + * minor + * patch + + """ + def __init__(self, major, minor, patch): + self.major = major + self.minor = minor + self.patch = patch + + def __str__(self): + return "%d.%d.%d" % (self.major, self.minor, self.patch) + + + # str(__version__) will produce a string like "0.2.8" + __version__ = Version(0, 2, 8) + + __contributors__ = [ "Lillian Lemmer", "Brian Houston Morrow", "Eric James Michael Ritz" ]
Add a class for representing the current version
## Code Before: __author__ = "Lillian Lemmer" __copyright__ = "Copyright 2015 Lillian Lemmer" __credits__ = ["Lillian Lemmer"] __license__ = "MIT" __maintainer__ = __author__ __site__ = "http://lillian-lemmer.github.io/hypatia/" __email__ = "[email protected]" __status__ = "Development" __version__ = "0.2.8" __contributors__ = [ "Lillian Lemmer", "Brian Houston Morrow", "Eric James Michael Ritz" ] ## Instruction: Add a class for representing the current version ## Code After: __author__ = "Lillian Lemmer" __copyright__ = "Copyright 2015 Lillian Lemmer" __credits__ = ["Lillian Lemmer"] __license__ = "MIT" __maintainer__ = __author__ __site__ = "http://lillian-lemmer.github.io/hypatia/" __email__ = "[email protected]" __status__ = "Development" class Version: """A represntation of Hypatia's current version. This class contains integer fields for the major, minor, and patch version numbers, respectively. This is useful for comparison within code if it becomes necessary to have code behave differently based on the version, e.g. for backwards compatibility. The class also supports str() which converts an instance into a human-readable string, e.g. '0.2.8'. Public Properties: * major * minor * patch """ def __init__(self, major, minor, patch): self.major = major self.minor = minor self.patch = patch def __str__(self): return "%d.%d.%d" % (self.major, self.minor, self.patch) # str(__version__) will produce a string like "0.2.8" __version__ = Version(0, 2, 8) __contributors__ = [ "Lillian Lemmer", "Brian Houston Morrow", "Eric James Michael Ritz" ]
... __email__ = "[email protected]" __status__ = "Development" class Version: """A represntation of Hypatia's current version. This class contains integer fields for the major, minor, and patch version numbers, respectively. This is useful for comparison within code if it becomes necessary to have code behave differently based on the version, e.g. for backwards compatibility. The class also supports str() which converts an instance into a human-readable string, e.g. '0.2.8'. Public Properties: * major * minor * patch """ def __init__(self, major, minor, patch): self.major = major self.minor = minor self.patch = patch def __str__(self): return "%d.%d.%d" % (self.major, self.minor, self.patch) # str(__version__) will produce a string like "0.2.8" __version__ = Version(0, 2, 8) __contributors__ = [ ...
3fb0f567dcaf69e4fa9872702ffbfa8ab0e69eaf
lib/utilities/key_exists.py
lib/utilities/key_exists.py
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
Add more error handling to key exists
Add more error handling to key exists
Python
mpl-2.0
mpurzynski/MozDef,mozilla/MozDef,jeffbryner/MozDef,mozilla/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,mozilla/MozDef,mozilla/MozDef,gdestuynder/MozDef,gdestuynder/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,jeffbryner/MozDef
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: + if current_pointer is None: + return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
Add more error handling to key exists
## Code Before: def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False ## Instruction: Add more error handling to key exists ## Code After: def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
// ... existing code ... current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer // ... rest of the code ...
a2fe7d1bb38bedee808c6b1a21cd5e3d93863c6c
winthrop/urls.py
winthrop/urls.py
from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ]
from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
Add redirect from site base url to admin index for now
Add redirect from site base url to admin index for now
Python
apache-2.0
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
from django.conf.urls import url from django.contrib import admin + from django.views.generic.base import RedirectView urlpatterns = [ + # for now, since there is not yet any public-facing site, + # redirect base url to admin index page + url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
Add redirect from site base url to admin index for now
## Code Before: from django.conf.urls import url from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), ] ## Instruction: Add redirect from site base url to admin index for now ## Code After: from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ]
# ... existing code ... from django.conf.urls import url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = [ # for now, since there is not yet any public-facing site, # redirect base url to admin index page url(r'^$', RedirectView.as_view(pattern_name='admin:index')), url(r'^admin/', admin.site.urls), ] # ... rest of the code ...
8868cc4e8379002c62db7f69ca77ec8449930321
src/adhocracy_core/adhocracy_core/scripts/import_resources.py
src/adhocracy_core/adhocracy_core/scripts/import_resources.py
# pragma: no cover import argparse import inspect import logging import sys import transaction from pyramid.paster import bootstrap from adhocracy_core import scripts def import_resources(): """Import resources from a JSON file. usage:: bin/import_resources etc/development.ini <filename> """ epilog = """The input JSON file contains the interface name of the resource type to create and a serialization of the sheets data. Strings having the form 'user_by_login: <username>' are resolved to the user's path. """ docstring = inspect.getdoc(import_resources) parser = argparse.ArgumentParser(description=docstring, epilog=epilog) parser.add_argument('ini_file', help='path to the adhocracy backend ini file') parser.add_argument('filename', type=str, help='file containing the resources descriptions') args = parser.parse_args() env = bootstrap(args.ini_file) logging.basicConfig(stream=sys.stdout, level=logging.INFO) scripts.import_resources(env['root'], env['registry'], args.filename) transaction.commit() env['closer']()
# pragma: no cover import argparse import inspect import logging import sys import transaction from pyramid.paster import bootstrap from . import import_resources as main_import_resources def import_resources(): """Import resources from a JSON file. usage:: bin/import_resources etc/development.ini <filename> """ epilog = """The input JSON file contains the interface name of the resource type to create and a serialization of the sheets data. Strings having the form 'user_by_login: <username>' are resolved to the user's path. """ docstring = inspect.getdoc(import_resources) parser = argparse.ArgumentParser(description=docstring, epilog=epilog) parser.add_argument('ini_file', help='path to the adhocracy backend ini file') parser.add_argument('filename', type=str, help='file containing the resources descriptions') args = parser.parse_args() env = bootstrap(args.ini_file) logging.basicConfig(stream=sys.stdout, level=logging.INFO) main_import_resources(env['root'], env['registry'], args.filename) transaction.commit() env['closer']()
Fix import resources command line wrapper
Fix import resources command line wrapper
Python
agpl-3.0
liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator
# pragma: no cover import argparse import inspect import logging import sys import transaction from pyramid.paster import bootstrap - from adhocracy_core import scripts + from . import import_resources as main_import_resources def import_resources(): """Import resources from a JSON file. usage:: bin/import_resources etc/development.ini <filename> """ epilog = """The input JSON file contains the interface name of the resource type to create and a serialization of the sheets data. Strings having the form 'user_by_login: <username>' are resolved to the user's path. """ docstring = inspect.getdoc(import_resources) parser = argparse.ArgumentParser(description=docstring, epilog=epilog) parser.add_argument('ini_file', help='path to the adhocracy backend ini file') parser.add_argument('filename', type=str, help='file containing the resources descriptions') args = parser.parse_args() env = bootstrap(args.ini_file) logging.basicConfig(stream=sys.stdout, level=logging.INFO) - scripts.import_resources(env['root'], env['registry'], args.filename) + main_import_resources(env['root'], env['registry'], args.filename) transaction.commit() env['closer']()
Fix import resources command line wrapper
## Code Before: # pragma: no cover import argparse import inspect import logging import sys import transaction from pyramid.paster import bootstrap from adhocracy_core import scripts def import_resources(): """Import resources from a JSON file. usage:: bin/import_resources etc/development.ini <filename> """ epilog = """The input JSON file contains the interface name of the resource type to create and a serialization of the sheets data. Strings having the form 'user_by_login: <username>' are resolved to the user's path. """ docstring = inspect.getdoc(import_resources) parser = argparse.ArgumentParser(description=docstring, epilog=epilog) parser.add_argument('ini_file', help='path to the adhocracy backend ini file') parser.add_argument('filename', type=str, help='file containing the resources descriptions') args = parser.parse_args() env = bootstrap(args.ini_file) logging.basicConfig(stream=sys.stdout, level=logging.INFO) scripts.import_resources(env['root'], env['registry'], args.filename) transaction.commit() env['closer']() ## Instruction: Fix import resources command line wrapper ## Code After: # pragma: no cover import argparse import inspect import logging import sys import transaction from pyramid.paster import bootstrap from . import import_resources as main_import_resources def import_resources(): """Import resources from a JSON file. usage:: bin/import_resources etc/development.ini <filename> """ epilog = """The input JSON file contains the interface name of the resource type to create and a serialization of the sheets data. Strings having the form 'user_by_login: <username>' are resolved to the user's path. """ docstring = inspect.getdoc(import_resources) parser = argparse.ArgumentParser(description=docstring, epilog=epilog) parser.add_argument('ini_file', help='path to the adhocracy backend ini file') parser.add_argument('filename', type=str, help='file containing the resources descriptions') args = parser.parse_args() env = bootstrap(args.ini_file) logging.basicConfig(stream=sys.stdout, level=logging.INFO) main_import_resources(env['root'], env['registry'], args.filename) transaction.commit() env['closer']()
# ... existing code ... from pyramid.paster import bootstrap from . import import_resources as main_import_resources # ... modified code ... env = bootstrap(args.ini_file) logging.basicConfig(stream=sys.stdout, level=logging.INFO) main_import_resources(env['root'], env['registry'], args.filename) transaction.commit() env['closer']() # ... rest of the code ...
fd4c7e3af81a4a37462dfcd7c3ac4eb43bdafcb2
crmapp/subscribers/models.py
crmapp/subscribers/models.py
from django.db import models from django.contrib.auth.models import User class Subscriber(models.Model): user_rec = models.ForeignKey(User) address_one = models.CharField(max_length=100) address_two = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=50) state = models.CharField(max_length=2) stripe_id = models.CharField(max_length=30, blank=True) class Meta: verbose_name_plural = 'subscribers' def __unicode__(self): return u"%s's Subscription Info" % self.user_rec
from django.db import models from django.contrib.auth.models import User from django.conf import settings import stripe class Subscriber(models.Model): user_rec = models.ForeignKey(User) address_one = models.CharField(max_length=100) address_two = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=50) state = models.CharField(max_length=2) stripe_id = models.CharField(max_length=30, blank=True) class Meta: verbose_name_plural = 'subscribers' def __unicode__(self): return u"%s's Subscription Info" % self.user_rec def charge(self, request, email, fee): # Set your secret key: remember to change this to your live secret key # in production. See your keys here https://manage.stripe.com/account stripe.api_key = settings.STRIPE_SECRET_KEY # Get the credit card details submitted by the form token = request.POST['stripeToken'] # Create a Customer stripe_customer = stripe.Customer.create( card=token, description=email ) # Save the Stripe ID to the customer's profile self.stripe_id = stripe_customer.id self.save() # Charge the Customer instead of the card stripe.Charge.create( amount=fee, # in cents currency="usd", customer=stripe_customer.id ) return stripe_customer
Create the Subscriber Form - Part III > Create Stripe Processing Code
Create the Subscriber Form - Part III > Create Stripe Processing Code
Python
mit
deenaariff/Django,tabdon/crmeasyapp,tabdon/crmeasyapp
from django.db import models from django.contrib.auth.models import User + from django.conf import settings + + import stripe + class Subscriber(models.Model): user_rec = models.ForeignKey(User) address_one = models.CharField(max_length=100) address_two = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=50) state = models.CharField(max_length=2) stripe_id = models.CharField(max_length=30, blank=True) class Meta: verbose_name_plural = 'subscribers' def __unicode__(self): return u"%s's Subscription Info" % self.user_rec + def charge(self, request, email, fee): + # Set your secret key: remember to change this to your live secret key + # in production. See your keys here https://manage.stripe.com/account + stripe.api_key = settings.STRIPE_SECRET_KEY + + # Get the credit card details submitted by the form + token = request.POST['stripeToken'] + + # Create a Customer + stripe_customer = stripe.Customer.create( + card=token, + description=email + ) + + # Save the Stripe ID to the customer's profile + self.stripe_id = stripe_customer.id + self.save() + + # Charge the Customer instead of the card + stripe.Charge.create( + amount=fee, # in cents + currency="usd", + customer=stripe_customer.id + ) + + return stripe_customer +
Create the Subscriber Form - Part III > Create Stripe Processing Code
## Code Before: from django.db import models from django.contrib.auth.models import User class Subscriber(models.Model): user_rec = models.ForeignKey(User) address_one = models.CharField(max_length=100) address_two = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=50) state = models.CharField(max_length=2) stripe_id = models.CharField(max_length=30, blank=True) class Meta: verbose_name_plural = 'subscribers' def __unicode__(self): return u"%s's Subscription Info" % self.user_rec ## Instruction: Create the Subscriber Form - Part III > Create Stripe Processing Code ## Code After: from django.db import models from django.contrib.auth.models import User from django.conf import settings import stripe class Subscriber(models.Model): user_rec = models.ForeignKey(User) address_one = models.CharField(max_length=100) address_two = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=50) state = models.CharField(max_length=2) stripe_id = models.CharField(max_length=30, blank=True) class Meta: verbose_name_plural = 'subscribers' def __unicode__(self): return u"%s's Subscription Info" % self.user_rec def charge(self, request, email, fee): # Set your secret key: remember to change this to your live secret key # in production. See your keys here https://manage.stripe.com/account stripe.api_key = settings.STRIPE_SECRET_KEY # Get the credit card details submitted by the form token = request.POST['stripeToken'] # Create a Customer stripe_customer = stripe.Customer.create( card=token, description=email ) # Save the Stripe ID to the customer's profile self.stripe_id = stripe_customer.id self.save() # Charge the Customer instead of the card stripe.Charge.create( amount=fee, # in cents currency="usd", customer=stripe_customer.id ) return stripe_customer
... from django.db import models from django.contrib.auth.models import User from django.conf import settings import stripe class Subscriber(models.Model): ... def __unicode__(self): return u"%s's Subscription Info" % self.user_rec def charge(self, request, email, fee): # Set your secret key: remember to change this to your live secret key # in production. See your keys here https://manage.stripe.com/account stripe.api_key = settings.STRIPE_SECRET_KEY # Get the credit card details submitted by the form token = request.POST['stripeToken'] # Create a Customer stripe_customer = stripe.Customer.create( card=token, description=email ) # Save the Stripe ID to the customer's profile self.stripe_id = stripe_customer.id self.save() # Charge the Customer instead of the card stripe.Charge.create( amount=fee, # in cents currency="usd", customer=stripe_customer.id ) return stripe_customer ...
66f06164a5654f2925fb16a1ce28638fd57e3a9e
issue_tracker/accounts/urls.py
issue_tracker/accounts/urls.py
from django.conf.urls.defaults import * from django.contrib.auth.views import logout_then_login, login from django.contrib.auth.forms import AuthenticationForm urlpatterns = patterns('', (r'^login/$', login, {}, 'login' ), (r'^logout/$', logout_then_login, {}, 'logout'), )
from django.conf.urls.defaults import * from django.contrib.auth.views import logout_then_login, login from accounts.views import register from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.forms import AuthenticationForm urlpatterns = patterns('', (r'^register/$', register, {}, 'register' ), (r'^login/$', login, {}, 'login' ), (r'^logout/$', logout_then_login, {}, 'logout'), )
Add url mapping to register.
Add url mapping to register.
Python
mit
hfrequency/django-issue-tracker
from django.conf.urls.defaults import * from django.contrib.auth.views import logout_then_login, login + from accounts.views import register + from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.forms import AuthenticationForm urlpatterns = patterns('', + (r'^register/$', register, {}, 'register' ), (r'^login/$', login, {}, 'login' ), (r'^logout/$', logout_then_login, {}, 'logout'), )
Add url mapping to register.
## Code Before: from django.conf.urls.defaults import * from django.contrib.auth.views import logout_then_login, login from django.contrib.auth.forms import AuthenticationForm urlpatterns = patterns('', (r'^login/$', login, {}, 'login' ), (r'^logout/$', logout_then_login, {}, 'logout'), ) ## Instruction: Add url mapping to register. ## Code After: from django.conf.urls.defaults import * from django.contrib.auth.views import logout_then_login, login from accounts.views import register from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.forms import AuthenticationForm urlpatterns = patterns('', (r'^register/$', register, {}, 'register' ), (r'^login/$', login, {}, 'login' ), (r'^logout/$', logout_then_login, {}, 'logout'), )
# ... existing code ... from django.conf.urls.defaults import * from django.contrib.auth.views import logout_then_login, login from accounts.views import register from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.forms import AuthenticationForm urlpatterns = patterns('', (r'^register/$', register, {}, 'register' ), (r'^login/$', login, {}, 'login' ), (r'^logout/$', logout_then_login, {}, 'logout'), # ... rest of the code ...
a05e9eff86ae43f83600842c5b9a840d22db6682
pyinfra/api/__init__.py
pyinfra/api/__init__.py
from .command import ( # noqa: F401 # pragma: no cover FileDownloadCommand, FileUploadCommand, FunctionCommand, MaskString, QuoteString, StringCommand, ) from .config import Config # noqa: F401 # pragma: no cover from .deploy import deploy # noqa: F401 # pragma: no cover from .exceptions import ( # noqa: F401 # pragma: no cover DeployError, InventoryError, OperationError, OperationTypeError, ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover from .state import State # noqa: F401 # pragma: no cover
from .command import ( # noqa: F401 # pragma: no cover FileDownloadCommand, FileUploadCommand, FunctionCommand, MaskString, QuoteString, StringCommand, ) from .config import Config # noqa: F401 # pragma: no cover from .deploy import deploy # noqa: F401 # pragma: no cover from .exceptions import ( # noqa: F401 # pragma: no cover DeployError, InventoryError, OperationError, OperationTypeError, ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover from .host import Host # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover from .state import State # noqa: F401 # pragma: no cover
Add `Host` to `pyinfra.api` imports.
Add `Host` to `pyinfra.api` imports.
Python
mit
Fizzadar/pyinfra,Fizzadar/pyinfra
from .command import ( # noqa: F401 # pragma: no cover FileDownloadCommand, FileUploadCommand, FunctionCommand, MaskString, QuoteString, StringCommand, ) from .config import Config # noqa: F401 # pragma: no cover from .deploy import deploy # noqa: F401 # pragma: no cover from .exceptions import ( # noqa: F401 # pragma: no cover DeployError, InventoryError, OperationError, OperationTypeError, ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover + from .host import Host # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover from .state import State # noqa: F401 # pragma: no cover
Add `Host` to `pyinfra.api` imports.
## Code Before: from .command import ( # noqa: F401 # pragma: no cover FileDownloadCommand, FileUploadCommand, FunctionCommand, MaskString, QuoteString, StringCommand, ) from .config import Config # noqa: F401 # pragma: no cover from .deploy import deploy # noqa: F401 # pragma: no cover from .exceptions import ( # noqa: F401 # pragma: no cover DeployError, InventoryError, OperationError, OperationTypeError, ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover from .state import State # noqa: F401 # pragma: no cover ## Instruction: Add `Host` to `pyinfra.api` imports. ## Code After: from .command import ( # noqa: F401 # pragma: no cover FileDownloadCommand, FileUploadCommand, FunctionCommand, MaskString, QuoteString, StringCommand, ) from .config import Config # noqa: F401 # pragma: no cover from .deploy import deploy # noqa: F401 # pragma: no cover from .exceptions import ( # noqa: F401 # pragma: no cover DeployError, InventoryError, OperationError, OperationTypeError, ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover from .host import Host # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover from .state import State # noqa: F401 # pragma: no cover
# ... existing code ... ) from .facts import FactBase, ShortFactBase # noqa: F401 # pragma: no cover from .host import Host # noqa: F401 # pragma: no cover from .inventory import Inventory # noqa: F401 # pragma: no cover from .operation import operation # noqa: F401 # pragma: no cover # ... rest of the code ...
c7c1fa91a0ec213bd648f2f50f95f5652891d3ab
main/readability_graph.py
main/readability_graph.py
import graph from corpus.mysql.reddit import RedditMySQLCorpus import cred if __name__ == '__main__': corpus = RedditMySQLCorpus() corpus.setup(**(cred.kwargs)) result = corpus.run_sql('SELECT ari FROM comment_feature_read', None) print('Got results') values = [ result[i]['ari'] for i in result ] graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values')
import graph from corpus.mysql.reddit import RedditMySQLCorpus import cred if __name__ == '__main__': corpus = RedditMySQLCorpus() corpus.setup(**(cred.kwargs)) result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None) print('Got results') values = [ result[i]['ari'] for i in result ] graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values') values = [ result[i]['flesch_reading_ease'] for i in result ] graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency', 'Frequency of Flesch Reading Ease values') values = [ result[i]['flesch_kincaid_grade_level'] for i in result ] graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency', 'Frequency of Flesch Kincaid Grade Level values') values = [ result[i]['gunning_fog_index'] for i in result ] graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency', 'Frequency of Gunning Fog Index values') values = [ result[i]['smog_index'] for i in result ] graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency', 'Frequency of Smog Index values') values = [ result[i]['coleman_liau_index'] for i in result ] graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency', 'Frequency of Coleman Liau Index values') values = [ result[i]['lix'] for i in result ] graph.hist('data/lix_hist', values, 'LIX', 'Frequency', 'Frequency of LIX values') values = [ result[i]['rix'] for i in result ] graph.hist('data/rix_hist', values, 'RIX', 'Frequency', 'Frequency of RIX values')
Add other statistical measures for graphing
Add other statistical measures for graphing
Python
mit
worldwise001/stylometry
import graph from corpus.mysql.reddit import RedditMySQLCorpus import cred if __name__ == '__main__': corpus = RedditMySQLCorpus() corpus.setup(**(cred.kwargs)) - result = corpus.run_sql('SELECT ari FROM comment_feature_read', None) + result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None) print('Got results') + values = [ result[i]['ari'] for i in result ] - graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values') + graph.hist('data/ari_hist', values, 'ARI', 'Frequency', + 'Frequency of ARI values') + values = [ result[i]['flesch_reading_ease'] for i in result ] + graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency', + 'Frequency of Flesch Reading Ease values') + + values = [ result[i]['flesch_kincaid_grade_level'] for i in result ] + graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency', + 'Frequency of Flesch Kincaid Grade Level values') + + values = [ result[i]['gunning_fog_index'] for i in result ] + graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency', + 'Frequency of Gunning Fog Index values') + + values = [ result[i]['smog_index'] for i in result ] + graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency', + 'Frequency of Smog Index values') + + values = [ result[i]['coleman_liau_index'] for i in result ] + graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency', + 'Frequency of Coleman Liau Index values') + + values = [ result[i]['lix'] for i in result ] + graph.hist('data/lix_hist', values, 'LIX', 'Frequency', + 'Frequency of LIX values') + + values = [ result[i]['rix'] for i in result ] + graph.hist('data/rix_hist', values, 'RIX', 'Frequency', + 'Frequency of RIX values') +
Add other statistical measures for graphing
## Code Before: import graph from corpus.mysql.reddit import RedditMySQLCorpus import cred if __name__ == '__main__': corpus = RedditMySQLCorpus() corpus.setup(**(cred.kwargs)) result = corpus.run_sql('SELECT ari FROM comment_feature_read', None) print('Got results') values = [ result[i]['ari'] for i in result ] graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values') ## Instruction: Add other statistical measures for graphing ## Code After: import graph from corpus.mysql.reddit import RedditMySQLCorpus import cred if __name__ == '__main__': corpus = RedditMySQLCorpus() corpus.setup(**(cred.kwargs)) result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None) print('Got results') values = [ result[i]['ari'] for i in result ] graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values') values = [ result[i]['flesch_reading_ease'] for i in result ] graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency', 'Frequency of Flesch Reading Ease values') values = [ result[i]['flesch_kincaid_grade_level'] for i in result ] graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency', 'Frequency of Flesch Kincaid Grade Level values') values = [ result[i]['gunning_fog_index'] for i in result ] graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency', 'Frequency of Gunning Fog Index values') values = [ result[i]['smog_index'] for i in result ] graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency', 'Frequency of Smog Index values') values = [ result[i]['coleman_liau_index'] for i in result ] graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency', 'Frequency of Coleman Liau Index values') values = [ result[i]['lix'] for i in result ] graph.hist('data/lix_hist', values, 'LIX', 'Frequency', 'Frequency of LIX values') values = [ result[i]['rix'] for i in result ] graph.hist('data/rix_hist', values, 'RIX', 'Frequency', 'Frequency of RIX values')
// ... existing code ... corpus.setup(**(cred.kwargs)) result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None) print('Got results') values = [ result[i]['ari'] for i in result ] graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values') values = [ result[i]['flesch_reading_ease'] for i in result ] graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency', 'Frequency of Flesch Reading Ease values') values = [ result[i]['flesch_kincaid_grade_level'] for i in result ] graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency', 'Frequency of Flesch Kincaid Grade Level values') values = [ result[i]['gunning_fog_index'] for i in result ] graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency', 'Frequency of Gunning Fog Index values') values = [ result[i]['smog_index'] for i in result ] graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency', 'Frequency of Smog Index values') values = [ result[i]['coleman_liau_index'] for i in result ] graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency', 'Frequency of Coleman Liau Index values') values = [ result[i]['lix'] for i in result ] graph.hist('data/lix_hist', values, 'LIX', 'Frequency', 'Frequency of LIX values') values = [ result[i]['rix'] for i in result ] graph.hist('data/rix_hist', values, 'RIX', 'Frequency', 'Frequency of RIX values') // ... rest of the code ...
1a830d0581f2baed76cb48eeee5f32d465737657
src/artgraph/plugins/infobox.py
src/artgraph/plugins/infobox.py
from artgraph.node import NodeTypes from artgraph.plugins import Plugin class InfoboxPlugin(Plugin): def __init__(self, node): self._node = node @staticmethod def get_target_node_type(): return NodeTypes.ARTIST def get_nodes(self): from artgraph.node import Node, NodeTypes from artgraph.relationship import AssociatedActRelationship relationships = [] wikicode = self.get_wikicode(self._node.get_dbtitle()) if wikicode: templates = wikicode.filter_templates() for t in templates: if t.name.matches('Infobox musical artist'): # Fill in current node info if t.has('birth_name'): name = str(t.get('birth_name').value) db = self.get_artistgraph_connection() cursor = db.cursor() cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id())) db.commit() db.close() associated_acts = t.get('associated_acts') for w in associated_acts.value.filter_wikilinks(): relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST))) return relationships
from artgraph.node import NodeTypes from artgraph.plugins import Plugin class InfoboxPlugin(Plugin): def __init__(self, node): self._node = node @staticmethod def get_target_node_type(): return NodeTypes.ARTIST def get_nodes(self): from artgraph.node import Node, NodeTypes from artgraph.relationship import AssociatedActRelationship relationships = [] wikicode = self.get_wikicode(self._node.get_dbtitle()) if wikicode: templates = wikicode.filter_templates() for t in templates: if t.name.matches('Infobox musical artist'): # Fill in current node info if t.has('birth_name'): name = str(t.get('birth_name').value) db = self.get_artistgraph_connection() cursor = db.cursor() cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id())) db.commit() db.close() if not t.has('associated_acts'): continue associated_acts = t.get('associated_acts') for w in associated_acts.value.filter_wikilinks(): relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST))) return relationships
Check if there are associated acts before querying for them
Check if there are associated acts before querying for them
Python
mit
dMaggot/ArtistGraph
from artgraph.node import NodeTypes from artgraph.plugins import Plugin class InfoboxPlugin(Plugin): def __init__(self, node): self._node = node @staticmethod def get_target_node_type(): return NodeTypes.ARTIST def get_nodes(self): from artgraph.node import Node, NodeTypes from artgraph.relationship import AssociatedActRelationship relationships = [] wikicode = self.get_wikicode(self._node.get_dbtitle()) if wikicode: templates = wikicode.filter_templates() for t in templates: if t.name.matches('Infobox musical artist'): # Fill in current node info if t.has('birth_name'): name = str(t.get('birth_name').value) db = self.get_artistgraph_connection() cursor = db.cursor() cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id())) db.commit() db.close() + if not t.has('associated_acts'): + continue + associated_acts = t.get('associated_acts') for w in associated_acts.value.filter_wikilinks(): relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST))) return relationships
Check if there are associated acts before querying for them
## Code Before: from artgraph.node import NodeTypes from artgraph.plugins import Plugin class InfoboxPlugin(Plugin): def __init__(self, node): self._node = node @staticmethod def get_target_node_type(): return NodeTypes.ARTIST def get_nodes(self): from artgraph.node import Node, NodeTypes from artgraph.relationship import AssociatedActRelationship relationships = [] wikicode = self.get_wikicode(self._node.get_dbtitle()) if wikicode: templates = wikicode.filter_templates() for t in templates: if t.name.matches('Infobox musical artist'): # Fill in current node info if t.has('birth_name'): name = str(t.get('birth_name').value) db = self.get_artistgraph_connection() cursor = db.cursor() cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id())) db.commit() db.close() associated_acts = t.get('associated_acts') for w in associated_acts.value.filter_wikilinks(): relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST))) return relationships ## Instruction: Check if there are associated acts before querying for them ## Code After: from artgraph.node import NodeTypes from artgraph.plugins import Plugin class InfoboxPlugin(Plugin): def __init__(self, node): self._node = node @staticmethod def get_target_node_type(): return NodeTypes.ARTIST def get_nodes(self): from artgraph.node import Node, NodeTypes from artgraph.relationship import AssociatedActRelationship relationships = [] wikicode = self.get_wikicode(self._node.get_dbtitle()) if wikicode: templates = wikicode.filter_templates() for t in templates: if t.name.matches('Infobox musical artist'): # Fill in current node info if t.has('birth_name'): name = str(t.get('birth_name').value) db = self.get_artistgraph_connection() cursor = db.cursor() cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id())) db.commit() db.close() if not t.has('associated_acts'): continue associated_acts = t.get('associated_acts') for w in associated_acts.value.filter_wikilinks(): relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST))) return relationships
... db.close() if not t.has('associated_acts'): continue associated_acts = t.get('associated_acts') ...
9f9d36025db87b7326b235131063ef852f43cef8
euxfel_h5tools/h5index.py
euxfel_h5tools/h5index.py
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): writer.writerow([path, item.shape, item.dtype.str]) grp.visititems(visitor) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
Sort datasets for index of HDF5 files
Sort datasets for index of HDF5 files
Python
bsd-3-clause
European-XFEL/h5tools-py
import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ + all_datasets = [] - writer = csv.writer(sys.stdout) - writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): - writer.writerow([path, item.shape, item.dtype.str]) + all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) + + writer = csv.writer(sys.stdout) + writer.writerow(['path', 'shape', 'dtype']) + for row in sorted(all_datasets): + writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
Sort datasets for index of HDF5 files
## Code Before: import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) def visitor(path, item): if isinstance(item, h5py.Dataset): writer.writerow([path, item.shape, item.dtype.str]) grp.visititems(visitor) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main() ## Instruction: Sort datasets for index of HDF5 files ## Code After: import csv import h5py import sys def hdf5_datasets(grp): """Print CSV data of all datasets in an HDF5 file. path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): file = h5py.File(sys.argv[1]) hdf5_datasets(file) if __name__ == '__main__': main()
// ... existing code ... path, shape, dtype """ all_datasets = [] def visitor(path, item): if isinstance(item, h5py.Dataset): all_datasets.append([path, item.shape, item.dtype.str]) grp.visititems(visitor) writer = csv.writer(sys.stdout) writer.writerow(['path', 'shape', 'dtype']) for row in sorted(all_datasets): writer.writerow(row) def main(): // ... rest of the code ...
058e2e75384052dcc2b90690cef695e4533eb854
scripts/insert_demo.py
scripts/insert_demo.py
"""Insert the demo into the codemirror site.""" from __future__ import print_function import os import fileinput import shutil proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) code_mirror_path = os.path.join( proselint_path, "plugins", "webeditor") code_mirror_demo_path = os.path.join(code_mirror_path, "index.html") live_write_path = os.path.join(proselint_path, "site", "write") shutil.copytree(code_mirror_path, live_write_path) demo_path = os.path.join(proselint_path, "proselint", "demo.md") with open(demo_path, "r") as f: demo = f.read() for line in fileinput.input( os.path.join(live_write_path, "index.html"), inplace=True): if "##DEMO_PLACEHOLDER##" in line: print(demo, end=' ') else: print(line, end=' ')
"""Insert the demo into the codemirror site.""" from __future__ import print_function import os import fileinput import shutil proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) code_mirror_path = os.path.join( proselint_path, "plugins", "webeditor") code_mirror_demo_path = os.path.join(code_mirror_path, "index.html") live_write_path = os.path.join(proselint_path, "site", "write") if os.path.exists(live_write_path): shutil.rmtree(live_write_path) shutil.copytree(code_mirror_path, live_write_path) demo_path = os.path.join(proselint_path, "proselint", "demo.md") with open(demo_path, "r") as f: demo = f.read() for line in fileinput.input( os.path.join(live_write_path, "index.html"), inplace=True): if "##DEMO_PLACEHOLDER##" in line: print(demo, end=' ') else: print(line, end=' ')
Delete live writing demo before loading new one
Delete live writing demo before loading new one
Python
bsd-3-clause
jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint,jstewmon/proselint
"""Insert the demo into the codemirror site.""" from __future__ import print_function import os import fileinput import shutil proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) code_mirror_path = os.path.join( proselint_path, "plugins", "webeditor") code_mirror_demo_path = os.path.join(code_mirror_path, "index.html") live_write_path = os.path.join(proselint_path, "site", "write") + if os.path.exists(live_write_path): + shutil.rmtree(live_write_path) shutil.copytree(code_mirror_path, live_write_path) demo_path = os.path.join(proselint_path, "proselint", "demo.md") with open(demo_path, "r") as f: demo = f.read() for line in fileinput.input( os.path.join(live_write_path, "index.html"), inplace=True): if "##DEMO_PLACEHOLDER##" in line: print(demo, end=' ') else: print(line, end=' ')
Delete live writing demo before loading new one
## Code Before: """Insert the demo into the codemirror site.""" from __future__ import print_function import os import fileinput import shutil proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) code_mirror_path = os.path.join( proselint_path, "plugins", "webeditor") code_mirror_demo_path = os.path.join(code_mirror_path, "index.html") live_write_path = os.path.join(proselint_path, "site", "write") shutil.copytree(code_mirror_path, live_write_path) demo_path = os.path.join(proselint_path, "proselint", "demo.md") with open(demo_path, "r") as f: demo = f.read() for line in fileinput.input( os.path.join(live_write_path, "index.html"), inplace=True): if "##DEMO_PLACEHOLDER##" in line: print(demo, end=' ') else: print(line, end=' ') ## Instruction: Delete live writing demo before loading new one ## Code After: """Insert the demo into the codemirror site.""" from __future__ import print_function import os import fileinput import shutil proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) code_mirror_path = os.path.join( proselint_path, "plugins", "webeditor") code_mirror_demo_path = os.path.join(code_mirror_path, "index.html") live_write_path = os.path.join(proselint_path, "site", "write") if os.path.exists(live_write_path): shutil.rmtree(live_write_path) shutil.copytree(code_mirror_path, live_write_path) demo_path = os.path.join(proselint_path, "proselint", "demo.md") with open(demo_path, "r") as f: demo = f.read() for line in fileinput.input( os.path.join(live_write_path, "index.html"), inplace=True): if "##DEMO_PLACEHOLDER##" in line: print(demo, end=' ') else: print(line, end=' ')
# ... existing code ... live_write_path = os.path.join(proselint_path, "site", "write") if os.path.exists(live_write_path): shutil.rmtree(live_write_path) shutil.copytree(code_mirror_path, live_write_path) # ... rest of the code ...
67b243915ef95ff1b9337bc67053d18df372e79d
unitypack/enums.py
unitypack/enums.py
from enum import IntEnum class RuntimePlatform(IntEnum): OSXEditor = 0 OSXPlayer = 1 WindowsPlayer = 2 OSXWebPlayer = 3 OSXDashboardPlayer = 4 WindowsWebPlayer = 5 WindowsEditor = 7 IPhonePlayer = 8 PS3 = 9 XBOX360 = 10 Android = 11 NaCl = 12 LinuxPlayer = 13 FlashPlayer = 15 WebGLPlayer = 17 MetroPlayerX86 = 18 WSAPlayerX86 = 18 MetroPlayerX64 = 19 WSAPlayerX64 = 19 MetroPlayerARM = 20 WSAPlayerARM = 20 WP8Player = 21 BB10Player = 22 BlackBerryPlayer = 22 TizenPlayer = 23 PSP2 = 24 PS4 = 25 PSM = 26 XboxOne = 27
from enum import IntEnum class RuntimePlatform(IntEnum): OSXEditor = 0 OSXPlayer = 1 WindowsPlayer = 2 OSXWebPlayer = 3 OSXDashboardPlayer = 4 WindowsWebPlayer = 5 WindowsEditor = 7 IPhonePlayer = 8 PS3 = 9 XBOX360 = 10 Android = 11 NaCl = 12 LinuxPlayer = 13 FlashPlayer = 15 WebGLPlayer = 17 MetroPlayerX86 = 18 WSAPlayerX86 = 18 MetroPlayerX64 = 19 WSAPlayerX64 = 19 MetroPlayerARM = 20 WSAPlayerARM = 20 WP8Player = 21 BB10Player = 22 BlackBerryPlayer = 22 TizenPlayer = 23 PSP2 = 24 PS4 = 25 PSM = 26 PSMPlayer = 26 XboxOne = 27 SamsungTVPlayer = 28
Add PSMPlayer and SamsungTVPlayer platforms
Add PSMPlayer and SamsungTVPlayer platforms
Python
mit
andburn/python-unitypack
from enum import IntEnum class RuntimePlatform(IntEnum): OSXEditor = 0 OSXPlayer = 1 WindowsPlayer = 2 OSXWebPlayer = 3 OSXDashboardPlayer = 4 WindowsWebPlayer = 5 WindowsEditor = 7 IPhonePlayer = 8 PS3 = 9 XBOX360 = 10 Android = 11 NaCl = 12 LinuxPlayer = 13 FlashPlayer = 15 WebGLPlayer = 17 MetroPlayerX86 = 18 WSAPlayerX86 = 18 MetroPlayerX64 = 19 WSAPlayerX64 = 19 MetroPlayerARM = 20 WSAPlayerARM = 20 WP8Player = 21 BB10Player = 22 BlackBerryPlayer = 22 TizenPlayer = 23 PSP2 = 24 PS4 = 25 PSM = 26 + PSMPlayer = 26 XboxOne = 27 + SamsungTVPlayer = 28
Add PSMPlayer and SamsungTVPlayer platforms
## Code Before: from enum import IntEnum class RuntimePlatform(IntEnum): OSXEditor = 0 OSXPlayer = 1 WindowsPlayer = 2 OSXWebPlayer = 3 OSXDashboardPlayer = 4 WindowsWebPlayer = 5 WindowsEditor = 7 IPhonePlayer = 8 PS3 = 9 XBOX360 = 10 Android = 11 NaCl = 12 LinuxPlayer = 13 FlashPlayer = 15 WebGLPlayer = 17 MetroPlayerX86 = 18 WSAPlayerX86 = 18 MetroPlayerX64 = 19 WSAPlayerX64 = 19 MetroPlayerARM = 20 WSAPlayerARM = 20 WP8Player = 21 BB10Player = 22 BlackBerryPlayer = 22 TizenPlayer = 23 PSP2 = 24 PS4 = 25 PSM = 26 XboxOne = 27 ## Instruction: Add PSMPlayer and SamsungTVPlayer platforms ## Code After: from enum import IntEnum class RuntimePlatform(IntEnum): OSXEditor = 0 OSXPlayer = 1 WindowsPlayer = 2 OSXWebPlayer = 3 OSXDashboardPlayer = 4 WindowsWebPlayer = 5 WindowsEditor = 7 IPhonePlayer = 8 PS3 = 9 XBOX360 = 10 Android = 11 NaCl = 12 LinuxPlayer = 13 FlashPlayer = 15 WebGLPlayer = 17 MetroPlayerX86 = 18 WSAPlayerX86 = 18 MetroPlayerX64 = 19 WSAPlayerX64 = 19 MetroPlayerARM = 20 WSAPlayerARM = 20 WP8Player = 21 BB10Player = 22 BlackBerryPlayer = 22 TizenPlayer = 23 PSP2 = 24 PS4 = 25 PSM = 26 PSMPlayer = 26 XboxOne = 27 SamsungTVPlayer = 28
... PS4 = 25 PSM = 26 PSMPlayer = 26 XboxOne = 27 SamsungTVPlayer = 28 ...
750dc7d4eddf691117cebf815e163a4d10af39cb
src/TulsiGenerator/Scripts/bazel_options.py
src/TulsiGenerator/Scripts/bazel_options.py
"""Logic to translate Xcode options to Bazel options.""" class BazelOptions(object): """Converts Xcode features into Bazel command line flags.""" def __init__(self, xcode_env): """Creates a new BazelOptions object. Args: xcode_env: A dictionary of Xcode environment variables. Returns: A BazelOptions instance. """ self.xcode_env = xcode_env def bazel_feature_flags(self): """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': flags.extend([ '--features=asan', ]) return flags
"""Logic to translate Xcode options to Bazel options.""" class BazelOptions(object): """Converts Xcode features into Bazel command line flags.""" def __init__(self, xcode_env): """Creates a new BazelOptions object. Args: xcode_env: A dictionary of Xcode environment variables. Returns: A BazelOptions instance. """ self.xcode_env = xcode_env def bazel_feature_flags(self): """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': flags.append('--features=asan') if self.xcode_env.get('ENABLE_THREAD_SANITIZER') == 'YES': flags.append('--features=tsan') if self.xcode_env.get('ENABLE_UNDEFINED_BEHAVIOR_SANITIZER') == 'YES': flags.append('--features=ubsan') return flags
Support enabling tsan and ubsan from Xcode UI
Support enabling tsan and ubsan from Xcode UI Xcode won't let you enable ubsan from the UI as it requires a 'Compile Sources' phase with (Objective-)C(++) sources, but if you manually edit the scheme and enable it or equivalently add the phase with a dummy file, enable it from the UI, and then remove the phase, ubsan will work. PiperOrigin-RevId: 196831918
Python
apache-2.0
pinterest/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,pinterest/tulsi,bazelbuild/tulsi,bazelbuild/tulsi,pinterest/tulsi,pinterest/tulsi,pinterest/tulsi,pinterest/tulsi
"""Logic to translate Xcode options to Bazel options.""" class BazelOptions(object): """Converts Xcode features into Bazel command line flags.""" def __init__(self, xcode_env): """Creates a new BazelOptions object. Args: xcode_env: A dictionary of Xcode environment variables. Returns: A BazelOptions instance. """ self.xcode_env = xcode_env def bazel_feature_flags(self): """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': - flags.extend([ - '--features=asan', - ]) + flags.append('--features=asan') + if self.xcode_env.get('ENABLE_THREAD_SANITIZER') == 'YES': + flags.append('--features=tsan') + if self.xcode_env.get('ENABLE_UNDEFINED_BEHAVIOR_SANITIZER') == 'YES': + flags.append('--features=ubsan') return flags
Support enabling tsan and ubsan from Xcode UI
## Code Before: """Logic to translate Xcode options to Bazel options.""" class BazelOptions(object): """Converts Xcode features into Bazel command line flags.""" def __init__(self, xcode_env): """Creates a new BazelOptions object. Args: xcode_env: A dictionary of Xcode environment variables. Returns: A BazelOptions instance. """ self.xcode_env = xcode_env def bazel_feature_flags(self): """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': flags.extend([ '--features=asan', ]) return flags ## Instruction: Support enabling tsan and ubsan from Xcode UI ## Code After: """Logic to translate Xcode options to Bazel options.""" class BazelOptions(object): """Converts Xcode features into Bazel command line flags.""" def __init__(self, xcode_env): """Creates a new BazelOptions object. Args: xcode_env: A dictionary of Xcode environment variables. Returns: A BazelOptions instance. """ self.xcode_env = xcode_env def bazel_feature_flags(self): """Returns a list of bazel flags for the current Xcode env configuration.""" flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': flags.append('--features=asan') if self.xcode_env.get('ENABLE_THREAD_SANITIZER') == 'YES': flags.append('--features=tsan') if self.xcode_env.get('ENABLE_UNDEFINED_BEHAVIOR_SANITIZER') == 'YES': flags.append('--features=ubsan') return flags
... flags = [] if self.xcode_env.get('ENABLE_ADDRESS_SANITIZER') == 'YES': flags.append('--features=asan') if self.xcode_env.get('ENABLE_THREAD_SANITIZER') == 'YES': flags.append('--features=tsan') if self.xcode_env.get('ENABLE_UNDEFINED_BEHAVIOR_SANITIZER') == 'YES': flags.append('--features=ubsan') return flags ...
49724932966fc509b202a80b6dcb9b309f0135a7
flexget/__init__.py
flexget/__init__.py
from __future__ import unicode_literals, division, absolute_import import logging import os from flexget import logger, plugin from flexget.manager import Manager from flexget.options import get_parser __version__ = '{git}' log = logging.getLogger('main') def main(args=None): """Main entry point for Command Line Interface""" logger.initialize() plugin.load_plugins() options = get_parser().parse_args(args) manager = Manager(options) if options.profile: try: import cProfile as profile except ImportError: import profile profile.runctx('manager.start()', globals(), locals(), os.path.join(manager.config_base, options.profile)) else: manager.start()
from __future__ import unicode_literals, division, absolute_import __version__ = '{git}' import logging import os from flexget import logger, plugin from flexget.manager import Manager from flexget.options import get_parser log = logging.getLogger('main') def main(args=None): """Main entry point for Command Line Interface""" logger.initialize() plugin.load_plugins() options = get_parser().parse_args(args) manager = Manager(options) if options.profile: try: import cProfile as profile except ImportError: import profile profile.runctx('manager.start()', globals(), locals(), os.path.join(manager.config_base, options.profile)) else: manager.start()
Move __version__ declaration before imports
Move __version__ declaration before imports
Python
mit
lildadou/Flexget,oxc/Flexget,JorisDeRieck/Flexget,tsnoam/Flexget,malkavi/Flexget,oxc/Flexget,crawln45/Flexget,tsnoam/Flexget,grrr2/Flexget,sean797/Flexget,dsemi/Flexget,drwyrm/Flexget,jawilson/Flexget,malkavi/Flexget,malkavi/Flexget,v17al/Flexget,jawilson/Flexget,drwyrm/Flexget,patsissons/Flexget,antivirtel/Flexget,tarzasai/Flexget,tobinjt/Flexget,jacobmetrick/Flexget,LynxyssCZ/Flexget,LynxyssCZ/Flexget,qk4l/Flexget,crawln45/Flexget,poulpito/Flexget,xfouloux/Flexget,offbyone/Flexget,tobinjt/Flexget,grrr2/Flexget,ratoaq2/Flexget,xfouloux/Flexget,tobinjt/Flexget,tobinjt/Flexget,lildadou/Flexget,Flexget/Flexget,Pretagonist/Flexget,ianstalk/Flexget,Pretagonist/Flexget,Flexget/Flexget,Flexget/Flexget,qvazzler/Flexget,Flexget/Flexget,grrr2/Flexget,tarzasai/Flexget,xfouloux/Flexget,sean797/Flexget,LynxyssCZ/Flexget,ZefQ/Flexget,jacobmetrick/Flexget,ratoaq2/Flexget,qk4l/Flexget,vfrc2/Flexget,Danfocus/Flexget,spencerjanssen/Flexget,gazpachoking/Flexget,ianstalk/Flexget,Danfocus/Flexget,malkavi/Flexget,vfrc2/Flexget,jawilson/Flexget,patsissons/Flexget,qvazzler/Flexget,thalamus/Flexget,v17al/Flexget,offbyone/Flexget,thalamus/Flexget,cvium/Flexget,v17al/Flexget,antivirtel/Flexget,jacobmetrick/Flexget,JorisDeRieck/Flexget,offbyone/Flexget,crawln45/Flexget,gazpachoking/Flexget,drwyrm/Flexget,ibrahimkarahan/Flexget,vfrc2/Flexget,oxc/Flexget,qk4l/Flexget,tvcsantos/Flexget,ratoaq2/Flexget,lildadou/Flexget,antivirtel/Flexget,ZefQ/Flexget,Pretagonist/Flexget,ianstalk/Flexget,poulpito/Flexget,ZefQ/Flexget,tsnoam/Flexget,JorisDeRieck/Flexget,camon/Flexget,JorisDeRieck/Flexget,ibrahimkarahan/Flexget,poulpito/Flexget,OmgOhnoes/Flexget,tarzasai/Flexget,thalamus/Flexget,dsemi/Flexget,sean797/Flexget,spencerjanssen/Flexget,cvium/Flexget,patsissons/Flexget,Danfocus/Flexget,dsemi/Flexget,tvcsantos/Flexget,ibrahimkarahan/Flexget,jawilson/Flexget,LynxyssCZ/Flexget,Danfocus/Flexget,crawln45/Flexget,cvium/Flexget,OmgOhnoes/Flexget,qvazzler/Flexget,OmgOhnoes/Flexget,spencerjanssen/Flexget,camon/Flexget
+ from __future__ import unicode_literals, division, absolute_import - from __future__ import unicode_literals, division, absolute_import + __version__ = '{git}' import logging import os from flexget import logger, plugin from flexget.manager import Manager from flexget.options import get_parser - - __version__ = '{git}' log = logging.getLogger('main') def main(args=None): """Main entry point for Command Line Interface""" logger.initialize() plugin.load_plugins() options = get_parser().parse_args(args) manager = Manager(options) if options.profile: try: import cProfile as profile except ImportError: import profile profile.runctx('manager.start()', globals(), locals(), os.path.join(manager.config_base, options.profile)) else: manager.start()
Move __version__ declaration before imports
## Code Before: from __future__ import unicode_literals, division, absolute_import import logging import os from flexget import logger, plugin from flexget.manager import Manager from flexget.options import get_parser __version__ = '{git}' log = logging.getLogger('main') def main(args=None): """Main entry point for Command Line Interface""" logger.initialize() plugin.load_plugins() options = get_parser().parse_args(args) manager = Manager(options) if options.profile: try: import cProfile as profile except ImportError: import profile profile.runctx('manager.start()', globals(), locals(), os.path.join(manager.config_base, options.profile)) else: manager.start() ## Instruction: Move __version__ declaration before imports ## Code After: from __future__ import unicode_literals, division, absolute_import __version__ = '{git}' import logging import os from flexget import logger, plugin from flexget.manager import Manager from flexget.options import get_parser log = logging.getLogger('main') def main(args=None): """Main entry point for Command Line Interface""" logger.initialize() plugin.load_plugins() options = get_parser().parse_args(args) manager = Manager(options) if options.profile: try: import cProfile as profile except ImportError: import profile profile.runctx('manager.start()', globals(), locals(), os.path.join(manager.config_base, options.profile)) else: manager.start()
// ... existing code ... from __future__ import unicode_literals, division, absolute_import __version__ = '{git}' import logging // ... modified code ... from flexget.manager import Manager from flexget.options import get_parser log = logging.getLogger('main') // ... rest of the code ...
de310ce3cdd37a372f92559b7ddcf0397b9fb016
src/convert_dir_to_CLAHE.py
src/convert_dir_to_CLAHE.py
from ij import IJ import os from mpicbg.ij.clahe import Flat from ij.process import ImageConverter # http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE) # http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD dir = "/usr/people/tmacrina/Desktop/test/" blocksize = 50 histogram_bins = 128 maximum_slope = 3 mask = "*None*" composite = False mask = None files = os.listdir(dir) files.sort() for file in files: if file.endswith(".tif") fn = os.path.join(dir, file) imp = IJ.openImage(path) output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif" imp = IJ.openImage(fn) Flat.getFastInstance().run( imp, blocksize, histogram_bins, maximum_slope, mask, composite ) ImageConverter(imp).convertToGray8() IJ.save(imp, output_fn)
from ij import IJ import os from mpicbg.ij.clahe import Flat from ij.process import ImageConverter # http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE) # http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/" blocksize = 63 histogram_bins = 255 maximum_slope = 3 mask = "*None*" composite = False mask = None # files = os.listdir(dir) # files.sort() # for file in files: # if file.endswith(".tif") fn = os.path.join(dir, 'original.tif') imp = IJ.openImage(fn) output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif" imp = IJ.openImage(fn) Flat.getFastInstance().run( imp, blocksize, histogram_bins, maximum_slope, mask, composite ) ImageConverter(imp).convertToGray8() IJ.save(imp, output_fn)
Adjust FIJI script for applying CLAHE to a directory
Adjust FIJI script for applying CLAHE to a directory
Python
mit
seung-lab/Julimaps,seung-lab/Julimaps
from ij import IJ import os from mpicbg.ij.clahe import Flat from ij.process import ImageConverter # http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE) # http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD - dir = "/usr/people/tmacrina/Desktop/test/" + dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/" - blocksize = 50 + blocksize = 63 - histogram_bins = 128 + histogram_bins = 255 maximum_slope = 3 mask = "*None*" composite = False mask = None - files = os.listdir(dir) + # files = os.listdir(dir) - files.sort() + # files.sort() - for file in files: + # for file in files: - if file.endswith(".tif") + # if file.endswith(".tif") - fn = os.path.join(dir, file) - imp = IJ.openImage(path) + fn = os.path.join(dir, 'original.tif') + imp = IJ.openImage(fn) - output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif" + output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif" - imp = IJ.openImage(fn) + imp = IJ.openImage(fn) - + - Flat.getFastInstance().run( imp, + Flat.getFastInstance().run( imp, - blocksize, + blocksize, - histogram_bins, + histogram_bins, - maximum_slope, + maximum_slope, - mask, + mask, - composite ) + composite ) - ImageConverter(imp).convertToGray8() + ImageConverter(imp).convertToGray8() - IJ.save(imp, output_fn) + IJ.save(imp, output_fn)
Adjust FIJI script for applying CLAHE to a directory
## Code Before: from ij import IJ import os from mpicbg.ij.clahe import Flat from ij.process import ImageConverter # http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE) # http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD dir = "/usr/people/tmacrina/Desktop/test/" blocksize = 50 histogram_bins = 128 maximum_slope = 3 mask = "*None*" composite = False mask = None files = os.listdir(dir) files.sort() for file in files: if file.endswith(".tif") fn = os.path.join(dir, file) imp = IJ.openImage(path) output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif" imp = IJ.openImage(fn) Flat.getFastInstance().run( imp, blocksize, histogram_bins, maximum_slope, mask, composite ) ImageConverter(imp).convertToGray8() IJ.save(imp, output_fn) ## Instruction: Adjust FIJI script for applying CLAHE to a directory ## Code After: from ij import IJ import os from mpicbg.ij.clahe import Flat from ij.process import ImageConverter # http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE) # http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/" blocksize = 63 histogram_bins = 255 maximum_slope = 3 mask = "*None*" composite = False mask = None # files = os.listdir(dir) # files.sort() # for file in files: # if file.endswith(".tif") fn = os.path.join(dir, 'original.tif') imp = IJ.openImage(fn) output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif" imp = IJ.openImage(fn) Flat.getFastInstance().run( imp, blocksize, histogram_bins, maximum_slope, mask, composite ) ImageConverter(imp).convertToGray8() IJ.save(imp, output_fn)
// ... existing code ... # http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD dir = "/usr/people/tmacrina/seungmount/research/Julimaps/datasets/AIBS_pilot_v1/0_raw/" blocksize = 63 histogram_bins = 255 maximum_slope = 3 mask = "*None*" // ... modified code ... mask = None # files = os.listdir(dir) # files.sort() # for file in files: # if file.endswith(".tif") fn = os.path.join(dir, 'original.tif') imp = IJ.openImage(fn) output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif" imp = IJ.openImage(fn) Flat.getFastInstance().run( imp, blocksize, histogram_bins, maximum_slope, mask, composite ) ImageConverter(imp).convertToGray8() IJ.save(imp, output_fn) // ... rest of the code ...
0050711d85ba4084e9d0f32d3bad1b3400350476
name/feeds.py
name/feeds.py
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse_lazy from django.utils.feedgenerator import Atom1Feed from . import app_settings from .models import Name class NameAtomFeedType(Atom1Feed): """Create an Atom feed that sets the Content-Type response header to application/xml. """ mime_type = 'application/xml' class NameAtomFeed(Feed): feed_type = NameAtomFeedType link = reverse_lazy("name_feed") title = "Name App" subtitle = "New Name Records" author_name = app_settings.NAME_FEED_AUTHOR_NAME author_email = app_settings.NAME_FEED_AUTHOR_EMAIL author_link = app_settings.NAME_FEED_AUTHOR_LINK def items(self): # last 5 added items return Name.objects.order_by('-date_created')[:20] def item_title(self, obj): return obj.name def item_description(self, obj): return 'Name Type: {0}'.format(obj.get_name_type_label()) def item_link(self, obj): return obj.get_absolute_url()
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse_lazy from django.utils.feedgenerator import Atom1Feed from . import app_settings from .models import Name class NameAtomFeedType(Atom1Feed): """Create an Atom feed that sets the Content-Type response header to application/xml. """ mime_type = 'application/xml' def root_attributes(self): attrs = super(NameAtomFeedType, self).root_attributes() attrs['xmlns:georss'] = 'http://www.georss.org/georss' return attrs def add_item_elements(self, handler, item): super(NameAtomFeedType, self).add_item_elements(handler, item) if item.get('location'): handler.addQuickElement('georss:point', item['location']) class NameAtomFeed(Feed): feed_type = NameAtomFeedType link = reverse_lazy("name_feed") title = "Name App" subtitle = "New Name Records" author_name = app_settings.NAME_FEED_AUTHOR_NAME author_email = app_settings.NAME_FEED_AUTHOR_EMAIL author_link = app_settings.NAME_FEED_AUTHOR_LINK def items(self): # last 5 added items return Name.objects.order_by('-date_created')[:20] def item_title(self, obj): return obj.name def item_description(self, obj): return 'Name Type: {0}'.format(obj.get_name_type_label()) def item_link(self, obj): return obj.get_absolute_url() def item_location(self, obj): if obj.has_locations() and obj.location_set.current_location: return obj.location_set.current_location.geo_point() def item_extra_kwargs(self, obj): return dict(location=self.item_location(obj))
Add the location as a georss:point element.
Add the location as a georss:point element.
Python
bsd-3-clause
damonkelley/django-name,damonkelley/django-name,unt-libraries/django-name,damonkelley/django-name,unt-libraries/django-name,unt-libraries/django-name
from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse_lazy from django.utils.feedgenerator import Atom1Feed from . import app_settings from .models import Name class NameAtomFeedType(Atom1Feed): """Create an Atom feed that sets the Content-Type response header to application/xml. """ mime_type = 'application/xml' + + def root_attributes(self): + attrs = super(NameAtomFeedType, self).root_attributes() + attrs['xmlns:georss'] = 'http://www.georss.org/georss' + return attrs + + def add_item_elements(self, handler, item): + super(NameAtomFeedType, self).add_item_elements(handler, item) + if item.get('location'): + handler.addQuickElement('georss:point', item['location']) class NameAtomFeed(Feed): feed_type = NameAtomFeedType link = reverse_lazy("name_feed") title = "Name App" subtitle = "New Name Records" author_name = app_settings.NAME_FEED_AUTHOR_NAME author_email = app_settings.NAME_FEED_AUTHOR_EMAIL author_link = app_settings.NAME_FEED_AUTHOR_LINK def items(self): # last 5 added items return Name.objects.order_by('-date_created')[:20] def item_title(self, obj): return obj.name def item_description(self, obj): return 'Name Type: {0}'.format(obj.get_name_type_label()) def item_link(self, obj): return obj.get_absolute_url() + def item_location(self, obj): + if obj.has_locations() and obj.location_set.current_location: + return obj.location_set.current_location.geo_point() + + def item_extra_kwargs(self, obj): + return dict(location=self.item_location(obj)) +
Add the location as a georss:point element.
## Code Before: from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse_lazy from django.utils.feedgenerator import Atom1Feed from . import app_settings from .models import Name class NameAtomFeedType(Atom1Feed): """Create an Atom feed that sets the Content-Type response header to application/xml. """ mime_type = 'application/xml' class NameAtomFeed(Feed): feed_type = NameAtomFeedType link = reverse_lazy("name_feed") title = "Name App" subtitle = "New Name Records" author_name = app_settings.NAME_FEED_AUTHOR_NAME author_email = app_settings.NAME_FEED_AUTHOR_EMAIL author_link = app_settings.NAME_FEED_AUTHOR_LINK def items(self): # last 5 added items return Name.objects.order_by('-date_created')[:20] def item_title(self, obj): return obj.name def item_description(self, obj): return 'Name Type: {0}'.format(obj.get_name_type_label()) def item_link(self, obj): return obj.get_absolute_url() ## Instruction: Add the location as a georss:point element. ## Code After: from django.contrib.syndication.views import Feed from django.core.urlresolvers import reverse_lazy from django.utils.feedgenerator import Atom1Feed from . import app_settings from .models import Name class NameAtomFeedType(Atom1Feed): """Create an Atom feed that sets the Content-Type response header to application/xml. """ mime_type = 'application/xml' def root_attributes(self): attrs = super(NameAtomFeedType, self).root_attributes() attrs['xmlns:georss'] = 'http://www.georss.org/georss' return attrs def add_item_elements(self, handler, item): super(NameAtomFeedType, self).add_item_elements(handler, item) if item.get('location'): handler.addQuickElement('georss:point', item['location']) class NameAtomFeed(Feed): feed_type = NameAtomFeedType link = reverse_lazy("name_feed") title = "Name App" subtitle = "New Name Records" author_name = app_settings.NAME_FEED_AUTHOR_NAME author_email = app_settings.NAME_FEED_AUTHOR_EMAIL author_link = app_settings.NAME_FEED_AUTHOR_LINK def items(self): # last 5 added items return Name.objects.order_by('-date_created')[:20] def item_title(self, obj): return obj.name def item_description(self, obj): return 'Name Type: {0}'.format(obj.get_name_type_label()) def item_link(self, obj): return obj.get_absolute_url() def item_location(self, obj): if obj.has_locations() and obj.location_set.current_location: return obj.location_set.current_location.geo_point() def item_extra_kwargs(self, obj): return dict(location=self.item_location(obj))
# ... existing code ... """ mime_type = 'application/xml' def root_attributes(self): attrs = super(NameAtomFeedType, self).root_attributes() attrs['xmlns:georss'] = 'http://www.georss.org/georss' return attrs def add_item_elements(self, handler, item): super(NameAtomFeedType, self).add_item_elements(handler, item) if item.get('location'): handler.addQuickElement('georss:point', item['location']) # ... modified code ... def item_link(self, obj): return obj.get_absolute_url() def item_location(self, obj): if obj.has_locations() and obj.location_set.current_location: return obj.location_set.current_location.geo_point() def item_extra_kwargs(self, obj): return dict(location=self.item_location(obj)) # ... rest of the code ...
ee6bd389e3e602b67fac399cdb4a50c3a67666b9
twitter/admin.py
twitter/admin.py
from django.contrib import admin from twitter.models import User, Tweet, Analytics, AnalyticsReport class UserAdmin(admin.ModelAdmin): list_display = ('screen_name', 'current_followers') class AnalyticsAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'followers', 'following', 'listed', 'tweet_count', 'retweet_count', 'reply_count', 'user_mention_count', 'link_count', 'hashtag_count', ) class AnalyticsReportAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'tweets_reweeted_count', 'tweets_favorited_count', ) admin.site.register(User, UserAdmin) admin.site.register(Tweet) admin.site.register(Analytics, AnalyticsAdmin) admin.site.register(AnalyticsReport, AnalyticsReportAdmin)
from django.contrib import admin from twitter.models import User, Tweet, Analytics, AnalyticsReport class UserAdmin(admin.ModelAdmin): list_display = ('screen_name', 'current_followers') class AnalyticsAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'followers', 'following', 'listed', 'tweet_count', 'retweet_count', 'reply_count', 'user_mention_count', 'link_count', 'hashtag_count', ) list_filter = ('user',) class AnalyticsReportAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'tweets_reweeted_count', 'tweets_favorited_count', ) admin.site.register(User, UserAdmin) admin.site.register(Tweet) admin.site.register(Analytics, AnalyticsAdmin) admin.site.register(AnalyticsReport, AnalyticsReportAdmin)
Add a list filter to make looking a specific users easier.
Add a list filter to make looking a specific users easier.
Python
mit
CIGIHub/tweet_cache,albertoconnor/tweet_cache
from django.contrib import admin from twitter.models import User, Tweet, Analytics, AnalyticsReport class UserAdmin(admin.ModelAdmin): list_display = ('screen_name', 'current_followers') class AnalyticsAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'followers', 'following', 'listed', 'tweet_count', 'retweet_count', 'reply_count', 'user_mention_count', 'link_count', 'hashtag_count', ) + list_filter = ('user',) class AnalyticsReportAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'tweets_reweeted_count', 'tweets_favorited_count', ) admin.site.register(User, UserAdmin) admin.site.register(Tweet) admin.site.register(Analytics, AnalyticsAdmin) admin.site.register(AnalyticsReport, AnalyticsReportAdmin)
Add a list filter to make looking a specific users easier.
## Code Before: from django.contrib import admin from twitter.models import User, Tweet, Analytics, AnalyticsReport class UserAdmin(admin.ModelAdmin): list_display = ('screen_name', 'current_followers') class AnalyticsAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'followers', 'following', 'listed', 'tweet_count', 'retweet_count', 'reply_count', 'user_mention_count', 'link_count', 'hashtag_count', ) class AnalyticsReportAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'tweets_reweeted_count', 'tweets_favorited_count', ) admin.site.register(User, UserAdmin) admin.site.register(Tweet) admin.site.register(Analytics, AnalyticsAdmin) admin.site.register(AnalyticsReport, AnalyticsReportAdmin) ## Instruction: Add a list filter to make looking a specific users easier. ## Code After: from django.contrib import admin from twitter.models import User, Tweet, Analytics, AnalyticsReport class UserAdmin(admin.ModelAdmin): list_display = ('screen_name', 'current_followers') class AnalyticsAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'followers', 'following', 'listed', 'tweet_count', 'retweet_count', 'reply_count', 'user_mention_count', 'link_count', 'hashtag_count', ) list_filter = ('user',) class AnalyticsReportAdmin(admin.ModelAdmin): list_display = ( 'date', 'user', 'tweets_reweeted_count', 'tweets_favorited_count', ) admin.site.register(User, UserAdmin) admin.site.register(Tweet) admin.site.register(Analytics, AnalyticsAdmin) admin.site.register(AnalyticsReport, AnalyticsReportAdmin)
# ... existing code ... 'hashtag_count', ) list_filter = ('user',) # ... rest of the code ...
9cdd86499013c1deac7caeb8320c34294789f716
py/garage/garage/asyncs/actors.py
py/garage/garage/asyncs/actors.py
"""Asynchronous support for garage.threads.actors.""" __all__ = [ 'StubAdapter', ] from garage.asyncs import futures class StubAdapter: """Wrap all method calls, adding FutureAdapter on their result. While this simple adapter does not work for all corner cases, for common cases, it should work fine. """ def __init__(self, stub): super().__setattr__('_stub', stub) def __getattr__(self, name): method = getattr(self._stub, name) # Simple foolproof detection of non-message-sending access if name.startswith('_'): return method return lambda *args, **kwargs: \ futures.FutureAdapter(method(*args, **kwargs)) def _get_future(self): return futures.FutureAdapter(self._stub._get_future()) def _send_message(self, func, args, kwargs): """Enqueue a message into actor's message queue. Since this does not block, it may raise Full when the message queue is full. """ future = self._stub._send_message(func, args, kwargs, block=False) return futures.FutureAdapter(future)
"""Asynchronous support for garage.threads.actors.""" __all__ = [ 'StubAdapter', ] from garage.asyncs import futures class StubAdapter: """Wrap all method calls, adding FutureAdapter on their result. While this simple adapter does not work for all corner cases, for common cases, it should work fine. """ def __init__(self, stub): super().__setattr__('_stub', stub) def __getattr__(self, name): method = getattr(self._stub, name) # Simple foolproof detection of non-message-sending access if name.startswith('_'): return method return lambda *args, **kwargs: \ futures.FutureAdapter(method(*args, **kwargs)) def _get_future(self): return futures.FutureAdapter(self._stub._get_future()) def _send_message(self, func, args, kwargs): """Enqueue a message into actor's message queue. Since this does not block, it may raise Full when the message queue is full. """ future = self._stub._send_message(func, args, kwargs, block=False) return futures.FutureAdapter(future) async def _kill_and_join(self, graceful=True): self._kill(graceful=graceful) await self._get_future().result()
Add _kill_and_join to async actor stub
Add _kill_and_join to async actor stub
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
"""Asynchronous support for garage.threads.actors.""" __all__ = [ 'StubAdapter', ] from garage.asyncs import futures class StubAdapter: """Wrap all method calls, adding FutureAdapter on their result. While this simple adapter does not work for all corner cases, for common cases, it should work fine. """ def __init__(self, stub): super().__setattr__('_stub', stub) def __getattr__(self, name): method = getattr(self._stub, name) # Simple foolproof detection of non-message-sending access if name.startswith('_'): return method return lambda *args, **kwargs: \ futures.FutureAdapter(method(*args, **kwargs)) def _get_future(self): return futures.FutureAdapter(self._stub._get_future()) def _send_message(self, func, args, kwargs): """Enqueue a message into actor's message queue. Since this does not block, it may raise Full when the message queue is full. """ future = self._stub._send_message(func, args, kwargs, block=False) return futures.FutureAdapter(future) + async def _kill_and_join(self, graceful=True): + self._kill(graceful=graceful) + await self._get_future().result() +
Add _kill_and_join to async actor stub
## Code Before: """Asynchronous support for garage.threads.actors.""" __all__ = [ 'StubAdapter', ] from garage.asyncs import futures class StubAdapter: """Wrap all method calls, adding FutureAdapter on their result. While this simple adapter does not work for all corner cases, for common cases, it should work fine. """ def __init__(self, stub): super().__setattr__('_stub', stub) def __getattr__(self, name): method = getattr(self._stub, name) # Simple foolproof detection of non-message-sending access if name.startswith('_'): return method return lambda *args, **kwargs: \ futures.FutureAdapter(method(*args, **kwargs)) def _get_future(self): return futures.FutureAdapter(self._stub._get_future()) def _send_message(self, func, args, kwargs): """Enqueue a message into actor's message queue. Since this does not block, it may raise Full when the message queue is full. """ future = self._stub._send_message(func, args, kwargs, block=False) return futures.FutureAdapter(future) ## Instruction: Add _kill_and_join to async actor stub ## Code After: """Asynchronous support for garage.threads.actors.""" __all__ = [ 'StubAdapter', ] from garage.asyncs import futures class StubAdapter: """Wrap all method calls, adding FutureAdapter on their result. While this simple adapter does not work for all corner cases, for common cases, it should work fine. """ def __init__(self, stub): super().__setattr__('_stub', stub) def __getattr__(self, name): method = getattr(self._stub, name) # Simple foolproof detection of non-message-sending access if name.startswith('_'): return method return lambda *args, **kwargs: \ futures.FutureAdapter(method(*args, **kwargs)) def _get_future(self): return futures.FutureAdapter(self._stub._get_future()) def _send_message(self, func, args, kwargs): """Enqueue a message into actor's message queue. Since this does not block, it may raise Full when the message queue is full. """ future = self._stub._send_message(func, args, kwargs, block=False) return futures.FutureAdapter(future) async def _kill_and_join(self, graceful=True): self._kill(graceful=graceful) await self._get_future().result()
... future = self._stub._send_message(func, args, kwargs, block=False) return futures.FutureAdapter(future) async def _kill_and_join(self, graceful=True): self._kill(graceful=graceful) await self._get_future().result() ...
f47781055326d6f259dd1b0d4b6be9cf47554977
craigschart/craigschart.py
craigschart/craigschart.py
from bs4 import BeautifulSoup import requests def get_html(): r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') print(r.status_code) print(r.text) return r.text def main(): html = get_html() soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) print('Pages:\n\n') mydivs = soup.findAll('a', {'class': 'hdrlnk'}) for t in mydivs: print(t['href']) totalcount_span = soup.find('span', {'class': 'totalcount'}) total_count = int(totalcount_span.string) print('Total result count: {}\n\n'.format(total_count)) print('Buttons:') next_page = soup.findAll('a', {'class': 'button next'}) for t in next_page: print(t['href']) if __name__ == '__main__': main()
from bs4 import BeautifulSoup import requests def get_html(url): r = requests.get(url) return r.text def add_start(url, start): parts = url.split('?') return parts[0] + '?s={}'.format(start) + '&' + parts[1] def main(): url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition' html = get_html(url) soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) print('Pages:\n\n') links = soup.findAll('a', {'class': 'hdrlnk'}) for link in links: print(link['href']) all_links = links totalcount_span = soup.find('span', {'class': 'totalcount'}) total_count = int(totalcount_span.string) print('Total result count: {}\n\n'.format(total_count)) for start in range(0, total_count, 100): print('Querying records {}'.format(start)) if start == 0: # first page already done continue query = add_start(url, start) html = get_html(query) soup = BeautifulSoup(html, 'lxml') print('Pages:\n\n') links = soup.findAll('a', {'class': 'hdrlnk'}) for link in links: print(link['href']) all_links.append(links) print('Found {} results'.format(len(all_links))) if __name__ == '__main__': main()
Enable search of paginated pages
Enable search of paginated pages
Python
mit
supermitch/craigschart
from bs4 import BeautifulSoup import requests + - def get_html(): + def get_html(url): + r = requests.get(url) - r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') - print(r.status_code) - print(r.text) return r.text + + def add_start(url, start): + parts = url.split('?') + return parts[0] + '?s={}'.format(start) + '&' + parts[1] + + def main(): + url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition' - html = get_html() + html = get_html(url) soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) print('Pages:\n\n') - mydivs = soup.findAll('a', {'class': 'hdrlnk'}) + links = soup.findAll('a', {'class': 'hdrlnk'}) - for t in mydivs: + for link in links: - print(t['href']) + print(link['href']) + all_links = links totalcount_span = soup.find('span', {'class': 'totalcount'}) total_count = int(totalcount_span.string) print('Total result count: {}\n\n'.format(total_count)) - print('Buttons:') - next_page = soup.findAll('a', {'class': 'button next'}) - for t in next_page: + for start in range(0, total_count, 100): + print('Querying records {}'.format(start)) + if start == 0: # first page already done + continue + query = add_start(url, start) + + html = get_html(query) + + soup = BeautifulSoup(html, 'lxml') + + print('Pages:\n\n') + links = soup.findAll('a', {'class': 'hdrlnk'}) + for link in links: - print(t['href']) + print(link['href']) + all_links.append(links) + + print('Found {} results'.format(len(all_links))) + if __name__ == '__main__': main()
Enable search of paginated pages
## Code Before: from bs4 import BeautifulSoup import requests def get_html(): r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition') print(r.status_code) print(r.text) return r.text def main(): html = get_html() soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) print('Pages:\n\n') mydivs = soup.findAll('a', {'class': 'hdrlnk'}) for t in mydivs: print(t['href']) totalcount_span = soup.find('span', {'class': 'totalcount'}) total_count = int(totalcount_span.string) print('Total result count: {}\n\n'.format(total_count)) print('Buttons:') next_page = soup.findAll('a', {'class': 'button next'}) for t in next_page: print(t['href']) if __name__ == '__main__': main() ## Instruction: Enable search of paginated pages ## Code After: from bs4 import BeautifulSoup import requests def get_html(url): r = requests.get(url) return r.text def add_start(url, start): parts = url.split('?') return parts[0] + '?s={}'.format(start) + '&' + parts[1] def main(): url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition' html = get_html(url) soup = BeautifulSoup(html, 'lxml') print(soup.prettify()) print('Pages:\n\n') links = soup.findAll('a', {'class': 'hdrlnk'}) for link in links: print(link['href']) all_links = links totalcount_span = soup.find('span', {'class': 'totalcount'}) total_count = int(totalcount_span.string) print('Total result count: {}\n\n'.format(total_count)) for start in range(0, total_count, 100): print('Querying records {}'.format(start)) if start == 0: # first page already done continue query = add_start(url, start) html = get_html(query) soup = BeautifulSoup(html, 'lxml') print('Pages:\n\n') links = soup.findAll('a', {'class': 'hdrlnk'}) for link in links: print(link['href']) all_links.append(links) print('Found {} results'.format(len(all_links))) if __name__ == '__main__': main()
// ... existing code ... import requests def get_html(url): r = requests.get(url) return r.text def add_start(url, start): parts = url.split('?') return parts[0] + '?s={}'.format(start) + '&' + parts[1] def main(): url = 'http://vancouver.craigslist.ca/search/cto?query=Expedition' html = get_html(url) soup = BeautifulSoup(html, 'lxml') // ... modified code ... print('Pages:\n\n') links = soup.findAll('a', {'class': 'hdrlnk'}) for link in links: print(link['href']) all_links = links totalcount_span = soup.find('span', {'class': 'totalcount'}) ... print('Total result count: {}\n\n'.format(total_count)) for start in range(0, total_count, 100): print('Querying records {}'.format(start)) if start == 0: # first page already done continue query = add_start(url, start) html = get_html(query) soup = BeautifulSoup(html, 'lxml') print('Pages:\n\n') links = soup.findAll('a', {'class': 'hdrlnk'}) for link in links: print(link['href']) all_links.append(links) print('Found {} results'.format(len(all_links))) if __name__ == '__main__': // ... rest of the code ...
75ee8c74af18c2ac9b3f4975d79a5d799ccc46da
pylatex/graphics.py
pylatex/graphics.py
from .utils import fix_filename from .base_classes import BaseLaTeXNamedContainer from .package import Package from .command import Command class Figure(BaseLaTeXNamedContainer): """A class that represents a Graphic.""" def __init__(self, data=None, position=None): packages = [Package('graphicx')] super().__init__('figure', data=data, packages=packages, options=position) def add_image(self, filename, width=r'0.8\textwidth', placement=r'\centering'): if placement is not None: self.append(placement) self.append(Command('includegraphics', options='width=' + width, arguments=fix_filename(filename))) def add_caption(self, caption): """Add a caption to the figure""" self.append(Command('caption', caption))
from .utils import fix_filename from .base_classes import BaseLaTeXNamedContainer from .package import Package from .command import Command class Figure(BaseLaTeXNamedContainer): """A class that represents a Graphic.""" def __init__(self, data=None, position=None): packages = [Package('graphicx')] super().__init__('figure', data=data, packages=packages, options=position) def add_image(self, filename, width=r'0.8\textwidth', placement=r'\centering'): if placement is not None: self.append(placement) if width is not None: width = 'width=' + str(width) self.append(Command('includegraphics', options=width, arguments=fix_filename(filename))) def add_caption(self, caption): """Add a caption to the figure""" self.append(Command('caption', caption))
Make figure a bit better
Make figure a bit better
Python
mit
jendas1/PyLaTeX,bjodah/PyLaTeX,bjodah/PyLaTeX,ovaskevich/PyLaTeX,votti/PyLaTeX,JelteF/PyLaTeX,votti/PyLaTeX,jendas1/PyLaTeX,sebastianhaas/PyLaTeX,JelteF/PyLaTeX,ovaskevich/PyLaTeX,sebastianhaas/PyLaTeX
from .utils import fix_filename from .base_classes import BaseLaTeXNamedContainer from .package import Package from .command import Command class Figure(BaseLaTeXNamedContainer): """A class that represents a Graphic.""" def __init__(self, data=None, position=None): packages = [Package('graphicx')] super().__init__('figure', data=data, packages=packages, options=position) def add_image(self, filename, width=r'0.8\textwidth', placement=r'\centering'): if placement is not None: self.append(placement) + if width is not None: + width = 'width=' + str(width) + - self.append(Command('includegraphics', options='width=' + width, + self.append(Command('includegraphics', options=width, arguments=fix_filename(filename))) def add_caption(self, caption): """Add a caption to the figure""" self.append(Command('caption', caption))
Make figure a bit better
## Code Before: from .utils import fix_filename from .base_classes import BaseLaTeXNamedContainer from .package import Package from .command import Command class Figure(BaseLaTeXNamedContainer): """A class that represents a Graphic.""" def __init__(self, data=None, position=None): packages = [Package('graphicx')] super().__init__('figure', data=data, packages=packages, options=position) def add_image(self, filename, width=r'0.8\textwidth', placement=r'\centering'): if placement is not None: self.append(placement) self.append(Command('includegraphics', options='width=' + width, arguments=fix_filename(filename))) def add_caption(self, caption): """Add a caption to the figure""" self.append(Command('caption', caption)) ## Instruction: Make figure a bit better ## Code After: from .utils import fix_filename from .base_classes import BaseLaTeXNamedContainer from .package import Package from .command import Command class Figure(BaseLaTeXNamedContainer): """A class that represents a Graphic.""" def __init__(self, data=None, position=None): packages = [Package('graphicx')] super().__init__('figure', data=data, packages=packages, options=position) def add_image(self, filename, width=r'0.8\textwidth', placement=r'\centering'): if placement is not None: self.append(placement) if width is not None: width = 'width=' + str(width) self.append(Command('includegraphics', options=width, arguments=fix_filename(filename))) def add_caption(self, caption): """Add a caption to the figure""" self.append(Command('caption', caption))
... self.append(placement) if width is not None: width = 'width=' + str(width) self.append(Command('includegraphics', options=width, arguments=fix_filename(filename))) ...
5cb6e90714ffe91377e01743451ed4aefe4a1e24
greencard/greencard.py
greencard/greencard.py
"""Greencard implementation.""" from functools import wraps TESTS = [] def greencard(func): """ A decorator for providing a unittesting function/method with every card in a librarian card library database when it is called. """ @wraps(func) def wrapped(*args, **kwargs): """Transparent wrapper.""" return func(*args, **kwargs) TESTS.append(wrapped) return wrapped def descovery(testdir): """Descover and load greencard tests.""" import os from glob import glob import importlib for testpath in glob(os.path.join(testdir, "*.py")): importlib.import_module(testpath) def main(clargs=None): """Command line entry point.""" from argparse import ArgumentParser from librarian.library import Library import sys parser = ArgumentParser( description="A test runner for each card in a librarian library.") parser.add_argument("library", help="Library database") parser.add_argument("-t", "--tests", default="./tests/", help="Test directory") args = parser.parse_args(clargs) descovery(args.tests) library = Library(args.library) failures = 0 for card in library.retrieve_all(): for test in TESTS: try: test(card) except AssertionError: print("{0} failed {1}".format(card, test.__name__)) failures += 1 sys.exit(failures)
"""Greencard implementation.""" from functools import wraps TESTS = [] def greencard(func): """ A decorator for providing a unittesting function/method with every card in a librarian card library database when it is called. """ @wraps(func) def wrapped(*args, **kwargs): """Transparent wrapper.""" return func(*args, **kwargs) TESTS.append(wrapped) return wrapped def descovery(testdir): """Descover and load greencard tests.""" from os.path import splitext, basename, join, exists if not exists(testdir): return None import sys from glob import glob import importlib sys.path.append(testdir) for testpath in glob(join(testdir, "*.py")): name, _ = splitext(basename(testpath)) importlib.import_module(name) def main(clargs=None): """Command line entry point.""" from argparse import ArgumentParser from librarian.library import Library import sys parser = ArgumentParser( description="A test runner for each card in a librarian library.") parser.add_argument("library", help="Library database") parser.add_argument("-t", "--tests", default="./tests/", help="Test directory") args = parser.parse_args(clargs) descovery(args.tests) library = Library(args.library) failures = 0 for card in library.retrieve_all(): for test in TESTS: try: test(card) except AssertionError: print("{0} failed {1}".format(card, test.__name__)) failures += 1 sys.exit(failures)
Fix test descovery to correctly add test dir to path and import modules rather then files
Fix test descovery to correctly add test dir to path and import modules rather then files
Python
mit
Nekroze/greencard,Nekroze/greencard
"""Greencard implementation.""" from functools import wraps TESTS = [] def greencard(func): """ A decorator for providing a unittesting function/method with every card in a librarian card library database when it is called. """ @wraps(func) def wrapped(*args, **kwargs): """Transparent wrapper.""" return func(*args, **kwargs) TESTS.append(wrapped) return wrapped def descovery(testdir): """Descover and load greencard tests.""" + from os.path import splitext, basename, join, exists + if not exists(testdir): + return None + - import os + import sys from glob import glob import importlib + sys.path.append(testdir) + - for testpath in glob(os.path.join(testdir, "*.py")): + for testpath in glob(join(testdir, "*.py")): + name, _ = splitext(basename(testpath)) - importlib.import_module(testpath) + importlib.import_module(name) def main(clargs=None): """Command line entry point.""" from argparse import ArgumentParser from librarian.library import Library import sys parser = ArgumentParser( description="A test runner for each card in a librarian library.") parser.add_argument("library", help="Library database") parser.add_argument("-t", "--tests", default="./tests/", help="Test directory") args = parser.parse_args(clargs) descovery(args.tests) library = Library(args.library) failures = 0 for card in library.retrieve_all(): for test in TESTS: try: test(card) except AssertionError: print("{0} failed {1}".format(card, test.__name__)) failures += 1 sys.exit(failures)
Fix test descovery to correctly add test dir to path and import modules rather then files
## Code Before: """Greencard implementation.""" from functools import wraps TESTS = [] def greencard(func): """ A decorator for providing a unittesting function/method with every card in a librarian card library database when it is called. """ @wraps(func) def wrapped(*args, **kwargs): """Transparent wrapper.""" return func(*args, **kwargs) TESTS.append(wrapped) return wrapped def descovery(testdir): """Descover and load greencard tests.""" import os from glob import glob import importlib for testpath in glob(os.path.join(testdir, "*.py")): importlib.import_module(testpath) def main(clargs=None): """Command line entry point.""" from argparse import ArgumentParser from librarian.library import Library import sys parser = ArgumentParser( description="A test runner for each card in a librarian library.") parser.add_argument("library", help="Library database") parser.add_argument("-t", "--tests", default="./tests/", help="Test directory") args = parser.parse_args(clargs) descovery(args.tests) library = Library(args.library) failures = 0 for card in library.retrieve_all(): for test in TESTS: try: test(card) except AssertionError: print("{0} failed {1}".format(card, test.__name__)) failures += 1 sys.exit(failures) ## Instruction: Fix test descovery to correctly add test dir to path and import modules rather then files ## Code After: """Greencard implementation.""" from functools import wraps TESTS = [] def greencard(func): """ A decorator for providing a unittesting function/method with every card in a librarian card library database when it is called. """ @wraps(func) def wrapped(*args, **kwargs): """Transparent wrapper.""" return func(*args, **kwargs) TESTS.append(wrapped) return wrapped def descovery(testdir): """Descover and load greencard tests.""" from os.path import splitext, basename, join, exists if not exists(testdir): return None import sys from glob import glob import importlib sys.path.append(testdir) for testpath in glob(join(testdir, "*.py")): name, _ = splitext(basename(testpath)) importlib.import_module(name) def main(clargs=None): """Command line entry point.""" from argparse import ArgumentParser from librarian.library import Library import sys parser = ArgumentParser( description="A test runner for each card in a librarian library.") parser.add_argument("library", help="Library database") parser.add_argument("-t", "--tests", default="./tests/", help="Test directory") args = parser.parse_args(clargs) descovery(args.tests) library = Library(args.library) failures = 0 for card in library.retrieve_all(): for test in TESTS: try: test(card) except AssertionError: print("{0} failed {1}".format(card, test.__name__)) failures += 1 sys.exit(failures)
... def descovery(testdir): """Descover and load greencard tests.""" from os.path import splitext, basename, join, exists if not exists(testdir): return None import sys from glob import glob import importlib sys.path.append(testdir) for testpath in glob(join(testdir, "*.py")): name, _ = splitext(basename(testpath)) importlib.import_module(name) ...
4539ebc92d59dd0388658fa482626185088222b8
tests.py
tests.py
from __future__ import unicode_literals from tqdm import format_interval, format_meter def test_format_interval(): assert format_interval(60) == '01:00' assert format_interval(6160) == '1:42:40' assert format_interval(238113) == '66:08:33' def test_format_meter(): assert format_meter(0, 1000, 13) == \ "|----------| 0/1000 0% [elapsed: " \ "00:13 left: ?, 0.00 iters/sec]" assert format_meter(231, 1000, 392) == \ "|##--------| 231/1000 23% [elapsed: " \ "06:32 left: 21:44, 0.59 iters/sec]"
from __future__ import unicode_literals from StringIO import StringIO import csv from tqdm import format_interval, format_meter, tqdm def test_format_interval(): assert format_interval(60) == '01:00' assert format_interval(6160) == '1:42:40' assert format_interval(238113) == '66:08:33' def test_format_meter(): assert format_meter(0, 1000, 13) == \ "|----------| 0/1000 0% [elapsed: " \ "00:13 left: ?, 0.00 iters/sec]" assert format_meter(231, 1000, 392) == \ "|##--------| 231/1000 23% [elapsed: " \ "06:32 left: 21:44, 0.59 iters/sec]" def test_iterate_over_csv_rows(): # Create a test csv pseudo file test_csv_file = StringIO() writer = csv.writer(test_csv_file) for i in range(3): writer.writerow(['test', 'test', 'test']) test_csv_file.seek(0) # Test that nothing fails if we iterate over rows reader = csv.DictReader(test_csv_file, fieldnames=('row1', 'row2', 'row3')) for row in tqdm(reader): pass
Test that tqdm fails when iterating over a csv file
Test that tqdm fails when iterating over a csv file
Python
mit
lrq3000/tqdm,kmike/tqdm
from __future__ import unicode_literals + from StringIO import StringIO + import csv - from tqdm import format_interval, format_meter + from tqdm import format_interval, format_meter, tqdm def test_format_interval(): assert format_interval(60) == '01:00' assert format_interval(6160) == '1:42:40' assert format_interval(238113) == '66:08:33' def test_format_meter(): assert format_meter(0, 1000, 13) == \ "|----------| 0/1000 0% [elapsed: " \ "00:13 left: ?, 0.00 iters/sec]" assert format_meter(231, 1000, 392) == \ "|##--------| 231/1000 23% [elapsed: " \ "06:32 left: 21:44, 0.59 iters/sec]" + + def test_iterate_over_csv_rows(): + # Create a test csv pseudo file + test_csv_file = StringIO() + writer = csv.writer(test_csv_file) + for i in range(3): + writer.writerow(['test', 'test', 'test']) + test_csv_file.seek(0) + + # Test that nothing fails if we iterate over rows + reader = csv.DictReader(test_csv_file, fieldnames=('row1', 'row2', 'row3')) + for row in tqdm(reader): + pass +
Test that tqdm fails when iterating over a csv file
## Code Before: from __future__ import unicode_literals from tqdm import format_interval, format_meter def test_format_interval(): assert format_interval(60) == '01:00' assert format_interval(6160) == '1:42:40' assert format_interval(238113) == '66:08:33' def test_format_meter(): assert format_meter(0, 1000, 13) == \ "|----------| 0/1000 0% [elapsed: " \ "00:13 left: ?, 0.00 iters/sec]" assert format_meter(231, 1000, 392) == \ "|##--------| 231/1000 23% [elapsed: " \ "06:32 left: 21:44, 0.59 iters/sec]" ## Instruction: Test that tqdm fails when iterating over a csv file ## Code After: from __future__ import unicode_literals from StringIO import StringIO import csv from tqdm import format_interval, format_meter, tqdm def test_format_interval(): assert format_interval(60) == '01:00' assert format_interval(6160) == '1:42:40' assert format_interval(238113) == '66:08:33' def test_format_meter(): assert format_meter(0, 1000, 13) == \ "|----------| 0/1000 0% [elapsed: " \ "00:13 left: ?, 0.00 iters/sec]" assert format_meter(231, 1000, 392) == \ "|##--------| 231/1000 23% [elapsed: " \ "06:32 left: 21:44, 0.59 iters/sec]" def test_iterate_over_csv_rows(): # Create a test csv pseudo file test_csv_file = StringIO() writer = csv.writer(test_csv_file) for i in range(3): writer.writerow(['test', 'test', 'test']) test_csv_file.seek(0) # Test that nothing fails if we iterate over rows reader = csv.DictReader(test_csv_file, fieldnames=('row1', 'row2', 'row3')) for row in tqdm(reader): pass
# ... existing code ... from __future__ import unicode_literals from StringIO import StringIO import csv from tqdm import format_interval, format_meter, tqdm # ... modified code ... "|##--------| 231/1000 23% [elapsed: " \ "06:32 left: 21:44, 0.59 iters/sec]" def test_iterate_over_csv_rows(): # Create a test csv pseudo file test_csv_file = StringIO() writer = csv.writer(test_csv_file) for i in range(3): writer.writerow(['test', 'test', 'test']) test_csv_file.seek(0) # Test that nothing fails if we iterate over rows reader = csv.DictReader(test_csv_file, fieldnames=('row1', 'row2', 'row3')) for row in tqdm(reader): pass # ... rest of the code ...
512ae6bd0ce42dc659f7cf4766fdc80587718909
go/apps/jsbox/definition.py
go/apps/jsbox/definition.py
import json from go.vumitools.conversation.definition import ( ConversationDefinitionBase, ConversationAction) class ViewLogsAction(ConversationAction): action_name = 'view_logs' action_display_name = 'View Sandbox Logs' redirect_to = 'jsbox_logs' class ConversationDefinition(ConversationDefinitionBase): conversation_type = 'jsbox' conversation_display_name = 'Javascript App' actions = (ViewLogsAction,) def configured_endpoints(self, config): # TODO: make jsbox apps define these explicitly and # update the outbound resource to check and # complain if a jsbox app sends on an endpoint # it hasn't defined. app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) try: js_config = json.loads(raw_js_config) except Exception: return [] endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: endpoints.update(js_config["endpoints"].keys()) except Exception: pass # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] endpoints.add("%s:%s" % (pool, tag)) except Exception: pass return sorted(endpoints)
import json from go.vumitools.conversation.definition import ( ConversationDefinitionBase, ConversationAction) class ViewLogsAction(ConversationAction): action_name = 'view_logs' action_display_name = 'View Sandbox Logs' redirect_to = 'jsbox_logs' class ConversationDefinition(ConversationDefinitionBase): conversation_type = 'jsbox' conversation_display_name = 'Javascript App' actions = (ViewLogsAction,) def configured_endpoints(self, config): app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) try: js_config = json.loads(raw_js_config) except Exception: return [] endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: endpoints.update(js_config["endpoints"].keys()) except Exception: pass # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] endpoints.add("%s:%s" % (pool, tag)) except Exception: pass return sorted(endpoints)
Remove ancient TODO that was resolved a long time ago.
Remove ancient TODO that was resolved a long time ago.
Python
bsd-3-clause
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
import json from go.vumitools.conversation.definition import ( ConversationDefinitionBase, ConversationAction) class ViewLogsAction(ConversationAction): action_name = 'view_logs' action_display_name = 'View Sandbox Logs' redirect_to = 'jsbox_logs' class ConversationDefinition(ConversationDefinitionBase): conversation_type = 'jsbox' conversation_display_name = 'Javascript App' actions = (ViewLogsAction,) def configured_endpoints(self, config): - # TODO: make jsbox apps define these explicitly and - # update the outbound resource to check and - # complain if a jsbox app sends on an endpoint - # it hasn't defined. app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) try: js_config = json.loads(raw_js_config) except Exception: return [] endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: endpoints.update(js_config["endpoints"].keys()) except Exception: pass # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] endpoints.add("%s:%s" % (pool, tag)) except Exception: pass return sorted(endpoints)
Remove ancient TODO that was resolved a long time ago.
## Code Before: import json from go.vumitools.conversation.definition import ( ConversationDefinitionBase, ConversationAction) class ViewLogsAction(ConversationAction): action_name = 'view_logs' action_display_name = 'View Sandbox Logs' redirect_to = 'jsbox_logs' class ConversationDefinition(ConversationDefinitionBase): conversation_type = 'jsbox' conversation_display_name = 'Javascript App' actions = (ViewLogsAction,) def configured_endpoints(self, config): # TODO: make jsbox apps define these explicitly and # update the outbound resource to check and # complain if a jsbox app sends on an endpoint # it hasn't defined. app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) try: js_config = json.loads(raw_js_config) except Exception: return [] endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: endpoints.update(js_config["endpoints"].keys()) except Exception: pass # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] endpoints.add("%s:%s" % (pool, tag)) except Exception: pass return sorted(endpoints) ## Instruction: Remove ancient TODO that was resolved a long time ago. ## Code After: import json from go.vumitools.conversation.definition import ( ConversationDefinitionBase, ConversationAction) class ViewLogsAction(ConversationAction): action_name = 'view_logs' action_display_name = 'View Sandbox Logs' redirect_to = 'jsbox_logs' class ConversationDefinition(ConversationDefinitionBase): conversation_type = 'jsbox' conversation_display_name = 'Javascript App' actions = (ViewLogsAction,) def configured_endpoints(self, config): app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) try: js_config = json.loads(raw_js_config) except Exception: return [] endpoints = set() # vumi-jssandbox-toolkit v2 endpoints try: endpoints.update(js_config["endpoints"].keys()) except Exception: pass # vumi-jssandbox-toolkit v1 endpoints try: pool, tag = js_config["sms_tag"] endpoints.add("%s:%s" % (pool, tag)) except Exception: pass return sorted(endpoints)
# ... existing code ... def configured_endpoints(self, config): app_config = config.get("jsbox_app_config", {}) raw_js_config = app_config.get("config", {}).get("value", {}) # ... rest of the code ...
b2803c40b2fcee7ab466c83fc95bb693a28576d0
messageboard/views.py
messageboard/views.py
from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) with open('media/img/snapshot.jpg', 'wb') as f: f.write(photo) photo_file = File(f, name='snapshot.jpg') serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File from django.core.files.temp import NamedTemporaryFile import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) img_temp = NamedTemporaryFile(delete=True) img_temp.write(photo) img_temp.flush() photo_file = File(img_temp) serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
Use temporary file and fix to image save handling
Use temporary file and fix to image save handling
Python
mit
DjangoBeer/message-board,DjangoBeer/message-board,fmarco/message-board,DjangoBeer/message-board,fmarco/message-board,fmarco/message-board
from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File + from django.core.files.temp import NamedTemporaryFile import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) - with open('media/img/snapshot.jpg', 'wb') as f: + img_temp = NamedTemporaryFile(delete=True) - f.write(photo) + img_temp.write(photo) - photo_file = File(f, name='snapshot.jpg') + img_temp.flush() + photo_file = File(img_temp) serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
Use temporary file and fix to image save handling
## Code Before: from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) with open('media/img/snapshot.jpg', 'wb') as f: f.write(photo) photo_file = File(f, name='snapshot.jpg') serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file ) ## Instruction: Use temporary file and fix to image save handling ## Code After: from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File from django.core.files.temp import NamedTemporaryFile import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) img_temp = NamedTemporaryFile(delete=True) img_temp.write(photo) img_temp.flush() photo_file = File(img_temp) serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
# ... existing code ... from django.core.files import File from django.core.files.temp import NamedTemporaryFile import base64 # ... modified code ... if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) img_temp = NamedTemporaryFile(delete=True) img_temp.write(photo) img_temp.flush() photo_file = File(img_temp) serializer.save( author=self.request.user, # ... rest of the code ...
9f05a8917ee6fd01a334ef2e1e57062be8ef13af
byceps/config_defaults.py
byceps/config_defaults.py
from datetime import timedelta from pathlib import Path # database connection SQLALCHEMY_ECHO = False # Disable Flask-SQLAlchemy's tracking of object modifications. SQLALCHEMY_TRACK_MODIFICATIONS = False # job queue JOBS_ASYNC = True # metrics METRICS_ENABLED = False # RQ dashboard (for job queue) RQ_DASHBOARD_ENABLED = False RQ_POLL_INTERVAL = 2500 WEB_BACKGROUND = 'white' # login sessions PERMANENT_SESSION_LIFETIME = timedelta(14) # localization LOCALE = 'de_DE.UTF-8' LOCALES_FORMS = ['de'] TIMEZONE = 'Europe/Berlin' # static content files path PATH_DATA = Path('./data') # home page ROOT_REDIRECT_TARGET = None ROOT_REDIRECT_STATUS_CODE = 307 # shop SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
from datetime import timedelta from pathlib import Path # database connection SQLALCHEMY_ECHO = False # Avoid connection errors after database becomes temporarily # unreachable, then becomes reachable again. SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True} # Disable Flask-SQLAlchemy's tracking of object modifications. SQLALCHEMY_TRACK_MODIFICATIONS = False # job queue JOBS_ASYNC = True # metrics METRICS_ENABLED = False # RQ dashboard (for job queue) RQ_DASHBOARD_ENABLED = False RQ_POLL_INTERVAL = 2500 WEB_BACKGROUND = 'white' # login sessions PERMANENT_SESSION_LIFETIME = timedelta(14) # localization LOCALE = 'de_DE.UTF-8' LOCALES_FORMS = ['de'] TIMEZONE = 'Europe/Berlin' # static content files path PATH_DATA = Path('./data') # home page ROOT_REDIRECT_TARGET = None ROOT_REDIRECT_STATUS_CODE = 307 # shop SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
Enable DBMS pool pre-pinging to avoid connection errors
Enable DBMS pool pre-pinging to avoid connection errors
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
from datetime import timedelta from pathlib import Path # database connection SQLALCHEMY_ECHO = False + + # Avoid connection errors after database becomes temporarily + # unreachable, then becomes reachable again. + SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True} # Disable Flask-SQLAlchemy's tracking of object modifications. SQLALCHEMY_TRACK_MODIFICATIONS = False # job queue JOBS_ASYNC = True # metrics METRICS_ENABLED = False # RQ dashboard (for job queue) RQ_DASHBOARD_ENABLED = False RQ_POLL_INTERVAL = 2500 WEB_BACKGROUND = 'white' # login sessions PERMANENT_SESSION_LIFETIME = timedelta(14) # localization LOCALE = 'de_DE.UTF-8' LOCALES_FORMS = ['de'] TIMEZONE = 'Europe/Berlin' # static content files path PATH_DATA = Path('./data') # home page ROOT_REDIRECT_TARGET = None ROOT_REDIRECT_STATUS_CODE = 307 # shop SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
Enable DBMS pool pre-pinging to avoid connection errors
## Code Before: from datetime import timedelta from pathlib import Path # database connection SQLALCHEMY_ECHO = False # Disable Flask-SQLAlchemy's tracking of object modifications. SQLALCHEMY_TRACK_MODIFICATIONS = False # job queue JOBS_ASYNC = True # metrics METRICS_ENABLED = False # RQ dashboard (for job queue) RQ_DASHBOARD_ENABLED = False RQ_POLL_INTERVAL = 2500 WEB_BACKGROUND = 'white' # login sessions PERMANENT_SESSION_LIFETIME = timedelta(14) # localization LOCALE = 'de_DE.UTF-8' LOCALES_FORMS = ['de'] TIMEZONE = 'Europe/Berlin' # static content files path PATH_DATA = Path('./data') # home page ROOT_REDIRECT_TARGET = None ROOT_REDIRECT_STATUS_CODE = 307 # shop SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin' ## Instruction: Enable DBMS pool pre-pinging to avoid connection errors ## Code After: from datetime import timedelta from pathlib import Path # database connection SQLALCHEMY_ECHO = False # Avoid connection errors after database becomes temporarily # unreachable, then becomes reachable again. SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True} # Disable Flask-SQLAlchemy's tracking of object modifications. SQLALCHEMY_TRACK_MODIFICATIONS = False # job queue JOBS_ASYNC = True # metrics METRICS_ENABLED = False # RQ dashboard (for job queue) RQ_DASHBOARD_ENABLED = False RQ_POLL_INTERVAL = 2500 WEB_BACKGROUND = 'white' # login sessions PERMANENT_SESSION_LIFETIME = timedelta(14) # localization LOCALE = 'de_DE.UTF-8' LOCALES_FORMS = ['de'] TIMEZONE = 'Europe/Berlin' # static content files path PATH_DATA = Path('./data') # home page ROOT_REDIRECT_TARGET = None ROOT_REDIRECT_STATUS_CODE = 307 # shop SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
# ... existing code ... # database connection SQLALCHEMY_ECHO = False # Avoid connection errors after database becomes temporarily # unreachable, then becomes reachable again. SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True} # Disable Flask-SQLAlchemy's tracking of object modifications. # ... rest of the code ...
c0a341bb285e9906747c1f872e3b022a3a491044
falmer/events/filters.py
falmer/events/filters.py
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') type = CharFilter(field_name='type__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
Add type filter by slug
Add type filter by slug
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') + type = CharFilter(field_name='type__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
Add type filter by slug
## Code Before: from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod ## Instruction: Add type filter by slug ## Code After: from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') type = CharFilter(field_name='type__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
// ... existing code ... brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') type = CharFilter(field_name='type__slug') student_group = CharFilter(field_name='student_group__slug') // ... rest of the code ...
f29a6b205a872d7df63e8c45b5829959c98de227
comics/comics/pcweenies.py
comics/comics/pcweenies.py
from comics.aggregator.crawler import CrawlerBase, CrawlerResult from comics.meta.base import MetaBase class Meta(MetaBase): name = 'The PC Weenies' language = 'en' url = 'http://www.pcweenies.com/' start_date = '1998-10-21' rights = 'Krishna M. Sadasivam' class Crawler(CrawlerBase): history_capable_days = 10 schedule = 'Mo,We,Fr' time_zone = -8 def crawl(self, pub_date): feed = self.parse_feed('http://www.pcweenies.com/feed/') for entry in feed.for_date(pub_date): if 'Comic' in entry.tags: title = entry.title url = entry.content0.src(u'img') return CrawlerResult(url, title)
from comics.aggregator.crawler import CrawlerBase, CrawlerResult from comics.meta.base import MetaBase class Meta(MetaBase): name = 'The PC Weenies' language = 'en' url = 'http://www.pcweenies.com/' start_date = '1998-10-21' rights = 'Krishna M. Sadasivam' class Crawler(CrawlerBase): history_capable_days = 10 schedule = 'Mo,We,Fr' time_zone = -8 def crawl(self, pub_date): feed = self.parse_feed('http://www.pcweenies.com/feed/') for entry in feed.for_date(pub_date): if 'Comic' in entry.tags: title = entry.title url = entry.content0.src(u'img[src*="/comics/"]') return CrawlerResult(url, title)
Update CSS selector which matched two img elements
Update CSS selector which matched two img elements
Python
agpl-3.0
klette/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,klette/comics,datagutten/comics,datagutten/comics
from comics.aggregator.crawler import CrawlerBase, CrawlerResult from comics.meta.base import MetaBase class Meta(MetaBase): name = 'The PC Weenies' language = 'en' url = 'http://www.pcweenies.com/' start_date = '1998-10-21' rights = 'Krishna M. Sadasivam' class Crawler(CrawlerBase): history_capable_days = 10 schedule = 'Mo,We,Fr' time_zone = -8 def crawl(self, pub_date): feed = self.parse_feed('http://www.pcweenies.com/feed/') for entry in feed.for_date(pub_date): if 'Comic' in entry.tags: title = entry.title - url = entry.content0.src(u'img') + url = entry.content0.src(u'img[src*="/comics/"]') return CrawlerResult(url, title)
Update CSS selector which matched two img elements
## Code Before: from comics.aggregator.crawler import CrawlerBase, CrawlerResult from comics.meta.base import MetaBase class Meta(MetaBase): name = 'The PC Weenies' language = 'en' url = 'http://www.pcweenies.com/' start_date = '1998-10-21' rights = 'Krishna M. Sadasivam' class Crawler(CrawlerBase): history_capable_days = 10 schedule = 'Mo,We,Fr' time_zone = -8 def crawl(self, pub_date): feed = self.parse_feed('http://www.pcweenies.com/feed/') for entry in feed.for_date(pub_date): if 'Comic' in entry.tags: title = entry.title url = entry.content0.src(u'img') return CrawlerResult(url, title) ## Instruction: Update CSS selector which matched two img elements ## Code After: from comics.aggregator.crawler import CrawlerBase, CrawlerResult from comics.meta.base import MetaBase class Meta(MetaBase): name = 'The PC Weenies' language = 'en' url = 'http://www.pcweenies.com/' start_date = '1998-10-21' rights = 'Krishna M. Sadasivam' class Crawler(CrawlerBase): history_capable_days = 10 schedule = 'Mo,We,Fr' time_zone = -8 def crawl(self, pub_date): feed = self.parse_feed('http://www.pcweenies.com/feed/') for entry in feed.for_date(pub_date): if 'Comic' in entry.tags: title = entry.title url = entry.content0.src(u'img[src*="/comics/"]') return CrawlerResult(url, title)
// ... existing code ... if 'Comic' in entry.tags: title = entry.title url = entry.content0.src(u'img[src*="/comics/"]') return CrawlerResult(url, title) // ... rest of the code ...
61cce2cd23c798a8604274335d9637e8ebce1385
api/v2/views/image.py
api/v2/views/image.py
from core.models import Application as Image from api import permissions from api.v2.serializers.details import ImageSerializer from api.v2.views.base import AuthOptionalViewSet from api.v2.views.mixins import MultipleFieldLookup class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet): """ API endpoint that allows images to be viewed or edited. """ lookup_fields = ("id", "uuid") http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace'] filter_fields = ('created_by__username', 'tags__name', 'projects__id') permission_classes = (permissions.InMaintenance, permissions.ApiAuthOptional, permissions.CanEditOrReadOnly, permissions.ApplicationMemberOrReadOnly) serializer_class = ImageSerializer search_fields = ('id', 'name', 'versions__change_log', 'tags__name', 'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location') def get_queryset(self): request_user = self.request.user return Image.current_apps(request_user)
from core.models import Application as Image from api import permissions from api.v2.serializers.details import ImageSerializer from api.v2.views.base import AuthOptionalViewSet from api.v2.views.mixins import MultipleFieldLookup class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet): """ API endpoint that allows images to be viewed or edited. """ lookup_fields = ("id", "uuid") http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace'] filter_fields = ('created_by__username', 'tags__name', 'projects__id') permission_classes = (permissions.InMaintenance, permissions.ApiAuthOptional, permissions.CanEditOrReadOnly, permissions.ApplicationMemberOrReadOnly) serializer_class = ImageSerializer search_fields = ('id', 'name', 'versions__change_log', 'tags__name', 'tags__description', 'created_by__username', 'versions__machines__instance_source__identifier', 'versions__machines__instance_source__provider__location') def get_queryset(self): request_user = self.request.user return Image.current_apps(request_user)
Add 'Machine Identifier' for easy support lookups in Troposphere
Add 'Machine Identifier' for easy support lookups in Troposphere
Python
apache-2.0
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
from core.models import Application as Image from api import permissions from api.v2.serializers.details import ImageSerializer from api.v2.views.base import AuthOptionalViewSet from api.v2.views.mixins import MultipleFieldLookup class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet): """ API endpoint that allows images to be viewed or edited. """ lookup_fields = ("id", "uuid") http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace'] filter_fields = ('created_by__username', 'tags__name', 'projects__id') permission_classes = (permissions.InMaintenance, permissions.ApiAuthOptional, permissions.CanEditOrReadOnly, permissions.ApplicationMemberOrReadOnly) serializer_class = ImageSerializer search_fields = ('id', 'name', 'versions__change_log', 'tags__name', + 'tags__description', 'created_by__username', + 'versions__machines__instance_source__identifier', - 'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location') + 'versions__machines__instance_source__provider__location') def get_queryset(self): request_user = self.request.user return Image.current_apps(request_user)
Add 'Machine Identifier' for easy support lookups in Troposphere
## Code Before: from core.models import Application as Image from api import permissions from api.v2.serializers.details import ImageSerializer from api.v2.views.base import AuthOptionalViewSet from api.v2.views.mixins import MultipleFieldLookup class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet): """ API endpoint that allows images to be viewed or edited. """ lookup_fields = ("id", "uuid") http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace'] filter_fields = ('created_by__username', 'tags__name', 'projects__id') permission_classes = (permissions.InMaintenance, permissions.ApiAuthOptional, permissions.CanEditOrReadOnly, permissions.ApplicationMemberOrReadOnly) serializer_class = ImageSerializer search_fields = ('id', 'name', 'versions__change_log', 'tags__name', 'tags__description', 'created_by__username', 'versions__machines__instance_source__provider__location') def get_queryset(self): request_user = self.request.user return Image.current_apps(request_user) ## Instruction: Add 'Machine Identifier' for easy support lookups in Troposphere ## Code After: from core.models import Application as Image from api import permissions from api.v2.serializers.details import ImageSerializer from api.v2.views.base import AuthOptionalViewSet from api.v2.views.mixins import MultipleFieldLookup class ImageViewSet(MultipleFieldLookup, AuthOptionalViewSet): """ API endpoint that allows images to be viewed or edited. """ lookup_fields = ("id", "uuid") http_method_names = ['get', 'put', 'patch', 'head', 'options', 'trace'] filter_fields = ('created_by__username', 'tags__name', 'projects__id') permission_classes = (permissions.InMaintenance, permissions.ApiAuthOptional, permissions.CanEditOrReadOnly, permissions.ApplicationMemberOrReadOnly) serializer_class = ImageSerializer search_fields = ('id', 'name', 'versions__change_log', 'tags__name', 'tags__description', 'created_by__username', 'versions__machines__instance_source__identifier', 'versions__machines__instance_source__provider__location') def get_queryset(self): request_user = self.request.user return Image.current_apps(request_user)
// ... existing code ... search_fields = ('id', 'name', 'versions__change_log', 'tags__name', 'tags__description', 'created_by__username', 'versions__machines__instance_source__identifier', 'versions__machines__instance_source__provider__location') def get_queryset(self): // ... rest of the code ...
16c1352ecf8583615e482c431ec5183fdb718f67
split_file.py
split_file.py
from strip_comments import strip_comments import re __all__ = ["split_coq_file_contents"] def split_coq_file_contents(contents): """Splits the contents of a coq file into multiple statements. This is done by finding one or three periods followed by whitespace. This is a dumb algorithm, but it seems to be (nearly) the one that ProofGeneral and CoqIDE use.""" return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
from strip_comments import strip_comments import re __all__ = ["split_coq_file_contents"] def merge_quotations(statements): """If there are an odd number of "s in a statement, assume that we broke the middle of a string. We recombine that string.""" cur = None for i in statements: if i.count('"') % 2 != 0: if cur is None: cur = i else: yield (cur + ' ' + i) cur = None elif cur is None: yield i else: cur += ' ' + i def split_coq_file_contents(contents): """Splits the contents of a coq file into multiple statements. This is done by finding one or three periods followed by whitespace. This is a dumb algorithm, but it seems to be (nearly) the one that ProofGeneral and CoqIDE use. We additionally merge lines inside of quotations.""" return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
Make splitting more robust to periods in strings
Make splitting more robust to periods in strings
Python
mit
JasonGross/coq-tools,JasonGross/coq-tools
from strip_comments import strip_comments import re __all__ = ["split_coq_file_contents"] + + def merge_quotations(statements): + """If there are an odd number of "s in a statement, assume that we + broke the middle of a string. We recombine that string.""" + + cur = None + for i in statements: + if i.count('"') % 2 != 0: + if cur is None: + cur = i + else: + yield (cur + ' ' + i) + cur = None + elif cur is None: + yield i + else: + cur += ' ' + i def split_coq_file_contents(contents): """Splits the contents of a coq file into multiple statements. This is done by finding one or three periods followed by whitespace. This is a dumb algorithm, but it seems to be (nearly) - the one that ProofGeneral and CoqIDE use.""" + the one that ProofGeneral and CoqIDE use. - return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents)) + We additionally merge lines inside of quotations.""" + return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents)))) +
Make splitting more robust to periods in strings
## Code Before: from strip_comments import strip_comments import re __all__ = ["split_coq_file_contents"] def split_coq_file_contents(contents): """Splits the contents of a coq file into multiple statements. This is done by finding one or three periods followed by whitespace. This is a dumb algorithm, but it seems to be (nearly) the one that ProofGeneral and CoqIDE use.""" return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents)) ## Instruction: Make splitting more robust to periods in strings ## Code After: from strip_comments import strip_comments import re __all__ = ["split_coq_file_contents"] def merge_quotations(statements): """If there are an odd number of "s in a statement, assume that we broke the middle of a string. We recombine that string.""" cur = None for i in statements: if i.count('"') % 2 != 0: if cur is None: cur = i else: yield (cur + ' ' + i) cur = None elif cur is None: yield i else: cur += ' ' + i def split_coq_file_contents(contents): """Splits the contents of a coq file into multiple statements. This is done by finding one or three periods followed by whitespace. This is a dumb algorithm, but it seems to be (nearly) the one that ProofGeneral and CoqIDE use. We additionally merge lines inside of quotations.""" return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
... __all__ = ["split_coq_file_contents"] def merge_quotations(statements): """If there are an odd number of "s in a statement, assume that we broke the middle of a string. We recombine that string.""" cur = None for i in statements: if i.count('"') % 2 != 0: if cur is None: cur = i else: yield (cur + ' ' + i) cur = None elif cur is None: yield i else: cur += ' ' + i def split_coq_file_contents(contents): ... This is done by finding one or three periods followed by whitespace. This is a dumb algorithm, but it seems to be (nearly) the one that ProofGeneral and CoqIDE use. We additionally merge lines inside of quotations.""" return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents)))) ...
07c40f2c47c81843c8fd183f8fad7e489fb2d814
sirius/LI_V00/record_names.py
sirius/LI_V00/record_names.py
from . import families as _families def get_record_names(subsystem=None): """Return a dictionary of record names for given subsystem each entry is another dictionary of model families whose values are the indices in the pyaccel model of the magnets that belong to the family. The magnet models ca be segmented, in which case the value is a python list of lists.""" _dict = {'LIPA-MODE':{}} return _dict def get_family_names(family=None, prefix=''): _dict = {} return _dict def get_element_names(element=None, prefix=''): _dict = {} return _dict def get_magnet_names(): # return get_record_names('boma') _dict = {} return _dict def get_pulsed_magnet_names(): # return get_record_names('boma') _dict = {} return _dict
from . import families as _families def get_record_names(subsystem=None): """Return a dictionary of record names for given subsystem each entry is another dictionary of model families whose values are the indices in the pyaccel model of the magnets that belong to the family. The magnet models ca be segmented, in which case the value is a python list of lists.""" _dict = {} return _dict def get_family_names(family=None, prefix=''): _dict = {} return _dict def get_element_names(element=None, prefix=''): _dict = {} return _dict def get_magnet_names(): # return get_record_names('boma') _dict = {} return _dict def get_pulsed_magnet_names(): # return get_record_names('boma') _dict = {} return _dict
Change linac mode pv to fake pvs
Change linac mode pv to fake pvs
Python
mit
lnls-fac/sirius
from . import families as _families def get_record_names(subsystem=None): """Return a dictionary of record names for given subsystem each entry is another dictionary of model families whose values are the indices in the pyaccel model of the magnets that belong to the family. The magnet models ca be segmented, in which case the value is a python list of lists.""" - _dict = {'LIPA-MODE':{}} + _dict = {} return _dict def get_family_names(family=None, prefix=''): _dict = {} return _dict def get_element_names(element=None, prefix=''): _dict = {} return _dict def get_magnet_names(): # return get_record_names('boma') _dict = {} return _dict def get_pulsed_magnet_names(): # return get_record_names('boma') _dict = {} return _dict
Change linac mode pv to fake pvs
## Code Before: from . import families as _families def get_record_names(subsystem=None): """Return a dictionary of record names for given subsystem each entry is another dictionary of model families whose values are the indices in the pyaccel model of the magnets that belong to the family. The magnet models ca be segmented, in which case the value is a python list of lists.""" _dict = {'LIPA-MODE':{}} return _dict def get_family_names(family=None, prefix=''): _dict = {} return _dict def get_element_names(element=None, prefix=''): _dict = {} return _dict def get_magnet_names(): # return get_record_names('boma') _dict = {} return _dict def get_pulsed_magnet_names(): # return get_record_names('boma') _dict = {} return _dict ## Instruction: Change linac mode pv to fake pvs ## Code After: from . import families as _families def get_record_names(subsystem=None): """Return a dictionary of record names for given subsystem each entry is another dictionary of model families whose values are the indices in the pyaccel model of the magnets that belong to the family. The magnet models ca be segmented, in which case the value is a python list of lists.""" _dict = {} return _dict def get_family_names(family=None, prefix=''): _dict = {} return _dict def get_element_names(element=None, prefix=''): _dict = {} return _dict def get_magnet_names(): # return get_record_names('boma') _dict = {} return _dict def get_pulsed_magnet_names(): # return get_record_names('boma') _dict = {} return _dict
# ... existing code ... that belong to the family. The magnet models ca be segmented, in which case the value is a python list of lists.""" _dict = {} return _dict # ... rest of the code ...
192c92fba3836f2073576674495faa42799cdb95
tests/test_sqlite.py
tests/test_sqlite.py
import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( '.molecule/ansible_inventory').get_hosts('sqlite') def test_package(Package): p = Package('pdns-backend-sqlite3') assert p.is_installed def test_database_exists(File): f = File('/var/lib/powerdns/pdns.db') assert f.exists assert f.user == 'pdns' assert f.group == 'pdns' assert f.mode == 420 assert f.size > 10000
import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( '.molecule/ansible_inventory').get_hosts('sqlite') debian_os = ['debian', 'ubuntu'] rhel_os = ['redhat', 'centos'] def test_package(Package, SystemInfo): p = None if SystemInfo.distribution in debian_os: p = Package('pdns-backend-sqlite3') if SystemInfo.distribution in rhel_os: p = Package('pdns-backend-sqlite') assert p.is_installed def test_database_exists(File): f = File('/var/lib/powerdns/pdns.db') assert f.exists assert f.user == 'pdns' assert f.group == 'pdns' assert f.mode == 420 assert f.size > 10000
Fix sqlite test on CentOS
Fix sqlite test on CentOS
Python
mit
PowerDNS/pdns-ansible
import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( '.molecule/ansible_inventory').get_hosts('sqlite') + debian_os = ['debian', 'ubuntu'] + rhel_os = ['redhat', 'centos'] + - def test_package(Package): + def test_package(Package, SystemInfo): + p = None + if SystemInfo.distribution in debian_os: - p = Package('pdns-backend-sqlite3') + p = Package('pdns-backend-sqlite3') + if SystemInfo.distribution in rhel_os: + p = Package('pdns-backend-sqlite') + assert p.is_installed def test_database_exists(File): f = File('/var/lib/powerdns/pdns.db') assert f.exists assert f.user == 'pdns' assert f.group == 'pdns' assert f.mode == 420 assert f.size > 10000
Fix sqlite test on CentOS
## Code Before: import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( '.molecule/ansible_inventory').get_hosts('sqlite') def test_package(Package): p = Package('pdns-backend-sqlite3') assert p.is_installed def test_database_exists(File): f = File('/var/lib/powerdns/pdns.db') assert f.exists assert f.user == 'pdns' assert f.group == 'pdns' assert f.mode == 420 assert f.size > 10000 ## Instruction: Fix sqlite test on CentOS ## Code After: import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( '.molecule/ansible_inventory').get_hosts('sqlite') debian_os = ['debian', 'ubuntu'] rhel_os = ['redhat', 'centos'] def test_package(Package, SystemInfo): p = None if SystemInfo.distribution in debian_os: p = Package('pdns-backend-sqlite3') if SystemInfo.distribution in rhel_os: p = Package('pdns-backend-sqlite') assert p.is_installed def test_database_exists(File): f = File('/var/lib/powerdns/pdns.db') assert f.exists assert f.user == 'pdns' assert f.group == 'pdns' assert f.mode == 420 assert f.size > 10000
... '.molecule/ansible_inventory').get_hosts('sqlite') debian_os = ['debian', 'ubuntu'] rhel_os = ['redhat', 'centos'] def test_package(Package, SystemInfo): p = None if SystemInfo.distribution in debian_os: p = Package('pdns-backend-sqlite3') if SystemInfo.distribution in rhel_os: p = Package('pdns-backend-sqlite') assert p.is_installed ...
d9b46a4d06bf6832aa5dbb394ae97325e0578400
survey/tests/test_default_settings.py
survey/tests/test_default_settings.py
from survey.tests import BaseTest from django.test import override_settings from django.conf import settings from django.test import tag from survey import set_default_settings @tag("set") @override_settings() class TestDefaultSettings(BaseTest): def test_set_choices_separator(self): url = "/admin/survey/survey/1/change/" del settings.CHOICES_SEPARATOR self.login() with self.assertRaises(AttributeError): self.client.get(url) set_default_settings() response = self.client.get(url) self.assertEqual(response.status_code, 200)
from survey.tests import BaseTest from django.test import override_settings from django.conf import settings from survey import set_default_settings from survey.exporter.tex.survey2tex import Survey2Tex @override_settings() class TestDefaultSettings(BaseTest): def test_set_choices_separator(self): url = "/admin/survey/survey/1/change/" del settings.CHOICES_SEPARATOR self.login() set_default_settings() try: self.client.get(url) except AttributeError: self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR") def test_set_root(self): del settings.ROOT set_default_settings() try: Survey2Tex.generate(self, "/") except AttributeError: self.fail("AttributeError: survey failed to set ROOT")
Add - Test for setting ROOT
Add - Test for setting ROOT
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
from survey.tests import BaseTest from django.test import override_settings from django.conf import settings - from django.test import tag from survey import set_default_settings + from survey.exporter.tex.survey2tex import Survey2Tex - @tag("set") @override_settings() class TestDefaultSettings(BaseTest): def test_set_choices_separator(self): url = "/admin/survey/survey/1/change/" del settings.CHOICES_SEPARATOR self.login() - with self.assertRaises(AttributeError): + set_default_settings() + try: self.client.get(url) + except AttributeError: + self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR") + + def test_set_root(self): + del settings.ROOT set_default_settings() - response = self.client.get(url) - self.assertEqual(response.status_code, 200) + try: + Survey2Tex.generate(self, "/") + except AttributeError: + self.fail("AttributeError: survey failed to set ROOT")
Add - Test for setting ROOT
## Code Before: from survey.tests import BaseTest from django.test import override_settings from django.conf import settings from django.test import tag from survey import set_default_settings @tag("set") @override_settings() class TestDefaultSettings(BaseTest): def test_set_choices_separator(self): url = "/admin/survey/survey/1/change/" del settings.CHOICES_SEPARATOR self.login() with self.assertRaises(AttributeError): self.client.get(url) set_default_settings() response = self.client.get(url) self.assertEqual(response.status_code, 200) ## Instruction: Add - Test for setting ROOT ## Code After: from survey.tests import BaseTest from django.test import override_settings from django.conf import settings from survey import set_default_settings from survey.exporter.tex.survey2tex import Survey2Tex @override_settings() class TestDefaultSettings(BaseTest): def test_set_choices_separator(self): url = "/admin/survey/survey/1/change/" del settings.CHOICES_SEPARATOR self.login() set_default_settings() try: self.client.get(url) except AttributeError: self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR") def test_set_root(self): del settings.ROOT set_default_settings() try: Survey2Tex.generate(self, "/") except AttributeError: self.fail("AttributeError: survey failed to set ROOT")
... from django.test import override_settings from django.conf import settings from survey import set_default_settings from survey.exporter.tex.survey2tex import Survey2Tex @override_settings() class TestDefaultSettings(BaseTest): ... del settings.CHOICES_SEPARATOR self.login() set_default_settings() try: self.client.get(url) except AttributeError: self.fail("AttributeError: survey failed to set CHOICES_SEPARATOR") def test_set_root(self): del settings.ROOT set_default_settings() try: Survey2Tex.generate(self, "/") except AttributeError: self.fail("AttributeError: survey failed to set ROOT") ...
39a16e50ad5f4164aed6cce58fb828cc78a9e4f3
myhome/blog/tests.py
myhome/blog/tests.py
from django.test import SimpleTestCase, Client from .models import BlogPost class BlogTestCase(SimpleTestCase): def setUp(self): BlogPost.objects.create( datetime='2014-01-01 12:00:00', title='title', content='content', live=True) def _test_get(self, url, *, ins=[], not_ins=[]): g = self.client.get(url) for in_ in ins: self.assertContains(g, in_) for nin_ in not_ins: self.assertNotContains(g, nin_) def _test_404(self, url): g = self.client.get(url) self.assertEqual(g.status_code, 404) def test_view(self): self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items']) def test_view_one(self): self._test_get('/blog/post/1/', ins=['title', 'content']) def test_view_miss(self): self._test_404('/blog/post/100/')
from test_base import MyHomeTest from .models import BlogPost class BlogTestCase(MyHomeTest): def setUp(self): BlogPost.objects.create( datetime='2014-01-01T12:00:00Z', title='livetitle', content='livecontent', live=True) BlogPost.objects.create( datetime='2014-01-01T12:00:00Z', title='hiddentitle', content='hiddencontent', live=False) def _test_404(self, url): g = self.client.get(url) self.assertEqual(g.status_code, 404) def test_view(self): self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle']) def test_view_one(self): self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent']) def test_view_one_nonlive(self): self._test_404('/blog/post/2/') def test_view_miss(self): self._test_404('/blog/post/100/')
Adjust blog test to use the base class
Adjust blog test to use the base class
Python
mit
plumdog/myhome,plumdog/myhome,plumdog/myhome,plumdog/myhome
- from django.test import SimpleTestCase, Client + from test_base import MyHomeTest from .models import BlogPost - class BlogTestCase(SimpleTestCase): + class BlogTestCase(MyHomeTest): def setUp(self): BlogPost.objects.create( - datetime='2014-01-01 12:00:00', + datetime='2014-01-01T12:00:00Z', - title='title', + title='livetitle', - content='content', + content='livecontent', live=True) + BlogPost.objects.create( + datetime='2014-01-01T12:00:00Z', + title='hiddentitle', + content='hiddencontent', + live=False) - - def _test_get(self, url, *, ins=[], not_ins=[]): - g = self.client.get(url) - for in_ in ins: - self.assertContains(g, in_) - for nin_ in not_ins: - self.assertNotContains(g, nin_) def _test_404(self, url): g = self.client.get(url) self.assertEqual(g.status_code, 404) def test_view(self): - self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items']) + self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle']) def test_view_one(self): - self._test_get('/blog/post/1/', ins=['title', 'content']) + self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent']) + + def test_view_one_nonlive(self): + self._test_404('/blog/post/2/') def test_view_miss(self): self._test_404('/blog/post/100/')
Adjust blog test to use the base class
## Code Before: from django.test import SimpleTestCase, Client from .models import BlogPost class BlogTestCase(SimpleTestCase): def setUp(self): BlogPost.objects.create( datetime='2014-01-01 12:00:00', title='title', content='content', live=True) def _test_get(self, url, *, ins=[], not_ins=[]): g = self.client.get(url) for in_ in ins: self.assertContains(g, in_) for nin_ in not_ins: self.assertNotContains(g, nin_) def _test_404(self, url): g = self.client.get(url) self.assertEqual(g.status_code, 404) def test_view(self): self._test_get('/blog/', ins=['title', 'content'], not_ins=['No Items']) def test_view_one(self): self._test_get('/blog/post/1/', ins=['title', 'content']) def test_view_miss(self): self._test_404('/blog/post/100/') ## Instruction: Adjust blog test to use the base class ## Code After: from test_base import MyHomeTest from .models import BlogPost class BlogTestCase(MyHomeTest): def setUp(self): BlogPost.objects.create( datetime='2014-01-01T12:00:00Z', title='livetitle', content='livecontent', live=True) BlogPost.objects.create( datetime='2014-01-01T12:00:00Z', title='hiddentitle', content='hiddencontent', live=False) def _test_404(self, url): g = self.client.get(url) self.assertEqual(g.status_code, 404) def test_view(self): self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle']) def test_view_one(self): self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent']) def test_view_one_nonlive(self): self._test_404('/blog/post/2/') def test_view_miss(self): self._test_404('/blog/post/100/')
// ... existing code ... from test_base import MyHomeTest from .models import BlogPost class BlogTestCase(MyHomeTest): def setUp(self): BlogPost.objects.create( datetime='2014-01-01T12:00:00Z', title='livetitle', content='livecontent', live=True) BlogPost.objects.create( datetime='2014-01-01T12:00:00Z', title='hiddentitle', content='hiddencontent', live=False) def _test_404(self, url): // ... modified code ... def test_view(self): self._test_get('/blog/', ins=['livetitle'], notins=['No Items', 'hiddentitle']) def test_view_one(self): self._test_get('/blog/post/1/', ins=['livetitle', 'livecontent']) def test_view_one_nonlive(self): self._test_404('/blog/post/2/') def test_view_miss(self): // ... rest of the code ...
1fe7b9c3c9a3764a1e209b2699ef51b84c87e897
setup.py
setup.py
from distutils.core import setup import os setup( name='python-jambel', version='0.1', py_module=['jambel'], url='http://github.com/jambit/python-jambel', license='UNKNOWN', author='Sebastian Rahlf', author_email='[email protected]', description="Interface to jambit's project traffic lights.", long_description=open(os.path.join(os.path.dirname(__file__), 'README.txt')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: Other/Proprietary License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ] )
from distutils.core import setup import os setup( name='python-jambel', version='0.1', py_module=['jambel'], url='http://github.com/jambit/python-jambel', license='UNKNOWN', author='Sebastian Rahlf', author_email='[email protected]', description="Interface to jambit's project traffic lights.", long_description=open(os.path.join(os.path.dirname(__file__), 'README.txt')).read(), test_requires=['pytest'], entry_points={ 'console_scripts': [ 'jambel = jambel:main', ] }, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: Other/Proprietary License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ] )
Add console script and test requirements.
Add console script and test requirements.
Python
mit
redtoad/python-jambel,jambit/python-jambel
from distutils.core import setup import os setup( name='python-jambel', version='0.1', py_module=['jambel'], url='http://github.com/jambit/python-jambel', license='UNKNOWN', author='Sebastian Rahlf', author_email='[email protected]', description="Interface to jambit's project traffic lights.", long_description=open(os.path.join(os.path.dirname(__file__), 'README.txt')).read(), + test_requires=['pytest'], + entry_points={ + 'console_scripts': [ + 'jambel = jambel:main', + ] + }, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: Other/Proprietary License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ] )
Add console script and test requirements.
## Code Before: from distutils.core import setup import os setup( name='python-jambel', version='0.1', py_module=['jambel'], url='http://github.com/jambit/python-jambel', license='UNKNOWN', author='Sebastian Rahlf', author_email='[email protected]', description="Interface to jambit's project traffic lights.", long_description=open(os.path.join(os.path.dirname(__file__), 'README.txt')).read(), classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: Other/Proprietary License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ] ) ## Instruction: Add console script and test requirements. ## Code After: from distutils.core import setup import os setup( name='python-jambel', version='0.1', py_module=['jambel'], url='http://github.com/jambit/python-jambel', license='UNKNOWN', author='Sebastian Rahlf', author_email='[email protected]', description="Interface to jambit's project traffic lights.", long_description=open(os.path.join(os.path.dirname(__file__), 'README.txt')).read(), test_requires=['pytest'], entry_points={ 'console_scripts': [ 'jambel = jambel:main', ] }, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: Other/Proprietary License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ] )
... description="Interface to jambit's project traffic lights.", long_description=open(os.path.join(os.path.dirname(__file__), 'README.txt')).read(), test_requires=['pytest'], entry_points={ 'console_scripts': [ 'jambel = jambel:main', ] }, classifiers=[ 'Development Status :: 4 - Beta', ...
d5fb6c5320dbb6827e24dc22be08454f05aac83e
emails/tests.py
emails/tests.py
from django.test import TestCase from django.core.urlresolvers import reverse from common.util import create_admin, create_user class TestEmailRendering(TestCase): def setUp(self): self.user = create_user(username='user', password='password') self.admin = create_admin(username='admin', password='password') def test_can_get_an_example_email(self): response = self.client.get(reverse('example_email')) self.assertEqual(response.status_code, 200) self.assertContains(response, "Example email") self.assertContains(response, "The email title") def test_can_load_email_sender_if_admin(self): self.client.login(username='admin', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 200)
from django.test import TestCase from django.core.urlresolvers import reverse from common.util import create_admin, create_user class TestEmailRendering(TestCase): def setUp(self): self.user = create_user(username='user', password='password') self.admin = create_admin(username='admin', password='password') def test_can_get_an_example_email(self): response = self.client.get(reverse('example_email')) self.assertEqual(response.status_code, 200) self.assertContains(response, "Example email") self.assertContains(response, "The email title") def test_can_load_email_sender_if_admin(self): self.client.login(username='admin', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 200) def test_regular_users_dont_have_access_to_tester(self): self.client.login(username='user', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 404)
Test that regular users can send test emails
Test that regular users can send test emails
Python
agpl-3.0
Turupawn/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website,lutris/website
from django.test import TestCase from django.core.urlresolvers import reverse from common.util import create_admin, create_user class TestEmailRendering(TestCase): def setUp(self): self.user = create_user(username='user', password='password') self.admin = create_admin(username='admin', password='password') def test_can_get_an_example_email(self): response = self.client.get(reverse('example_email')) self.assertEqual(response.status_code, 200) self.assertContains(response, "Example email") self.assertContains(response, "The email title") def test_can_load_email_sender_if_admin(self): self.client.login(username='admin', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 200) + def test_regular_users_dont_have_access_to_tester(self): + self.client.login(username='user', password='password') + response = self.client.get(reverse('email_sender_test')) + self.assertEqual(response.status_code, 404) +
Test that regular users can send test emails
## Code Before: from django.test import TestCase from django.core.urlresolvers import reverse from common.util import create_admin, create_user class TestEmailRendering(TestCase): def setUp(self): self.user = create_user(username='user', password='password') self.admin = create_admin(username='admin', password='password') def test_can_get_an_example_email(self): response = self.client.get(reverse('example_email')) self.assertEqual(response.status_code, 200) self.assertContains(response, "Example email") self.assertContains(response, "The email title") def test_can_load_email_sender_if_admin(self): self.client.login(username='admin', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 200) ## Instruction: Test that regular users can send test emails ## Code After: from django.test import TestCase from django.core.urlresolvers import reverse from common.util import create_admin, create_user class TestEmailRendering(TestCase): def setUp(self): self.user = create_user(username='user', password='password') self.admin = create_admin(username='admin', password='password') def test_can_get_an_example_email(self): response = self.client.get(reverse('example_email')) self.assertEqual(response.status_code, 200) self.assertContains(response, "Example email") self.assertContains(response, "The email title") def test_can_load_email_sender_if_admin(self): self.client.login(username='admin', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 200) def test_regular_users_dont_have_access_to_tester(self): self.client.login(username='user', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 404)
... response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 200) def test_regular_users_dont_have_access_to_tester(self): self.client.login(username='user', password='password') response = self.client.get(reverse('email_sender_test')) self.assertEqual(response.status_code, 404) ...
4f1f0b9d1643a6ff4934070472973e60b1eb6c26
tests/rules_tests/isValid_tests/NongrammarEntitiesTest.py
tests/rules_tests/isValid_tests/NongrammarEntitiesTest.py
from unittest import main, TestCase from grammpy import Rule from .grammar import * class NongrammarEntitiesTest(TestCase): pass if __name__ == '__main__': main()
from unittest import main, TestCase from grammpy import Rule, Nonterminal as _N from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException from .grammar import * class Invalid(_N): pass class NongrammarEntitiesTest(TestCase): def test_invalidTerminal(self): class tmp(Rule): rules = [([NFifth], [5, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalFrom(self): class tmp(Rule): rules = [(['asdf', NFifth], [2, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), ([NFifth], [5, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalFromMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), (['asdf', NFifth], [2, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminal(self): class tmp(Rule): rules = [([NFifth], [2, Invalid])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalFrom(self): class tmp(Rule): rules = [(['a', Invalid], [2, NFirst])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), ([NFifth], [2, Invalid])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalFromMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), (['a', Invalid], [2, NFirst])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) if __name__ == '__main__': main()
Add tests of terminals and nonterminals that are not in grammar
Add tests of terminals and nonterminals that are not in grammar
Python
mit
PatrikValkovic/grammpy
from unittest import main, TestCase - from grammpy import Rule + from grammpy import Rule, Nonterminal as _N + from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException from .grammar import * + class Invalid(_N): + pass + + class NongrammarEntitiesTest(TestCase): - pass + def test_invalidTerminal(self): + class tmp(Rule): + rules = [([NFifth], [5, NFirst])] + with self.assertRaises(TerminalDoesNotExistsException): + tmp.validate(grammar) + + def test_invalidTerminalFrom(self): + class tmp(Rule): + rules = [(['asdf', NFifth], [2, NFirst])] + with self.assertRaises(TerminalDoesNotExistsException): + tmp.validate(grammar) + + def test_invalidTerminalMultiple(self): + class tmp(Rule): + rules = [([TSecond, 'b', TThird], ['c', 2]), + ([NFifth], [5, NFirst])] + with self.assertRaises(TerminalDoesNotExistsException): + tmp.validate(grammar) + + def test_invalidTerminalFromMultiple(self): + class tmp(Rule): + rules = [([TSecond, 'b', TThird], ['c', 2]), + (['asdf', NFifth], [2, NFirst])] + with self.assertRaises(TerminalDoesNotExistsException): + tmp.validate(grammar) + + def test_invalidNonterminal(self): + class tmp(Rule): + rules = [([NFifth], [2, Invalid])] + with self.assertRaises(NonterminalDoesNotExistsException): + tmp.validate(grammar) + + def test_invalidNonterminalFrom(self): + class tmp(Rule): + rules = [(['a', Invalid], [2, NFirst])] + with self.assertRaises(NonterminalDoesNotExistsException): + tmp.validate(grammar) + + def test_invalidNonterminalMultiple(self): + class tmp(Rule): + rules = [([TSecond, 'b', TThird], ['c', 2]), + ([NFifth], [2, Invalid])] + with self.assertRaises(NonterminalDoesNotExistsException): + tmp.validate(grammar) + + def test_invalidNonterminalFromMultiple(self): + class tmp(Rule): + rules = [([TSecond, 'b', TThird], ['c', 2]), + (['a', Invalid], [2, NFirst])] + with self.assertRaises(NonterminalDoesNotExistsException): + tmp.validate(grammar) if __name__ == '__main__': main() +
Add tests of terminals and nonterminals that are not in grammar
## Code Before: from unittest import main, TestCase from grammpy import Rule from .grammar import * class NongrammarEntitiesTest(TestCase): pass if __name__ == '__main__': main() ## Instruction: Add tests of terminals and nonterminals that are not in grammar ## Code After: from unittest import main, TestCase from grammpy import Rule, Nonterminal as _N from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException from .grammar import * class Invalid(_N): pass class NongrammarEntitiesTest(TestCase): def test_invalidTerminal(self): class tmp(Rule): rules = [([NFifth], [5, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalFrom(self): class tmp(Rule): rules = [(['asdf', NFifth], [2, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), ([NFifth], [5, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalFromMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), (['asdf', NFifth], [2, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminal(self): class tmp(Rule): rules = [([NFifth], [2, Invalid])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalFrom(self): class tmp(Rule): rules = [(['a', Invalid], [2, NFirst])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), ([NFifth], [2, Invalid])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalFromMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), (['a', Invalid], [2, NFirst])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) if __name__ == '__main__': main()
// ... existing code ... from unittest import main, TestCase from grammpy import Rule, Nonterminal as _N from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException from .grammar import * class Invalid(_N): pass class NongrammarEntitiesTest(TestCase): def test_invalidTerminal(self): class tmp(Rule): rules = [([NFifth], [5, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalFrom(self): class tmp(Rule): rules = [(['asdf', NFifth], [2, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), ([NFifth], [5, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidTerminalFromMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), (['asdf', NFifth], [2, NFirst])] with self.assertRaises(TerminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminal(self): class tmp(Rule): rules = [([NFifth], [2, Invalid])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalFrom(self): class tmp(Rule): rules = [(['a', Invalid], [2, NFirst])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), ([NFifth], [2, Invalid])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) def test_invalidNonterminalFromMultiple(self): class tmp(Rule): rules = [([TSecond, 'b', TThird], ['c', 2]), (['a', Invalid], [2, NFirst])] with self.assertRaises(NonterminalDoesNotExistsException): tmp.validate(grammar) // ... rest of the code ...
3d3319b96475f40de6dd4e4cf39cdae323fd3b3d
arcutils/templatetags/arc.py
arcutils/templatetags/arc.py
from bootstrapform.templatetags.bootstrap import * from django.template import Template, Context, Library from django.template.loader import get_template from django.utils.safestring import mark_safe register = Library()
from django import template from django.template.defaulttags import url from django.core.urlresolvers import reverse from django.conf import settings from django.template import Node, Variable, VariableDoesNotExist from django.utils.html import conditional_escape from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_name(cls): """Given a model class, this returns its verbose name""" return cls._meta.verbose_name.title() @register.tag def full_url(parser, token): """Spits out the full URL""" url_node = url(parser, token) f = url_node.render url_node.render = lambda context: _get_host_from_context(context) + f(context) return url_node def _get_host_from_context(context): """ Returns the hostname from context or the settings.HOSTNAME or settings.HOST_NAME variables """ try: request = Variable('request.HTTP_HOST').resolve(context) except VariableDoesNotExist: request = "" return request or getattr(settings, "HOSTNAME", "") or getattr(settings, "HOST_NAME", "") class AddGetParameter(Node): def __init__(self, values): self.values = values def render(self, context): req = Variable('request').resolve(context) params = req.GET.copy() for key, value in self.values.items(): params[key] = value.resolve(context) return '?%s' % params.urlencode() @register.tag def add_get(parser, token): """ The tag generates a parameter string in form '?param1=val1&param2=val2'. The parameter list is generated by taking all parameters from current request.GET and optionally overriding them by providing parameters to the tag. This is a cleaned up version of http://djangosnippets.org/snippets/2105/. It solves a couple of issues, namely: * parameters are optional * parameters can have values from request, e.g. request.GET.foo * native parsing methods are used for better compatibility and readability * shorter tag name Usage: place this code in your appdir/templatetags/add_get_parameter.py In template: {% load add_get_parameter %} <a href="{% add_get param1='const' param2=variable_in_context %}"> Link with modified params </a> It's required that you have 'django.core.context_processors.request' in TEMPLATE_CONTEXT_PROCESSORS Original version's URL: http://django.mar.lt/2010/07/add-get-parameter-tag.html """ pairs = token.split_contents()[1:] values = {} for pair in pairs: s = pair.split('=', 1) values[s[0]] = parser.compile_filter(s[1]) return AddGetParameter(values)
Add a bunch of template tags
Add a bunch of template tags
Python
mit
PSU-OIT-ARC/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils,kfarr2/django-arcutils,wylee/django-arcutils,mdj2/django-arcutils,mdj2/django-arcutils
- from bootstrapform.templatetags.bootstrap import * - from django.template import Template, Context, Library - from django.template.loader import get_template + from django import template + from django.template.defaulttags import url + from django.core.urlresolvers import reverse + from django.conf import settings + from django.template import Node, Variable, VariableDoesNotExist + from django.utils.html import conditional_escape from django.utils.safestring import mark_safe - register = Library() + register = template.Library() + @register.filter + def model_name(cls): + """Given a model class, this returns its verbose name""" + return cls._meta.verbose_name.title() + + + @register.tag + def full_url(parser, token): + """Spits out the full URL""" + url_node = url(parser, token) + f = url_node.render + url_node.render = lambda context: _get_host_from_context(context) + f(context) + return url_node + + + def _get_host_from_context(context): + """ + Returns the hostname from context or the settings.HOSTNAME or + settings.HOST_NAME variables + """ + try: + request = Variable('request.HTTP_HOST').resolve(context) + except VariableDoesNotExist: + request = "" + return request or getattr(settings, "HOSTNAME", "") or getattr(settings, "HOST_NAME", "") + + + class AddGetParameter(Node): + def __init__(self, values): + self.values = values + + def render(self, context): + req = Variable('request').resolve(context) + params = req.GET.copy() + for key, value in self.values.items(): + params[key] = value.resolve(context) + return '?%s' % params.urlencode() + + + @register.tag + def add_get(parser, token): + """ + The tag generates a parameter string in form '?param1=val1&param2=val2'. + The parameter list is generated by taking all parameters from current + request.GET and optionally overriding them by providing parameters to the tag. + + This is a cleaned up version of http://djangosnippets.org/snippets/2105/. It + solves a couple of issues, namely: + * parameters are optional + * parameters can have values from request, e.g. request.GET.foo + * native parsing methods are used for better compatibility and readability + * shorter tag name + + Usage: place this code in your appdir/templatetags/add_get_parameter.py + In template: + {% load add_get_parameter %} + <a href="{% add_get param1='const' param2=variable_in_context %}"> + Link with modified params + </a> + + It's required that you have 'django.core.context_processors.request' in + TEMPLATE_CONTEXT_PROCESSORS + + Original version's URL: http://django.mar.lt/2010/07/add-get-parameter-tag.html + """ + pairs = token.split_contents()[1:] + values = {} + for pair in pairs: + s = pair.split('=', 1) + values[s[0]] = parser.compile_filter(s[1]) + return AddGetParameter(values) +
Add a bunch of template tags
## Code Before: from bootstrapform.templatetags.bootstrap import * from django.template import Template, Context, Library from django.template.loader import get_template from django.utils.safestring import mark_safe register = Library() ## Instruction: Add a bunch of template tags ## Code After: from django import template from django.template.defaulttags import url from django.core.urlresolvers import reverse from django.conf import settings from django.template import Node, Variable, VariableDoesNotExist from django.utils.html import conditional_escape from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_name(cls): """Given a model class, this returns its verbose name""" return cls._meta.verbose_name.title() @register.tag def full_url(parser, token): """Spits out the full URL""" url_node = url(parser, token) f = url_node.render url_node.render = lambda context: _get_host_from_context(context) + f(context) return url_node def _get_host_from_context(context): """ Returns the hostname from context or the settings.HOSTNAME or settings.HOST_NAME variables """ try: request = Variable('request.HTTP_HOST').resolve(context) except VariableDoesNotExist: request = "" return request or getattr(settings, "HOSTNAME", "") or getattr(settings, "HOST_NAME", "") class AddGetParameter(Node): def __init__(self, values): self.values = values def render(self, context): req = Variable('request').resolve(context) params = req.GET.copy() for key, value in self.values.items(): params[key] = value.resolve(context) return '?%s' % params.urlencode() @register.tag def add_get(parser, token): """ The tag generates a parameter string in form '?param1=val1&param2=val2'. The parameter list is generated by taking all parameters from current request.GET and optionally overriding them by providing parameters to the tag. This is a cleaned up version of http://djangosnippets.org/snippets/2105/. It solves a couple of issues, namely: * parameters are optional * parameters can have values from request, e.g. request.GET.foo * native parsing methods are used for better compatibility and readability * shorter tag name Usage: place this code in your appdir/templatetags/add_get_parameter.py In template: {% load add_get_parameter %} <a href="{% add_get param1='const' param2=variable_in_context %}"> Link with modified params </a> It's required that you have 'django.core.context_processors.request' in TEMPLATE_CONTEXT_PROCESSORS Original version's URL: http://django.mar.lt/2010/07/add-get-parameter-tag.html """ pairs = token.split_contents()[1:] values = {} for pair in pairs: s = pair.split('=', 1) values[s[0]] = parser.compile_filter(s[1]) return AddGetParameter(values)
// ... existing code ... from django import template from django.template.defaulttags import url from django.core.urlresolvers import reverse from django.conf import settings from django.template import Node, Variable, VariableDoesNotExist from django.utils.html import conditional_escape from django.utils.safestring import mark_safe register = template.Library() @register.filter def model_name(cls): """Given a model class, this returns its verbose name""" return cls._meta.verbose_name.title() @register.tag def full_url(parser, token): """Spits out the full URL""" url_node = url(parser, token) f = url_node.render url_node.render = lambda context: _get_host_from_context(context) + f(context) return url_node def _get_host_from_context(context): """ Returns the hostname from context or the settings.HOSTNAME or settings.HOST_NAME variables """ try: request = Variable('request.HTTP_HOST').resolve(context) except VariableDoesNotExist: request = "" return request or getattr(settings, "HOSTNAME", "") or getattr(settings, "HOST_NAME", "") class AddGetParameter(Node): def __init__(self, values): self.values = values def render(self, context): req = Variable('request').resolve(context) params = req.GET.copy() for key, value in self.values.items(): params[key] = value.resolve(context) return '?%s' % params.urlencode() @register.tag def add_get(parser, token): """ The tag generates a parameter string in form '?param1=val1&param2=val2'. The parameter list is generated by taking all parameters from current request.GET and optionally overriding them by providing parameters to the tag. This is a cleaned up version of http://djangosnippets.org/snippets/2105/. It solves a couple of issues, namely: * parameters are optional * parameters can have values from request, e.g. request.GET.foo * native parsing methods are used for better compatibility and readability * shorter tag name Usage: place this code in your appdir/templatetags/add_get_parameter.py In template: {% load add_get_parameter %} <a href="{% add_get param1='const' param2=variable_in_context %}"> Link with modified params </a> It's required that you have 'django.core.context_processors.request' in TEMPLATE_CONTEXT_PROCESSORS Original version's URL: http://django.mar.lt/2010/07/add-get-parameter-tag.html """ pairs = token.split_contents()[1:] values = {} for pair in pairs: s = pair.split('=', 1) values[s[0]] = parser.compile_filter(s[1]) return AddGetParameter(values) // ... rest of the code ...
e652e57be097949d06acd06cef813fd28a45afc2
base_report_auto_create_qweb/__manifest__.py
base_report_auto_create_qweb/__manifest__.py
{ "name": "Report qweb auto generation", "version": "9.0.1.0.0", "depends": [ "report", ], "external_dependencies": { "python": [ "unidecode", ], }, "author": "OdooMRP team, " "AvanzOSC, " "Serv. Tecnol. Avanzados - Pedro M. Baeza, " "Odoo Community Association (OCA), ", "website": "http://www.odoomrp.com", "license": "AGPL-3", "contributors": [ "Oihane Crucelaegui <[email protected]>", "Pedro M. Baeza <[email protected]>", "Ana Juaristi <[email protected]>", ], "category": "Tools", "data": [ "wizard/report_duplicate_view.xml", "views/report_xml_view.xml", ], 'installable': False, }
{ "name": "Report qweb auto generation", "version": "9.0.1.0.0", "depends": [ "report", ], "external_dependencies": { "python": [ "unidecode", ], }, "author": "AvanzOSC, " "Tecnativa, " "Odoo Community Association (OCA), ", "website": "https://github.com/OCA/server-tools", "license": "AGPL-3", "contributors": [ "Oihane Crucelaegui <[email protected]>", "Pedro M. Baeza <[email protected]>", "Ana Juaristi <[email protected]>", ], "category": "Tools", "data": [ "wizard/report_duplicate_view.xml", "views/report_xml_view.xml", ], 'installable': False, }
Change authors to new ones
base_report_auto_create_qweb: Change authors to new ones
Python
agpl-3.0
ovnicraft/server-tools,ovnicraft/server-tools,ovnicraft/server-tools
{ "name": "Report qweb auto generation", "version": "9.0.1.0.0", "depends": [ "report", ], "external_dependencies": { "python": [ "unidecode", ], }, - "author": "OdooMRP team, " + "author": "AvanzOSC, " - "AvanzOSC, " + "Tecnativa, " - "Serv. Tecnol. Avanzados - Pedro M. Baeza, " "Odoo Community Association (OCA), ", - "website": "http://www.odoomrp.com", + "website": "https://github.com/OCA/server-tools", "license": "AGPL-3", "contributors": [ "Oihane Crucelaegui <[email protected]>", "Pedro M. Baeza <[email protected]>", "Ana Juaristi <[email protected]>", ], "category": "Tools", "data": [ "wizard/report_duplicate_view.xml", "views/report_xml_view.xml", ], 'installable': False, }
Change authors to new ones
## Code Before: { "name": "Report qweb auto generation", "version": "9.0.1.0.0", "depends": [ "report", ], "external_dependencies": { "python": [ "unidecode", ], }, "author": "OdooMRP team, " "AvanzOSC, " "Serv. Tecnol. Avanzados - Pedro M. Baeza, " "Odoo Community Association (OCA), ", "website": "http://www.odoomrp.com", "license": "AGPL-3", "contributors": [ "Oihane Crucelaegui <[email protected]>", "Pedro M. Baeza <[email protected]>", "Ana Juaristi <[email protected]>", ], "category": "Tools", "data": [ "wizard/report_duplicate_view.xml", "views/report_xml_view.xml", ], 'installable': False, } ## Instruction: Change authors to new ones ## Code After: { "name": "Report qweb auto generation", "version": "9.0.1.0.0", "depends": [ "report", ], "external_dependencies": { "python": [ "unidecode", ], }, "author": "AvanzOSC, " "Tecnativa, " "Odoo Community Association (OCA), ", "website": "https://github.com/OCA/server-tools", "license": "AGPL-3", "contributors": [ "Oihane Crucelaegui <[email protected]>", "Pedro M. Baeza <[email protected]>", "Ana Juaristi <[email protected]>", ], "category": "Tools", "data": [ "wizard/report_duplicate_view.xml", "views/report_xml_view.xml", ], 'installable': False, }
... ], }, "author": "AvanzOSC, " "Tecnativa, " "Odoo Community Association (OCA), ", "website": "https://github.com/OCA/server-tools", "license": "AGPL-3", "contributors": [ ...
e817f726c20ccf40cd43d4e6cf36235187a27c20
objects/utils.py
objects/utils.py
"""Utils module.""" from inspect import isclass from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) def ensure_is_provider(instance): """Check if instance is provider instance, otherwise raise and error.""" if not is_provider(instance): raise Error('Expected provider instance, ' 'got {0}'.format(str(instance))) return instance def is_injection(instance): """Check if instance is injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__'))
"""Utils module.""" from six import class_types from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) def ensure_is_provider(instance): """Check if instance is provider instance, otherwise raise and error.""" if not is_provider(instance): raise Error('Expected provider instance, ' 'got {0}'.format(str(instance))) return instance def is_injection(instance): """Check if instance is injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__'))
Fix of bug in Python 2.6 with failed isclass check in inspect module
Fix of bug in Python 2.6 with failed isclass check in inspect module
Python
bsd-3-clause
rmk135/dependency_injector,ets-labs/dependency_injector,ets-labs/python-dependency-injector,rmk135/objects
"""Utils module.""" - from inspect import isclass + from six import class_types from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) def ensure_is_provider(instance): """Check if instance is provider instance, otherwise raise and error.""" if not is_provider(instance): raise Error('Expected provider instance, ' 'got {0}'.format(str(instance))) return instance def is_injection(instance): """Check if instance is injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__'))
Fix of bug in Python 2.6 with failed isclass check in inspect module
## Code Before: """Utils module.""" from inspect import isclass from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) def ensure_is_provider(instance): """Check if instance is provider instance, otherwise raise and error.""" if not is_provider(instance): raise Error('Expected provider instance, ' 'got {0}'.format(str(instance))) return instance def is_injection(instance): """Check if instance is injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__')) ## Instruction: Fix of bug in Python 2.6 with failed isclass check in inspect module ## Code After: """Utils module.""" from six import class_types from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) def ensure_is_provider(instance): """Check if instance is provider instance, otherwise raise and error.""" if not is_provider(instance): raise Error('Expected provider instance, ' 'got {0}'.format(str(instance))) return instance def is_injection(instance): """Check if instance is injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__'))
... """Utils module.""" from six import class_types from .errors import Error ... def is_provider(instance): """Check if instance is provider instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) ... def is_injection(instance): """Check if instance is injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) ... def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) ... def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) ... def is_method_injection(instance): """Check if instance is method injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__')) ...
157d5c2f680134fa5b9f4f69320259416c46f44b
tp/netlib/objects/Order_Probe.py
tp/netlib/objects/Order_Probe.py
import copy from Order import Order class Order_Probe(Order): no = 34 def __init__(self, sequence, \ id, slot, type, \ *args, **kw): self.no = 34 apply(Order.__init__, (self, sequence, id, slot, type, -1, [])+args, kw)
import copy from Order import Order class Order_Probe(Order): no = 34 def __init__(self, *args, **kw): self.no = 34 Order.__init__(self, *args, **kw)
Fix the Order Probe message for the new order stuff.
Fix the Order Probe message for the new order stuff.
Python
lgpl-2.1
thousandparsec/libtpproto-py,thousandparsec/libtpproto-py
import copy from Order import Order class Order_Probe(Order): no = 34 + def __init__(self, *args, **kw): - def __init__(self, sequence, \ - id, slot, type, \ - *args, **kw): self.no = 34 - apply(Order.__init__, (self, sequence, id, slot, type, -1, [])+args, kw) + Order.__init__(self, *args, **kw)
Fix the Order Probe message for the new order stuff.
## Code Before: import copy from Order import Order class Order_Probe(Order): no = 34 def __init__(self, sequence, \ id, slot, type, \ *args, **kw): self.no = 34 apply(Order.__init__, (self, sequence, id, slot, type, -1, [])+args, kw) ## Instruction: Fix the Order Probe message for the new order stuff. ## Code After: import copy from Order import Order class Order_Probe(Order): no = 34 def __init__(self, *args, **kw): self.no = 34 Order.__init__(self, *args, **kw)
... class Order_Probe(Order): no = 34 def __init__(self, *args, **kw): self.no = 34 Order.__init__(self, *args, **kw) ...
d8e5dce3489817a5065c045688b03f9e85c0b9a4
tests/data_structures/commons/binary_search_tree_unit_test.py
tests/data_structures/commons/binary_search_tree_unit_test.py
import unittest from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree class BinarySearchTreeUnitTest(unittest.TestCase): def test_binarySearchTree(self): bst = BinarySearchTree.create() bst.put("one", 1) bst.put("two", 2) bst.put("three", 3) bst.put("six", 6) bst.put("ten", 10) self.assertEqual(1, bst.get("one")) self.assertEqual(2, bst.get("two")) self.assertEqual(3, bst.get("three")) self.assertTrue(bst.contains_key("one")) self.assertTrue(bst.contains_key("two")) self.assertEqual(5, bst.size()) self.assertFalse(bst.is_empty()) bst.delete("one") self.assertFalse(bst.contains_key("one")) self.assertEqual(4, bst.size()) bst.delete("ten") self.assertFalse(bst.contains_key("ten")) self.assertEqual(3, bst.size()) bst.delete("three") self.assertFalse(bst.contains_key("three")) self.assertEqual(2, bst.size()) if __name__ == '__main__': unittest.main()
import unittest from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree class BinarySearchTreeUnitTest(unittest.TestCase): def test_binarySearchTree(self): bst = BinarySearchTree.create() bst.put("one", 1) bst.put("two", 2) bst.put("three", 3) bst.put("six", 6) bst.put("ten", 10) bst.put("ten", 10) self.assertEqual(1, bst.get("one")) self.assertEqual(2, bst.get("two")) self.assertEqual(3, bst.get("three")) self.assertTrue(bst.contains_key("one")) self.assertTrue(bst.contains_key("two")) self.assertEqual(5, bst.size()) self.assertFalse(bst.is_empty()) bst.delete("one") self.assertFalse(bst.contains_key("one")) self.assertEqual(4, bst.size()) bst.delete("ten") self.assertFalse(bst.contains_key("ten")) self.assertEqual(3, bst.size()) bst.delete("three") self.assertFalse(bst.contains_key("three")) self.assertEqual(2, bst.size()) for i in range(100): bst.put(str(i), i) self.assertEqual(i, bst.get(str(i))) for i in range(100): bst.delete(str(i)) self.assertFalse(bst.contains_key(str(i))) if __name__ == '__main__': unittest.main()
Increase the unit test coverage for the binary search tree
Increase the unit test coverage for the binary search tree
Python
bsd-3-clause
chen0040/pyalgs
import unittest from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree class BinarySearchTreeUnitTest(unittest.TestCase): def test_binarySearchTree(self): bst = BinarySearchTree.create() bst.put("one", 1) bst.put("two", 2) bst.put("three", 3) bst.put("six", 6) + bst.put("ten", 10) bst.put("ten", 10) self.assertEqual(1, bst.get("one")) self.assertEqual(2, bst.get("two")) self.assertEqual(3, bst.get("three")) self.assertTrue(bst.contains_key("one")) self.assertTrue(bst.contains_key("two")) self.assertEqual(5, bst.size()) self.assertFalse(bst.is_empty()) bst.delete("one") self.assertFalse(bst.contains_key("one")) self.assertEqual(4, bst.size()) bst.delete("ten") self.assertFalse(bst.contains_key("ten")) self.assertEqual(3, bst.size()) bst.delete("three") self.assertFalse(bst.contains_key("three")) self.assertEqual(2, bst.size()) + for i in range(100): + bst.put(str(i), i) + self.assertEqual(i, bst.get(str(i))) + + for i in range(100): + bst.delete(str(i)) + self.assertFalse(bst.contains_key(str(i))) + if __name__ == '__main__': unittest.main()
Increase the unit test coverage for the binary search tree
## Code Before: import unittest from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree class BinarySearchTreeUnitTest(unittest.TestCase): def test_binarySearchTree(self): bst = BinarySearchTree.create() bst.put("one", 1) bst.put("two", 2) bst.put("three", 3) bst.put("six", 6) bst.put("ten", 10) self.assertEqual(1, bst.get("one")) self.assertEqual(2, bst.get("two")) self.assertEqual(3, bst.get("three")) self.assertTrue(bst.contains_key("one")) self.assertTrue(bst.contains_key("two")) self.assertEqual(5, bst.size()) self.assertFalse(bst.is_empty()) bst.delete("one") self.assertFalse(bst.contains_key("one")) self.assertEqual(4, bst.size()) bst.delete("ten") self.assertFalse(bst.contains_key("ten")) self.assertEqual(3, bst.size()) bst.delete("three") self.assertFalse(bst.contains_key("three")) self.assertEqual(2, bst.size()) if __name__ == '__main__': unittest.main() ## Instruction: Increase the unit test coverage for the binary search tree ## Code After: import unittest from pyalgs.data_structures.commons.binary_search_tree import BinarySearchTree class BinarySearchTreeUnitTest(unittest.TestCase): def test_binarySearchTree(self): bst = BinarySearchTree.create() bst.put("one", 1) bst.put("two", 2) bst.put("three", 3) bst.put("six", 6) bst.put("ten", 10) bst.put("ten", 10) self.assertEqual(1, bst.get("one")) self.assertEqual(2, bst.get("two")) self.assertEqual(3, bst.get("three")) self.assertTrue(bst.contains_key("one")) self.assertTrue(bst.contains_key("two")) self.assertEqual(5, bst.size()) self.assertFalse(bst.is_empty()) bst.delete("one") self.assertFalse(bst.contains_key("one")) self.assertEqual(4, bst.size()) bst.delete("ten") self.assertFalse(bst.contains_key("ten")) self.assertEqual(3, bst.size()) bst.delete("three") self.assertFalse(bst.contains_key("three")) self.assertEqual(2, bst.size()) for i in range(100): bst.put(str(i), i) self.assertEqual(i, bst.get(str(i))) for i in range(100): bst.delete(str(i)) self.assertFalse(bst.contains_key(str(i))) if __name__ == '__main__': unittest.main()
// ... existing code ... bst.put("three", 3) bst.put("six", 6) bst.put("ten", 10) bst.put("ten", 10) // ... modified code ... self.assertEqual(2, bst.size()) for i in range(100): bst.put(str(i), i) self.assertEqual(i, bst.get(str(i))) for i in range(100): bst.delete(str(i)) self.assertFalse(bst.contains_key(str(i))) if __name__ == '__main__': unittest.main() // ... rest of the code ...
4eb043cfb0f2535a1dca37927323155b7d3f363e
dynamic_rest/links.py
dynamic_rest/links.py
"""This module contains utilities to support API links.""" from django.utils import six from dynamic_rest.conf import settings from .routers import DynamicRouter def merge_link_object(serializer, data, instance): """Add a 'links' attribute to the data that maps field names to URLs. NOTE: This is the format that Ember Data supports, but alternative implementations are possible to support other formats. """ link_object = {} if not getattr(instance, 'pk', None): # If instance doesn't have a `pk` field, we'll assume it doesn't # have a canonical resource URL to hang a link off of. # This generally only affectes Ephemeral Objects. return data link_fields = serializer.get_link_fields() for name, field in six.iteritems(link_fields): # For included fields, omit link if there's no data. if name in data and not data[name]: continue link = getattr(field, 'link', None) if link is None: base_url = '' if settings.ENABLE_HOST_RELATIVE_LINKS: # if the resource isn't registered, this will default back to # using resource-relative urls for links. base_url = DynamicRouter.get_canonical_path( serializer.get_resource_key(), instance.pk ) or '' link = '%s%s/' % (base_url, name) # Default to DREST-generated relation endpoints. elif callable(link): link = link(name, field, data, instance) link_object[name] = link if link_object: data['links'] = link_object return data
"""This module contains utilities to support API links.""" from django.utils import six from dynamic_rest.conf import settings from dynamic_rest.routers import DynamicRouter def merge_link_object(serializer, data, instance): """Add a 'links' attribute to the data that maps field names to URLs. NOTE: This is the format that Ember Data supports, but alternative implementations are possible to support other formats. """ link_object = {} if not getattr(instance, 'pk', None): # If instance doesn't have a `pk` field, we'll assume it doesn't # have a canonical resource URL to hang a link off of. # This generally only affectes Ephemeral Objects. return data link_fields = serializer.get_link_fields() for name, field in six.iteritems(link_fields): # For included fields, omit link if there's no data. if name in data and not data[name]: continue link = getattr(field, 'link', None) if link is None: base_url = '' if settings.ENABLE_HOST_RELATIVE_LINKS: # if the resource isn't registered, this will default back to # using resource-relative urls for links. base_url = DynamicRouter.get_canonical_path( serializer.get_resource_key(), instance.pk ) or '' link = '%s%s/' % (base_url, name) # Default to DREST-generated relation endpoints. elif callable(link): link = link(name, field, data, instance) link_object[name] = link if link_object: data['links'] = link_object return data
Fix some sorting thing for isort
Fix some sorting thing for isort
Python
mit
sanoma/dynamic-rest,AltSchool/dynamic-rest,sanoma/dynamic-rest,AltSchool/dynamic-rest
"""This module contains utilities to support API links.""" from django.utils import six + from dynamic_rest.conf import settings - from .routers import DynamicRouter + from dynamic_rest.routers import DynamicRouter def merge_link_object(serializer, data, instance): """Add a 'links' attribute to the data that maps field names to URLs. NOTE: This is the format that Ember Data supports, but alternative implementations are possible to support other formats. """ link_object = {} if not getattr(instance, 'pk', None): # If instance doesn't have a `pk` field, we'll assume it doesn't # have a canonical resource URL to hang a link off of. # This generally only affectes Ephemeral Objects. return data link_fields = serializer.get_link_fields() for name, field in six.iteritems(link_fields): # For included fields, omit link if there's no data. if name in data and not data[name]: continue link = getattr(field, 'link', None) if link is None: base_url = '' if settings.ENABLE_HOST_RELATIVE_LINKS: # if the resource isn't registered, this will default back to # using resource-relative urls for links. base_url = DynamicRouter.get_canonical_path( serializer.get_resource_key(), instance.pk ) or '' link = '%s%s/' % (base_url, name) # Default to DREST-generated relation endpoints. elif callable(link): link = link(name, field, data, instance) link_object[name] = link if link_object: data['links'] = link_object return data
Fix some sorting thing for isort
## Code Before: """This module contains utilities to support API links.""" from django.utils import six from dynamic_rest.conf import settings from .routers import DynamicRouter def merge_link_object(serializer, data, instance): """Add a 'links' attribute to the data that maps field names to URLs. NOTE: This is the format that Ember Data supports, but alternative implementations are possible to support other formats. """ link_object = {} if not getattr(instance, 'pk', None): # If instance doesn't have a `pk` field, we'll assume it doesn't # have a canonical resource URL to hang a link off of. # This generally only affectes Ephemeral Objects. return data link_fields = serializer.get_link_fields() for name, field in six.iteritems(link_fields): # For included fields, omit link if there's no data. if name in data and not data[name]: continue link = getattr(field, 'link', None) if link is None: base_url = '' if settings.ENABLE_HOST_RELATIVE_LINKS: # if the resource isn't registered, this will default back to # using resource-relative urls for links. base_url = DynamicRouter.get_canonical_path( serializer.get_resource_key(), instance.pk ) or '' link = '%s%s/' % (base_url, name) # Default to DREST-generated relation endpoints. elif callable(link): link = link(name, field, data, instance) link_object[name] = link if link_object: data['links'] = link_object return data ## Instruction: Fix some sorting thing for isort ## Code After: """This module contains utilities to support API links.""" from django.utils import six from dynamic_rest.conf import settings from dynamic_rest.routers import DynamicRouter def merge_link_object(serializer, data, instance): """Add a 'links' attribute to the data that maps field names to URLs. NOTE: This is the format that Ember Data supports, but alternative implementations are possible to support other formats. """ link_object = {} if not getattr(instance, 'pk', None): # If instance doesn't have a `pk` field, we'll assume it doesn't # have a canonical resource URL to hang a link off of. # This generally only affectes Ephemeral Objects. return data link_fields = serializer.get_link_fields() for name, field in six.iteritems(link_fields): # For included fields, omit link if there's no data. if name in data and not data[name]: continue link = getattr(field, 'link', None) if link is None: base_url = '' if settings.ENABLE_HOST_RELATIVE_LINKS: # if the resource isn't registered, this will default back to # using resource-relative urls for links. base_url = DynamicRouter.get_canonical_path( serializer.get_resource_key(), instance.pk ) or '' link = '%s%s/' % (base_url, name) # Default to DREST-generated relation endpoints. elif callable(link): link = link(name, field, data, instance) link_object[name] = link if link_object: data['links'] = link_object return data
# ... existing code ... """This module contains utilities to support API links.""" from django.utils import six from dynamic_rest.conf import settings from dynamic_rest.routers import DynamicRouter # ... rest of the code ...
c69e18a4dd324b8d32fb3d5c74bd011c7fa081d6
waybackpack/session.py
waybackpack/session.py
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
Add stream=True to requests params
Add stream=True to requests params
Python
mit
jsvine/waybackpack
from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): + headers = { - headers = { "User-Agent": self.user_agent } + "User-Agent": self.user_agent, + } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, + stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
Add stream=True to requests params
## Code Before: from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res ## Instruction: Add stream=True to requests params ## Code After: from .settings import DEFAULT_USER_AGENT import requests import time import logging logger = logging.getLogger(__name__) class Session(object): def __init__(self, follow_redirects=False, user_agent=DEFAULT_USER_AGENT): self.follow_redirects = follow_redirects self.user_agent = user_agent def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): res = requests.get( url, allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) if res.status_code != 200: logger.info("HTTP status code: {0}".format(res.status_code)) if int(res.status_code / 100) == 5: logger.info("Waiting 1 second before retrying.") time.sleep(1) continue else: response_is_final = True return res
... def get(self, url, **kwargs): headers = { "User-Agent": self.user_agent, } response_is_final = False while (response_is_final == False): ... allow_redirects=self.follow_redirects, headers=headers, stream=True, **kwargs ) ...
ca9f3c005b2412c1b9ff0247afc6708b0172d183
web/impact/impact/v1/views/base_history_view.py
web/impact/impact/v1/views/base_history_view.py
from rest_framework.response import Response from rest_framework.views import APIView from impact.permissions import ( V1APIPermissions, ) from impact.v1.metadata import ( ImpactMetadata, READ_ONLY_LIST_TYPE, ) class BaseHistoryView(APIView): metadata_class = ImpactMetadata permission_classes = ( V1APIPermissions, ) METADATA_ACTIONS = {"GET": {"history": READ_ONLY_LIST_TYPE}} # def __init__(self, *args, **kwargs): # super().__init__(*args, **kwargs) def get(self, request, pk): self.instance = self.model.objects.get(pk=pk) events = [] for event_class in self.event_classes: events = events + event_class.events(self.instance) result = { "results": sorted([event.serialize() for event in events], key=lambda e: e["datetime"]) } return Response(result)
from rest_framework.response import Response from rest_framework.views import APIView from impact.permissions import ( V1APIPermissions, ) from impact.v1.metadata import ( ImpactMetadata, READ_ONLY_LIST_TYPE, ) class BaseHistoryView(APIView): metadata_class = ImpactMetadata permission_classes = ( V1APIPermissions, ) METADATA_ACTIONS = {"GET": {"history": READ_ONLY_LIST_TYPE}} def get(self, request, pk): self.instance = self.model.objects.get(pk=pk) events = [] for event_class in self.event_classes: events = events + event_class.events(self.instance) result = { "results": sorted([event.serialize() for event in events], key=lambda e: (e["datetime"], e.get("latest_datetime", e["datetime"]))) } return Response(result)
Improve history sorting and remove dead comments
[AC-4875] Improve history sorting and remove dead comments
Python
mit
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
from rest_framework.response import Response from rest_framework.views import APIView from impact.permissions import ( V1APIPermissions, ) from impact.v1.metadata import ( ImpactMetadata, READ_ONLY_LIST_TYPE, ) class BaseHistoryView(APIView): metadata_class = ImpactMetadata permission_classes = ( V1APIPermissions, ) METADATA_ACTIONS = {"GET": {"history": READ_ONLY_LIST_TYPE}} - # def __init__(self, *args, **kwargs): - # super().__init__(*args, **kwargs) - def get(self, request, pk): self.instance = self.model.objects.get(pk=pk) events = [] for event_class in self.event_classes: events = events + event_class.events(self.instance) result = { "results": sorted([event.serialize() for event in events], - key=lambda e: e["datetime"]) + key=lambda e: (e["datetime"], + e.get("latest_datetime", + e["datetime"]))) } return Response(result)
Improve history sorting and remove dead comments
## Code Before: from rest_framework.response import Response from rest_framework.views import APIView from impact.permissions import ( V1APIPermissions, ) from impact.v1.metadata import ( ImpactMetadata, READ_ONLY_LIST_TYPE, ) class BaseHistoryView(APIView): metadata_class = ImpactMetadata permission_classes = ( V1APIPermissions, ) METADATA_ACTIONS = {"GET": {"history": READ_ONLY_LIST_TYPE}} # def __init__(self, *args, **kwargs): # super().__init__(*args, **kwargs) def get(self, request, pk): self.instance = self.model.objects.get(pk=pk) events = [] for event_class in self.event_classes: events = events + event_class.events(self.instance) result = { "results": sorted([event.serialize() for event in events], key=lambda e: e["datetime"]) } return Response(result) ## Instruction: Improve history sorting and remove dead comments ## Code After: from rest_framework.response import Response from rest_framework.views import APIView from impact.permissions import ( V1APIPermissions, ) from impact.v1.metadata import ( ImpactMetadata, READ_ONLY_LIST_TYPE, ) class BaseHistoryView(APIView): metadata_class = ImpactMetadata permission_classes = ( V1APIPermissions, ) METADATA_ACTIONS = {"GET": {"history": READ_ONLY_LIST_TYPE}} def get(self, request, pk): self.instance = self.model.objects.get(pk=pk) events = [] for event_class in self.event_classes: events = events + event_class.events(self.instance) result = { "results": sorted([event.serialize() for event in events], key=lambda e: (e["datetime"], e.get("latest_datetime", e["datetime"]))) } return Response(result)
// ... existing code ... METADATA_ACTIONS = {"GET": {"history": READ_ONLY_LIST_TYPE}} def get(self, request, pk): self.instance = self.model.objects.get(pk=pk) // ... modified code ... result = { "results": sorted([event.serialize() for event in events], key=lambda e: (e["datetime"], e.get("latest_datetime", e["datetime"]))) } return Response(result) // ... rest of the code ...
8cbac87d73f361bd6d623cbe58d188dd9cc518ce
ext_pylib/input/__init__.py
ext_pylib/input/__init__.py
from __future__ import absolute_import # Use Python 3 input if possible try: INPUT = input except NameError: INPUT = raw_input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
from __future__ import absolute_import # Use Python 2 input unless raw_input doesn't exist try: INPUT = raw_input except NameError: INPUT = input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
Use raw_input [py2] first, then resort to input [py3].
BUGFIX: Use raw_input [py2] first, then resort to input [py3].
Python
mit
hbradleyiii/ext_pylib
from __future__ import absolute_import - # Use Python 3 input if possible + # Use Python 2 input unless raw_input doesn't exist try: + INPUT = raw_input + except NameError: INPUT = input - except NameError: - INPUT = raw_input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
Use raw_input [py2] first, then resort to input [py3].
## Code Before: from __future__ import absolute_import # Use Python 3 input if possible try: INPUT = input except NameError: INPUT = raw_input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt ## Instruction: Use raw_input [py2] first, then resort to input [py3]. ## Code After: from __future__ import absolute_import # Use Python 2 input unless raw_input doesn't exist try: INPUT = raw_input except NameError: INPUT = input # pylint: disable=wrong-import-position # this import MUST be after INPUT is defined from .prompts import prompt, prompt_str, warn_prompt
... from __future__ import absolute_import # Use Python 2 input unless raw_input doesn't exist try: INPUT = raw_input except NameError: INPUT = input # pylint: disable=wrong-import-position ...
8348cf481dc098cb5cf583dd86a6923c9c03d5f5
freight/utils/auth.py
freight/utils/auth.py
from __future__ import absolute_import from flask import current_app, request, session from freight.models import User from freight.testutils.fixtures import Fixtures NOT_SET = object() def get_current_user(): """ Return the currently authenticated user based on their active session. Will return a dummy user if in development mode. """ if getattr(request, 'current_user', NOT_SET) is NOT_SET: if current_app.config.get('DEV'): request.current_user = User.query.filter( User.name == 'Freight', ).first() if not request.current_user: request.current_user = Fixtures().create_user( name='Freight', ) elif session.get('uid') is None: request.current_user = None else: request.current_user = User.query.get(session['uid']) if request.current_user is None: del session['uid'] return request.current_user
from __future__ import absolute_import from flask import current_app, request, session from freight.models import User NOT_SET = object() def get_current_user(): """ Return the currently authenticated user based on their active session. Will return a dummy user if in development mode. """ if getattr(request, 'current_user', NOT_SET) is NOT_SET: if current_app.config.get('DEV'): from freight.testutils.fixtures import Fixtures request.current_user = User.query.filter( User.name == 'Freight', ).first() if not request.current_user: request.current_user = Fixtures().create_user( name='Freight', ) elif session.get('uid') is None: request.current_user = None else: request.current_user = User.query.get(session['uid']) if request.current_user is None: del session['uid'] return request.current_user
Move fixture import to only be in DEV
Move fixture import to only be in DEV
Python
apache-2.0
getsentry/freight,getsentry/freight,getsentry/freight,getsentry/freight,getsentry/freight
from __future__ import absolute_import from flask import current_app, request, session from freight.models import User - - from freight.testutils.fixtures import Fixtures NOT_SET = object() def get_current_user(): """ Return the currently authenticated user based on their active session. Will return a dummy user if in development mode. """ if getattr(request, 'current_user', NOT_SET) is NOT_SET: if current_app.config.get('DEV'): + from freight.testutils.fixtures import Fixtures + request.current_user = User.query.filter( User.name == 'Freight', ).first() if not request.current_user: request.current_user = Fixtures().create_user( name='Freight', ) elif session.get('uid') is None: request.current_user = None else: request.current_user = User.query.get(session['uid']) if request.current_user is None: del session['uid'] return request.current_user
Move fixture import to only be in DEV
## Code Before: from __future__ import absolute_import from flask import current_app, request, session from freight.models import User from freight.testutils.fixtures import Fixtures NOT_SET = object() def get_current_user(): """ Return the currently authenticated user based on their active session. Will return a dummy user if in development mode. """ if getattr(request, 'current_user', NOT_SET) is NOT_SET: if current_app.config.get('DEV'): request.current_user = User.query.filter( User.name == 'Freight', ).first() if not request.current_user: request.current_user = Fixtures().create_user( name='Freight', ) elif session.get('uid') is None: request.current_user = None else: request.current_user = User.query.get(session['uid']) if request.current_user is None: del session['uid'] return request.current_user ## Instruction: Move fixture import to only be in DEV ## Code After: from __future__ import absolute_import from flask import current_app, request, session from freight.models import User NOT_SET = object() def get_current_user(): """ Return the currently authenticated user based on their active session. Will return a dummy user if in development mode. """ if getattr(request, 'current_user', NOT_SET) is NOT_SET: if current_app.config.get('DEV'): from freight.testutils.fixtures import Fixtures request.current_user = User.query.filter( User.name == 'Freight', ).first() if not request.current_user: request.current_user = Fixtures().create_user( name='Freight', ) elif session.get('uid') is None: request.current_user = None else: request.current_user = User.query.get(session['uid']) if request.current_user is None: del session['uid'] return request.current_user
... from freight.models import User NOT_SET = object() ... if getattr(request, 'current_user', NOT_SET) is NOT_SET: if current_app.config.get('DEV'): from freight.testutils.fixtures import Fixtures request.current_user = User.query.filter( User.name == 'Freight', ...
4fe55df3bb668a2eafdb65a3a31ad27ffa5dc3c2
pytable.py
pytable.py
from __future__ import print_function from operator import itemgetter import monoidal_tables as mt from monoidal_tables import renderers if __name__ == '__main__': table = (mt.integer('X', itemgetter('x')) + mt.integer('Y', itemgetter('y')) + mt.align_center(mt.column('Name', itemgetter('name')))) data = [ {'x': 0, 'y': 0, 'name': 'Origin'}, {'x': 5, 'y': 5, 'name': 'Diagonal'}, {'x': 12, 'y': 8, 'name': 'Up'}, ] table.render(data, renderer=renderers.FancyRenderer)
from __future__ import print_function from operator import itemgetter import monoidal_tables as mt from monoidal_tables import renderers if __name__ == '__main__': table = (mt.integer('X', itemgetter('x')) + mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') + mt.align_center(mt.column('Name', itemgetter('name')))) data = [ {'x': 0, 'y': 0, 'name': 'Origin'}, {'x': 5, 'y': 5, 'name': 'Diagonal'}, {'x': 12, 'y': 8, 'name': 'Up'}, ] table.render(data, renderer=renderers.FancyRenderer)
Update example to show HTML class
Update example to show HTML class
Python
bsd-3-clause
lubomir/monoidal-tables
from __future__ import print_function from operator import itemgetter import monoidal_tables as mt from monoidal_tables import renderers if __name__ == '__main__': table = (mt.integer('X', itemgetter('x')) + - mt.integer('Y', itemgetter('y')) + + mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') + mt.align_center(mt.column('Name', itemgetter('name')))) data = [ {'x': 0, 'y': 0, 'name': 'Origin'}, {'x': 5, 'y': 5, 'name': 'Diagonal'}, {'x': 12, 'y': 8, 'name': 'Up'}, ] table.render(data, renderer=renderers.FancyRenderer)
Update example to show HTML class
## Code Before: from __future__ import print_function from operator import itemgetter import monoidal_tables as mt from monoidal_tables import renderers if __name__ == '__main__': table = (mt.integer('X', itemgetter('x')) + mt.integer('Y', itemgetter('y')) + mt.align_center(mt.column('Name', itemgetter('name')))) data = [ {'x': 0, 'y': 0, 'name': 'Origin'}, {'x': 5, 'y': 5, 'name': 'Diagonal'}, {'x': 12, 'y': 8, 'name': 'Up'}, ] table.render(data, renderer=renderers.FancyRenderer) ## Instruction: Update example to show HTML class ## Code After: from __future__ import print_function from operator import itemgetter import monoidal_tables as mt from monoidal_tables import renderers if __name__ == '__main__': table = (mt.integer('X', itemgetter('x')) + mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') + mt.align_center(mt.column('Name', itemgetter('name')))) data = [ {'x': 0, 'y': 0, 'name': 'Origin'}, {'x': 5, 'y': 5, 'name': 'Diagonal'}, {'x': 12, 'y': 8, 'name': 'Up'}, ] table.render(data, renderer=renderers.FancyRenderer)
... if __name__ == '__main__': table = (mt.integer('X', itemgetter('x')) + mt.set_class(mt.integer('Y', itemgetter('y')), 'col-y') + mt.align_center(mt.column('Name', itemgetter('name')))) data = [ ...
336fbbd82a09469a7ac7d0eb850daa6a55f42669
ctypescrypto/__init__.py
ctypescrypto/__init__.py
from ctypes import CDLL, c_char_p def config(filename=None): """ Loads OpenSSL Config file. If none are specified, loads default (compiled in) one """ libcrypto.OPENSSL_config(filename) __all__ = ['config'] libcrypto = CDLL("libcrypto.so.1.0.0") libcrypto.OPENSSL_config.argtypes = (c_char_p, ) libcrypto.OPENSSL_add_all_algorithms_conf()
from ctypes import CDLL, c_char_p from ctypes.util import find_library import sys def config(filename=None): """ Loads OpenSSL Config file. If none are specified, loads default (compiled in) one """ libcrypto.OPENSSL_config(filename) __all__ = ['config'] if sys.platform.startswith('win'): __libname__ = find_library('libeay32') else: __libname__ = find_library('crypto') if __libname__ is None: raise OSError("Cannot find OpenSSL crypto library") libcrypto = CDLL(__libname__) libcrypto.OPENSSL_config.argtypes = (c_char_p, ) libcrypto.OPENSSL_add_all_algorithms_conf()
Use find_library to search for openssl libs
Use find_library to search for openssl libs
Python
mit
vbwagner/ctypescrypto
from ctypes import CDLL, c_char_p + from ctypes.util import find_library + import sys def config(filename=None): """ Loads OpenSSL Config file. If none are specified, loads default (compiled in) one """ libcrypto.OPENSSL_config(filename) __all__ = ['config'] - libcrypto = CDLL("libcrypto.so.1.0.0") + if sys.platform.startswith('win'): + __libname__ = find_library('libeay32') + else: + __libname__ = find_library('crypto') + + if __libname__ is None: + raise OSError("Cannot find OpenSSL crypto library") + + libcrypto = CDLL(__libname__) libcrypto.OPENSSL_config.argtypes = (c_char_p, ) libcrypto.OPENSSL_add_all_algorithms_conf()
Use find_library to search for openssl libs
## Code Before: from ctypes import CDLL, c_char_p def config(filename=None): """ Loads OpenSSL Config file. If none are specified, loads default (compiled in) one """ libcrypto.OPENSSL_config(filename) __all__ = ['config'] libcrypto = CDLL("libcrypto.so.1.0.0") libcrypto.OPENSSL_config.argtypes = (c_char_p, ) libcrypto.OPENSSL_add_all_algorithms_conf() ## Instruction: Use find_library to search for openssl libs ## Code After: from ctypes import CDLL, c_char_p from ctypes.util import find_library import sys def config(filename=None): """ Loads OpenSSL Config file. If none are specified, loads default (compiled in) one """ libcrypto.OPENSSL_config(filename) __all__ = ['config'] if sys.platform.startswith('win'): __libname__ = find_library('libeay32') else: __libname__ = find_library('crypto') if __libname__ is None: raise OSError("Cannot find OpenSSL crypto library") libcrypto = CDLL(__libname__) libcrypto.OPENSSL_config.argtypes = (c_char_p, ) libcrypto.OPENSSL_add_all_algorithms_conf()
... from ctypes import CDLL, c_char_p from ctypes.util import find_library import sys def config(filename=None): ... __all__ = ['config'] if sys.platform.startswith('win'): __libname__ = find_library('libeay32') else: __libname__ = find_library('crypto') if __libname__ is None: raise OSError("Cannot find OpenSSL crypto library") libcrypto = CDLL(__libname__) libcrypto.OPENSSL_config.argtypes = (c_char_p, ) libcrypto.OPENSSL_add_all_algorithms_conf() ...
401f98ad74792e9a5d9354dec8c24dc9637d1f5e
tests/gsim/pezeshk_2011_test.py
tests/gsim/pezeshk_2011_test.py
from openquake.hazardlib.gsim.pezeshk_2011 import Pezeshk2011 from tests.gsim.utils import BaseGSIMTestCase class Pezeshk2011TestCase(BaseGSIMTestCase): GSIM_CLASS = Pezeshk2011 # Test data were obtained from a tool given by the authors # The data of the values of the mean PGA and SA are in g's. def test_mean(self): self.check('PEZE11/PZ11_MEAN.csv', max_discrep_percentage=0.5) def test_std_total(self): self.check('PEZE11/PZ11_STD_TOTAL.csv', max_discrep_percentage=0.5)
from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011 from tests.gsim.utils import BaseGSIMTestCase class Pezeshk2011EtAlTestCase(BaseGSIMTestCase): GSIM_CLASS = PezeshkEtAl2011 # Test data were obtained from a tool given by the authors # The data of the values of the mean PGA and SA are in g's. def test_mean(self): self.check('PEZE11/PZ11_MEAN.csv', max_discrep_percentage=0.5) def test_std_total(self): self.check('PEZE11/PZ11_STD_TOTAL.csv', max_discrep_percentage=0.5)
Add implementation of gmpe Pezeshk et al 2011 for ENA
Add implementation of gmpe Pezeshk et al 2011 for ENA
Python
agpl-3.0
vup1120/oq-hazardlib,gem/oq-engine,g-weatherill/oq-hazardlib,gem/oq-hazardlib,gem/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-engine,gem/oq-engine,rcgee/oq-hazardlib,mmpagani/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,vup1120/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,larsbutler/oq-hazardlib,ROB-Seismology/oq-hazardlib,silviacanessa/oq-hazardlib,larsbutler/oq-hazardlib,silviacanessa/oq-hazardlib,gem/oq-engine,larsbutler/oq-hazardlib,g-weatherill/oq-hazardlib,rcgee/oq-hazardlib,vup1120/oq-hazardlib,ROB-Seismology/oq-hazardlib,gem/oq-engine,mmpagani/oq-hazardlib,mmpagani/oq-hazardlib
- from openquake.hazardlib.gsim.pezeshk_2011 import Pezeshk2011 + from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011 from tests.gsim.utils import BaseGSIMTestCase - class Pezeshk2011TestCase(BaseGSIMTestCase): + class Pezeshk2011EtAlTestCase(BaseGSIMTestCase): - GSIM_CLASS = Pezeshk2011 + GSIM_CLASS = PezeshkEtAl2011 # Test data were obtained from a tool given by the authors # The data of the values of the mean PGA and SA are in g's. def test_mean(self): self.check('PEZE11/PZ11_MEAN.csv', max_discrep_percentage=0.5) def test_std_total(self): self.check('PEZE11/PZ11_STD_TOTAL.csv', max_discrep_percentage=0.5)
Add implementation of gmpe Pezeshk et al 2011 for ENA
## Code Before: from openquake.hazardlib.gsim.pezeshk_2011 import Pezeshk2011 from tests.gsim.utils import BaseGSIMTestCase class Pezeshk2011TestCase(BaseGSIMTestCase): GSIM_CLASS = Pezeshk2011 # Test data were obtained from a tool given by the authors # The data of the values of the mean PGA and SA are in g's. def test_mean(self): self.check('PEZE11/PZ11_MEAN.csv', max_discrep_percentage=0.5) def test_std_total(self): self.check('PEZE11/PZ11_STD_TOTAL.csv', max_discrep_percentage=0.5) ## Instruction: Add implementation of gmpe Pezeshk et al 2011 for ENA ## Code After: from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011 from tests.gsim.utils import BaseGSIMTestCase class Pezeshk2011EtAlTestCase(BaseGSIMTestCase): GSIM_CLASS = PezeshkEtAl2011 # Test data were obtained from a tool given by the authors # The data of the values of the mean PGA and SA are in g's. def test_mean(self): self.check('PEZE11/PZ11_MEAN.csv', max_discrep_percentage=0.5) def test_std_total(self): self.check('PEZE11/PZ11_STD_TOTAL.csv', max_discrep_percentage=0.5)
# ... existing code ... from openquake.hazardlib.gsim.pezeshk_2011 import PezeshkEtAl2011 from tests.gsim.utils import BaseGSIMTestCase class Pezeshk2011EtAlTestCase(BaseGSIMTestCase): GSIM_CLASS = PezeshkEtAl2011 # Test data were obtained from a tool given by the authors # ... rest of the code ...
6a58c7f0eb1b92ec12d0e48d7fd3f2586de20755
sal/management/commands/update_admin_user.py
sal/management/commands/update_admin_user.py
''' Creates an admin user if there aren't any existing superusers ''' from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User from optparse import make_option class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, parser): parser.add_argument('--username', action='store', dest='username', default=None, help='Admin username') parser.add_argument('--password', action='store', dest='password', default=None, help='Admin password') def handle(self, *args, **options): username = options.get('username') password = options.get('password') if not username or not password: raise StandardError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() if su_count == 0: # there aren't any superusers, create one user, created = User.objects.get_or_create(username=username) user.set_password(password) user.is_staff = True user.is_superuser = True user.save() print('{0} updated'.format(username)) else: print('There are already {0} superusers'.format(su_count))
"""Creates an admin user if there aren't any existing superusers.""" from optparse import make_option from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, parser): parser.add_argument('--username', action='store', dest='username', default=None, help='Admin username') parser.add_argument('--password', action='store', dest='password', default=None, help='Admin password') def handle(self, *args, **options): username = options.get('username') password = options.get('password') if not username or not password: raise CommandError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() if su_count == 0: # there aren't any superusers, create one user, created = User.objects.get_or_create(username=username) user.set_password(password) user.is_staff = True user.is_superuser = True user.save() print(f'{username} updated') else: print(f'There are already {su_count} superusers')
Fix exception handling in management command. Clean up.
Fix exception handling in management command. Clean up.
Python
apache-2.0
salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal
- ''' - Creates an admin user if there aren't any existing superusers + """Creates an admin user if there aren't any existing superusers.""" - ''' + + from optparse import make_option + + from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError - from django.contrib.auth.models import User - from optparse import make_option class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, parser): parser.add_argument('--username', action='store', dest='username', default=None, help='Admin username') parser.add_argument('--password', action='store', dest='password', default=None, help='Admin password') def handle(self, *args, **options): username = options.get('username') password = options.get('password') if not username or not password: - raise StandardError('You must specify a username and password') + raise CommandError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() if su_count == 0: # there aren't any superusers, create one user, created = User.objects.get_or_create(username=username) user.set_password(password) user.is_staff = True user.is_superuser = True user.save() - print('{0} updated'.format(username)) + print(f'{username} updated') else: - print('There are already {0} superusers'.format(su_count)) + print(f'There are already {su_count} superusers')
Fix exception handling in management command. Clean up.
## Code Before: ''' Creates an admin user if there aren't any existing superusers ''' from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User from optparse import make_option class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, parser): parser.add_argument('--username', action='store', dest='username', default=None, help='Admin username') parser.add_argument('--password', action='store', dest='password', default=None, help='Admin password') def handle(self, *args, **options): username = options.get('username') password = options.get('password') if not username or not password: raise StandardError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() if su_count == 0: # there aren't any superusers, create one user, created = User.objects.get_or_create(username=username) user.set_password(password) user.is_staff = True user.is_superuser = True user.save() print('{0} updated'.format(username)) else: print('There are already {0} superusers'.format(su_count)) ## Instruction: Fix exception handling in management command. Clean up. ## Code After: """Creates an admin user if there aren't any existing superusers.""" from optparse import make_option from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): help = 'Creates/Updates an Admin user' def add_arguments(self, parser): parser.add_argument('--username', action='store', dest='username', default=None, help='Admin username') parser.add_argument('--password', action='store', dest='password', default=None, help='Admin password') def handle(self, *args, **options): username = options.get('username') password = options.get('password') if not username or not password: raise CommandError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() if su_count == 0: # there aren't any superusers, create one user, created = User.objects.get_or_create(username=username) user.set_password(password) user.is_staff = True user.is_superuser = True user.save() print(f'{username} updated') else: print(f'There are already {su_count} superusers')
... """Creates an admin user if there aren't any existing superusers.""" from optparse import make_option from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError ... password = options.get('password') if not username or not password: raise CommandError('You must specify a username and password') # Get the current superusers su_count = User.objects.filter(is_superuser=True).count() ... user.is_superuser = True user.save() print(f'{username} updated') else: print(f'There are already {su_count} superusers') ...
f35494ebc7c710af45c8973eb1c2b4d31ec1c7c0
tests/fakes.py
tests/fakes.py
class FakeHttpRequest(object): def __init__(self, method='GET', body=''): self.method = method.upper() self.body = body class FakeHttpResponse(object): def __init__(self, body, content_type='text/html'): self.body = body self.content_type = content_type self.status_code = 200 class FakeModel(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v)
import six class FakeHttpRequest(object): def __init__(self, method='GET', body=''): self.method = method.upper() self.body = body if six.PY3: self.body = body.encode('utf-8') class FakeHttpResponse(object): def __init__(self, body, content_type='text/html'): self.body = body self.content_type = content_type self.status_code = 200 class FakeModel(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v)
Update tests to reproduce bug
Update tests to reproduce bug
Python
bsd-3-clause
pobear/restless,viniciuscainelli/restless,toastdriven/restless,tonybajan/restless,CraveFood/restkiss,jangeador/restless
+ import six + + class FakeHttpRequest(object): def __init__(self, method='GET', body=''): self.method = method.upper() self.body = body + if six.PY3: + self.body = body.encode('utf-8') class FakeHttpResponse(object): def __init__(self, body, content_type='text/html'): self.body = body self.content_type = content_type self.status_code = 200 class FakeModel(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v)
Update tests to reproduce bug
## Code Before: class FakeHttpRequest(object): def __init__(self, method='GET', body=''): self.method = method.upper() self.body = body class FakeHttpResponse(object): def __init__(self, body, content_type='text/html'): self.body = body self.content_type = content_type self.status_code = 200 class FakeModel(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) ## Instruction: Update tests to reproduce bug ## Code After: import six class FakeHttpRequest(object): def __init__(self, method='GET', body=''): self.method = method.upper() self.body = body if six.PY3: self.body = body.encode('utf-8') class FakeHttpResponse(object): def __init__(self, body, content_type='text/html'): self.body = body self.content_type = content_type self.status_code = 200 class FakeModel(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v)
... import six class FakeHttpRequest(object): def __init__(self, method='GET', body=''): ... self.method = method.upper() self.body = body if six.PY3: self.body = body.encode('utf-8') ...
a3975cc9d4a388789fcdaf07ece011b01801f162
hilbert/decorators.py
hilbert/decorators.py
from functools import wraps from django import http from django.conf import settings from django.contrib.auth.decorators import login_required from django.utils.decorators import available_attrs from django.utils.log import getLogger logger = getLogger('django-hilbert') def ajax_login_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view def ajax_only(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): return view_func(request, *args, **kwargs) else: logger.warning(u'AJAX required: %s' % request.path, extra={'request': request}) return http.HttpResponseBadRequest() return _wrapped_view
from functools import wraps from django import http from django.conf import settings from django.contrib.auth.decorators import login_required from django.utils.decorators import available_attrs def ajax_login_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view def ajax_only(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): return view_func(request, *args, **kwargs) else: return http.HttpResponseBadRequest() return _wrapped_view
Remove logging to preserve 1.2 compatability.
Remove logging to preserve 1.2 compatability.
Python
bsd-2-clause
mlavin/django-hilbert,mlavin/django-hilbert
from functools import wraps from django import http from django.conf import settings from django.contrib.auth.decorators import login_required from django.utils.decorators import available_attrs - from django.utils.log import getLogger - - - logger = getLogger('django-hilbert') def ajax_login_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view def ajax_only(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): return view_func(request, *args, **kwargs) else: - logger.warning(u'AJAX required: %s' % request.path, extra={'request': request}) return http.HttpResponseBadRequest() return _wrapped_view
Remove logging to preserve 1.2 compatability.
## Code Before: from functools import wraps from django import http from django.conf import settings from django.contrib.auth.decorators import login_required from django.utils.decorators import available_attrs from django.utils.log import getLogger logger = getLogger('django-hilbert') def ajax_login_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view def ajax_only(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): return view_func(request, *args, **kwargs) else: logger.warning(u'AJAX required: %s' % request.path, extra={'request': request}) return http.HttpResponseBadRequest() return _wrapped_view ## Instruction: Remove logging to preserve 1.2 compatability. ## Code After: from functools import wraps from django import http from django.conf import settings from django.contrib.auth.decorators import login_required from django.utils.decorators import available_attrs def ajax_login_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view def ajax_only(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): return view_func(request, *args, **kwargs) else: return http.HttpResponseBadRequest() return _wrapped_view
# ... existing code ... from django.contrib.auth.decorators import login_required from django.utils.decorators import available_attrs # ... modified code ... return view_func(request, *args, **kwargs) else: return http.HttpResponseBadRequest() return _wrapped_view # ... rest of the code ...
69fb681cd27b43cc2d5500fcca89df3744b3661c
tests/test_registry.py
tests/test_registry.py
import pytest from web_test_base import * class TestIATIRegistry(WebTestBase): requests_to_load = { 'IATI Registry Homepage - http, no www': { 'url': 'http://iatiregistry.org/' } , 'IATI Registry Homepage - http, with www': { 'url': 'http://www.iatiregistry.org/' } , 'IATI Registry Homepage - https, no www': { 'url': 'https://iatiregistry.org/' } , 'IATI Registry Homepage - https, with www': { 'url': 'https://www.iatiregistry.org/' } } def test_contains_links(self, loaded_request): """ Test that each page contains links to the defined URLs. """ result = utility.get_links_from_page(loaded_request) assert "http://www.aidtransparency.net/" in result assert "http://www.iatistandard.org/" in result
import pytest from web_test_base import * class TestIATIRegistry(WebTestBase): requests_to_load = { 'IATI Registry Homepage - http, no www': { 'url': 'http://iatiregistry.org/' } , 'IATI Registry Homepage - http, with www': { 'url': 'http://www.iatiregistry.org/' } , 'IATI Registry Homepage - https, no www': { 'url': 'https://iatiregistry.org/' } , 'IATI Registry Homepage - https, with www': { 'url': 'https://www.iatiregistry.org/' } , 'IATI Registry Registration Page': { 'url': 'https://iatiregistry.org/user/register' } } def test_contains_links(self, loaded_request): """ Test that each page contains links to the defined URLs. """ result = utility.get_links_from_page(loaded_request) assert "http://www.aidtransparency.net/" in result assert "http://www.iatistandard.org/" in result @pytest.mark.parametrize("target_request", ["IATI Registry Registration Page"]) def test_registration_form_presence(self, target_request): """ Test that there is a valid registration form on the Registry Registration Page. """ req = self.loaded_request_from_test_name(target_request) form_xpath = '//*[@id="user-register-form"]' form_method_xpath = '//*[@id="user-register-form"]/@method' input_xpath = '//*[@id="user-register-form"]/div/div/input' forms = utility.locate_xpath_result(req, form_xpath) form_method = utility.locate_xpath_result(req, form_method_xpath) form_inputs = utility.locate_xpath_result(req, input_xpath) assert len(forms) == 1 assert form_method == ['post'] assert len(form_inputs) == 5
Add test for registry registration form
Add test for registry registration form This adds a test to ensure that the registry registration page has a registration form with the expected method and number of inputs.
Python
mit
IATI/IATI-Website-Tests
import pytest from web_test_base import * class TestIATIRegistry(WebTestBase): requests_to_load = { 'IATI Registry Homepage - http, no www': { 'url': 'http://iatiregistry.org/' } , 'IATI Registry Homepage - http, with www': { 'url': 'http://www.iatiregistry.org/' } , 'IATI Registry Homepage - https, no www': { 'url': 'https://iatiregistry.org/' } , 'IATI Registry Homepage - https, with www': { 'url': 'https://www.iatiregistry.org/' } + , 'IATI Registry Registration Page': { + 'url': 'https://iatiregistry.org/user/register' + } } def test_contains_links(self, loaded_request): """ Test that each page contains links to the defined URLs. """ result = utility.get_links_from_page(loaded_request) assert "http://www.aidtransparency.net/" in result assert "http://www.iatistandard.org/" in result + @pytest.mark.parametrize("target_request", ["IATI Registry Registration Page"]) + def test_registration_form_presence(self, target_request): + """ + Test that there is a valid registration form on the Registry Registration Page. + """ + req = self.loaded_request_from_test_name(target_request) + form_xpath = '//*[@id="user-register-form"]' + form_method_xpath = '//*[@id="user-register-form"]/@method' + input_xpath = '//*[@id="user-register-form"]/div/div/input' + + forms = utility.locate_xpath_result(req, form_xpath) + form_method = utility.locate_xpath_result(req, form_method_xpath) + form_inputs = utility.locate_xpath_result(req, input_xpath) + + assert len(forms) == 1 + assert form_method == ['post'] + assert len(form_inputs) == 5 +
Add test for registry registration form
## Code Before: import pytest from web_test_base import * class TestIATIRegistry(WebTestBase): requests_to_load = { 'IATI Registry Homepage - http, no www': { 'url': 'http://iatiregistry.org/' } , 'IATI Registry Homepage - http, with www': { 'url': 'http://www.iatiregistry.org/' } , 'IATI Registry Homepage - https, no www': { 'url': 'https://iatiregistry.org/' } , 'IATI Registry Homepage - https, with www': { 'url': 'https://www.iatiregistry.org/' } } def test_contains_links(self, loaded_request): """ Test that each page contains links to the defined URLs. """ result = utility.get_links_from_page(loaded_request) assert "http://www.aidtransparency.net/" in result assert "http://www.iatistandard.org/" in result ## Instruction: Add test for registry registration form ## Code After: import pytest from web_test_base import * class TestIATIRegistry(WebTestBase): requests_to_load = { 'IATI Registry Homepage - http, no www': { 'url': 'http://iatiregistry.org/' } , 'IATI Registry Homepage - http, with www': { 'url': 'http://www.iatiregistry.org/' } , 'IATI Registry Homepage - https, no www': { 'url': 'https://iatiregistry.org/' } , 'IATI Registry Homepage - https, with www': { 'url': 'https://www.iatiregistry.org/' } , 'IATI Registry Registration Page': { 'url': 'https://iatiregistry.org/user/register' } } def test_contains_links(self, loaded_request): """ Test that each page contains links to the defined URLs. """ result = utility.get_links_from_page(loaded_request) assert "http://www.aidtransparency.net/" in result assert "http://www.iatistandard.org/" in result @pytest.mark.parametrize("target_request", ["IATI Registry Registration Page"]) def test_registration_form_presence(self, target_request): """ Test that there is a valid registration form on the Registry Registration Page. """ req = self.loaded_request_from_test_name(target_request) form_xpath = '//*[@id="user-register-form"]' form_method_xpath = '//*[@id="user-register-form"]/@method' input_xpath = '//*[@id="user-register-form"]/div/div/input' forms = utility.locate_xpath_result(req, form_xpath) form_method = utility.locate_xpath_result(req, form_method_xpath) form_inputs = utility.locate_xpath_result(req, input_xpath) assert len(forms) == 1 assert form_method == ['post'] assert len(form_inputs) == 5
// ... existing code ... 'url': 'https://www.iatiregistry.org/' } , 'IATI Registry Registration Page': { 'url': 'https://iatiregistry.org/user/register' } } // ... modified code ... assert "http://www.aidtransparency.net/" in result assert "http://www.iatistandard.org/" in result @pytest.mark.parametrize("target_request", ["IATI Registry Registration Page"]) def test_registration_form_presence(self, target_request): """ Test that there is a valid registration form on the Registry Registration Page. """ req = self.loaded_request_from_test_name(target_request) form_xpath = '//*[@id="user-register-form"]' form_method_xpath = '//*[@id="user-register-form"]/@method' input_xpath = '//*[@id="user-register-form"]/div/div/input' forms = utility.locate_xpath_result(req, form_xpath) form_method = utility.locate_xpath_result(req, form_method_xpath) form_inputs = utility.locate_xpath_result(req, input_xpath) assert len(forms) == 1 assert form_method == ['post'] assert len(form_inputs) == 5 // ... rest of the code ...