text
stringlengths
17
1.47k
positive
stringlengths
673
4.43k
negative
stringlengths
677
2.81k
Order method parameters more consistent
package com.basgeekball.screenshotsnanny.core; import android.os.Handler; import com.basgeekball.screenshotsnanny.activityassistant.ActivityHelper; import com.basgeekball.screenshotsnanny.helper.Callback; import com.basgeekball.screenshotsnanny.helper.KeyboardHelper; public class ScreenshotsTask { private static void perform(final Class<?> T, final Callback callback, final Callback completionCallback, final boolean hasMap, final int mapFragmentId, final long screenshotDelay) { new Runnable() { @Override public void run() { callback.execute(); ActivityHelper.performTaskWhenActivityIsReady(T, new Callback() { @Override public void execute() { KeyboardHelper.hideKeyboard(); if (hasMap) { ScreenshotsCapturer.executeWithMap(ActivityHelper.getCurrentActivity(), mapFragmentId, screenshotDelay, completionCallback); } else { new Handler().postDelayed(new Runnable() { @Override public void run() { ScreenshotsCapturer.execute(ActivityHelper.getCurrentActivity(), completionCallback); } }, screenshotDelay); } } }); } }.run(); } public static void perform(Class<?> T, Callback callback, Callback completionCallback, long screenshotDelay) { perform(T, callback, completionCallback, false, 0, screenshotDelay); } public static void perform(Class<?> T, Callback callback, Callback completionCallback, int mapFragmentId, long screenshotDelay) { perform(T, callback, completionCallback, true, mapFragmentId, screenshotDelay); } }
package com.basgeekball.screenshotsnanny.core; import android.os.Handler; import com.basgeekball.screenshotsnanny.activityassistant.ActivityHelper; import com.basgeekball.screenshotsnanny.helper.Callback; import com.basgeekball.screenshotsnanny.helper.KeyboardHelper; public class ScreenshotsTask { private static void perform(final Class<?> T, final Callback callback, final Callback completionCallback, final boolean hasMap, final long screenshotDelay, final int mapFragmentId) { new Runnable() { @Override public void run() { callback.execute(); ActivityHelper.performTaskWhenActivityIsReady(T, new Callback() { @Override public void execute() { KeyboardHelper.hideKeyboard(); if (hasMap) { ScreenshotsCapturer.executeWithMap(ActivityHelper.getCurrentActivity(), mapFragmentId, screenshotDelay, completionCallback); } else { new Handler().postDelayed(new Runnable() { @Override public void run() { ScreenshotsCapturer.execute(ActivityHelper.getCurrentActivity(), completionCallback); } }, screenshotDelay); } } }); } }.run(); } public static void perform(Class<?> T, Callback callback, Callback completionCallback, long screenshotDelay) { perform(T, callback, completionCallback, false, screenshotDelay, 0); } public static void perform(Class<?> T, Callback callback, Callback completionCallback, int mapFragmentId, long screenshotDelay) { perform(T, callback, completionCallback, true, screenshotDelay, mapFragmentId); } }
Add inheritance from object for BaseWrapper to force new style classes in python 2.7.
"""This module contains the main wrapper class.""" class BaseWrapper(object): """Define base template for function wrapper classes. """ def __init__(self, func): self.func = func self.__doc__ = func.__doc__ def __call__(self, *args, **kwargs): raise NotImplementedError class NumpyWrapper(BaseWrapper): """Function wrapper for numpy's random functions. Allows easy usage avoiding the creation anonymous lambda functions. In addition, the `size` attribute is adjusted automatically. For instance, instead of writing 'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])' you may simply write 'ts.random.randint(low=1, high=10)'. """ def __init__(self, func, size="arg"): super(NumpyWrapper, self).__init__(func) self.size = size def __call__(self, *args, **kwargs): if self.size == "arg": def wrapped(x): return self.func(x.shape[0], *args, **kwargs) elif self.size == "kwarg": def wrapped(x): return self.func(*args, size=x.shape[0], **kwargs) else: raise ValueError("Size argument must be 'arg' or 'kwarg'.") wrapped.__doc__ = self.func.__doc__ return wrapped
"""This module contains the main wrapper class.""" class BaseWrapper: """Define base template for function wrapper classes. """ def __init__(self, func): self.func = func self.__doc__ = func.__doc__ def __call__(self, *args, **kwargs): raise NotImplementedError class NumpyWrapper(BaseWrapper): """Function wrapper for numpy's random functions. Allows easy usage avoiding the creation anonymous lambda functions. In addition, the `size` attribute is adjusted automatically. For instance, instead of writing 'lambda x: np.random.randint(low=1, high=10, size=x.shape[0])' you may simply write 'ts.random.randint(low=1, high=10)'. """ def __init__(self, func, size="arg"): super(NumpyWrapper, self).__init__(func) self.size = size def __call__(self, *args, **kwargs): if self.size == "arg": def wrapped(x): return self.func(x.shape[0], *args, **kwargs) elif self.size == "kwarg": def wrapped(x): return self.func(*args, size=x.shape[0], **kwargs) else: raise ValueError("Size argument must be 'arg' or 'kwarg'.") wrapped.__doc__ = self.func.__doc__ return wrapped
Add support for configurable function to extract code from item for sorting
Ext.define('Slate.sorter.Code', { extend: 'Ext.util.Sorter', config: { numberRe: /^\d+$/, numberDelim: '.', codeFn: function(item) { return item.get('Code'); }, sorterFn: function(a, b) { var me = this, codeFn = me._codeFn, // eslint-disable-line no-underscore-dangle numberRe = me._numberRe, // eslint-disable-line no-underscore-dangle numberDelim = me._numberDelim, // eslint-disable-line no-underscore-dangle codeA = codeFn(a).toLowerCase(), codeB = codeFn(b).toLowerCase(), dotIndexA, dotIndexB, numberA, numberB; if (codeA == codeB) { return 0; } dotIndexA = codeA.lastIndexOf(numberDelim); dotIndexB = codeB.lastIndexOf(numberDelim); if ( dotIndexA == -1 || dotIndexB == -1 || codeA.substr(0, dotIndexA) != codeB.substr(0, dotIndexB) || (numberA = codeA.substr(dotIndexA + 1)) == '' || (numberB = codeB.substr(dotIndexB + 1)) == '' || !numberRe.test(numberA) || !numberRe.test(numberB) ) { return codeA < codeB ? -1 : 1; } return numberA - numberB; } }, constructor: function(config) { this.initConfig(config); } });
Ext.define('Slate.sorter.Code', { extend: 'Ext.util.Sorter', config: { numberRe: /^\d+$/, numberDelim: '.', sorterFn: function(a, b) { var codeA = a.get('Code').toLowerCase(), codeB = b.get('Code').toLowerCase(), numberRe = this._numberRe, // eslint-disable-line no-underscore-dangle numberDelim = this._numberDelim, // eslint-disable-line no-underscore-dangle dotIndexA, dotIndexB, numberA, numberB; if (codeA == codeB) { return 0; } dotIndexA = codeA.lastIndexOf(numberDelim); dotIndexB = codeB.lastIndexOf(numberDelim); if ( dotIndexA == -1 || dotIndexB == -1 || codeA.substr(0, dotIndexA) != codeB.substr(0, dotIndexB) || (numberA = codeA.substr(dotIndexA + 1)) == '' || (numberB = codeB.substr(dotIndexB + 1)) == '' || !numberRe.test(numberA) || !numberRe.test(numberB) ) { return codeA < codeB ? -1 : 1; } return numberA - numberB; } }, });
Update vector layer name to wikidata
mapboxgl.accessToken = 'pk.eyJ1IjoicGxhbmVtYWQiLCJhIjoiY2l2dzVxbzA3MDAwNDJzbDUzMzVzbXc5dSJ9.WZ4_UtVvuVmOw4ofNMkiJw'; var map = new mapboxgl.Map({ container: 'map', style: 'mapbox://styles/mapbox/light-v9', zoom: 1.4, center: [21.6,7.6], hash: true }); map.addControl(new MapboxGeocoder({ accessToken: mapboxgl.accessToken })); map.on('load', function () { map.addSource('wikidata', { type: 'vector', url: 'mapbox://amisha.wikidata' }); map.addLayer({ "id": "wikidata-nodes", "type": "circle", "source": "wikidata", "source-layer": "wikidata", "paint": { "circle-color": { "property": 'distance', "stops": [ [ 0, '#fbb03b'], [ 0.1, '#223b53'], [ 1, '#e55e5e'], [ 5, '#3bb2d0'], [ 10, '#ccc'], [1000, '']] } } }); });
mapboxgl.accessToken = 'pk.eyJ1IjoicGxhbmVtYWQiLCJhIjoiY2l2dzVxbzA3MDAwNDJzbDUzMzVzbXc5dSJ9.WZ4_UtVvuVmOw4ofNMkiJw'; var map = new mapboxgl.Map({ container: 'map', style: 'mapbox://styles/mapbox/light-v9', zoom: 1.4, center: [21.6,7.6], hash: true }); map.addControl(new MapboxGeocoder({ accessToken: mapboxgl.accessToken })); map.on('load', function () { map.addSource('terrain-data', { type: 'vector', url: 'mapbox://amisha.wikidata' }); map.addLayer({ "id": "terrain-data", "type": "circle", "source": "terrain-data", "source-layer": "wikidata", "paint": { "circle-color": { "property": 'distance', "stops": [ [ 0, '#fbb03b'], [ 0.1, '#223b53'], [ 1, '#e55e5e'], [ 5, '#3bb2d0'], [ 10, '#ccc'], [1000, '']] } } }); });
Exclude spec from coverage reporting.
'use strict'; module.exports = function(config) { config.set({ autoWatch: true, browsers: [ 'PhantomJS' ], colors: true, coverageReporter: { dir: 'coverage', instrumenterOptions: { istanbul: { noCompact: true } }, reporters: [{ type: 'html', subdir: 'report-html' }, { type: 'lcov', subdir: 'report-lcov' }, { type: 'cobertura', subdir: '.', file: 'cobertura.txt' }, { type: 'lcovonly', subdir: '.', file: 'report-lcovonly.txt' }, { type: 'teamcity', subdir: '.', file: 'teamcity.txt' }, { type: 'text', subdir: '.', file: 'text.txt' }, { type: 'text-summary', subdir: '.', file: 'text-summary.txt' }] }, files: [ 'dist/jasmine-matchers.js', 'dist/jasmine-matchers.spec.js' ], frameworks: [ 'jasmine' ], preprocessors: { '**/dist/jasmine-matchers.js': [ 'coverage' ] }, reporters: [ 'nested', 'coverage' ], thresholdReporter: { statements: 95, branches: 95, functions: 95, lines: 95 } }); };
'use strict'; module.exports = function(config) { config.set({ autoWatch: true, browsers: [ 'PhantomJS' ], colors: true, coverageReporter: { dir: 'coverage', instrumenterOptions: { istanbul: { noCompact: true } }, reporters: [{ type: 'html', subdir: 'report-html' }, { type: 'lcov', subdir: 'report-lcov' }, { type: 'cobertura', subdir: '.', file: 'cobertura.txt' }, { type: 'lcovonly', subdir: '.', file: 'report-lcovonly.txt' }, { type: 'teamcity', subdir: '.', file: 'teamcity.txt' }, { type: 'text', subdir: '.', file: 'text.txt' }, { type: 'text-summary', subdir: '.', file: 'text-summary.txt' }] }, files: [ 'dist/jasmine-matchers.js', 'dist/jasmine-matchers.spec.js' ], frameworks: [ 'jasmine' ], preprocessors: { '**/dist/*.js': [ 'coverage' ] }, reporters: [ 'nested', 'coverage' ], thresholdReporter: { statements: 95, branches: 95, functions: 95, lines: 95 } }); };
Load custom preprocessor from ".thought/hb-preprocessor.js" change name of custom helper file to ".thought/hb-helpers.js"
/*! * thought <https://github.com/nknapp/thought> * * Copyright (c) 2015 Nils Knappmeier. * Released under the MIT license. */ 'use strict' var path = require('path') /** * * Create a spec that can be loaded with `customize` using the `load()` function. * * @param {String} workingDir the working directory of thought * @returns {Function} the Customize-Spec */ module.exports = function createSpec (workingDir) { return function thoughtSpec (customize) { return customize .registerEngine('handlebars', require('customize-engine-handlebars')) .merge({ handlebars: { partials: path.join(__dirname, 'handlebars', 'partials'), templates: path.join(__dirname, 'handlebars', 'templates'), helpers: require.resolve('./handlebars/helpers.js'), data: { 'package': require(path.resolve('package.json')), 'workingDir': workingDir }, preprocessor: require('./handlebars/preprocessor.js') } }) .merge({ handlebars: { partials: path.join(workingDir, '.thought', 'partials'), templates: path.join(workingDir, '.thought', 'templates'), helpers: path.resolve(workingDir, '.thought', 'hb-helpers,js'), preprocessor: path.resolve(workingDir, '.thought','hb-preprocessor.js') } }) // .tap(console.log) } }
/*! * thought <https://github.com/nknapp/thought> * * Copyright (c) 2015 Nils Knappmeier. * Released under the MIT license. */ 'use strict' var path = require('path') /** * * Create a spec that can be loaded with `customize` using the `load()` function. * * @param {String} workingDir the working directory of thought * @returns {Function} the Customize-Spec */ module.exports = function createSpec (workingDir) { return function thoughtSpec (customize) { return customize .registerEngine('handlebars', require('customize-engine-handlebars')) .merge({ handlebars: { partials: path.join(__dirname, 'handlebars', 'partials'), templates: path.join(__dirname, 'handlebars', 'templates'), helpers: require.resolve('./handlebars/helpers.js'), data: { 'package': require(path.resolve('package.json')), 'workingDir': workingDir }, preprocessor: require('./handlebars/preprocessor.js') } }) .merge({ handlebars: { partials: path.join(workingDir, '.thought', 'partials'), templates: path.join(workingDir, '.thought', 'templates'), helpers: path.join(workingDir, '.thought', 'handlebars-helpers,js') } }) // .tap(console.log) } }
Fix Python3s lack of .next().
#!/usr/bin/env python # -*- coding: utf-8 -*- from .. import KeyValueStore from .._compat import BytesIO from .._compat import pickle from bson.binary import Binary class MongoStore(KeyValueStore): """Uses a MongoDB collection as the backend, using pickle as a serializer. :param db: A (already authenticated) pymongo database. :param collection: A MongoDB collection name. """ def __init__(self, db, collection): self.db = db self.collection = collection def _has_key(self, key): return self.db[self.collection].find({"_id": key}).count() > 0 def _delete(self, key): return self.db[self.collection].remove({"_id": key}) def _get(self, key): try: item = next(self.db[self.collection].find({"_id": key})) return pickle.loads(item["v"]) except StopIteration: raise KeyError(key) def _open(self, key): return BytesIO(self._get(key)) def _put(self, key, value): self.db[self.collection].update( {"_id": key}, {"$set": {"v": Binary(pickle.dumps(value))}}, upsert=True) return key def _put_file(self, key, file): return self._put(key, file.read()) def iter_keys(self): for item in self.db[self.collection].find(): yield item["_id"]
#!/usr/bin/env python # -*- coding: utf-8 -*- from .. import KeyValueStore from .._compat import BytesIO from .._compat import pickle from bson.binary import Binary class MongoStore(KeyValueStore): """Uses a MongoDB collection as the backend, using pickle as a serializer. :param db: A (already authenticated) pymongo database. :param collection: A MongoDB collection name. """ def __init__(self, db, collection): self.db = db self.collection = collection def _has_key(self, key): return self.db[self.collection].find({"_id": key}).count() > 0 def _delete(self, key): return self.db[self.collection].remove({"_id": key}) def _get(self, key): try: item = self.db[self.collection].find({"_id": key}).next() return pickle.loads(item["v"]) except StopIteration: raise KeyError(key) def _open(self, key): return BytesIO(self._get(key)) def _put(self, key, value): self.db[self.collection].update( {"_id": key}, {"$set": {"v": Binary(pickle.dumps(value))}}, upsert=True) return key def _put_file(self, key, file): return self._put(key, file.read()) def iter_keys(self): for item in self.db[self.collection].find(): yield item["_id"]
Add comment to special pendingcomments bit after its sent
// HubBub client code // From: https://github.com/almost/hubbub/ (function () { // Just a very rough demo, needs a lot more work var form = document.querySelector('form[data-hubbub]'); form.addEventListener('submit', function (evt) { evt.preventDefault(); var comment = form.querySelector('[name=comment]').value; var name = form.querySelector('[name=name]').value; var xmlhttp = new XMLHttpRequest(); // TODO: Make this configurable! xmlhttp.open("POST","https://hubbub.herokuapp.com/api/default/comments",true); xmlhttp.setRequestHeader("Content-type", "application/json"); xmlhttp.onload = function (e) { if (xmlhttp.readyState === 4) { if (xmlhttp.status >= 200 && xmlhttp.status < 300) { var response = JSON.parse(xhr.responseText); var commentsContainer = document.querySelector('form[data-pendingcomment]'); if (commentsContainer) { var commentEl = document.createElement('div'); commentEl.innerHTML = response.html; commentsContainer.addChild(commentEl); } } else { alert("Failed to send comment: " + xhr.statusText); } } }; xmlhttp.onerror = function (e) { alert("Failed to send comment: " + xhr.statusText); }; // TOOD: Get post path from canonical meta tag if it's present xmlhttp.send(JSON.stringify({metadata: {name: name}, comment: comment, post: window.location.path})); }); })();
// HubBub client code // From: https://github.com/almost/hubbub/ (function () { // Just a very rough demo, needs a lot more work var form = document.querySelector('form[data-hubbub]'); form.addEventListener('submit', function (evt) { evt.preventDefault(); var comment = form.querySelector('[name=comment]').value; var name = form.querySelector('[name=name]').value; var xmlhttp = new XMLHttpRequest(); // TODO: Make this configurable! xmlhttp.open("POST","https://hubbub.herokuapp.com/api/default/comments",true); xmlhttp.setRequestHeader("Content-type", "application/json"); xmlhttp.onload = function (e) { if (xmlhttp.readyState === 4) { if (xmlhttp.status >= 200 && xmlhttp.status < 300) { var response = JSON.parse(xhr.responseText); var commentsContainer = document.querySelector('form[data-comments]'); if (commentsContainer) { var commentEl = document.createElement('div'); commentEl.innerHTML = response.html; commentsContainer.addChild(commentEl); } } else { alert("Failed to send comment: " + xhr.statusText); } } }; xmlhttp.onerror = function (e) { alert("Failed to send comment: " + xhr.statusText); }; // TOOD: Get post path from canonical meta tag if it's present xmlhttp.send(JSON.stringify({metadata: {name: name}, comment: comment, post: window.location.path})); }); })();
Change default for env execOption to null
'use strict'; var glob = require('glob'), _ = require('underscore'), ParallelExec = require('./lib/ParallelExec'), BehatTask = require('./lib/BehatTask'), defaults = { src: './**/*.feature', bin: './bin/behat', cwd: './', config: './behat.yml', flags: '', maxProcesses: 10000, baseDir: './', debug: false, numRetries: 0, timeout: 600000, env: null }; /** * Grunt task for executing behat feature files in parallel * * @param {Grunt} grunt */ function GruntTask (grunt) { grunt.registerMultiTask('behat', 'Parallel behat', function () { var done = this.async(), options = this.options(defaults), executor = new ParallelExec(options.maxProcesses, {cwd: options.cwd, timeout: options.timeout, env: options.env}), behat; options.files = this.filesSrc; options.done = done; options.executor = executor; options.log = grunt.log; options.fail = grunt.fail; behat = new BehatTask(options); behat.run(); }); } module.exports = GruntTask;
'use strict'; var glob = require('glob'), _ = require('underscore'), ParallelExec = require('./lib/ParallelExec'), BehatTask = require('./lib/BehatTask'), defaults = { src: './**/*.feature', bin: './bin/behat', cwd: './', config: './behat.yml', flags: '', maxProcesses: 10000, baseDir: './', debug: false, numRetries: 0, timeout: 600000, env: {} }; /** * Grunt task for executing behat feature files in parallel * * @param {Grunt} grunt */ function GruntTask (grunt) { grunt.registerMultiTask('behat', 'Parallel behat', function () { var done = this.async(), options = this.options(defaults), executor = new ParallelExec(options.maxProcesses, {cwd: options.cwd, timeout: options.timeout, env: options.env}), behat; options.files = this.filesSrc; options.done = done; options.executor = executor; options.log = grunt.log; options.fail = grunt.fail; behat = new BehatTask(options); behat.run(); }); } module.exports = GruntTask;
Upgrade deadline implementation to optional
package seedu.address.logic.parser; import static seedu.address.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import static seedu.address.logic.parser.CliSyntax.PREFIX_DEADLINE; import static seedu.address.logic.parser.CliSyntax.PREFIX_TAG; import java.util.NoSuchElementException; import seedu.address.commons.exceptions.IllegalValueException; import seedu.address.logic.commands.AddCommand; import seedu.address.logic.commands.Command; import seedu.address.logic.commands.IncorrectCommand; /** * Parses input arguments and creates a new AddCommand object */ public class AddCommandParser { /** * Parses the given {@code String} of arguments in the context of the AddCommand * and returns an AddCommand object for execution. */ public Command parse(String args) { ArgumentTokenizer argsTokenizer = new ArgumentTokenizer(PREFIX_DEADLINE, PREFIX_TAG); argsTokenizer.tokenize(args); String name = argsTokenizer.getPreamble().get(); String deadline = argsTokenizer.getValue(PREFIX_DEADLINE).orElse(""); try { return new AddCommand(name, deadline, ParserUtil.toSet(argsTokenizer.getAllValues(PREFIX_TAG)) ); } catch (NoSuchElementException nsee) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE)); } catch (IllegalValueException ive) { return new IncorrectCommand(ive.getMessage()); } } }
package seedu.address.logic.parser; import static seedu.address.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import static seedu.address.logic.parser.CliSyntax.PREFIX_DEADLINE; import static seedu.address.logic.parser.CliSyntax.PREFIX_TAG; import java.util.NoSuchElementException; import seedu.address.commons.exceptions.IllegalValueException; import seedu.address.logic.commands.AddCommand; import seedu.address.logic.commands.Command; import seedu.address.logic.commands.IncorrectCommand; /** * Parses input arguments and creates a new AddCommand object */ public class AddCommandParser { /** * Parses the given {@code String} of arguments in the context of the AddCommand * and returns an AddCommand object for execution. */ public Command parse(String args) { ArgumentTokenizer argsTokenizer = new ArgumentTokenizer(PREFIX_DEADLINE, PREFIX_TAG); argsTokenizer.tokenize(args); try { return new AddCommand( argsTokenizer.getPreamble().get(), argsTokenizer.getValue(PREFIX_DEADLINE).get(), ParserUtil.toSet(argsTokenizer.getAllValues(PREFIX_TAG)) ); } catch (NoSuchElementException nsee) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE)); } catch (IllegalValueException ive) { return new IncorrectCommand(ive.getMessage()); } } }
Remove extra variable by simplifying config
(function () { 'use strict'; var path = require('path'), loadConfig = require(path.join(__dirname, 'grunt/load')), config = {}; module.exports = function (grunt) { config = { pkg: grunt.file.readJSON('package.json'), scaffold: { dev: { path: 'dev', assets: 'dev/assets', partials: 'dev/partials', templates: 'dev/templates' }, staging: { path: 'staging', assets: 'staging/assets' }, build: { path: 'build', assets: 'build/assets' }, grunt: 'grunt', tmp: { path: '.tmp', assets: '.tmp/assets' } }, timestamp: '<%= new Date().getTime() %>', banner: '/*! <%= pkg.projectName %> - v<%= pkg.version %> - by <%= pkg.developers %> - <%= grunt.template.today("dd/mm/yyyy") %> */\n', }; grunt.util._.extend(config, loadConfig(path.join(__dirname, 'grunt/options/'))); grunt.initConfig(config); require('load-grunt-tasks')(grunt); require('time-grunt')(grunt); grunt.loadNpmTasks('assemble'); grunt.loadTasks('grunt/tasks/'); }; })();
(function () { 'use strict'; var path = require('path'), loadConfig = require(path.join(__dirname, 'grunt/load')), config = {}, scaffold = {}; module.exports = function (grunt) { scaffold = { dev: { path: 'dev', assets: 'dev/assets', partials: 'dev/partials', templates: 'dev/templates' }, staging: { path: 'staging', assets: 'staging/assets' }, build: { path: 'build', assets: 'build/assets' }, grunt: 'grunt', tmp: { path: '.tmp', assets: '.tmp/assets' } }; config = { pkg: grunt.file.readJSON('package.json'), scaffold: scaffold, timestamp: '<%= new Date().getTime() %>', banner: '/*! <%= pkg.projectName %> - v<%= pkg.version %> - by <%= pkg.developers %> - <%= grunt.template.today("dd/mm/yyyy") %> */\n', }; grunt.util._.extend(config, loadConfig(path.join(__dirname, 'grunt/options/'))); grunt.initConfig(config); require('load-grunt-tasks')(grunt); require('time-grunt')(grunt); grunt.loadNpmTasks('assemble'); grunt.loadTasks('grunt/tasks/'); }; })();
Add forward to list of bridges
from setuptools import setup setup( name='regrowl', description='Regrowl server', author='Paul Traylor', url='https://github.com/kfdm/gntp-regrowl', version='0.0.1', packages=[ 'regrowl', 'regrowl.bridge', 'regrowl.extras', ], # http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], install_requires=[ 'gntp', ], entry_points={ 'console_scripts': [ 'regrowl = regrowl.cli:main' ], 'regrowl.bridge': [ 'echo = regrowl.bridge.echo:EchoNotifier', 'forward = regrowl.bridge.forward:ForwardNotifier', 'local = regrowl.bridge.local:LocalNotifier', 'subscribe = regrowl.bridge.subscribe:SubscribelNotifier', 'udp = regrowl.bridge.udp:UDPNotifier', ] } )
from setuptools import setup setup( name='regrowl', description='Regrowl server', author='Paul Traylor', url='https://github.com/kfdm/gntp-regrowl', version='0.0.1', packages=[ 'regrowl', 'regrowl.bridge', 'regrowl.extras', ], # http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], install_requires=[ 'gntp', ], entry_points={ 'console_scripts': [ 'regrowl = regrowl.cli:main' ], 'regrowl.bridge': [ 'echo = regrowl.bridge.echo:EchoNotifier', 'local = regrowl.bridge.local:LocalNotifier', 'subscribe = regrowl.bridge.subscribe:SubscribelNotifier', 'udp = regrowl.bridge.udp:UDPNotifier', ] } )
Sort parent objectives by testTitle to avoid them jumping around
import Ember from 'ember'; export default Ember.Route.extend({ session: null, course: null, proxiedObjectives: [], afterModel: function(sessionObjective){ var self = this; var deferred = Ember.RSVP.defer(); var objectiveProxy = Ember.ObjectProxy.extend({ sessionObjective: null, selected: function(){ return this.get('sessionObjective.parents').contains(this.get('content')); }.property('content', 'sessionObjective.parents.@each'), }); Ember.run.later(deferred.resolve, function() { var resolve = this; sessionObjective.get('sessions').then(function(sessions){ var session = sessions.get('firstObject'); session.get('course').then(function(course){ course.get('objectives').then(function(objectives){ var proxiedObjectives = objectives.map(function(objective){ return objectiveProxy.create({ content: objective, sessionObjective: sessionObjective, }); }).sortBy('textTitle'); if(!self.get('isDestroyed')){ self.set('proxiedObjectives', proxiedObjectives); self.set('session', session); self.set('course', course); resolve(); } }); }); }); }, 500); return deferred.promise; }, setupController: function(controller, model){ controller.set('model', model); controller.set('proxiedObjectives', this.get('proxiedObjectives')); controller.set('session', this.get('session')); controller.set('course', this.get('course')); } });
import Ember from 'ember'; export default Ember.Route.extend({ session: null, course: null, proxiedObjectives: [], afterModel: function(sessionObjective){ var self = this; var deferred = Ember.RSVP.defer(); var objectiveProxy = Ember.ObjectProxy.extend({ sessionObjective: null, selected: function(){ return this.get('sessionObjective.parents').contains(this.get('content')); }.property('content', 'sessionObjective.parents.@each'), }); Ember.run.later(deferred.resolve, function() { var resolve = this; sessionObjective.get('sessions').then(function(sessions){ var session = sessions.get('firstObject'); session.get('course').then(function(course){ course.get('objectives').then(function(objectives){ var proxiedObjectives = objectives.map(function(objective){ return objectiveProxy.create({ content: objective, sessionObjective: sessionObjective, }); }); if(!self.get('isDestroyed')){ self.set('proxiedObjectives', proxiedObjectives); self.set('session', session); self.set('course', course); resolve(); } }); }); }); }, 500); return deferred.promise; }, setupController: function(controller, model){ controller.set('model', model); controller.set('proxiedObjectives', this.get('proxiedObjectives')); controller.set('session', this.get('session')); controller.set('course', this.get('course')); } });
Adjust field name of GoTask
package com.github.blindpirate.gogradle.task.go; import groovy.lang.Closure; import org.gradle.api.Task; import org.gradle.api.internal.tasks.ContextAwareTaskAction; import org.gradle.api.internal.tasks.TaskExecutionContext; import java.util.Map; public class GoExecutionAction implements ContextAwareTaskAction { private Closure closure; private Map<String, String> env; public static GoExecutionAction wrapClosureWithEnvs(Closure closure, Map<String, String> env) { GoExecutionAction ret = new GoExecutionAction(); ret.env = env; ret.closure = closure; return ret; } private GoExecutionAction() { } @Override public void contextualise(TaskExecutionContext context) { } @Override public void execute(Task task) { closure.setDelegate(task); closure.setResolveStrategy(Closure.DELEGATE_FIRST); ClassLoader original = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(closure.getClass().getClassLoader()); Map<String, String> originalEnv = Go.class.cast(task).getEnv(); Go.class.cast(task).setEnv(env); try { synchronized (task) { closure.call(task); } } finally { Thread.currentThread().setContextClassLoader(original); Go.class.cast(task).setEnv(originalEnv); } } @Override public ClassLoader getClassLoader() { return closure.getClass().getClassLoader(); } }
package com.github.blindpirate.gogradle.task.go; import groovy.lang.Closure; import org.gradle.api.Task; import org.gradle.api.internal.tasks.ContextAwareTaskAction; import org.gradle.api.internal.tasks.TaskExecutionContext; import java.util.Map; public class GoExecutionAction implements ContextAwareTaskAction { private Closure closure; private Map<String, String> env; public static GoExecutionAction wrapClosureWithEnvs(Closure closure, Map<String, String> env) { GoExecutionAction ret = new GoExecutionAction(); ret.env = env; ret.closure = closure; return ret; } private GoExecutionAction() { } @Override public void contextualise(TaskExecutionContext context) { } @Override public void execute(Task task) { closure.setDelegate(task); closure.setResolveStrategy(Closure.DELEGATE_FIRST); ClassLoader original = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(closure.getClass().getClassLoader()); Map<String, String> originalEnv = Go.class.cast(task).getCurrentEnv(); Go.class.cast(task).setCurrentEnv(env); try { synchronized (task) { closure.call(task); } } finally { Thread.currentThread().setContextClassLoader(original); Go.class.cast(task).setCurrentEnv(originalEnv); } } @Override public ClassLoader getClassLoader() { return closure.getClass().getClassLoader(); } }
Use DjangoJSONEncoder for JSON serialization
# coding: utf-8 import logging from socket import gethostname from django.core.serializers.json import DjangoJSONEncoder from .compat import json, text_type class JSONFormatter(logging.Formatter): """ Formatter to convert LogRecord into JSON. Thanks to: https://github.com/lobziik/rlog """ def __init__(self, *args, **kwargs): include = kwargs.pop('include', None) exclude = kwargs.pop('exclude', None) super().__init__(*args, **kwargs) self.include = include self.exclude = exclude def format(self, record): data = record.__dict__.copy() if record.args: msg = record.msg % record.args else: msg = record.msg data.update( host=gethostname(), msg=msg, args=tuple(text_type(arg) for arg in record.args) ) if 'exc_info' in data and data['exc_info']: data['exc_info'] = self.formatException(data['exc_info']) if self.include: data = {f: data[f] for f in self.include} elif self.exclude: for f in self.exclude: if f in data: del data[f] return json.dumps(data, cls=DjangoJSONEncoder)
# coding: utf-8 import logging from socket import gethostname from .compat import json, text_type class JSONFormatter(logging.Formatter): """ Formatter to convert LogRecord into JSON. Thanks to: https://github.com/lobziik/rlog """ def __init__(self, *args, **kwargs): include = kwargs.pop('include', None) exclude = kwargs.pop('exclude', None) super().__init__(*args, **kwargs) self.include = include self.exclude = exclude def format(self, record): data = record.__dict__.copy() if record.args: msg = record.msg % record.args else: msg = record.msg data.update( host=gethostname(), msg=msg, args=tuple(text_type(arg) for arg in record.args) ) if 'exc_info' in data and data['exc_info']: data['exc_info'] = self.formatException(data['exc_info']) if self.include: data = {f: data[f] for f in self.include} elif self.exclude: for f in self.exclude: del data[f] return json.dumps(data)
Fix search problem (but how??)
module.exports = function($scope, $state, $location, $http, GlobalService, DocumentService, DocumentApiService, MathJaxService, QueryParser) { $scope.doSearch = function(){ var apiServer = GlobalService.apiServer() var query = QueryParser.parse($scope.searchText) console.log('query = ' + query) $http.get('http://' + apiServer + '/v1/documents' + '?' + query ) .then(function(response){ console.log(response.data['status']) console.log('Number of documents: ' + response.data['document_count']) var jsonData = response.data var documents = jsonData['documents'] DocumentService.setDocumentList(documents) var id = documents[0]['id'] console.log('SearchController, id: ' + id) DocumentApiService.getDocument(id) .then(function(response) { console.log('Document " + id + ' retrieved') console.log('CURRENT STATE: ' + $state.current) $state.go('documents') $state.reload() $scope.$watch(function(scope) { return $scope.renderedText }, MathJaxService.reload('SearchController') ); }) }); }; }
module.exports = function($scope, $state, $location, $http, GlobalService, DocumentService, DocumentApiService, MathJaxService, QueryParser) { $scope.doSearch = function(){ var apiServer = GlobalService.apiServer() var query = QueryParser.parse($scope.searchText) console.log('query = ' + query) $http.get('http://' + apiServer + '/v1/documents' + '?' + query ) .then(function(response){ console.log(response.data['status']) console.log('Number of documents: ' + response.data['document_count']) var jsonData = response.data var documents = jsonData['documents'] DocumentService.setDocumentList(documents) var id = documents[0]['id'] console.log('SearchController, id: ' + id) DocumentApiService.getDocument(id) .then(function(response) { console.log('CURRENT STATE: ' + $state.current) $state.go('documents') $state.reload() $scope.$watch(function(scope) { return $scope.renderedText }, MathJaxService.reload('SearchController') ); }) }); }; }
Add check for src field Src field can be absent in copy task, and content field can be used instead, so we should check if src is present to avoid Keyerror exception.
from ansiblelint import AnsibleLintRule format = "{}" class RoleRelativePath(AnsibleLintRule): id = 'E201' shortdesc = "Doesn't need a relative path in role" description = '' tags = ['role'] def matchplay(self, file, play): # assume if 'roles' in path, inside a role. if 'roles' not in file['path']: return [] if 'template' in play: if not isinstance(play['template'], dict): return False if "../templates" in play['template']['src']: return [({'': play['template']}, self.shortdesc)] if 'win_template' in play: if not isinstance(play['win_template'], dict): return False if "../win_templates" in play['win_template']['src']: return ({'win_template': play['win_template']}, self.shortdesc) if 'copy' in play: if not isinstance(play['copy'], dict): return False if 'src' in play['copy']: if "../copys" in play['copy']['src']: return ({'sudo': play['copy']}, self.shortdesc) if 'win_copy' in play: if not isinstance(play['win_copy'], dict): return False if "../win_copys" in play['win_copy']['src']: return ({'sudo': play['win_copy']}, self.shortdesc) return []
from ansiblelint import AnsibleLintRule format = "{}" class RoleRelativePath(AnsibleLintRule): id = 'E201' shortdesc = "Doesn't need a relative path in role" description = '' tags = ['role'] def matchplay(self, file, play): # assume if 'roles' in path, inside a role. if 'roles' not in file['path']: return [] if 'template' in play: if not isinstance(play['template'], dict): return False if "../templates" in play['template']['src']: return [({'': play['template']}, self.shortdesc)] if 'win_template' in play: if not isinstance(play['win_template'], dict): return False if "../win_templates" in play['win_template']['src']: return ({'win_template': play['win_template']}, self.shortdesc) if 'copy' in play: if not isinstance(play['copy'], dict): return False if "../copys" in play['copy']['src']: return ({'sudo': play['copy']}, self.shortdesc) if 'win_copy' in play: if not isinstance(play['win_copy'], dict): return False if "../win_copys" in play['win_copy']['src']: return ({'sudo': play['win_copy']}, self.shortdesc) return []
Fix slave labor required by
module.exports = { name: "slave_labor", title: "Slave Labor", description: "During Upkeep, you can increase one City AV by 1. \ The maximum AV of a City is 2, unless otherwise noted.", points: 1, cost: { }, resources: [ 'food' ], requires: [ ], required_by: [ ], events: { 'anarchy': { 'steps': { '2': "- If you have {{ adv:slave_labor }}, Draw the next card {%; draw_card() %}. \ Reduce Tribes throughout your Empire an \ additional amount as shown in the RED CIRCLE. \ {%; reduce('tribes', card_value('c')) %}" } }, 'uprising': { 'steps': { '3': "- If you have {{ adv:slave_labor }}, Decimate farms in areas that have no cities.\ {% reduceFarms() %}" }, reduceFarms: function() { } }, 'bandits': { } }, phases: { 'city_advance.pre': function(ctx) { console.log('Slave labor max city 2') this.max_city = !this.max_city || this.max_city < 2 ? 2 : this.max_city; if (this.round.city_advance_limit === undefined) this.round.city_advance_limit = 1; else this.round.city_advance_limit++; ctx.done && ctx.done(); } }, actions: { }, }
module.exports = { name: "slave_labor", title: "Slave Labor", description: "During Upkeep, you can increase one City AV by 1. \ The maximum AV of a City is 2, unless otherwise noted.", points: 1, cost: { }, resources: [ 'food' ], requires: [ ], required_by: [ 'government' ], events: { 'anarchy': { 'steps': { '2': "- If you have {{ adv:slave_labor }}, Draw the next card {%; draw_card() %}. \ Reduce Tribes throughout your Empire an \ additional amount as shown in the RED CIRCLE. \ {%; reduce('tribes', card_value('c')) %}" } }, 'uprising': { 'steps': { '3': "- If you have {{ adv:slave_labor }}, Decimate farms in areas that have no cities.\ {% reduceFarms() %}" }, reduceFarms: function() { } }, 'bandits': { } }, phases: { 'city_advance.pre': function(ctx) { console.log('Slave labor max city 2') this.max_city = !this.max_city || this.max_city < 2 ? 2 : this.max_city; if (this.round.city_advance_limit === undefined) this.round.city_advance_limit = 1; else this.round.city_advance_limit++; ctx.done && ctx.done(); } }, actions: { }, }
Fix Spotify api object creation
from flask_login import UserMixin import spotify import spotipy import db_utils import application as app class User(UserMixin): ''' User class for Flask-Login ''' def __init__(self, user_id, username=None): self.id = int(user_id) self.username = username self._spotify = None @property def spotify(self): oa_client = spotify.sp_oauth # Fetch credentials from database if not self._spotify: self._spotify = db_utils.spotify_creds_for_user(app.engine, self.id) # No credentials exist for user if self._spotify is None: return None # Refresh tokens if nescessary if oa_client.is_token_expired(self._spotify): self._spotify = oa_client.refresh_access_token(self._spotify) db_utils.spotify_credentials_upsert(app.engine, self.id, self._spotify) return spotipy.Spotify(auth=self._spotify['access_token']) class Playlist(object): ''' Playlist object representation ''' def __init__(self, playlist_id, title=None, duration=0, count=0): self.id = playlist_id self.title = title self.duration = duration self.count = count
from flask_login import UserMixin import spotify import spotipy import db_utils import application as app class User(UserMixin): ''' User class for Flask-Login ''' def __init__(self, user_id, username=None): self.id = int(user_id) self.username = username self._spotify = None @property def spotify(self): oa_client = spotify.sp_oauth # Fetch credentials from database if not self._spotify: self._spotify = db_utils.spotify_creds_for_user(app.engine, self.id) # No credentials exist for user if self._spotify is None: return None # Refresh tokens if nescessary if oa_client.is_token_expired(self._spotify): self._spotify = oa_client.refresh_access_token(self._spotify) db_utils.spotify_credentials_upsert(app.engine, self.id, self._spotify) return spotipy.Spotify(auth=self._spotify) class Playlist(object): ''' Playlist object representation ''' def __init__(self, playlist_id, title=None, duration=0, count=0): self.id = playlist_id self.title = title self.duration = duration self.count = count
Add a better transform example
/** The `DS.Transform` class is used to serialize and deserialize model attributes when they are saved or loaded from an adapter. Subclassing `DS.Transform` is useful for creating custom attributes. All subclasses of `DS.Transform` must implement a `serialize` and a `deserialize` method. Example ```javascript // Converts centigrade in the JSON to fahrenheit in the app App.TemperatureTransform = DS.Transform.extend({ deserialize: function(serialized) { return (serialized * 1.8) + 32; }, serialize: function(deserialized) { return (deserialized - 32) / 1.8; } }); ``` Usage ```javascript var attr = DS.attr; App.Requirement = DS.Model.extend({ name: attr('string'), optionsArray: attr('raw') }); ``` @class Transform @namespace DS */ var Transform = Ember.Object.extend({ /** When given a deserialized value from a record attribute this method must return the serialized value. Example ```javascript serialize: function(deserialized) { return Ember.isEmpty(deserialized) ? null : Number(deserialized); } ``` @method serialize @param deserialized The deserialized value @return The serialized value */ serialize: Ember.required(), /** When given a serialize value from a JSON object this method must return the deserialized value for the record attribute. Example ```javascript deserialize: function(serialized) { return empty(serialized) ? null : Number(serialized); } ``` @method deserialize @param serialized The serialized value @return The deserialized value */ deserialize: Ember.required() }); export default Transform;
/** The `DS.Transform` class is used to serialize and deserialize model attributes when they are saved or loaded from an adapter. Subclassing `DS.Transform` is useful for creating custom attributes. All subclasses of `DS.Transform` must implement a `serialize` and a `deserialize` method. Example ```javascript App.RawTransform = DS.Transform.extend({ deserialize: function(serialized) { return serialized; }, serialize: function(deserialized) { return deserialized; } }); ``` Usage ```javascript var attr = DS.attr; App.Requirement = DS.Model.extend({ name: attr('string'), optionsArray: attr('raw') }); ``` @class Transform @namespace DS */ var Transform = Ember.Object.extend({ /** When given a deserialized value from a record attribute this method must return the serialized value. Example ```javascript serialize: function(deserialized) { return Ember.isEmpty(deserialized) ? null : Number(deserialized); } ``` @method serialize @param deserialized The deserialized value @return The serialized value */ serialize: Ember.required(), /** When given a serialize value from a JSON object this method must return the deserialized value for the record attribute. Example ```javascript deserialize: function(serialized) { return empty(serialized) ? null : Number(serialized); } ``` @method deserialize @param serialized The serialized value @return The deserialized value */ deserialize: Ember.required() }); export default Transform;
Add homepage to LongT5 dataset collection PiperOrigin-RevId: 479013251
# coding=utf-8 # Copyright 2022 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Long T5 dataset collection.""" import collections from typing import Mapping from tensorflow_datasets.core import dataset_collection_builder from tensorflow_datasets.core import naming class Longt5(dataset_collection_builder.DatasetCollection): """Long T5 dataset collection.""" @property def info(self) -> dataset_collection_builder.DatasetCollectionInfo: return dataset_collection_builder.DatasetCollectionInfo.from_cls( dataset_collection_class=self.__class__, release_notes={ "1.0.0": "Initial release", }, homepage="https://github.com/google-research/longt5", ) @property def datasets(self,) -> Mapping[str, Mapping[str, naming.DatasetReference]]: return collections.OrderedDict({ "1.0.0": naming.references_for({ "natural_questions": "natural_questions/longt5:0.1.0", "media_sum": "media_sum:1.0.0", }) })
# coding=utf-8 # Copyright 2022 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Long T5 dataset collection.""" import collections from typing import Mapping from tensorflow_datasets.core import dataset_collection_builder from tensorflow_datasets.core import naming class Longt5(dataset_collection_builder.DatasetCollection): """Long T5 dataset collection.""" @property def info(self) -> dataset_collection_builder.DatasetCollectionInfo: return dataset_collection_builder.DatasetCollectionInfo.from_cls( dataset_collection_class=self.__class__, release_notes={ "1.0.0": "Initial release", }, ) @property def datasets(self,) -> Mapping[str, Mapping[str, naming.DatasetReference]]: return collections.OrderedDict({ "1.0.0": naming.references_for({ "natural_questions": "natural_questions/longt5:0.1.0", "media_sum": "media_sum:1.0.0", }) })
Fix compile error of recipe "android" for non-sdl bootstrap build
from distutils.core import setup, Extension import os library_dirs = ['libs/' + os.environ['ARCH']] lib_dict = { 'pygame': ['sdl'], 'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf'] } sdl_libs = lib_dict.get(os.environ['BOOTSTRAP'], []) renpy_sound = Extension('android._android_sound', ['android/_android_sound.c', 'android/_android_sound_jni.c', ], libraries=sdl_libs + ['log'], library_dirs=library_dirs) modules = [Extension('android._android', ['android/_android.c', 'android/_android_jni.c'], libraries=sdl_libs + ['log'], library_dirs=library_dirs), Extension('android._android_billing', ['android/_android_billing.c', 'android/_android_billing_jni.c'], libraries=['log'], library_dirs=library_dirs)] if int(os.environ['IS_PYGAME']): modules.append(renpy_sound) setup(name='android', version='1.0', packages=['android'], package_dir={'android': 'android'}, ext_modules=modules )
from distutils.core import setup, Extension import os library_dirs = ['libs/' + os.environ['ARCH']] lib_dict = { 'pygame': ['sdl'], 'sdl2': ['SDL2', 'SDL2_image', 'SDL2_mixer', 'SDL2_ttf'] } sdl_libs = lib_dict[os.environ['BOOTSTRAP']] renpy_sound = Extension('android._android_sound', ['android/_android_sound.c', 'android/_android_sound_jni.c', ], libraries=sdl_libs + ['log'], library_dirs=library_dirs) modules = [Extension('android._android', ['android/_android.c', 'android/_android_jni.c'], libraries=sdl_libs + ['log'], library_dirs=library_dirs), Extension('android._android_billing', ['android/_android_billing.c', 'android/_android_billing_jni.c'], libraries=['log'], library_dirs=library_dirs)] if int(os.environ['IS_PYGAME']): modules.append(renpy_sound) setup(name='android', version='1.0', packages=['android'], package_dir={'android': 'android'}, ext_modules=modules )
[Tests] Implement simple tests for findBy([]) method
<?php /** * @author: Patsura Dmitry http://github.com/ovr <[email protected]> */ namespace Lynx\Tests; use DateTime; use Model\User; class RepositoryTest extends TestCase { public function testGetOneMethodSuccessForUserEntity() { /** @var \Lynx\Repository $repository */ $repository = $this->em->getRepository(User::class); /** @var User $result */ $userOne = $repository->getOne(1); static::assertSuccessUser($userOne); static::assertSame(1, $userOne->id); /** @var User $result */ $userTwo = $repository->getOne(2); static::assertSuccessUser($userTwo); static::assertSame(2, $userTwo->id); } public function testGetOneMethodNotFoundForUserEntity() { /** @var \Lynx\Repository $repository */ $repository = $this->em->getRepository(User::class); static::assertNull($repository->getOne(100000000)); } protected static function assertSuccessUser($result) { static::assertInstanceOf(User::class, $result); static::assertInternalType('integer', $result->id); static::assertInternalType('string', $result->name); static::assertInstanceOf(DateTime::class, $result->dateCreated); static::assertInternalType('integer', $result->groupId); } public function testFindByWithoutParametersForUserEntity() { /** @var \Lynx\Repository $repository */ $repository = $this->em->getRepository(User::class); /** @var User[] $result */ $result = $repository->findBy([]); foreach ($result as $user) { static::assertSuccessUser($user); } } }
<?php /** * @author: Patsura Dmitry http://github.com/ovr <[email protected]> */ namespace Lynx\Tests; use DateTime; use Model\User; class RepositoryTest extends TestCase { public function testGetOneMethodSuccessForUserEntity() { /** @var \Lynx\Repository $repository */ $repository = $this->em->getRepository(User::class); /** @var User $result */ $userOne = $repository->getOne(1); static::assertSuccessUser($userOne); static::assertSame(1, $userOne->id); /** @var User $result */ $userTwo = $repository->getOne(2); static::assertSuccessUser($userTwo); static::assertSame(2, $userTwo->id); } public function testGetOneMethodNotFoundForUserEntity() { /** @var \Lynx\Repository $repository */ $repository = $this->em->getRepository(User::class); static::assertNull($repository->getOne(100000000)); } protected static function assertSuccessUser($result) { static::assertInstanceOf(User::class, $result); static::assertInternalType('integer', $result->id); static::assertInternalType('string', $result->name); static::assertInstanceOf(DateTime::class, $result->dateCreated); static::assertInternalType('integer', $result->groupId); } }
Update menu to Bootstrap 4
<nav class="navbar navbar-toggleable-sm navbar-static-top navbar-inverse bg-inverse"> <div class="container"> <button class="navbar-toggler navbar-toggler-right" type="button" data-toggle="collapse" data-target="#navbarSupportedContent" aria-controls="navbarSupportedContent" aria-expanded="false" aria-label="Toggle navigation"> <span class="navbar-toggler-icon"></span> </button> <span class="navbar-brand" href="{{ url('/') }}">Bang</span> <div class="collapse navbar-collapse" id="navbarSupportedContent"> <ul class="navbar-nav"> <li class="nav-item"> <a class="nav-link" href="{{ route('cartridges.index') }}">Cartridges</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('ranges.index') }}">Ranges</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('stores.index') }}">Stores</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('firearms.index') }}">Firearms</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('orders.index') }}">Orders</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('trips.index') }}">Range Trips</a> </li> </ul> </div> </div> </nav>
<nav class="navbar navbar-static-top navbar-dark bg-inverse"> <div class="container"> <a class="navbar-brand" href="{{ url('/') }}">Bang</a> <ul class="nav navbar-nav"> <li class="nav-item"> <a class="nav-link" href="{{ route('cartridges.index') }}">Cartridges</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('ranges.index') }}">Ranges</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('stores.index') }}">Stores</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('firearms.index') }}">Firearms</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('orders.index') }}">Orders</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ route('trips.index') }}">Range Trips</a> </li> </ul> </div> </nav>
Make this easier to test, which we'll get to a bit later
import os import atexit import logging import socket from .preflight import preflight_check from .log import configure_logging from .notifier import notify from .constants import SOCKET_PATH, SOCKET_TERMINATOR def _clean_up_existing_socket(socket_path): try: os.unlink(socket_path) except OSError: if os.path.exists(socket_path): raise def _listen_on_socket(socket_path): _clean_up_existing_socket(socket_path) sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(socket_path) sock.listen(1) logging.info('Listening on socket at {}'.format(socket_path)) notify('Dusty is listening for commands') atexit.register(notify, 'Dusty daemon has terminated') while True: try: connection, client_address = sock.accept() try: while True: data = connection.recv(1024) if not data: break logging.info('Received command: {}'.format(data)) connection.sendall('Received: {}\n'.format(data)) connection.sendall(SOCKET_TERMINATOR) finally: connection.close() except KeyboardInterrupt: break except: logging.exception('Exception on socket listen') def main(): notify('Dusty initializing...') configure_logging() preflight_check() _listen_on_socket(SOCKET_PATH) if __name__ == '__main__': main()
import os import atexit import logging import socket from .preflight import preflight_check from .log import configure_logging from .notifier import notify from .constants import SOCKET_PATH, SOCKET_TERMINATOR def _clean_up_existing_socket(): try: os.unlink(SOCKET_PATH) except OSError: if os.path.exists(SOCKET_PATH): raise def _listen_on_socket(): _clean_up_existing_socket() sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(SOCKET_PATH) sock.listen(1) logging.info('Listening on socket at {}'.format(SOCKET_PATH)) notify('Dusty is listening for commands') atexit.register(notify, 'Dusty daemon has terminated') while True: try: connection, client_address = sock.accept() try: while True: data = connection.recv(1024) if not data: break logging.info('Received command: {}'.format(data)) connection.sendall('Received: {}\n'.format(data)) connection.sendall(SOCKET_TERMINATOR) finally: connection.close() except KeyboardInterrupt: break except: logging.exception('Exception on socket listen') def main(): notify('Dusty initializing...') configure_logging() preflight_check() _listen_on_socket() if __name__ == '__main__': main()
Enable AutoConfiguration and removed default controller to CustomerAdaptor
package com.tesco.bootcamp.orderreview.adaptor; import com.tesco.bootcamp.orderreview.representations.Customer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.HttpMethod; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Component; import org.springframework.web.client.RestTemplate; /** * Created by Bartosz Jedrzejczak on 11/01/2017. */ @Component @EnableAutoConfiguration public class CustomerServiceAdaptor { private String customerApiURi; @Autowired public CustomerServiceAdaptor(@Qualifier("customerServiceURL") String url) { this.customerApiURi = url; } public Customer call(String loginID) { RestTemplate restTemplate = new RestTemplate(); try { ResponseEntity<Customer> collectRequestResult = restTemplate.exchange( customerApiURi + "/customer?login=" + loginID + "&password=Password!23", HttpMethod.GET, null, new ParameterizedTypeReference<Customer>() { }); return collectRequestResult.getBody(); } catch (Exception e) { throw new RuntimeException(e); } } }
package com.tesco.bootcamp.orderreview.adaptor; import com.tesco.bootcamp.orderreview.representations.Customer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.HttpMethod; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Component; import org.springframework.web.client.RestTemplate; /** * Created by Bartosz Jedrzejczak on 11/01/2017. */ @Component public class CustomerServiceAdaptor { private String customerApiURi = null; public CustomerServiceAdaptor() { } @Autowired public CustomerServiceAdaptor(@Qualifier("customerServiceURL") String url) { this.customerApiURi = url; } public Customer call(String loginID) { RestTemplate restTemplate = new RestTemplate(); try { ResponseEntity<Customer> collectRequestResult = restTemplate.exchange( customerApiURi + "/customer?login=" + loginID + "&password=Password!23", HttpMethod.GET, null, new ParameterizedTypeReference<Customer>() { }); return collectRequestResult.getBody(); } catch (Exception e) { throw new RuntimeException(e); } } }
Fix issue with manager (error w cut and paste)
from django.db import models from logicaldelete import managers try: from django.utils import timezone except ImportError: from datetime import datetime as timezone class Model(models.Model): """ This base model provides date fields and functionality to enable logical delete functionality in derived models. """ date_created = models.DateTimeField(default=timezone.now) date_modified = models.DateTimeField(default=timezone.now) date_removed = models.DateTimeField(null=True, blank=True) objects = managers.LogicalDeletedManager() def active(self): return self.date_removed is None active.boolean = True def delete(self): ''' Soft delete all fk related objects that inherit from logicaldelete class ''' # Fetch related models related_objs = [relation.get_accessor_name() for relation in self._meta.get_all_related_objects()] for objs_model in related_objs: # Retrieve all related objects objs = getattr(self, objs_model).all() for obj in objs: # Checking if inherits from logicaldelete if not issubclass(obj.__class__, Model): break obj.delete() # Soft delete the object self.date_removed = timezone.now() self.save() class Meta: abstract = True
from django.db import models try: from django.utils import timezone except ImportError: from datetime import datetime as timezonefrom logicaldelete import managers class Model(models.Model): """ This base model provides date fields and functionality to enable logical delete functionality in derived models. """ date_created = models.DateTimeField(default=timezone.now) date_modified = models.DateTimeField(default=timezone.now) date_removed = models.DateTimeField(null=True, blank=True) objects = managers.LogicalDeletedManager() def active(self): return self.date_removed is None active.boolean = True def delete(self): ''' Soft delete all fk related objects that inherit from logicaldelete class ''' # Fetch related models related_objs = [relation.get_accessor_name() for relation in self._meta.get_all_related_objects()] for objs_model in related_objs: # Retrieve all related objects objs = getattr(self, objs_model).all() for obj in objs: # Checking if inherits from logicaldelete if not issubclass(obj.__class__, Model): break obj.delete() # Soft delete the object self.date_removed = timezone.now() self.save() class Meta: abstract = True
Fix interface in historian service interface
import logging from flow_workflow.historian.messages import UpdateMessage LOG = logging.getLogger(__name__) class WorkflowHistorianServiceInterface(object): def __init__(self, broker=None, exchange=None, routing_key=None): self.broker = broker self.exchange = exchange self.routing_key = routing_key def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs): if workflow_plan_id < 0: # ignore update (don't even make message) LOG.debug("Received negative workflow_plan_id:%s, " "ignoring update (net_key=%s, operation_id=%s, name=%s," "workflow_plan_id=%s, kwargs=%s)", workflow_plan_id, net_key, peration_id, name, workflow_plan_id, kwargs) else: LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s," "workflow_plan_id=%s, kwargs=%s)", net_key, operation_id, name, workflow_plan_id, kwargs) message = UpdateMessage(net_key=net_key, operation_id=operation_id, name=name, workflow_plan_id=workflow_plan_id, **kwargs) self.broker.publish(self.exchange, self.routing_key, message)
import logging from flow_workflow.historian.messages import UpdateMessage LOG = logging.getLogger(__name__) class WorkflowHistorianServiceInterface(object): def __init__(self, broker=None, exchange=None, routing_key=None): self.broker = broker self.exchange = exchange self.routing_key = routing_key def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs): if workflow_plan_id < 0: # ignore update (don't even make message) LOG.debug("Received negative workflow_plan_id:%s, " "ignoring update (net_key=%s, operation_id=%s, name=%s," "workflow_plan_id=%s, kwargs=%s)", workflow_plan_id, net_key, peration_id, name, workflow_plan_id, kwargs) else: LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s," "workflow_plan_id=%s, kwargs=%s)", net_key, peration_id, name, workflow_plan_id, kwargs) message = UpdateMessage(net_key=net_key, operation_id=operation_id, **kwargs) self.broker.publish(self.exchange, self.routing_key, message)
Fix misspelled UglifyJS filter class name
"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_. UglifyJS is an external tool written for NodeJS; this filter assumes that the ``uglifyjs`` executable is in the path. Otherwise, you may define a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs`` by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings. """ import subprocess from webassets.exceptions import FilterError from webassets.filter import Filter __all__ = ('UglifyJSFilter',) class UglifyJSFilter(Filter): name = 'uglifyjs' def setup(self): self.binary = self.get_config( 'UGLIFYJS_BIN', require=False) or 'uglifyjs' self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS', require=False) def output(self, _in, out, **kw): args = [self.binary] if self.extra_args: args.extend(self.extra_args) proc = subprocess.Popen( args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate(_in.read()) if proc.returncode != 0: raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+ 'stdout=%s, returncode=%s') % ( stderr, stdout, proc.returncode)) out.write(stdout)
"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_. UglifyJS is an external tool written for NodeJS; this filter assumes that the ``uglifyjs`` executable is in the path. Otherwise, you may define a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs`` by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings. """ import subprocess from webassets.exceptions import FilterError from webassets.filter import Filter __all__ = ('UglifySFilter',) class UglifySFilter(Filter): name = 'uglifyjs' def setup(self): self.binary = self.get_config( 'UGLIFYJS_BIN', require=False) or 'uglifyjs' self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS', require=False) def output(self, _in, out, **kw): args = [self.binary] if self.extra_args: args.extend(self.extra_args) proc = subprocess.Popen( args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate(_in.read()) if proc.returncode != 0: raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+ 'stdout=%s, returncode=%s') % ( stderr, stdout, proc.returncode)) out.write(stdout)
Add commented includePaths parameters for grunt-sass in case of Foundation usage
module.exports = function(grunt) { //grunt-sass grunt.config('sass', { options: { outputStyle: 'expanded', //includePaths: ['<%= config.scss.includePaths %>'], imagePath: '../<%= config.image.dir %>' }, dist: { files: { '<%= config.css.dir %>/<%= config.css.file %>': '<%= config.scss.dir %>/<%= config.scss.file %>' } } }); //grunt-autoprefixer grunt.config('autoprefixer', { options: { browsers: ['> 1%', 'last 2 versions', 'ie 8', 'ie 9', 'ie 10'] }, dist: { files: { '<%= config.css.dir %>/<%= config.css.file %>': '<%= config.css.dir %>/<%= config.css.file %>' } } }); //grunt-contrib-cssmin grunt.config('cssmin', { target: { src: '<%= config.css.dir %>/<%= config.css.file %>', dest: '<%= config.css.dir %>/<%= config.css.file %>' } }); //grunt-contrib-csslint grunt.config('csslint', { options: { csslintrc: 'grunt/.csslintrc' }, strict: { src: ['<%= config.css.dir %>/*.css'] } }); };
module.exports = function(grunt) { //grunt-sass grunt.config('sass', { options: { outputStyle: 'expanded', imagePath: '../<%= config.image.dir %>' }, dist: { files: { '<%= config.css.dir %>/<%= config.css.file %>': '<%= config.scss.dir %>/<%= config.scss.file %>' } } }); //grunt-autoprefixer grunt.config('autoprefixer', { options: { browsers: ['> 1%', 'last 2 versions', 'ie 8', 'ie 9', 'ie 10'] }, dist: { files: { '<%= config.css.dir %>/<%= config.css.file %>': '<%= config.css.dir %>/<%= config.css.file %>' } } }); //grunt-contrib-cssmin grunt.config('cssmin', { target: { src: '<%= config.css.dir %>/<%= config.css.file %>', dest: '<%= config.css.dir %>/<%= config.css.file %>' } }); //grunt-contrib-csslint grunt.config('csslint', { options: { csslintrc: 'grunt/.csslintrc' }, strict: { src: ['<%= config.css.dir %>/*.css'] } }); };
Fix to avoid asynchronous Ebean fetchAhead (automatically fetches the next page) When a page is accessed, Ebean LimitOffsetPagingQuery.java automatically starts a new background thread to fetch the next page. This thread uses a database connection that can be still alive after the HTTP request was processed. On heavy load, this problem causes the following exception: java.sql.SQLException: Timed out waiting for a free available connection. FetchAhead is useless in a stateless web application. Therefore, it must be disabled using setFetchAhead(false).
package models; import java.util.*; import javax.persistence.*; import play.db.ebean.*; import play.data.format.*; import play.data.validation.*; import com.avaje.ebean.*; /** * Computer entity managed by Ebean */ @Entity public class Computer extends Model { @Id public Long id; @Constraints.Required public String name; @Formats.DateTime(pattern="yyyy-MM-dd") public Date introduced; @Formats.DateTime(pattern="yyyy-MM-dd") public Date discontinued; @ManyToOne public Company company; /** * Generic query helper for entity Computer with id Long */ public static Finder<Long,Computer> find = new Finder<Long,Computer>(Long.class, Computer.class); /** * Return a page of computer * * @param page Page to display * @param pageSize Number of computers per page * @param sortBy Computer property used for sorting * @param order Sort order (either or asc or desc) * @param filter Filter applied on the name column */ public static Page<Computer> page(int page, int pageSize, String sortBy, String order, String filter) { return find.where() .ilike("name", "%" + filter + "%") .orderBy(sortBy + " " + order) .fetch("company") .findPagingList(pageSize) .setFetchAhead(false) .getPage(page); } }
package models; import java.util.*; import javax.persistence.*; import play.db.ebean.*; import play.data.format.*; import play.data.validation.*; import com.avaje.ebean.*; /** * Computer entity managed by Ebean */ @Entity public class Computer extends Model { @Id public Long id; @Constraints.Required public String name; @Formats.DateTime(pattern="yyyy-MM-dd") public Date introduced; @Formats.DateTime(pattern="yyyy-MM-dd") public Date discontinued; @ManyToOne public Company company; /** * Generic query helper for entity Computer with id Long */ public static Finder<Long,Computer> find = new Finder<Long,Computer>(Long.class, Computer.class); /** * Return a page of computer * * @param page Page to display * @param pageSize Number of computers per page * @param sortBy Computer property used for sorting * @param order Sort order (either or asc or desc) * @param filter Filter applied on the name column */ public static Page<Computer> page(int page, int pageSize, String sortBy, String order, String filter) { return find.where() .ilike("name", "%" + filter + "%") .orderBy(sortBy + " " + order) .fetch("company") .findPagingList(pageSize) .getPage(page); } }
Update dependencies, migrate webpack dev conf + babel + adapt to new react v16
/* eslint-disable */ const BrowserSyncPlugin = require('browser-sync-webpack-plugin'); const ExtractTextPlugin = require("extract-text-webpack-plugin"); module.exports = { entry: __dirname + '/src/main/resources/static/js/main.js', devtool: 'eval-source-map', output: { filename: 'main.js', path: __dirname + '/target/classes/static/dist/' // copy to target }, module: { rules: [ { test: /\.js$/, exclude: /node_modules/, use: { loader: 'babel-loader' } }, { test: /\.css$/, exclude: /node_modules/, use: ExtractTextPlugin.extract({ fallback: 'style-loader', use: 'css-loader' }) }, { test: /\.js$/, enforce: 'pre', exclude: /node_modules/, use: { loader: 'eslint-loader' } } ] }, plugins: [ new ExtractTextPlugin('bundle.css', {allChunks: true}), // new BrowserSyncPlugin({ proxy: 'http://localhost:8080/' }) ] };
/* eslint-disable */ //noinspection Eslint const BrowserSyncPlugin = require('browser-sync-webpack-plugin'); //noinspection Eslint const ExtractTextPlugin = require("extract-text-webpack-plugin"); //noinspection Eslint module.exports = { // eslint-disable-line entry: __dirname + '/src/main/resources/static/js/main.js', devtool: 'eval-source-map', output: { filename: 'main.js', // eslint-disable-line path: __dirname + '/target/classes/static/dist/' // copy to target }, module: { rules: [ { test: /\.js$/, exclude: /node_modules/, use: { loader: 'babel-loader' } }, { test: /\.css$/, exclude: /node_modules/, use: ExtractTextPlugin.extract({ fallback: 'style-loader', use: 'css-loader' }) }, { test: /\.js$/, enforce: 'pre', exclude: /node_modules/, use: { loader: 'eslint-loader' } } ] }, plugins: [ new ExtractTextPlugin('bundle.css', {allChunks: true}), // new BrowserSyncPlugin({ proxy: 'http://localhost:8080/' }) ] };
Fix typo in command name
from setuptools import setup, find_packages setup( name='sgfs', version='0.1.0b', description='Translation layer between Shotgun entities and a file structure.', url='http://github.com/westernx/sgfs', packages=find_packages(exclude=['build*', 'tests*']), include_package_data=True, author='Mike Boers', author_email='[email protected]', license='BSD-3', entry_points={ 'console_scripts': [ # Low-level structure. 'sgfs-tag = sgfs.commands.tag:main', 'sgfs-create-structure = sgfs.commands.create_structure:main', # Relinking or updating tags. 'sgfs-relink = sgfs.commands.relink:main', 'sgfs-rebuild-cache = sgfs.commands.relink:main_rebuild', 'sgfs-update = sgfs.commands.update:main', # Opening commands. 'sgfs-open = sgfs.commands.open:run_open', 'sgfs-shotgun = sgfs.commands.open:run_shotgun', 'sgfs-path = sgfs.commands.open:run_path', 'sgfs-rv = sgfs.commands.rv:run', ], }, classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
from setuptools import setup, find_packages setup( name='sgfs', version='0.1.0b', description='Translation layer between Shotgun entities and a file structure.', url='http://github.com/westernx/sgfs', packages=find_packages(exclude=['build*', 'tests*']), include_package_data=True, author='Mike Boers', author_email='[email protected]', license='BSD-3', entry_points={ 'console_scripts': [ # Low-level structure. 'sgfs-tag = sgfs.commands.tag:main', 'sgfs-create-structure = sgfs.commands.create_structure:main', # Relinking or updating tags. 'sgfs-relink = sgfs.commands.relink:main', 'sgfs-rebuild-cache = gfs.commands.relink:main_rebuild', 'sgfs-update = sgfs.commands.update:main', # Opening commands. 'sgfs-open = sgfs.commands.open:run_open', 'sgfs-shotgun = sgfs.commands.open:run_shotgun', 'sgfs-path = sgfs.commands.open:run_path', 'sgfs-rv = sgfs.commands.rv:run', ], }, classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Set heartbeat attrib in init
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json import random import hashlib import requests from heartbeat import Challenge, Heartbeat from .utils import urlify from .exc import DownstreamError class DownstreamClient(object): def __init__(self, server_url): self.server = server_url.strip('/') self.challenges = [] self.heartbeat = None def connect(self, url): raise NotImplementedError def store_path(self, path): raise NotImplementedError def get_chunk(self, hash): raise NotImplementedError def challenge(self, hash, challenge): raise NotImplementedError def answer(self, hash, hash_answer): raise NotImplementedError def _enc_fname(self, filename): return urlify(os.path.split(filename)[1]) def get_challenges(self, filename): enc_fname = urlify(os.path.split(filename)[1]) url = '%s/api/downstream/challenge/%s' % (self.server, enc_fname) resp = requests.get(url) try: resp.raise_for_status() except Exception as e: raise DownstreamError("Error connecting to downstream" "-node:", e.message) _json = resp.json() for challenge in _json['challenges']: chal = Challenge(challenge.get('block'), challenge.get('seed')) self.challenges.append(chal) def answer_challenge(self, filename): enc_fname = self._enc_fname(filename) raise NotImplementedError def random_challenge(self): random.choice(self.challenges)
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import json import random import hashlib import requests from heartbeat import Challenge, Heartbeat from .utils import urlify from .exc import DownstreamError class DownstreamClient(object): def __init__(self, server_url): self.server = server_url.strip('/') self.challenges = [] def connect(self, url): raise NotImplementedError def store_path(self, path): raise NotImplementedError def get_chunk(self, hash): raise NotImplementedError def challenge(self, hash, challenge): raise NotImplementedError def answer(self, hash, hash_answer): raise NotImplementedError def _enc_fname(self, filename): return urlify(os.path.split(filename)[1]) def get_challenges(self, filename): enc_fname = urlify(os.path.split(filename)[1]) url = '%s/api/downstream/challenge/%s' % (self.server, enc_fname) resp = requests.get(url) try: resp.raise_for_status() except Exception as e: raise DownstreamError("Error connecting to downstream" "-node:", e.message) _json = resp.json() for challenge in _json['challenges']: chal = Challenge(challenge.get('block'), challenge.get('seed')) self.challenges.append(chal) def answer_challenge(self, filename): enc_fname = self._enc_fname(filename) raise NotImplementedError def random_challenge(self): random.choice(self.challenges)
Update required version of wptrunner
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup PACKAGE_VERSION = '0.1' deps = ['fxos-appgen>=0.2.7', 'marionette_client>=0.7.1.1', 'marionette_extension >= 0.1', 'mozdevice >= 0.33', 'mozlog >= 1.6', 'moznetwork >= 0.24', 'mozprocess >= 0.18', 'wptserve >= 1.0.1', 'wptrunner >= 0.2.7, < 0.3'] setup(name='fxos-certsuite', version=PACKAGE_VERSION, description='Certification suite for FirefoxOS', classifiers=[], keywords='mozilla', author='Mozilla Automation and Testing Team', author_email='[email protected]', url='https://github.com/mozilla-b2g/fxos-certsuite', license='MPL', packages=['certsuite'], include_package_data=True, zip_safe=False, install_requires=deps, entry_points=""" # -*- Entry points: -*- [console_scripts] runcertsuite = certsuite:harness_main cert = certsuite:certcli """)
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup PACKAGE_VERSION = '0.1' deps = ['fxos-appgen>=0.2.7', 'marionette_client>=0.7.1.1', 'marionette_extension >= 0.1', 'mozdevice >= 0.33', 'mozlog >= 1.6', 'moznetwork >= 0.24', 'mozprocess >= 0.18', 'wptserve >= 1.0.1', 'wptrunner >= 0.2.6'] setup(name='fxos-certsuite', version=PACKAGE_VERSION, description='Certification suite for FirefoxOS', classifiers=[], keywords='mozilla', author='Mozilla Automation and Testing Team', author_email='[email protected]', url='https://github.com/mozilla-b2g/fxos-certsuite', license='MPL', packages=['certsuite'], include_package_data=True, zip_safe=False, install_requires=deps, entry_points=""" # -*- Entry points: -*- [console_scripts] runcertsuite = certsuite:harness_main cert = certsuite:certcli """)
Change template and improve query to NotificationHistory
window.c.AdminNotificationHistory = ((m, h, _, models) => { return { controller: (args) => { const notifications = m.prop([]), getNotifications = (user) => { let notification = models.notification; notification.getPageWithToken(m.postgrest.filtersVM({user_id: 'eq', sent_at: 'is.null'}).user_id(user.id).sent_at(!null).order({sent_at: 'desc'}).parameters()).then(function(data){ notifications(data); }); return notifications(); }; getNotifications(args.user); return { notifications: notifications }; }, view: (ctrl) => { return m('.w-col.w-col-4', [ m('.fontweight-semibold.fontsize-smaller.lineheight-tighter.u-marginbottom-20', 'Histórico de notificações'), ctrl.notifications().map(function(cEvent) { return m('.w-row.fontsize-smallest.lineheight-looser.date-event', [ m('.w-col.w-col-24', [ m('.fontcolor-secondary', h.momentify(cEvent.sent_at, 'DD/MM/YYYY, HH:mm'), ' - ', cEvent.template_name) ]), ]); }) ]); } }; }(window.m, window.c.h, window._, window.c.models));
window.c.AdminNotificationHistory = ((m, h, _, models) => { return { controller: (args) => { const notifications = m.prop([]), getNotifications = (user) => { let notification = models.notification; notification.getPageWithToken(m.postgrest.filtersVM({user_id: 'eq'}).user_id(user.id).parameters()).then(function(data){ notifications(data); }); return notifications(); }; getNotifications(args.user); return { notifications: notifications }; }, view: (ctrl) => { return m('.w-col.w-col-4', [ m('.fontweight-semibold.fontsize-smaller.lineheight-tighter.u-marginbottom-20', 'Histórico de notificações'), ctrl.notifications().map(function(cEvent) { return m('.w-row.fontsize-smallest.lineheight-looser.date-event', [ m('.w-col.w-col-24', [ m('.fontcolor-secondary', 'notificação: ', cEvent.template_name, ', ', 'criada em: ', h.momentify(cEvent.created_at, 'DD/MM/YYYY, HH:mm'), ', ', 'enviada em: ', h.momentify(cEvent.sent_at, 'DD/MM/YYYY, HH:mm')) ]), ]); }) ]); } }; }(window.m, window.c.h, window._, window.c.models));
Add balance checking test samplesets
from django.test import TestCase from breach.models import SampleSet, Victim, Target, Round class RuptureTestCase(TestCase): def setUp(self): target = Target.objects.create( endpoint='https://di.uoa.gr/?breach=%s', prefix='test', alphabet='0123456789' ) self.victim = Victim.objects.create( target=target, sourceip='192.168.10.140', snifferendpoint='http://localhost/' ) round = Round.objects.create( victim=self.victim, amount=1, knownsecret='testsecret', knownalphabet='01' ) self.samplesets = [ SampleSet.objects.create( round=round, candidatealphabet='0', data='bigbigbigbigbigbig' ), SampleSet.objects.create( round=round, candidatealphabet='1', data='small' ) ] # Balance checking self.balance_victim = Victim.objects.create( target=target, sourceip='192.168.10.141', snifferendpoint='http://localhost/' ) balance_round = Round.objects.create( victim=self.balance_victim, amount=1, knownsecret='testsecret', knownalphabet='0123', roundcardinality=3 ) self.balance_samplesets = [ SampleSet.objects.create( round=balance_round, candidatealphabet='0', data='bigbigbigbigbigbig' ), SampleSet.objects.create( round=balance_round, candidatealphabet='123', data='small' ) ]
from django.test import TestCase from breach.models import SampleSet, Victim, Target, Round class RuptureTestCase(TestCase): def setUp(self): target = Target.objects.create( endpoint='https://di.uoa.gr/?breach=%s', prefix='test', alphabet='0123456789' ) self.victim = Victim.objects.create( target=target, sourceip='192.168.10.140', snifferendpoint='http://localhost/' ) round = Round.objects.create( victim=self.victim, amount=1, knownsecret='testsecret', knownalphabet='01' ) self.samplesets = [ SampleSet.objects.create( round=round, candidatealphabet='0', data='bigbigbigbigbigbig' ), SampleSet.objects.create( round=round, candidatealphabet='1', data='small' ) ] # Balance checking self.balance_victim = Victim.objects.create( target=target, sourceip='192.168.10.141', snifferendpoint='http://localhost/' ) balance_round = Round.objects.create( victim=self.balance_victim, amount=1, knownsecret='testsecret', knownalphabet='0123', roundcardinality=3 )
Make table basic and no longer striped
import React, { Component } from 'react'; import { Table, Icon } from 'semantic-ui-react'; import data from './data/food.json'; class List extends Component { render() { let list = []; let categories = data.categories; let status = data.status; data.foods.forEach((item, index) => { if (item.name.toLowerCase().includes(this.props.term) || this.props.term === "") { list.push( <Table.Row positive={ !item.status } negative={ item.status } key={ index } > <Table.Cell> { !item.status ? ( <Icon name='checkmark' /> ) : ( <Icon name='close' /> )} </Table.Cell> <Table.Cell>{ item.name }</Table.Cell> <Table.Cell>{ categories[item.category] }</Table.Cell> <Table.Cell warning={ item.hasOwnProperty('note') }>{ item.note }</Table.Cell> </Table.Row> ); } }); return ( <Table celled basic compact unstackable> <Table.Header> <Table.Row> <Table.HeaderCell></Table.HeaderCell> <Table.HeaderCell>Name</Table.HeaderCell> <Table.HeaderCell>Category</Table.HeaderCell> <Table.HeaderCell>Note</Table.HeaderCell> </Table.Row> </Table.Header> <Table.Body> { list } </Table.Body> </Table> ); } } export default List;
import React, { Component } from 'react'; import { Table, Icon } from 'semantic-ui-react'; import data from './data/food.json'; class List extends Component { render() { let list = []; let categories = data.categories; let status = data.status; data.foods.forEach((item, index) => { if (item.name.toLowerCase().includes(this.props.term) || this.props.term === "") { list.push( <Table.Row positive={ !item.status } negative={ item.status } key={ index } > <Table.Cell> { !item.status ? ( <Icon name='checkmark' /> ) : ( <Icon name='close' /> )} </Table.Cell> <Table.Cell>{ item.name }</Table.Cell> <Table.Cell>{ categories[item.category] }</Table.Cell> <Table.Cell warning={ item.hasOwnProperty('note') }>{ item.note }</Table.Cell> </Table.Row> ); } }); return ( <Table celled striped compact unstackable> <Table.Header> <Table.Row> <Table.HeaderCell></Table.HeaderCell> <Table.HeaderCell>Name</Table.HeaderCell> <Table.HeaderCell>Category</Table.HeaderCell> <Table.HeaderCell>Note</Table.HeaderCell> </Table.Row> </Table.Header> <Table.Body> { list } </Table.Body> </Table> ); } } export default List;
Make sure we set the EMAIL_BACKEND by default
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage' FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024 FILE_UPLOAD_HANDLERS = ( 'djangae.storage.BlobstoreFileUploadHandler', 'django.core.files.uploadhandler.MemoryFileUploadHandler', ) DATABASES = { 'default': { 'ENGINE': 'djangae.db.backends.appengine' } } GENERATE_SPECIAL_INDEXES_DURING_TESTING = False CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', } } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner' EMAIL_BACKEND = 'djangae.mail.AsyncEmailBackend'
DEFAULT_FILE_STORAGE = 'djangae.storage.BlobstoreStorage' FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024 FILE_UPLOAD_HANDLERS = ( 'djangae.storage.BlobstoreFileUploadHandler', 'django.core.files.uploadhandler.MemoryFileUploadHandler', ) DATABASES = { 'default': { 'ENGINE': 'djangae.db.backends.appengine' } } GENERATE_SPECIAL_INDEXES_DURING_TESTING = False CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', } } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } TEST_RUNNER = 'djangae.test_runner.DjangaeTestSuiteRunner'
Add docblock to correct return type provided by 2dotstwice collection
<?php namespace CultuurNet\UDB3\Media; use ArrayIterator; use TwoDotsTwice\Collection\AbstractCollection; use TwoDotsTwice\Collection\CollectionInterface; use ValueObjects\Identity\UUID; class ImageCollection extends AbstractCollection implements CollectionInterface { /** * @var Image|null */ protected $mainImage; protected function getValidObjectType() { return Image::class; } /** * @param Image $image * @return ImageCollection */ public function withMain(Image $image) { $collection = $this->contains($image) ? $this : $this->with($image); $copy = clone $collection; $copy->mainImage = $image; return $copy; } /** * @return Image|null */ public function getMain() { if (0 === $this->length()) { return null; } if ($this->mainImage) { return $this->mainImage; } else { /** @var ArrayIterator $iterator */ $iterator = $this->getIterator(); $iterator->rewind(); return $iterator->current(); } } /** * @param UUID $uuid * @return Image|null */ public function findImageByUUID(UUID $uuid) { /** @var Image $image */ foreach ($this->items as $image) { if ($image->getMediaObjectId()->sameValueAs($uuid)) { return $image; } } return null; } }
<?php namespace CultuurNet\UDB3\Media; use TwoDotsTwice\Collection\AbstractCollection; use TwoDotsTwice\Collection\CollectionInterface; use ValueObjects\Identity\UUID; class ImageCollection extends AbstractCollection implements CollectionInterface { /** * @var Image|null */ protected $mainImage; protected function getValidObjectType() { return Image::class; } /** * @param Image $image * @return ImageCollection */ public function withMain(Image $image) { $collection = $this->contains($image) ? $this : $this->with($image); $copy = clone $collection; $copy->mainImage = $image; return $copy; } /** * @return Image|null */ public function getMain() { if (0 === $this->length()) { return null; } if ($this->mainImage) { return $this->mainImage; } else { $iterator = $this->getIterator(); $iterator->rewind(); return $iterator->current(); } } /** * @param UUID $uuid * @return Image|null */ public function findImageByUUID(UUID $uuid) { /** @var Image $image */ foreach ($this->items as $image) { if ($image->getMediaObjectId()->sameValueAs($uuid)) { return $image; } } return null; } }
Fix bug in daenerys module:register command
<?php declare(strict_types=1); namespace LotGD\Core\Console\Command; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Output\OutputInterface; use LotGD\Core\Exceptions\ClassNotFoundException; use LotGD\Core\Exceptions\ModuleAlreadyExistsException; use LotGD\Core\LibraryConfiguration; /** * Danerys command to register and initiate any newly installed modules. */ class ModuleRegisterCommand extends BaseCommand { /** * @inheritDoc */ protected function configure() { $this->setName('module:register') ->setDescription('Register and initialize any newly installed modules'); } /** * @inheritDoc */ protected function execute(InputInterface $input, OutputInterface $output) { $modules = $this->game->getComposerManager()->getModulePackages(); foreach ($modules as $p) { $library = new LibraryConfiguration($this->game->getComposerManager(), $p, $this->game->getCWD()); $name = $library->getName(); try { $this->game->getModuleManager()->register($library); $output->writeln("<info>Registered new module {$name}</info>"); } catch (ModuleAlreadyExistsException $e) { $output->writeln("Skipping already registered module {$name}"); } catch (ClassNotFoundException $e) { $output->writeln("<error>Error installing module {$name}: " . $e->getMessage() . "</error>"); } } } }
<?php declare(strict_types=1); namespace LotGD\Core\Console\Command; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Output\OutputInterface; use LotGD\Core\Exceptions\ClassNotFoundException; use LotGD\Core\Exceptions\ModuleAlreadyExistsException; /** * Danerys command to register and initiate any newly installed modules. */ class ModuleRegisterCommand extends BaseCommand { /** * @inheritDoc */ protected function configure() { $this->setName('module:register') ->setDescription('Register and initialize any newly installed modules'); } /** * @inheritDoc */ protected function execute(InputInterface $input, OutputInterface $output) { $modules = $this->game->getComposerManager()->getModulePackages(); foreach ($modules as $p) { $library = new LibraryConfiguration($this->game->getComposerManager(), $p, $this->game->getCWD()); $name = $library->getName(); try { $this->game->getModuleManager()->register($library); $output->writeln("<info>Registered new module {$name}</info>"); } catch (ModuleAlreadyExistsException $e) { $output->writeln("Skipping already registered module {$name}"); } catch (ClassNotFoundException $e) { $output->writeln("<error>Error installing module {$name}: " . $e->getMessage() . "</error>"); } } } }
Fix url of entries made by listdir on Windows. git-svn-id: ad91b9aa7ba7638d69f912c9f5d012e3326e9f74@1586 3942dd89-8c5d-46d7-aeed-044bccf3e60c
import logging from flexget.plugin import register_plugin log = logging.getLogger('listdir') class InputListdir: """ Uses local path content as an input. Example: listdir: /storage/movies/ """ def validator(self): from flexget import validator root = validator.factory() root.accept('path') bundle = root.accept('list') bundle.accept('path') return root def get_config(self, feed): config = feed.config.get('listdir', None) # If only a single path is passed turn it into a 1 element list if isinstance(config, basestring): config = [config] return config def on_feed_input(self, feed): from flexget.feed import Entry import os config = self.get_config(feed) for path in config: for name in os.listdir(unicode(path)): e = Entry() e['title'] = name filepath = os.path.join(path, name) # Windows paths need an extra / prepended to them if not filepath.startswith('/'): filepath = '/' + filepath e['url'] = 'file://%s' % (filepath) e['location'] = os.path.join(path, name) feed.entries.append(e) register_plugin(InputListdir, 'listdir')
import logging from flexget.plugin import * log = logging.getLogger('listdir') class InputListdir: """ Uses local path content as an input. Example: listdir: /storage/movies/ """ def validator(self): from flexget import validator root = validator.factory() root.accept('path') bundle = root.accept('list') bundle.accept('path') return root def get_config(self, feed): config = feed.config.get('listdir', None) #if only a single path is passed turn it into a 1 element list if isinstance(config, basestring): config = [config] return config def on_feed_input(self, feed): from flexget.feed import Entry import os config = self.get_config(feed) for path in config: for name in os.listdir(unicode(path)): e = Entry() e['title'] = name e['url'] = 'file://%s' % (os.path.join(path, name)) e['location'] = os.path.join(path, name) feed.entries.append(e) register_plugin(InputListdir, 'listdir')
Use more recent packages as minimum requirements
import codecs from os import path from setuptools import find_packages, setup def read(*parts): filename = path.join(path.dirname(__file__), *parts) with codecs.open(filename, encoding="utf-8") as fp: return fp.read() NAME = "pinax-blog" DESCRIPTION = "a Django blog app" AUTHOR = "Pinax Team" AUTHOR_EMAIL = "[email protected]" URL = "https://github.com/pinax/pinax-blog" setup( name=NAME, version="3.1.3", description=DESCRIPTION, long_description=read("README.rst"), url=URL, license="MIT", packages=find_packages(), package_data={ "pinax.blog": [ "templates/pinax/blog/*.xml", ] }, install_requires=[ "django-appconf>=1.0.1", "Pillow>=2.0", "Markdown>=2.6", "Pygments>=2.0.2" ], test_suite="runtests.runtests", classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Framework :: Django", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ], zip_safe=False )
import codecs from os import path from setuptools import find_packages, setup def read(*parts): filename = path.join(path.dirname(__file__), *parts) with codecs.open(filename, encoding="utf-8") as fp: return fp.read() NAME = "pinax-blog" DESCRIPTION = "a Django blog app" AUTHOR = "Pinax Team" AUTHOR_EMAIL = "[email protected]" URL = "https://github.com/pinax/pinax-blog" setup( name=NAME, version="3.1.3", description=DESCRIPTION, long_description=read("README.rst"), url=URL, license="MIT", packages=find_packages(), package_data={ "pinax.blog": [ "templates/pinax/blog/*.xml", ] }, install_requires=[ "django-appconf>=1.0.1", "Pillow>=2.0", "Markdown>=2.4", "Pygments>=1.6" ], test_suite="runtests.runtests", classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Framework :: Django", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ], zip_safe=False )
Use AbstractEntry instead of EntryAbstractClass
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models_bases.entry import AbstractEntry class EntryPlaceholder(AbstractEntry): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(AbstractEntry.Meta): """EntryPlaceholder's Meta""" abstract = True
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True
Change dateutil to python-dateutil because some loser decided to rename it. (Thanks Leigh) git-svn-id: 7187af8a85e68091b56e148623cc345c4eafc588@188 d723f978-dc38-0410-87ed-da353333cdcc
from setuptools import setup, find_packages import sys, os version = '0.4.4' setup(name='twitter', version=version, description="An API and command-line toolset for Twitter (twitter.com)", long_description=open("./README", "r").read(), # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: End Users/Desktop", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Communications :: Chat :: Internet Relay Chat", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries", "Topic :: Utilities", "License :: OSI Approved :: MIT License", ], keywords='twitter, IRC, command-line tools, web 2.0', author='Mike Verdone', author_email='[email protected]', url='http://mike.verdone.ca/twitter/', license='MIT License', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=True, install_requires=[ # -*- Extra requirements: -*- "simplejson>=1.7.1", "python-dateutil>=1.1", ], entry_points=""" # -*- Entry points: -*- [console_scripts] twitter=twitter.cmdline:main twitterbot=twitter.ircbot:main """, )
from setuptools import setup, find_packages import sys, os version = '0.4.3' setup(name='twitter', version=version, description="An API and command-line toolset for Twitter (twitter.com)", long_description=open("./README", "r").read(), # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: End Users/Desktop", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Communications :: Chat :: Internet Relay Chat", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries", "Topic :: Utilities", "License :: OSI Approved :: MIT License", ], keywords='twitter, IRC, command-line tools, web 2.0', author='Mike Verdone', author_email='[email protected]', url='http://mike.verdone.ca/twitter/', license='MIT License', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=True, install_requires=[ # -*- Extra requirements: -*- "simplejson>=1.7.1", "dateutil>=1.1", ], entry_points=""" # -*- Entry points: -*- [console_scripts] twitter=twitter.cmdline:main twitterbot=twitter.ircbot:main """, )
Make django-sortable install, pypi package is broken.
import os from setuptools import setup README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = 'RecordExpress', version = '0.0', packages = ['collection_record'], include_package_data = True, dependency_links = ['https://github.com/cdlib/RecordExpress.git', 'https://github.com/drewyeaton/django-sortable/archive/master.zip#egg=django-sortable', #pypi package currently broken - 2013/09 ], license = 'BSD License - see LICENSE file', description = 'A lightweight EAD creator', long_description = README, author = 'Mark Redar', author_email = '[email protected]', classifiers = [ 'Environment :: Web Environment', 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], install_requires = [ 'django>=1.4', 'django-dublincore>=0.1', 'django-sortable', 'BeautifulSoup', 'webtest', 'django-webtest' ], )
import os from setuptools import setup README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name = 'RecordExpress', version = '0.0', packages = ['collection_record'], include_package_data = True, dependency_links = ['https://github.com/cdlib/RecordExpress.git'], license = 'BSD License - see LICENSE file', description = 'A lightweight EAD creator', long_description = README, author = 'Mark Redar', author_email = '[email protected]', classifiers = [ 'Environment :: Web Environment', 'Development Status :: 3 - Alpha', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], install_requires = [ 'django>=1.4', 'django-dublincore>=0.1', 'django-sortable', 'BeautifulSoup', 'webtest', 'django-webtest' ], )
Add SSL option to work with modified Gaufrette that use SSL/TSL FTP
<?php namespace Knp\Bundle\GaufretteBundle\DependencyInjection\Factory; use Symfony\Component\Config\Definition\Builder\NodeDefinition; use Symfony\Component\DependencyInjection\Reference; use Symfony\Component\DependencyInjection\ContainerBuilder; use Symfony\Component\DependencyInjection\DefinitionDecorator; /** * Ftp Adapter Factory */ class FtpAdapterFactory implements AdapterFactoryInterface { /** * {@inheritDoc} */ function create(ContainerBuilder $container, $id, array $config) { $container ->setDefinition($id, new DefinitionDecorator('knp_gaufrette.adapter.ftp')) ->addArgument($config['directory']) ->addArgument($config['host']) ->addArgument($config) ; } /** * {@inheritDoc} */ function getKey() { return 'ftp'; } /** * {@inheritDoc} */ function addConfiguration(NodeDefinition $builder) { $builder ->children() ->scalarNode('directory')->isRequired()->end() ->scalarNode('host')->isRequired()->end() ->scalarNode('port')->defaultValue(21)->end() ->scalarNode('username')->defaultNull()->end() ->scalarNode('password')->defaultNull()->end() ->booleanNode('passive')->defaultFalse()->end() ->booleanNode('create')->defaultFalse()->end() ->scalarNode('mode') ->defaultValue(defined('FTP_ASCII') ? FTP_ASCII : null) ->beforeNormalization() ->ifString() ->then(function($v) { return constant($v); }) ->booleanNode('ssl')->defaultFalse()->end() ->end() ->end() ; } }
<?php namespace Knp\Bundle\GaufretteBundle\DependencyInjection\Factory; use Symfony\Component\Config\Definition\Builder\NodeDefinition; use Symfony\Component\DependencyInjection\Reference; use Symfony\Component\DependencyInjection\ContainerBuilder; use Symfony\Component\DependencyInjection\DefinitionDecorator; /** * Ftp Adapter Factory */ class FtpAdapterFactory implements AdapterFactoryInterface { /** * {@inheritDoc} */ function create(ContainerBuilder $container, $id, array $config) { $container ->setDefinition($id, new DefinitionDecorator('knp_gaufrette.adapter.ftp')) ->addArgument($config['directory']) ->addArgument($config['host']) ->addArgument($config) ; } /** * {@inheritDoc} */ function getKey() { return 'ftp'; } /** * {@inheritDoc} */ function addConfiguration(NodeDefinition $builder) { $builder ->children() ->scalarNode('directory')->isRequired()->end() ->scalarNode('host')->isRequired()->end() ->scalarNode('port')->defaultValue(21)->end() ->scalarNode('username')->defaultNull()->end() ->scalarNode('password')->defaultNull()->end() ->booleanNode('passive')->defaultFalse()->end() ->booleanNode('create')->defaultFalse()->end() ->scalarNode('mode') ->defaultValue(defined('FTP_ASCII') ? FTP_ASCII : null) ->beforeNormalization() ->ifString() ->then(function($v) { return constant($v); }) ->end() ->end() ; } }
III-709: Use String instead of fully namespaced equivalent
<?php namespace CultuurNet\UDB3\EventExport\Command; use CultuurNet\Deserializer\JSONDeserializer; use CultuurNet\Deserializer\MissingValueException; use CultuurNet\UDB3\EventExport\EventExportQuery; use ValueObjects\String\String; use ValueObjects\Web\EmailAddress; abstract class ExportEventsJSONDeserializer extends JSONDeserializer { /** * @param String $data * @return ExportEvents */ public function deserialize(String $data) { $data = parent::deserialize($data); if (!isset($data->query)) { throw new MissingValueException('query is missing'); } $query = new EventExportQuery($data->query); $email = $selection = $include = null; // @todo This throws an exception when the e-mail is invalid. How do we handle this? if (isset($data->email)) { $email = new EmailAddress($data->email); } if (isset($data->selection)) { $selection = $data->selection; } if (isset($data->include)) { $include = $data->include; } return $this->createCommand($query, $email, $selection, $include); } /** * @param EventExportQuery $query * @param EmailAddress|null $address * @param string[]|null $selection * @param string[]|null $include * @return ExportEvents */ abstract protected function createCommand( EventExportQuery $query, EmailAddress $address = null, $selection = null, $include = null ); }
<?php namespace CultuurNet\UDB3\EventExport\Command; use CultuurNet\Deserializer\JSONDeserializer; use CultuurNet\Deserializer\MissingValueException; use CultuurNet\UDB3\EventExport\EventExportQuery; use ValueObjects\String\String; use ValueObjects\Web\EmailAddress; abstract class ExportEventsJSONDeserializer extends JSONDeserializer { /** * @param \ValueObjects\String\String $data * @return ExportEvents */ public function deserialize(String $data) { $data = parent::deserialize($data); if (!isset($data->query)) { throw new MissingValueException('query is missing'); } $query = new EventExportQuery($data->query); $email = $selection = $include = null; // @todo This throws an exception when the e-mail is invalid. How do we handle this? if (isset($data->email)) { $email = new EmailAddress($data->email); } if (isset($data->selection)) { $selection = $data->selection; } if (isset($data->include)) { $include = $data->include; } return $this->createCommand($query, $email, $selection, $include); } /** * @param EventExportQuery $query * @param EmailAddress|null $address * @param string[]|null $selection * @param string[]|null $include * @return ExportEvents */ abstract protected function createCommand( EventExportQuery $query, EmailAddress $address = null, $selection = null, $include = null ); }
Remove extra margin around SVG images
import React, { PropTypes, PureComponent } from 'react'; import { View, Platform, WebView, ActivityIndicator } from 'react-native'; export default class SVGImage extends PureComponent { static propTypes = { style: PropTypes.any, source: PropTypes.shape({ uri: PropTypes.string, }).isRequired, showWebviewLoader: PropTypes.bool, height: PropTypes.number, }; static defaultProps = { style: {}, source: { uri: '' }, showWebviewLoader: Platform.OS === 'android', height: null, }; renderLoader = () => ( <View style={[this.props.style, { flex: 1, alignItems: 'center', justifyContent: 'center' }]}> <ActivityIndicator /> </View> ); render() { const { showWebviewLoader, source: { uri }, height, ...restOfProps } = this.props; const html = ` <!DOCTYPE html>\n <html> <head> <style type="text/css"> img { max-width: 100%; max-height: 100%; margin: 0 auto; } body { margin: 0; } </style> </head> <body> <img src="${uri}" height="${height}" align="middle" /> </body> </html> `; return ( <WebView startInLoadingState={showWebviewLoader} renderLoading={showWebviewLoader ? this.renderLoader : null} {...restOfProps} source={{ html }} /> ); } }
import React, { PropTypes, PureComponent } from 'react'; import { View, Platform, WebView, ActivityIndicator } from 'react-native'; export default class SVGImage extends PureComponent { static propTypes = { style: PropTypes.any, source: PropTypes.shape({ uri: PropTypes.string, }).isRequired, showWebviewLoader: PropTypes.bool, height: PropTypes.number, }; static defaultProps = { style: {}, source: { uri: '' }, showWebviewLoader: Platform.OS === 'android', height: null, }; renderLoader = () => ( <View style={[this.props.style, { flex: 1, alignItems: 'center', justifyContent: 'center' }]}> <ActivityIndicator /> </View> ); render() { const { showWebviewLoader, source: { uri }, height, ...props } = this.props; const html = ` <!DOCTYPE html>\n <html> <head> <style type="text/css"> img { max-width: 100%; max-height: 100%; margin: 0 auto; } </style> </head> <body> <img src="${uri}" height="${height}" align="middle" /> </body> </html> `; return ( <WebView source={{ html }} startInLoadingState={showWebviewLoader} renderLoading={showWebviewLoader ? this.renderLoader : null} {...props} /> ); } }
fix(shop): Insert link type and category in home page Insert link type and category in home page see #386
@extends('layouts.app') @section('content') <div class="container"> <div class="row"> <div class="col-md-12"> @foreach ($types as $type) <a href="{{ route('showProductByType', ['id' => $type->id] ) }}"><h3>{{ $type->name }}</h3></a> <div class="panel-group"> @foreach ($type->categories(['limit' => 2]) as $category) <div class="panel panel-info"> <div class="panel-heading"> <a href="{{ route('showProductByCategory', ['id' => $category->id] ) }}"><h4>{{ $category->name }}</h4></a> </div> <div class="panel-body"> <div class="row"> @foreach ($category->products(['limit' => 4, 'newest' => 1]) as $product) <a href={{ route('productsDetail', ['id' => $product->id]) }}> <div class="col-md-3"> @include('partials.products.product') </div> </a> @endforeach </div> <div class="pull-right"> <a href="{{ route('showProductByCategory', ['id' => $category->id] ) }}"><button class="btn btn-xs btn-primary ">View more</button></a> </div> </div> </div> @endforeach </div> @endforeach </div> </div> </div> @endsection
@extends('layouts.app') @section('content') <div class="container"> <div class="row"> <div class="col-md-12"> @foreach ($types as $type) <h3>{{ $type->name }}</h3> <div class="panel-group"> @foreach ($type->categories(['limit' => 2]) as $category) <div class="panel panel-info"> <div class="panel-heading"> <h4>{{ $category->name }}</h4> </div> <div class="panel-body"> <div class="row"> @foreach ($category->products(['limit' => 4, 'newest' => 1]) as $product) <a href={{ route('productsDetail', ['id' => $product->id]) }}> <div class="col-md-3"> @include('partials.products.product') </div> </a> @endforeach </div> <div class="pull-right"> <a href="{{ route('showProductByCategory', ['id' => $category->id] ) }}"><button class="btn btn-xs btn-primary ">View more</button></a> </div> </div> </div> @endforeach </div> @endforeach </div> </div> </div> @endsection
Align with ASM Comics layout (which is the old ASM Hentai Layout...)
package me.devsaki.hentoid.parsers.images; import androidx.annotation.NonNull; import org.jsoup.nodes.Document; import org.jsoup.select.Elements; import java.io.IOException; import java.util.ArrayList; import java.util.List; import me.devsaki.hentoid.database.domains.Content; import static me.devsaki.hentoid.util.network.HttpHelper.getOnlineDocument; public class ASMHentaiParser extends BaseParser { @Override protected List<String> parseImages(@NonNull Content content) throws IOException { List<String> result = new ArrayList<>(); // Fetch the reader page Document doc = getOnlineDocument(content.getReaderUrl()); if (doc != null) { Elements imgContainer = doc.select("div.full_image"); // New ASM layout if (imgContainer.isEmpty()) imgContainer = doc.select("div.full_gallery"); // Old ASM layout; current ASM Comics layout String imgUrl = "https:" + imgContainer .select("a") .select("img") .attr("src"); String ext = imgUrl.substring(imgUrl.lastIndexOf('.')); for (int i = 0; i < content.getQtyPages(); i++) { String img = imgUrl.substring(0, imgUrl.lastIndexOf('/') + 1) + (i + 1) + ext; result.add(img); } } return result; } }
package me.devsaki.hentoid.parsers.images; import androidx.annotation.NonNull; import org.jsoup.nodes.Document; import java.io.IOException; import java.util.ArrayList; import java.util.List; import me.devsaki.hentoid.database.domains.Content; import static me.devsaki.hentoid.util.network.HttpHelper.getOnlineDocument; public class ASMHentaiParser extends BaseParser { @Override protected List<String> parseImages(@NonNull Content content) throws IOException { List<String> result = new ArrayList<>(); // Fetch the reader page Document doc = getOnlineDocument(content.getReaderUrl()); if (doc != null) { String imgUrl = "https:" + doc.select("div.full_image") .select("a") .select("img") .attr("src"); String ext = imgUrl.substring(imgUrl.lastIndexOf('.')); for (int i = 0; i < content.getQtyPages(); i++) { String img = imgUrl.substring(0, imgUrl.lastIndexOf('/') + 1) + (i + 1) + ext; result.add(img); } } return result; } }
Use OPPS_MULTISITE_ADMIN on queryset AdminViewPermission
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib import admin from django.conf import settings from django.utils import timezone from .models import SitePermission class AdminViewPermission(admin.ModelAdmin): def queryset(self, request): queryset = super(AdminViewPermission, self).queryset(request) if not settings.OPPS_MULTISITE_ADMIN: return queryset try: sitepermission = SitePermission.objects.get( user=request.user, date_available__lte=timezone.now(), published=True) return queryset.filter(site_iid=sitepermission.site_iid) except SitePermission.DoesNotExist: pass return queryset def get_form(self, request, obj=None, **kwargs): form = super(AdminViewPermission, self).get_form(request, obj, **kwargs) if not settings.OPPS_MULTISITE_ADMIN: return form try: sitepermission = SitePermission.objects.get( user=request.user, date_available__lte=timezone.now(), published=True) form.base_fields['site'].initial = sitepermission.site form.base_fields['site'].choices = ((sitepermission.site.id, sitepermission.site.domain),) except SitePermission.DoesNotExist: pass return form admin.site.register(SitePermission)
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.contrib import admin from django.conf import settings from django.utils import timezone from .models import SitePermission class AdminViewPermission(admin.ModelAdmin): def queryset(self, request): queryset = super(AdminViewPermission, self).queryset(request) try: sitepermission = SitePermission.objects.get( user=request.user, date_available__lte=timezone.now(), published=True) return queryset.filter(site_iid=sitepermission.site_iid) except SitePermission.DoesNotExist: pass return queryset def get_form(self, request, obj=None, **kwargs): form = super(AdminViewPermission, self).get_form(request, obj, **kwargs) if not settings.OPPS_MULTISITE_ADMIN: return form try: sitepermission = SitePermission.objects.get( user=request.user, date_available__lte=timezone.now(), published=True) form.base_fields['site'].initial = sitepermission.site form.base_fields['site'].choices = ((sitepermission.site.id, sitepermission.site.domain),) except SitePermission.DoesNotExist: pass return form admin.site.register(SitePermission)
Support ipv6 for status endpoint security
package org.apereo.cas.configuration.model.core.web.security; import org.springframework.core.io.Resource; /** * This is {@link AdminPagesSecurityProperties}. * * @author Misagh Moayyed * @since 5.0.0 */ public class AdminPagesSecurityProperties { private String ip = "127\\.0\\.0\\.1|0:0:0:0:0:0:0:1"; private String adminRoles = "ROLE_ADMIN"; private String loginUrl; private String service; private Resource users; private boolean actuatorEndpointsEnabled; public boolean isActuatorEndpointsEnabled() { return actuatorEndpointsEnabled; } public void setActuatorEndpointsEnabled(final boolean actuatorEndpointsEnabled) { this.actuatorEndpointsEnabled = actuatorEndpointsEnabled; } public String getIp() { return ip; } public void setIp(final String ip) { this.ip = ip; } public String getAdminRoles() { return adminRoles; } public void setAdminRoles(final String adminRoles) { this.adminRoles = adminRoles; } public String getLoginUrl() { return loginUrl; } public void setLoginUrl(final String loginUrl) { this.loginUrl = loginUrl; } public String getService() { return service; } public void setService(final String service) { this.service = service; } public Resource getUsers() { return users; } public void setUsers(final Resource users) { this.users = users; } }
package org.apereo.cas.configuration.model.core.web.security; import org.springframework.core.io.Resource; /** * This is {@link AdminPagesSecurityProperties}. * * @author Misagh Moayyed * @since 5.0.0 */ public class AdminPagesSecurityProperties { private String ip = "127\\.0\\.0\\.1"; private String adminRoles = "ROLE_ADMIN"; private String loginUrl; private String service; private Resource users; private boolean actuatorEndpointsEnabled; public boolean isActuatorEndpointsEnabled() { return actuatorEndpointsEnabled; } public void setActuatorEndpointsEnabled(final boolean actuatorEndpointsEnabled) { this.actuatorEndpointsEnabled = actuatorEndpointsEnabled; } public String getIp() { return ip; } public void setIp(final String ip) { this.ip = ip; } public String getAdminRoles() { return adminRoles; } public void setAdminRoles(final String adminRoles) { this.adminRoles = adminRoles; } public String getLoginUrl() { return loginUrl; } public void setLoginUrl(final String loginUrl) { this.loginUrl = loginUrl; } public String getService() { return service; } public void setService(final String service) { this.service = service; } public Resource getUsers() { return users; } public void setUsers(final Resource users) { this.users = users; } }
Check on type rather than method exist
<?php namespace PhpSpec\Formatter\Html; use PhpSpec\Formatter\Presenter\StringPresenter; use Exception; use PhpSpec\Exception\Exception as PhpSpecException; class HtmlPresenter extends StringPresenter { public function presentException(Exception $exception, $verbose = false) { if ($exception instanceof PhpSpecException) { list($file, $line) = $this->getExceptionExamplePosition($exception); return $this->presentFileCode($file, $line); } } protected function presentFileCode($file, $lineno, $context = 6) { $lines = explode("\n", file_get_contents($file)); $offset = max(0, $lineno - ceil($context / 2)); $lines = array_slice($lines, $offset, $context); $text = "\n"; foreach ($lines as $line) { $offset++; if ($offset == $lineno) { $cssClass = "offending"; } else { $cssClass = "normal"; } $text .= '<span class="linenum">'.$offset.'</span><span class="' . $cssClass . '">'.$line.'</span>'; $text .= "\n"; } return $text; } }
<?php namespace PhpSpec\Formatter\Html; use PhpSpec\Formatter\Presenter\StringPresenter; use Exception; class HtmlPresenter extends StringPresenter { public function presentException(Exception $exception, $verbose = false) { if (method_exists($exception, 'getCause')) { list($file, $line) = $this->getExceptionExamplePosition($exception); return $this->presentFileCode($file, $line); } } protected function presentFileCode($file, $lineno, $context = 6) { $lines = explode("\n", file_get_contents($file)); $offset = max(0, $lineno - ceil($context / 2)); $lines = array_slice($lines, $offset, $context); $text = "\n"; foreach ($lines as $line) { $offset++; if ($offset == $lineno) { $cssClass = "offending"; } else { $cssClass = "normal"; } $text .= '<span class="linenum">'.$offset.'</span><span class="' . $cssClass . '">'.$line.'</span>'; $text .= "\n"; } return $text; } }
Use non default args for proc_open
<?php namespace Aztech\Process; class ProcessBuilder { private $command; private $args = []; private $env = null; private $workingDirectory = null; public function setCommand($executablePath) { $this->command = $executablePath; return $this; } public function getCommand() { return $this->command; } public function setArguments(array $args = null) { $this->args = $args ?: []; return $this; } public function getArguments() { return $this->args; } public function setEnvironment(array $env = null) { $this->env = $env; } public function getEnvironment() { return $this->env ?: $_ENV; } public function setWorkingDirectory($path) { $this->workingDirectory = $path; } public function getWorkingDirectory() { return $this->workingDirectory ?: getcwd(); } /** * * @throws \RuntimeException * @return AttachedProcess */ public function run() { $cmd = trim(sprintf('%s %s', $this->command, implode(' ', $this->args))); $descriptorspec = [ 0 => [ 'pipe', 'r' ], 1 => [ 'pipe', 'w' ], 2 => [ 'file', 'php://stderr', 'a' ] ]; $pipes = []; $env = $this->getEnvironment(); $cwd = $this->getWorkingDirectory(); $process = proc_open($cmd, $descriptorspec, $pipes, $cwd, $env); if ($process !== false) { return new AttachedProcess($process, $pipes, (new InspectorFactory())->create()); } throw new \RuntimeException('Unable to start process.'); } }
<?php namespace Aztech\Process; class ProcessBuilder { private $command; private $args = []; private $env = null; public function setCommand($executablePath) { $this->command = $executablePath; return $this; } public function getCommand() { return $this->command; } public function setArguments(array $args = null) { $this->args = $args ?: []; return $this; } public function getArguments() { return $this->args; } public function setEnvironment(array $env = null) { $this->env = $env; } public function getEnvironment() { return $this->env; } /** * * @throws \RuntimeException * @return AttachedProcess */ public function run() { $cmd = trim(sprintf('%s %s', $this->command, implode(' ', $this->args))); $descriptorspec = [ 0 => [ 'pipe', 'r' ], 1 => [ 'pipe', 'w' ], 2 => [ 'file', 'php://stderr', 'a' ] ]; $pipes = []; $process = proc_open($cmd, $descriptorspec, $pipes); if ($process !== false) { return new AttachedProcess($process, $pipes, (new InspectorFactory())->create()); } throw new \RuntimeException('Unable to start process.'); } }
Resolve in over array values
package org.hcjf.layers.query; import java.util.Arrays; import java.util.Collection; import java.util.Map; /** * @author javaito * @mail [email protected] */ public class In extends FieldEvaluator { public In(String fieldName, Object value) { super(fieldName, value); } @Override public boolean evaluate(Object object, Query.Consumer consumer) { boolean result = false; try { Object fieldValue = consumer.get(object, getFieldName()); if(Map.class.isAssignableFrom(fieldValue.getClass())) { result = ((Map)fieldValue).containsKey(getValue()); } else if(Collection.class.isAssignableFrom(fieldValue.getClass())) { result = ((Collection)fieldValue).contains(getValue()); } else if(fieldValue.getClass().isArray()) { result = Arrays.binarySearch((Object[])fieldValue, getValue()) >= 0; } else if (Map.class.isAssignableFrom(getValue().getClass())) { result = ((Map)getValue()).containsKey(fieldValue); } else if(Collection.class.isAssignableFrom(getValue().getClass())) { result = ((Collection)getValue()).contains(fieldValue); } else if(getValue().getClass().isArray()) { result = Arrays.binarySearch((Object[])getValue(), fieldValue) >= 0; } } catch (Exception ex) { throw new IllegalArgumentException("In evaluator fail", ex); } return result; } }
package org.hcjf.layers.query; import java.util.Arrays; import java.util.Collection; import java.util.Map; /** * @author javaito * @mail [email protected] */ public class In extends FieldEvaluator { public In(String fieldName, Object value) { super(fieldName, value); } @Override public boolean evaluate(Object object, Query.Consumer consumer) { boolean result = false; try { Object fieldValue = consumer.get(object, getFieldName()); if(Map.class.isAssignableFrom(fieldValue.getClass())) { result = ((Map)fieldValue).containsKey(getValue()); } else if(Collection.class.isAssignableFrom(fieldValue.getClass())) { result = ((Collection)fieldValue).contains(getValue()); } else if(fieldValue.getClass().isArray()) { result = Arrays.binarySearch((Object[])fieldValue, getValue()) >= 0; } else if(String.class.isAssignableFrom(fieldValue.getClass())) { result = ((String)fieldValue).contains(getValue().toString()); } } catch (Exception ex) { throw new IllegalArgumentException("In evaluator fail", ex); } return result; } }
Update module loader for Python 3.4+
import sys import types import unittest if sys.version_info >= (3, 4): from importlib.machinery import SourceFileLoader loader = SourceFileLoader('rollbar-agent', './rollbar-agent') rollbar_agent = types.ModuleType(loader.name) loader.exec_module(rollbar_agent) else: import imp rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent') class FakeScanner: def __init__(self, config): self.config = config class TestDefaultMessageStartParserUsage(unittest.TestCase): app = {'name': 'pyramid', 'config': { 'log_format.default': 'pyramid', 'log_format.patterns': 'celery*.log celery_process', 'min_log_level': 'INFO' } } def test_process_log_debug_with_format_name(self): # check if self.default_parser uses valid format name provided in the config config = {'_formats': {'pyramid': {'name': 'pyramid'}}} scanner = FakeScanner(config) new_processor = rollbar_agent.LogFileProcessor(scanner, self.app) self.assertEqual('pyramid', new_processor.default_parser['name']) def test_process_log_debug_without_format_name(self): # check if self.default_parser can access _default_message_start_parser if format name not provided in config config = {'_formats': {}} scanner = FakeScanner(config) new_processor = rollbar_agent.LogFileProcessor(scanner, self.app) self.assertEqual('default parser', new_processor.default_parser['name']) if __name__ == '__main__': unittest.main()
import unittest import imp rollbar_agent = imp.load_source('rollbar-agent', './rollbar-agent') class FakeScanner: def __init__(self, config): self.config = config class TestDefaultMessageStartParserUsage(unittest.TestCase): app = {'name': 'pyramid', 'config': { 'log_format.default': 'pyramid', 'log_format.patterns': 'celery*.log celery_process', 'min_log_level': 'INFO' } } def test_process_log_debug_with_format_name(self): # check if self.default_parser uses valid format name provided in the config config = {'_formats': {'pyramid': {'name': 'pyramid'}}} scanner = FakeScanner(config) new_processor = rollbar_agent.LogFileProcessor(scanner, self.app) self.assertEqual('pyramid', new_processor.default_parser['name']) def test_process_log_debug_without_format_name(self): # check if self.default_parser can access _default_message_start_parser if format name not provided in config config = {'_formats': {}} scanner = FakeScanner(config) new_processor = rollbar_agent.LogFileProcessor(scanner, self.app) self.assertEqual('default parser', new_processor.default_parser['name']) if __name__ == '__main__': unittest.main()
Fix propel config for now
<?php namespace FOS\ElasticaBundle\Propel; use FOS\ElasticaBundle\Provider\AbstractProvider; /** * Propel provider. * * @author William Durand <[email protected]> */ class Provider extends AbstractProvider { /** * {@inheritDoc} */ public function doPopulate($options, \Closure $loggerClosure = null) { $queryClass = $this->objectClass.'Query'; $nbObjects = $queryClass::create()->count(); $offset = $options['offset']; for (; $offset < $nbObjects; $offset += $options['batch_size']) { $objects = $queryClass::create() ->limit($options['batch_size']) ->offset($offset) ->find() ->getArrayCopy(); $objects = $this->filterObjects($options, $objects); if (!empty($objects)) { $this->objectPersister->insertMany($objects); } usleep($options['sleep']); if ($loggerClosure) { $loggerClosure($options['batch_size'], $nbObjects); } } } /** * {@inheritDoc} */ protected function disableLogging() { } /** * {@inheritDoc} */ protected function enableLogging($logger) { } /** * {@inheritDoc} */ protected function configureOptions() { parent::configureOptions(); $this->resolver->setDefaults(array( 'clear_object_manager' => true, 'debug_logging' => false, 'ignore_errors' => false, 'offset' => 0, 'query_builder_method' => 'createQueryBuilder', 'sleep' => 0 )); } }
<?php namespace FOS\ElasticaBundle\Propel; use FOS\ElasticaBundle\Provider\AbstractProvider; /** * Propel provider. * * @author William Durand <[email protected]> */ class Provider extends AbstractProvider { /** * {@inheritDoc} */ public function doPopulate($options, \Closure $loggerClosure = null) { $queryClass = $this->objectClass.'Query'; $nbObjects = $queryClass::create()->count(); $offset = $options['offset']; for (; $offset < $nbObjects; $offset += $options['batch_size']) { $objects = $queryClass::create() ->limit($options['batch_size']) ->offset($offset) ->find() ->getArrayCopy(); $objects = $this->filterObjects($options, $objects); if (!empty($objects)) { $this->objectPersister->insertMany($objects); } usleep($options['sleep']); if ($loggerClosure) { $loggerClosure($options['batch_size'], $nbObjects); } } } /** * {@inheritDoc} */ protected function disableLogging() { } /** * {@inheritDoc} */ protected function enableLogging($logger) { } }
Make the test robust against usage for comparisons between minor Python versions. Typically, for Wine, I have an older version installed, than my Debian has, and this then fails the test without strict need.
# Copyright 2012, Kay Hayen, mailto:[email protected] # # Python tests originally created or extracted from other peoples work. The # parts were too small to be protected. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # def starImporterFunction(): from sys import * print "Version", version.split()[0].split( "." )[:-1] starImporterFunction() def deepExec(): for_closure = 3 def deeper(): for_closure_as_well = 4 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() print "Closure one level up was taken", for_closure_as_well print "Closure two levels up was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deeper() deepExec()
# Copyright 2012, Kay Hayen, mailto:[email protected] # # Python tests originally created or extracted from other peoples work. The # parts were too small to be protected. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # def starImporterFunction(): from sys import * print "Version", version.split()[0] starImporterFunction() def deepExec(): for_closure = 3 def deeper(): for_closure_as_well = 4 def execFunction(): code = "f=2" # Can fool it to nest exec code in None, None print "Locals now", locals() print "Closure one level up was taken", for_closure_as_well print "Closure two levels up was taken", for_closure print "Globals still work", starImporterFunction print "Added local from code", f execFunction() deeper() deepExec()
Add tasks/build_from_config to the public API.
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015 by Ecpy Authors, see AUTHORS for more details. # # Distributed under the terms of the BSD license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- """Tasks package public interface. """ from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig from .manager.utils.building import build_task_from_config with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView', 'build_task_from_config']
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015 by Ecpy Authors, see AUTHORS for more details. # # Distributed under the terms of the BSD license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- """Tasks package public interface. """ from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView']
Update rest of functions with new data structure; Passes Test 8
#!/usr/bin/env python import sys class Grid(object): def __init__(self): self.board = [[None] * 10 for i in range(22)] self.score = 0 self.lines_clear = 0 def draw_board(self): current_board = self.board for row in current_board: row = map(lambda cell: '.' if cell == None else cell, row) print ' '.join(row) def given(self): for index, row in enumerate(self.board): self.board[index] = [None if cell == '.' else cell for cell in raw_input() if cell != ' '] def clear(self): self.board = [[None] * 10 for i in range(22)] def show_score(self): print self.score def show_clear_lines(self): print self.lines_clear def step(self): for index, row in enumerate(self.board): if all(row) and row[0] != None: self.board[index] = [None] * 10 self.score += 100 self.lines_clear += 1 def main(): grid = Grid() commands = {'p' : grid.draw_board, 'g' : grid.given, 'c' : grid.clear, '?s': grid.show_score, '?n': grid.show_clear_lines, 's' : grid.step} while True: command = raw_input() if command == 'q': break commands[command]() if __name__ == '__main__': main()
#!/usr/bin/env python import sys class Grid(object): def __init__(self): self.board = [[None] * 10 for i in range(22)] self.score = 0 self.lines_clear = 0 def draw_board(self): current_board = self.board for row in current_board: row = map(lambda cell: '.' if cell == None else y, row) print ' '.join(row) def given(self): self.board = [] for row in range(0,22): self.board.append(raw_input()) def clear(self): self.board = ['. '*10 for row in range(0,22)] def show_score(self): print self.score def show_clear_lines(self): print self.lines_clear def main(): grid = Grid() commands = {'p': grid.draw_board, 'g': grid.given, 'c': grid.clear, '?s': grid.show_score, '?n': grid.show_clear_lines} while True: command = raw_input() if command == 'q': break commands[command]() if __name__ == '__main__': main()
Sort repository regex paths by longest first
<?php namespace Gitlist\Util; use Silex\Application; class Routing { protected $app; public function __construct(Application $app) { $this->app = $app; } public function getRepositoryRegex() { static $regex = null; if ($regex === null) { $app = $this->app; $quoted_paths = array_map( function ($repo) use ($app) { return preg_quote($app['util.routing']->getRelativePath($repo['path']), '#'); }, $this->app['git']->getRepositories($this->app['git.repos']) ); usort($quoted_paths, function ($a, $b) { return strlen($b) - strlen($a); }); $regex = implode('|', $quoted_paths); } return $regex; } /** * Strips the base path from a full repository path * * @param string $repo_path Full path to the repository * @return string Relative path to the repository from git.repositories */ public function getRelativePath($repo_path) { if (strpos($repo_path, $this->app['git.repos']) === 0) { $relative_path = substr($repo_path, strlen($this->app['git.repos'])); return ltrim($relative_path, '/'); } else { throw new \InvalidArgumentException( sprintf("Path '%s' does not match configured repository directory", $repo_path) ); } } }
<?php namespace Gitlist\Util; use Silex\Application; class Routing { protected $app; public function __construct(Application $app) { $this->app = $app; } public function getRepositoryRegex() { static $regex = null; if ($regex === null) { $app = $this->app; $quoted_paths = array_map( function ($repo) use ($app) { return preg_quote($app['util.routing']->getRelativePath($repo['path']), '#'); }, $this->app['git']->getRepositories($this->app['git.repos']) ); $regex = implode('|', $quoted_paths); } return $regex; } /** * Strips the base path from a full repository path * * @param string $repo_path Full path to the repository * @return string Relative path to the repository from git.repositories */ public function getRelativePath($repo_path) { if (strpos($repo_path, $this->app['git.repos']) === 0) { $relative_path = substr($repo_path, strlen($this->app['git.repos'])); return ltrim($relative_path, '/'); } else { throw new \InvalidArgumentException( sprintf("Path '%s' does not match configured repository directory", $repo_path) ); } } }
Trim the S3 prefix on both sides just to be on the safe side
<?php namespace Jalle19\VagrantRegistryGenerator\Configuration; use Symfony\Component\Console\Input\InputInterface; /** * Class Parser * @package Jalle19\VagrantRegistryGenerator\Configuration */ class Parser { /** * @param InputInterface $input * * @return Configuration */ public static function parseConfiguration(InputInterface $input) { $configuration = new Configuration(); $outputPath = $input->getArgument('outputPath'); $configuration ->setRegistryPath($input->getArgument('registryPath')) ->setOutputPath($outputPath) ->setAwsAccessKey($input->getOption('awsAccessKey')) ->setAwsSecretKey($input->getOption('awsSecretKey')) ->setAwsRegion($input->getOption('awsRegion')); if (Configuration::getFilesystemType($outputPath) === Configuration::FILESYSTEM_TYPE_LOCAL) { // Convert to absolute path if (substr($outputPath, 0, 1) !== '/') { $configuration->setOutputPath(getcwd() . '/' . $outputPath); } } return $configuration; } /** * @param string $path * * @return array */ public static function parseBucketPrefix($path) { $url = parse_url($path); $bucket = $url['host']; $prefix = trim($url['path'], '/'); return [$bucket, $prefix]; } }
<?php namespace Jalle19\VagrantRegistryGenerator\Configuration; use Symfony\Component\Console\Input\InputInterface; /** * Class Parser * @package Jalle19\VagrantRegistryGenerator\Configuration */ class Parser { /** * @param InputInterface $input * * @return Configuration */ public static function parseConfiguration(InputInterface $input) { $configuration = new Configuration(); $outputPath = $input->getArgument('outputPath'); $configuration ->setRegistryPath($input->getArgument('registryPath')) ->setOutputPath($outputPath) ->setAwsAccessKey($input->getOption('awsAccessKey')) ->setAwsSecretKey($input->getOption('awsSecretKey')) ->setAwsRegion($input->getOption('awsRegion')); if (Configuration::getFilesystemType($outputPath) === Configuration::FILESYSTEM_TYPE_LOCAL) { // Convert to absolute path if (substr($outputPath, 0, 1) !== '/') { $configuration->setOutputPath(getcwd() . '/' . $outputPath); } } return $configuration; } /** * @param string $path * * @return array */ public static function parseBucketPrefix($path) { $url = parse_url($path); $bucket = $url['host']; $prefix = ltrim($url['path'], '/'); return [$bucket, $prefix]; } }
Update test that it runs
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.url = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.url, type=Request.GET) self.assertEqual(self.url, request.url) self.assertEqual(Request.GET, request.type) def test_do(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertTrue(hasattr(request, 'status_code')) request.type = Request.POST request.do() self.assertTrue(hasattr(request, 'status_code')) def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.url, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s"' % type, error.exception.__str__()) def test_response_time(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertEqual(request.finished - request.started, request.get_response_time()) def test_time_error(self): request = Request(url=self.url, type=Request.GET) with self.assertRaises(RequestTimeError): request.get_response_time()
import unittest from performance.web import Request, RequestTypeError, RequestTimeError class RequestTestCase(unittest.TestCase): def setUp(self): self.url = 'http://www.google.com' def test_constants(self): self.assertEqual('get', Request.GET) self.assertEqual('post', Request.POST) def test_init(self): request = Request(url=self.url, type=Request.GET) self.assertEqual(self.url, request.url) self.assertEqual(Request.GET, request.type) def test_do(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertTrue(hasattr(request, 'status_code')) request.type = Request.POST request.do() self.assertTrue(hasattr(request, 'status_code')) def test_invalid_type(self): type = 'foo_bar' request = Request(url=self.url, type=type) with self.assertRaises(RequestTypeError) as error: request.do() self.assertEqual('Invalid request type "%s".' % type, error.exception.__str__()) def test_response_time(self): request = Request(url=self.url, type=Request.GET) request.do() self.assertEqual(request.finished - request.started, request.get_response_time()) def test_time_error(self): request = Request(url=self.url, type=Request.GET) with self.assertRaises(RequestTimeError): request.get_response_time()
Clarify that the MathJax comment is Notebook specific.
"""Simple magics for display formats""" #----------------------------------------------------------------------------- # Copyright (c) 2012 The IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Our own packages from IPython.core.display import display, Javascript, Latex, SVG, HTML from IPython.core.magic import ( Magics, magics_class, cell_magic ) #----------------------------------------------------------------------------- # Magic implementation classes #----------------------------------------------------------------------------- @magics_class class DisplayMagics(Magics): """Magics for displaying various output types with literals Defines javascript/latex/svg/html cell magics for writing blocks in those languages, to be rendered in the frontend. """ @cell_magic def javascript(self, line, cell): """Run the cell block of Javascript code""" display(Javascript(cell)) @cell_magic def latex(self, line, cell): """Render the cell as a block of latex The subset of latex which is support depends on the implementation in the client. In the Jupyter Notebook, this magic only renders the subset of latex defined by MathJax [here](https://docs.mathjax.org/en/v2.5-latest/tex.html).""" display(Latex(cell)) @cell_magic def svg(self, line, cell): """Render the cell as an SVG literal""" display(SVG(cell)) @cell_magic def html(self, line, cell): """Render the cell as a block of HTML""" display(HTML(cell))
"""Simple magics for display formats""" #----------------------------------------------------------------------------- # Copyright (c) 2012 The IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Our own packages from IPython.core.display import display, Javascript, Latex, SVG, HTML from IPython.core.magic import ( Magics, magics_class, cell_magic ) #----------------------------------------------------------------------------- # Magic implementation classes #----------------------------------------------------------------------------- @magics_class class DisplayMagics(Magics): """Magics for displaying various output types with literals Defines javascript/latex/svg/html cell magics for writing blocks in those languages, to be rendered in the frontend. """ @cell_magic def javascript(self, line, cell): """Run the cell block of Javascript code""" display(Javascript(cell)) @cell_magic def latex(self, line, cell): """Render the cell as a block of latex This magic only renders the subset of latex defined by MathJax [here](https://docs.mathjax.org/en/v2.5-latest/tex.html).""" display(Latex(cell)) @cell_magic def svg(self, line, cell): """Render the cell as an SVG literal""" display(SVG(cell)) @cell_magic def html(self, line, cell): """Render the cell as a block of HTML""" display(HTML(cell))
Fix Ghost icon is not clickable closes #3623 - Initialization of the link was done on login page where the ‚burger‘ did not exist. - initialization in application needs to be done to make it work on refresh
import {mobileQuery, responsiveAction} from 'ghost/utils/mobile'; var PostsView = Ember.View.extend({ target: Ember.computed.alias('controller'), classNames: ['content-view-container'], tagName: 'section', mobileInteractions: function () { Ember.run.scheduleOnce('afterRender', this, function () { var self = this; $(window).resize(function () { if (!mobileQuery.matches) { self.send('resetContentPreview'); } }); // ### Show content preview when swiping left on content list $('.manage').on('click', '.content-list ol li', function (event) { responsiveAction(event, '(max-width: 800px)', function () { self.send('showContentPreview'); }); }); // ### Hide content preview $('.manage').on('click', '.content-preview .button-back', function (event) { responsiveAction(event, '(max-width: 800px)', function () { self.send('hideContentPreview'); }); }); $('[data-off-canvas]').attr('href', this.get('controller.ghostPaths.blogRoot')); }); }.on('didInsertElement'), }); export default PostsView;
import {mobileQuery, responsiveAction} from 'ghost/utils/mobile'; var PostsView = Ember.View.extend({ target: Ember.computed.alias('controller'), classNames: ['content-view-container'], tagName: 'section', mobileInteractions: function () { Ember.run.scheduleOnce('afterRender', this, function () { var self = this; $(window).resize(function () { if (!mobileQuery.matches) { self.send('resetContentPreview'); } }); // ### Show content preview when swiping left on content list $('.manage').on('click', '.content-list ol li', function (event) { responsiveAction(event, '(max-width: 800px)', function () { self.send('showContentPreview'); }); }); // ### Hide content preview $('.manage').on('click', '.content-preview .button-back', function (event) { responsiveAction(event, '(max-width: 800px)', function () { self.send('hideContentPreview'); }); }); }); }.on('didInsertElement'), }); export default PostsView;
Use an indexation method that works even for Magento <= 1.7
<?php class SPM_ShopyMind_Test_Observer { public function beforeTestStart() { if (Mage::app()->getStore()->isAdmin()) { $store = Mage::getModel('core/store')->load(1); if (!$store->isEmpty()) { $this->_setStore($store->getCode()); } } if ($this->_shouldRunWithFlatCatalog()) { $this->_enableFlatCatalog(); $this->_reindexFlatCatalog(); } } protected function _setStoreConfig($path, $value) { Mage::app()->getStore() ->setConfig($path, $value); } protected function _shouldRunWithFlatCatalog() { return getenv('USE_FLAT_CATALOG'); } protected function _enableFlatCatalog() { $this->_setStoreConfig('catalog/frontend/flat_catalog_product', '1'); $this->_setStoreConfig('catalog/frontend/flat_catalog_category', '1'); } protected function _reindexFlatCatalog() { Mage::getResourceModel('catalog/product_flat_indexer')->rebuild(); Mage::getResourceModel('catalog/category_flat')->rebuild(); } protected function _setStore($store) { EcomDev_PHPUnit_Test_Case_Util::setCurrentStore($store); } }
<?php class SPM_ShopyMind_Test_Observer { public function beforeTestStart() { if (Mage::app()->getStore()->isAdmin()) { $store = Mage::getModel('core/store')->load(1); if (!$store->isEmpty()) { $this->_setStore($store->getCode()); } } if ($this->_shouldRunWithFlatCatalog()) { $this->_enableFlatCatalog(); $this->_reindexFlatCatalog(); } } protected function _setStoreConfig($path, $value) { Mage::app()->getStore() ->setConfig($path, $value); } protected function _shouldRunWithFlatCatalog() { return getenv('USE_FLAT_CATALOG'); } protected function _enableFlatCatalog() { $this->_setStoreConfig('catalog/frontend/flat_catalog_product', '1'); $this->_setStoreConfig('catalog/frontend/flat_catalog_category', '1'); } protected function _reindexFlatCatalog() { foreach (array('catalog_product_flat', 'catalog_category_flat') as $indexer) { Mage::getSingleton('index/indexer') ->getProcessByCode($indexer) ->reindexAll(); } } protected function _setStore($store) { EcomDev_PHPUnit_Test_Case_Util::setCurrentStore($store); } }
Support for anonymous saved versions This fixes a case when de API sends a version without user. There was a bug allowing to create anonymous versions in the application and we have to support the old data. The problem here is that SnapshotInfo classes are inflated from json via Gson. This method does not call any constructor and, since the json does not include the ‘user’ key, the bridge crashes because we’re not expecting null users. I’m not happy with this fix, but is the minimum solution that does not affect anything else.
package uk.ac.ic.wlgitbridge.snapshot.getsavedvers; import uk.ac.ic.wlgitbridge.util.Util; /** * Created by Winston on 06/11/14. */ public class SnapshotInfo implements Comparable<SnapshotInfo> { private int versionId; private String comment; private WLUser user; private String createdAt; public SnapshotInfo(int versionID, String createdAt, String name, String email) { this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt); } public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) { versionId = versionID; this.comment = comment; user = new WLUser(name, email); this.createdAt = createdAt; } public int getVersionId() { return versionId; } public String getComment() { return comment; } public WLUser getUser() { return user != null ? user : new WLUser(); } public String getCreatedAt() { return createdAt; } @Override public boolean equals(Object obj) { if (!(obj instanceof SnapshotInfo)) { return false; } SnapshotInfo that = (SnapshotInfo) obj; return versionId == that.versionId; } @Override public int compareTo(SnapshotInfo o) { return Integer.compare(versionId, o.versionId); } }
package uk.ac.ic.wlgitbridge.snapshot.getsavedvers; import uk.ac.ic.wlgitbridge.util.Util; /** * Created by Winston on 06/11/14. */ public class SnapshotInfo implements Comparable<SnapshotInfo> { private int versionId; private String comment; private WLUser user; private String createdAt; public SnapshotInfo(int versionID, String createdAt, String name, String email) { this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt); } public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) { versionId = versionID; this.comment = comment; user = new WLUser(name, email); this.createdAt = createdAt; } public int getVersionId() { return versionId; } public String getComment() { return comment; } public WLUser getUser() { return user; } public String getCreatedAt() { return createdAt; } @Override public boolean equals(Object obj) { if (!(obj instanceof SnapshotInfo)) { return false; } SnapshotInfo that = (SnapshotInfo) obj; return versionId == that.versionId; } @Override public int compareTo(SnapshotInfo o) { return Integer.compare(versionId, o.versionId); } }
Remove Telescope service provider registration
<?php namespace RadDB\Providers; use Illuminate\Support\Facades\Blade; use Illuminate\Support\Facades\Schema; use Illuminate\Support\ServiceProvider; use Laravel\Dusk\DuskServiceProvider; class AppServiceProvider extends ServiceProvider { /** * Bootstrap any application services. * * @return void */ public function boot() { // Work around for MariaDB/MySQL key too long errors // Not required as long as MySQL > 5.7 or Mariadb > 10.2.2 // and InnoDB tables are being used. // Schema::defaultStringLength(191); /* * Blade directive to dump a variable/object inside a template. * This is similar to dd(), except that it doesn't interrupt the * execution of the app. It does NOT support multiple arguments * however, you have to use one directive per variable. * * From https://gist.github.com/victorloux/0c073afa5d4784d2b8e9 * * @example @dump($posts->comments) */ Blade::directive('dump', function ($param) { return "<pre><?php (new \Illuminate\Support\Debug\Dumper)->dump($param); ?></pre>"; }); } /** * Register any application services. * * @return void */ public function register() { if ($this->app->environment('local', 'testing')) { $this->app->register(DuskServiceProvider::class); } } }
<?php namespace RadDB\Providers; use Illuminate\Support\Facades\Blade; use Illuminate\Support\Facades\Schema; use Illuminate\Support\ServiceProvider; use Laravel\Dusk\DuskServiceProvider; class AppServiceProvider extends ServiceProvider { /** * Bootstrap any application services. * * @return void */ public function boot() { // Work around for MariaDB/MySQL key too long errors // Not required as long as MySQL > 5.7 or Mariadb > 10.2.2 // and InnoDB tables are being used. // Schema::defaultStringLength(191); /* * Blade directive to dump a variable/object inside a template. * This is similar to dd(), except that it doesn't interrupt the * execution of the app. It does NOT support multiple arguments * however, you have to use one directive per variable. * * From https://gist.github.com/victorloux/0c073afa5d4784d2b8e9 * * @example @dump($posts->comments) */ Blade::directive('dump', function ($param) { return "<pre><?php (new \Illuminate\Support\Debug\Dumper)->dump($param); ?></pre>"; }); } /** * Register any application services. * * @return void */ public function register() { if ($this->app->environment('local', 'testing')) { $this->app->register(DuskServiceProvider::class); $this->app->register(TelescopeServiceProvider::class); } } }
Add function to calculate distance between points
<?php namespace GeoTools\Model; final class Point2D { /** * @var double */ public $x; /** * @var double */ public $y; /** * @param double $x * @param double $y */ public function __construct($x, $y) { $this->x = $x; $this->y = $y; } /** * @param Point2D $point * @return double */ public function distanceToPoint(Point2D $point) { return sqrt($this->squareDistanceToPoint($point)); } /** * @param Point2D $point * @return double */ public function squareDistanceToPoint(Point2D $point) { return pow($point->x - $this->x, 2) + pow($point->y - $this->y, 2); } /** * @param Point2D[] $points * @return BoundingBox2D */ public static function calculateBoundingBoxForPoints(array $points) { if (!$points) { return new BoundingBox2D(0, 0, 0, 0); } $xmin = null; $xmax = null; $ymin = null; $ymax = null; foreach ($points as $point) { if ($xmin === null || $point->x < $xmin) { $xmin = $point->x; } if ($xmax === null || $point->x > $xmax) { $xmax = $point->x; } if ($ymin === null || $point->y < $ymin) { $ymin = $point->y; } if ($ymax === null || $point->y > $ymax) { $ymax = $point->y; } } return new BoundingBox2D($xmin, $xmax, $ymin, $ymax); } }
<?php namespace GeoTools\Model; final class Point2D { /** * @var double */ public $x; /** * @var double */ public $y; /** * @param double $x * @param double $y */ public function __construct($x, $y) { $this->x = $x; $this->y = $y; } /** * @param Point2D[] $points * @return BoundingBox2D */ public static function calculateBoundingBoxForPoints(array $points) { if (!$points) { return new BoundingBox2D(0, 0, 0, 0); } $xmin = null; $xmax = null; $ymin = null; $ymax = null; foreach ($points as $point) { if ($xmin === null || $point->x < $xmin) { $xmin = $point->x; } if ($xmax === null || $point->x > $xmax) { $xmax = $point->x; } if ($ymin === null || $point->y < $ymin) { $ymin = $point->y; } if ($ymax === null || $point->y > $ymax) { $ymax = $point->y; } } return new BoundingBox2D($xmin, $xmax, $ymin, $ymax); } }
Throw errors if either arg is wrong
var async = require('async'); var DoWhen = function(obj, ev) { var objCallback, triggerCallbacks, args = null, callbacks = []; if (typeof(obj) != 'undefined') { throw TypeError('obj argument must be an EventEmitter-like object'); } if (typeof(ev) == 'undefined') { throw TypeError('ev argument must be an event type string'); } triggerCallbacks = function() { if (args !== null && callbacks.length > 0) { for(var i = 0; i < callbacks.length; ++i) { var callback = callbacks[i]; async.nextTick(function() { callback.apply(callback, args); }); } callbacks = []; return true; } else { return false; } }; objCallback = function() { args = arguments; triggerCallbacks(); }; this.on = function(obj, ev) { obj.on(ev, objCallback); }; this.on(obj, ev); this.off = function() { obj.removeCallback(objCallback); }; this.addCallback = function(callback) { callbacks.push(callback); if (args !== null) { triggerCallbacks(); } }; this.do = this.addCallback; this.removeCallback = function(callback) { var i = callbacks.indexOf(callback); if (i != -1) { callbacks.splice(i, 1); return true; } else { return false; } }; }; module.exports = DoWhen;
var async = require('async'); var DoWhen = function(obj, ev) { var objCallback, triggerCallbacks, args = null, callbacks = []; if (typeof(obj) == 'undefined') { // error } if (typeof(ev) == 'undefined') { //error } triggerCallbacks = function() { if (args !== null && callbacks.length > 0) { for(var i = 0; i < callbacks.length; ++i) { var callback = callbacks[i]; async.nextTick(function() { callback.apply(callback, args); }); } callbacks = []; return true; } else { return false; } }; objCallback = function() { args = arguments; triggerCallbacks(); }; this.on = function(obj, ev) { obj.on(ev, objCallback); }; this.on(obj, ev); this.off = function() { obj.removeCallback(objCallback); }; this.addCallback = function(callback) { callbacks.push(callback); if (args !== null) { triggerCallbacks(); } }; this.do = this.addCallback; this.removeCallback = function(callback) { var i = callbacks.indexOf(callback); if (i != -1) { callbacks.splice(i, 1); return true; } else { return false; } }; }; module.exports = DoWhen;
Include the entire existing environment for integration tests subprocesses
import copy import multiprocessing import os from pathlib import PurePath import subprocess import sys import tempfile from textwrap import dedent import unittest try: from unittest.mock import MagicMock except: from mock import MagicMock from green import cmdline class TestFinalizer(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() def test_finalizer(self): """ Test that the finalizer works on Python 3.8+ """ sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir) for i in range(multiprocessing.cpu_count() * 2): fh = open(os.path.join(sub_tmpdir, f"test_finalizer{i}.py"), "w") fh.write( dedent( f""" import unittest class Pass{i}(unittest.TestCase): def test_pass{i}(self): pass def msg(): print("finalizer worked") """ ) ) fh.close() args = [ sys.executable, "-m", "green.cmdline", "--finalizer=test_finalizer0.msg", "--maxtasksperchild=1", ] pythonpath = str(PurePath(__file__).parent.parent.parent) env = copy.deepcopy(os.environ) env["PYTHONPATH"] = pythonpath output = subprocess.run( args, cwd=sub_tmpdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, timeout=10, ).stdout.decode("utf-8") self.assertIn("finalizer worked", output)
import multiprocessing import os from pathlib import PurePath import subprocess import sys import tempfile from textwrap import dedent import unittest try: from unittest.mock import MagicMock except: from mock import MagicMock from green import cmdline class TestFinalizer(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() def test_finalizer(self): """ Test that the finalizer works on Python 3.8+ """ sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir) for i in range(multiprocessing.cpu_count() * 2): fh = open(os.path.join(sub_tmpdir, f"test_finalizer{i}.py"), "w") fh.write( dedent( f""" import unittest class Pass{i}(unittest.TestCase): def test_pass{i}(self): pass def msg(): print("finalizer worked") """ ) ) fh.close() args = [ sys.executable, "-m", "green.cmdline", "--finalizer=test_finalizer0.msg", "--maxtasksperchild=1", ] pythonpath = str(PurePath(__file__).parent.parent.parent) output = subprocess.run( args, cwd=sub_tmpdir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env={"PYTHONPATH": pythonpath}, timeout=10, ).stdout.decode("utf-8") self.assertIn("finalizer worked", output)
Make GA user id persistent across database resets and app_key resets
<?php namespace OpenDominion\Http\Middleware; use Analytics; use Closure; use Illuminate\Support\Facades\Auth; class Authenticate { /** * Handle an incoming request. * * @param \Illuminate\Http\Request $request * @param \Closure $next * @param string|null $guard * @return mixed */ public function handle($request, Closure $next, $guard = null) { $auth = Auth::guard($guard); if ($auth->guest()) { if ($request->ajax() || $request->wantsJson()) { return response('Unauthorized.', 401); } else { return redirect()->guest(route('auth.login')); } } $user = $auth->user(); if (!$user->activated) { $auth->logout(); // todo: add "click here to have a new activation email being sent to you" $request->session()->flash('alert-danger', 'Your account has not been activated yet. Check your spam folder for the activation email.'); return redirect()->guest(route('auth.login')); } // Analytics Analytics::setUserId(md5($user->email)); return $next($request); } }
<?php namespace OpenDominion\Http\Middleware; use Analytics; use Closure; use Illuminate\Support\Facades\Auth; class Authenticate { /** * Handle an incoming request. * * @param \Illuminate\Http\Request $request * @param \Closure $next * @param string|null $guard * @return mixed */ public function handle($request, Closure $next, $guard = null) { $auth = Auth::guard($guard); if ($auth->guest()) { if ($request->ajax() || $request->wantsJson()) { return response('Unauthorized.', 401); } else { return redirect()->guest(route('auth.login')); } } $user = $auth->user(); if (!$user->activated) { $auth->logout(); // todo: add "click here to have a new activation email being sent to you" $request->session()->flash('alert-danger', 'Your account has not been activated yet. Check your spam folder for the activation email.'); return redirect()->guest(route('auth.login')); } // Analytics Analytics::setUserId(md5(env('APP_KEY') . $user->id)); return $next($request); } }
Use repr() instead of str() for printing
import traceback import sys import logging # always print stuff on the screen: logging.basicConfig(level=logging.INFO) def log_exception(func): def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except: logging.info("Exception raised") etype, value, tb = sys.exc_info() s = "".join(traceback.format_exception(etype, value, tb)) logging.info(s) logging.info("-"*40) raise return wrapper class Eval(object): def __init__(self): self._namespace = {} def eval(self, x): globals = self._namespace try: x = x.strip() y = x.split('\n') if len(y) == 0: return '' s = '\n'.join(y[:-1]) + '\n' t = y[-1] try: z = compile(t + '\n', '', 'eval') except SyntaxError: s += '\n' + t z = None eval(compile(s, '', 'exec'), globals, globals) if not z is None: r = repr(eval(z, globals)) else: r = '' return r except: etype, value, tb = sys.exc_info() # If we decide in the future to remove the first frame fromt he # traceback (since it links to our code, so it could be confusing # to the user), it's easy to do: #tb = tb.tb_next s = "".join(traceback.format_exception(etype, value, tb)) return s
import traceback import sys import logging # always print stuff on the screen: logging.basicConfig(level=logging.INFO) def log_exception(func): def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except: logging.info("Exception raised") etype, value, tb = sys.exc_info() s = "".join(traceback.format_exception(etype, value, tb)) logging.info(s) logging.info("-"*40) raise return wrapper class Eval(object): def __init__(self): self._namespace = {} def eval(self, x): globals = self._namespace try: x = x.strip() y = x.split('\n') if len(y) == 0: return '' s = '\n'.join(y[:-1]) + '\n' t = y[-1] try: z = compile(t + '\n', '', 'eval') except SyntaxError: s += '\n' + t z = None eval(compile(s, '', 'exec'), globals, globals) if not z is None: r = str(eval(z, globals)) else: r = '' return r except: etype, value, tb = sys.exc_info() # If we decide in the future to remove the first frame fromt he # traceback (since it links to our code, so it could be confusing # to the user), it's easy to do: #tb = tb.tb_next s = "".join(traceback.format_exception(etype, value, tb)) return s
Rename lock to _lock to imply that it's private. tilequeue/queue/file.py -The `lock` instance variable shouldn't be used outside of the `OutputFileQueue`'s methods.
from tilequeue.tile import serialize_coord, deserialize_coord, CoordMessage import threading class OutputFileQueue(object): def __init__(self, fp): self.fp = fp self._lock = threading.RLock() def enqueue(self, coord): with self._lock: payload = serialize_coord(coord) self.fp.write(payload + '\n') def enqueue_batch(self, coords): n = 0 for coord in coords: self.enqueue(coord) n += 1 return n, 0 def read(self, max_to_read=1, timeout_seconds=20): with self._lock: coords = [] for _ in range(max_to_read): try: coord = next(self.fp) except StopIteration: break coords.append(CoordMessage(deserialize_coord(coord), None)) return coords def job_done(self, coord_message): pass def clear(self): with self._lock: self.fp.seek(0) self.fp.truncate() return -1 def close(self): with self._lock: remaining_queue = "".join([ln for ln in self.fp]) self.clear() self.fp.write(remaining_queue) self.fp.close()
from tilequeue.tile import serialize_coord, deserialize_coord, CoordMessage import threading class OutputFileQueue(object): def __init__(self, fp): self.fp = fp self.lock = threading.RLock() def enqueue(self, coord): with self.lock: payload = serialize_coord(coord) self.fp.write(payload + '\n') def enqueue_batch(self, coords): n = 0 for coord in coords: self.enqueue(coord) n += 1 return n, 0 def read(self, max_to_read=1, timeout_seconds=20): with self.lock: coords = [] for _ in range(max_to_read): try: coord = next(self.fp) except StopIteration: break coords.append(CoordMessage(deserialize_coord(coord), None)) return coords def job_done(self, coord_message): pass def clear(self): with self.lock: self.fp.seek(0) self.fp.truncate() return -1 def close(self): with self.lock: remaining_queue = "".join([ln for ln in self.fp]) self.clear() self.fp.write(remaining_queue) self.fp.close()
Correct directory names in test sample of javascript validator
/* tests validator.js implementation. steps to run this test: 1. install mocha $ npm install -g mocha 2. run RedPen in server mode $ cd $REDPEN_HOME/bin $ ./redpen-server 3. rename validator.js.example to enable the validator implementation $ cd $REDPEN_HOME/js $ mv validator.js.example validator.js 4. run mocha $ cd $REDPEN_HOME/js $ mocha */ var assert = require('assert'); var redpen = require('./redpen'); describe('redpen-test', function () { it('test validator.js', function (done) { var request = { "document": "This sentence contains toolongword. This sentence doesn't contain too long word.", "format": "json2", "documentParser": "PLAIN", "config": { "lang": "en", "validators": { "JavaScript": {} } } }; var assertion = function (errorSentences) { // only one sentence contains error assert.equal(errorSentences.length, 1); firstErrorSentence = errorSentences[0]; assert.equal(firstErrorSentence.sentence, 'This sentence contains toolongword.'); // there is one too word exceeds 10 chalacteres long assert.equal(1, firstErrorSentence.errors.length); assert.equal('[validator.js] word [toolongword.] is too long. length: 12', firstErrorSentence.errors[0].message); done(); }; redpen.callRedPen(request, assertion); }); });
/* tests validator.js implementation. steps to run this test: 1. install mocha $ npm install -g mocha 2. run RedPen in server mode $ cd $REDPEN_HOME/bin $ ./redpen-server 3. rename validator.js.example to enable the validator implementation $ cd $REDPEN_HOME/sample $ mv validator.js.example validator.js 4. run mocha $ cd $REDPEN_HOME/sample $ mocha */ var assert = require('assert'); var redpen = require('./redpen'); describe('redpen-test', function () { it('test validator.js', function (done) { var request = { "document": "This sentence contains toolongword. This sentence doesn't contain too long word.", "format": "json2", "documentParser": "PLAIN", "config": { "lang": "en", "validators": { "JavaScript": {} } } }; var assertion = function (errorSentences) { // only one sentence contains error assert.equal(errorSentences.length, 1); firstErrorSentence = errorSentences[0]; assert.equal(firstErrorSentence.sentence, 'This sentence contains toolongword.'); // there is one too word exceeds 10 chalacteres long assert.equal(1, firstErrorSentence.errors.length); assert.equal('[validator.js] word [toolongword.] is too long. length: 12', firstErrorSentence.errors[0].message); done(); }; redpen.callRedPen(request, assertion); }); });
Add sl-bootstrap to blueprint bower includes
/* globals module */ module.exports = { afterInstall: function() { var self = this; return this.addBowerPackageToProject( 'bootstrap-datepicker' ) .then( function() { return self.addBowerPackageToProject( 'momentjs' ); }) .then( function() { return self.addBowerPackageToProject( 'fontawesome' ); }) .then( function() { return self.addBowerPackageToProject( 'highcharts' ); }) .then( function() { return self.addBowerPackageToProject( 'moment' ); }) .then( function() { return self.addBowerPackageToProject( 'moment-timezone' ); }) .then( function() { return self.addBowerPackageToProject( 'select2' ); }) .then( function() { return self.addBowerPackageToProject( 'typeahead.js' ); }) .then( function() { return self.addBowerPackageToProject( '[email protected]:interface/sl-bootstrap#0.6.1' ); }); }, normalizeEntityName: function() {} };
/* globals module */ module.exports = { afterInstall: function() { var self = this; return this.addBowerPackageToProject( 'bootstrap-datepicker' ) .then( function() { return self.addBowerPackageToProject( 'momentjs' ); }) .then( function() { return self.addBowerPackageToProject( 'fontawesome' ); }) .then( function() { return self.addBowerPackageToProject( 'highcharts' ); }) .then( function() { return self.addBowerPackageToProject( 'moment' ); }) .then( function() { return self.addBowerPackageToProject( 'moment-timezone' ); }) .then( function() { return self.addBowerPackageToProject( 'select2' ); }) .then( function() { return self.addBowerPackageToProject( 'typeahead.js' ); }); }, normalizeEntityName: function() {} };
Fix python 2 unicode issue.
from django.utils import six from debug_toolbar_multilang.pseudo import STR_FORMAT_PATTERN, \ STR_FORMAT_NAMED_PATTERN from debug_toolbar_multilang.pseudo.pseudo_language import PseudoLanguage class ExpanderPseudoLanguage(PseudoLanguage): """ Pseudo Language for expanding the strings. This is useful for verifying that the message still fits on the screen. Remember that some words are much more longer in other languages than in English. For instance, German words that 30% more space in average. """ def make_pseudo(self, message): # message without %s or {} in it. # {test} or %(test)s is allowed, though. safeMessage = list(message) # find every matching string for match in reversed(list(STR_FORMAT_PATTERN.finditer(message))): # Check if string uses the "named format". # If not, the string will be replaced and saved # into safeMessage if not STR_FORMAT_NAMED_PATTERN.match(match.group()): start, end = match.span() safeMessage[start:end] = "???" # create complete message by using the original, appending # a space and finally converting the safeMessage to a string # again. return "%s %s" % (message, "".join(safeMessage)) def language(self): return "pse-expander" @property def name(self): return "Pseudo-Expander Language"
from django.utils import six from debug_toolbar_multilang.pseudo import STR_FORMAT_PATTERN, \ STR_FORMAT_NAMED_PATTERN from debug_toolbar_multilang.pseudo.pseudo_language import PseudoLanguage class ExpanderPseudoLanguage(PseudoLanguage): """ Pseudo Language for expanding the strings. This is useful for verifying that the message still fits on the screen. Remember that some words are much more longer in other languages than in English. For instance, German words that 30% more space in average. """ def make_pseudo(self, message): # message without %s or {} in it. # {test} or %(test)s is allowed, though. safeMessage = list(message) # find every matching string for match in reversed(list(STR_FORMAT_PATTERN.finditer(message))): # Check if string uses the "named format". # If not, the string will be replaced and saved # into safeMessage if not STR_FORMAT_NAMED_PATTERN.match(match.group()): start, end = match.span() safeMessage[start:end] = "???" # create complete message by using the original, appending # a space and finally converting the safeMessage to a string # again. return six.u("%s %s" % (message, "".join(safeMessage))) def language(self): return "pse-expander" @property def name(self): return "Pseudo-Expander Language"
Correct bug in var declarations
(function($) { return $.fn.noiseGen = function(options) { var defaultOptions = { width: 32, height: 32, opacity: 0.2, fallbackImage: false, depth: 60 }, canvas = document.createElement("canvas"); options = $.extend(defaultOptions, options); if (!canvas.getContext || !canvas.getContext("2d")) { // Canvas not supported :( if (!!options.fallbackImage) { // Fallback image provided, set it as background return this.css("background-image", "url(" + options.fallbackImage + ")"); } else { // Fallback background image not provided, just return maintaining chainability return this; } } else { // Canvas supported :) var ctx = canvas.getContext("2d"), x = 0, y = 0; canvas.width = options.width; canvas.height = options.height; while (x < canvas.width) { while (y < canvas.height) { var r = Math.floor(Math.random() * options.depth); ctx.fillStyle = "rgba(" + r + "," + r + "," + r + "," + options.opacity + ")"; ctx.fillRect(x, y, 1, 1); y++; } y = 0; x++; } return this.css("background-image", "url(" + canvas.toDataURL("image/png") + ")"); } }; })(jQuery);
(function($) { return $.fn.noiseGen = function(options) { var defaultOptions = { width: 32, height: 32, opacity: 0.2, fallbackImage: false, depth: 60 }, canvas = document.createElement("canvas"), options = $.extend(defaultOptions, options); if (!canvas.getContext || !canvas.getContext("2d")) { // Canvas not supported :( if (!!options.fallbackImage) { // Fallback image provided, set it as background return this.css("background-image", "url(" + options.fallbackImage + ")"); } else { // Fallback background image not provided, just return maintaining chainability return this; } } else { // Canvas supported :) var ctx = canvas.getContext("2d"), x = 0, y = 0; canvas.width = options.width; canvas.height = options.height; while (x < canvas.width) { y = 0; while (y < canvas.height) { number = Math.floor(Math.random() * options.depth); ctx.fillStyle = "rgba(" + number + "," + number + "," + number + "," + options.opacity + ")"; ctx.fillRect(x, y, 1, 1); y++; } x++; } return this.css("background-image", "url(" + canvas.toDataURL("image/png") + ")"); } }; })(jQuery);
Fix pre/post plugins for generateSVGOConfig helper not being overwritten properly
const svgo = require('svgo'); const { omit, concat, uniqBy } = require('lodash'); const { merge } = require('webpack-merge'); module.exports = (options, pre = [], post = []) => { try { // The preset-default plugin is only available since SVGO 2.4.0 svgo.optimize('', { plugins: [{ name: 'preset-default' }] }); const names = concat(pre, post).map((plugin) => plugin.name); return merge({}, omit(options, ['plugins']), { plugins: [{ name: 'preset-default', params: { overrides: uniqBy(concat(pre, options.plugins, post).reverse(), 'name').reduce((overrides, plugin) => ({ ...overrides, [plugin.name]: plugin.active !== false ? plugin.params : false }), {}) } }, ...options.plugins.filter((plugin) => { return !names.includes(plugin.name); })] }); } catch (error) { // Fall back to extendDefaultPlugins which is deprecated since 2.4.0 return merge({}, omit(options, ['plugins']), { plugins: uniqBy(concat(pre, svgo.extendDefaultPlugins(options.plugins), post).reverse(), 'name') }); } }
const svgo = require('svgo'); const { omit, concat, uniqBy } = require('lodash'); const { merge } = require('webpack-merge'); module.exports = (options, pre = [], post = []) => { try { // The preset-default plugin is only available since SVGO 2.4.0 svgo.optimize('', { plugins: [{ name: 'preset-default' }] }); return merge({}, omit(options, ['plugins']), { plugins: [{ name: 'preset-default', params: { overrides: uniqBy(concat(pre, options.plugins, post).reverse(), 'name').reduce((overrides, plugin) => ({ ...overrides, [plugin.name]: plugin.active !== false ? plugin.params : false }), {}) } }, ...options.plugins] }); } catch (error) { // Fall back to extendDefaultPlugins which is deprecated since 2.4.0 return merge({}, omit(options, ['plugins']), { plugins: uniqBy(concat(pre, svgo.extendDefaultPlugins(options.plugins), post).reverse(), 'name') }); } }
Use raw parsing mode for asset_compress.ini This will avoid parsing issues due to special characters like "^" in URLs.
<?php declare(strict_types=1); use Cake\Core\Plugin; // The function `parse_ini_file` may be disabled $assets = parse_ini_string( file_get_contents(dirname(__FILE__) . '/asset_compress.ini'), true, INI_SCANNER_RAW ); // Fix the CrudView local.css file for use Html::css() foreach ($assets['crudview.css']['files'] as $i => $file) { if ($file === 'plugin:CrudView:css/local.css') { $assets['crudview.css']['files'][$i] = 'CrudView.local'; break; } } // Fix the CrudView local.css file for use Html::css() foreach ($assets['crudview.js']['files'] as $i => $file) { if ($file === 'plugin:CrudView:js/local.js') { $assets['crudview.js']['files'][$i] = 'CrudView.local'; break; } } return [ 'CrudView' => [ 'siteTitle' => 'Crud View', 'css' => $assets['crudview.css']['files'], 'js' => [ 'headjs' => $assets['crudview_head.js']['files'], 'script' => $assets['crudview.js']['files'], ], 'datetimePicker' => false, 'useAssetCompress' => Plugin::isLoaded('AssetCompress'), 'tablesBlacklist' => [ 'phinxlog', ], ], ];
<?php declare(strict_types=1); use Cake\Core\Plugin; // The function `parse_ini_file` may be disabled $assets = parse_ini_string(file_get_contents(dirname(__FILE__) . '/asset_compress.ini'), true); // Fix the CrudView local.css file for use Html::css() foreach ($assets['crudview.css']['files'] as $i => $file) { if ($file === 'plugin:CrudView:css/local.css') { $assets['crudview.css']['files'][$i] = 'CrudView.local'; break; } } // Fix the CrudView local.css file for use Html::css() foreach ($assets['crudview.js']['files'] as $i => $file) { if ($file === 'plugin:CrudView:js/local.js') { $assets['crudview.js']['files'][$i] = 'CrudView.local'; break; } } return [ 'CrudView' => [ 'siteTitle' => 'Crud View', 'css' => $assets['crudview.css']['files'], 'js' => [ 'headjs' => $assets['crudview_head.js']['files'], 'script' => $assets['crudview.js']['files'], ], 'datetimePicker' => false, 'useAssetCompress' => Plugin::isLoaded('AssetCompress'), 'tablesBlacklist' => [ 'phinxlog', ], ], ];
Add proper region name for the Overwatch rank command
'use strict'; const DiscordCommand = require('../../../../bot/modules/DiscordCommand'); const models = require('../../../models'); class CommandRank extends DiscordCommand { constructor(bot) { super(bot, 'rank', ['rank']); } async onCommand(message) { const bot = this.getBot(); const l = bot.getLocalizer(); const discordId = message.author.id; try { const account = await models.BattleNetAccount.findOne({ discordId }); if (!account) { return l.t('module.overwatch:rank.response-no-account'); } const stats = await models.OverwatchStats.findOne({ accountName: account.accountName, platform: account.platform }); if (!stats) { return l.t('module.overwatch:rank.response-not-available'); } const regionStats = stats[stats.activeRegion]; return l.t('module.overwatch:rank.response', { account_name: account.accountName, // eslint-disable-line camelcase rank: regionStats.rank, ranking: regionStats.ranking, region: l.t(`module.overwatch:competitive-rank-checker.region-${stats.activeRegion}`) }); } catch (err) { return l.t('module.overwatch:rank.response-error', { error: err.message }); } } } module.exports = CommandRank;
'use strict'; const DiscordCommand = require('../../../../bot/modules/DiscordCommand'); const models = require('../../../models'); class CommandRank extends DiscordCommand { constructor(bot) { super(bot, 'rank', ['rank']); } async onCommand(message) { const bot = this.getBot(); const l = bot.getLocalizer(); const discordId = message.author.id; try { const account = await models.BattleNetAccount.findOne({ discordId }); if (!account) { return l.t('module.overwatch:rank.response-no-account'); } const stats = await models.OverwatchStats.findOne({ accountName: account.accountName, platform: account.platform }); if (!stats) { return l.t('module.overwatch:rank.response-not-available'); } const regionStats = stats[stats.activeRegion]; return l.t('module.overwatch:rank.response', { account_name: account.accountName, // eslint-disable-line camelcase rank: regionStats.rank, ranking: regionStats.ranking, region: stats.activeRegion }); } catch (err) { return l.t('module.overwatch:rank.response-error', { error: err.message }); } } } module.exports = CommandRank;
Make assertions more resilient to text wrapping Running tests in isolation vs not in isolation may cause text to wrap at a different line width.
<?php namespace Tests\Concerns; use PHPUnit\Framework\Assert; use Illuminate\Support\Collection; class ArtisanResult { private $output; private $status; private $parameters; public function __construct($parameters, $output, $status) { $this->output = $output; $this->status = $status; $this->parameters = $parameters; $this->assertableOutput = preg_replace("/[\s\n]+/", " ", $this->output); } public function assertStatus($expected) { Assert::assertEquals($expected, $this->status); } public function assertOutputContains($expected) { foreach ((array) $expected as $line) { Assert::assertContains($line, $this->assertableOutput); } } public function dump() { return tap($this, function () { dump("Status: {$this->status}"); dump("Output:\n{$this->output}"); }); } public function dumpIfFailed() { return $this->status === 0 ? $this : $this->dump(); } private function lines() { $lines = new Collection(explode("\n", trim($this->output))); $lines = $lines->map(function ($line) { return trim($line); }); return $lines; } }
<?php namespace Tests\Concerns; use PHPUnit\Framework\Assert; use Illuminate\Support\Collection; class ArtisanResult { private $output; private $status; private $parameters; public function __construct($parameters, $output, $status) { $this->output = $output; $this->status = $status; $this->parameters = $parameters; } public function assertStatus($expected) { Assert::assertEquals($expected, $this->status); } public function assertOutputContains($expected) { $actualLines = $this->lines(); $actualLines = $actualLines->slice(0, count($expectedLines))->all(); Assert::assertEquals($expectedLines, $actualLines); } public function dump() { return tap($this, function () { dump("Status: {$this->status}"); dump("Output:\n{$this->output}"); }); } public function dumpIfFailed() { return $this->status === 0 ? $this : $this->dump(); } private function lines() { $lines = new Collection(explode("\n", trim($this->output))); $lines = $lines->map(function ($line) { return trim($line); }); return $lines; } }
Replace locale parameter by default_locale
<?php namespace Alpixel\Bundle\CMSBundle\Twig\Extension; use Alpixel\Bundle\CMSBundle\Entity\NodeInterface; use Alpixel\Bundle\CMSBundle\Helper\CMSHelper; class CMSExtension extends \Twig_Extension { protected $contentTypes; protected $container; protected $cmsHelper; public function __construct(CMSHelper $cmsHelper, $container, $contentTypes = null) { $this->cmsHelper = $cmsHelper; $this->container = $container; $this->contentTypes = $contentTypes; } public function getName() { return 'cms'; } public function getGlobals() { return [ 'cms_contentTypes' => $this->contentTypes, 'cms_languages' => $this->container->getParameter('lunetics_locale.allowed_locales'), ]; } public function getFunctions() { return [ new \Twig_SimpleFunction('cms_get_translation', [$this, 'cmsHasTranslation']), ]; } public function getFilters() { return [ new \Twig_SimpleFilter('iso_to_country_name', [$this, 'isoToCountryName']), ]; } public function isoToCountryName($iso) { return \Locale::getDisplayLanguage($iso, $this->container->getParameter('default_locale')); } public function cmsHasTranslation(NodeInterface $node, $locale) { return $this->cmsHelper->nodeGetTranslation($node, $locale); } }
<?php namespace Alpixel\Bundle\CMSBundle\Twig\Extension; use Alpixel\Bundle\CMSBundle\Entity\NodeInterface; use Alpixel\Bundle\CMSBundle\Helper\CMSHelper; class CMSExtension extends \Twig_Extension { protected $contentTypes; protected $container; protected $cmsHelper; public function __construct(CMSHelper $cmsHelper, $container, $contentTypes = null) { $this->cmsHelper = $cmsHelper; $this->container = $container; $this->contentTypes = $contentTypes; } public function getName() { return 'cms'; } public function getGlobals() { return [ 'cms_contentTypes' => $this->contentTypes, 'cms_languages' => $this->container->getParameter('lunetics_locale.allowed_locales'), ]; } public function getFunctions() { return [ new \Twig_SimpleFunction('cms_get_translation', [$this, 'cmsHasTranslation']), ]; } public function getFilters() { return [ new \Twig_SimpleFilter('iso_to_country_name', [$this, 'isoToCountryName']), ]; } public function isoToCountryName($iso) { return \Locale::getDisplayLanguage($iso, $this->container->getParameter('locale')); } public function cmsHasTranslation(NodeInterface $node, $locale) { return $this->cmsHelper->nodeGetTranslation($node, $locale); } }
Use the async version of buildMatcher for the middleware loading
var metaRouter = require('../'); var DataHolder = require('raptor-async/DataHolder'); var nodePath = require('path'); module.exports = function matchFactory(routes) { var matcher; var matcherDataHolder; if (typeof routes === 'string') { routes = nodePath.resolve(process.cwd(), routes); matcherDataHolder = new DataHolder(); metaRouter.buildMatcher(routes, function(err, matcher) { if (err) { return matcherDataHolder.reject(err); } matcherDataHolder.resolve(matcher); }); } else if (typeof routes.match === 'function') { // The provided routes are already a matcher matcher = routes; } else { matcher = metaRouter.buildMatcher(routes); } function go(matcher, req, res, next) { var match = matcher.match(req.path, req.method); if (match) { req.route = match; } next(); } return function match(req, res, next) { if (matcher) { go(matcher, req, res, next); } else { matcherDataHolder.done(function(err, matcher) { if (err) { return next(err); } go(matcher, req, res, next); }); } }; };
var metaRouter = require('../'); var routesLoader = require('../lib/routes-loader'); var DataHolder = require('raptor-async/DataHolder'); var nodePath = require('path'); module.exports = function matchFactory(routes) { var matcher; var matcherDataHolder; if (typeof routes === 'string') { routes = nodePath.resolve(process.cwd(), routes); matcherDataHolder = new DataHolder(); routesLoader.load(routes, function(err, routes) { if (err) { return matcherDataHolder.reject(err); } matcher = metaRouter.buildMatcher(routes); matcherDataHolder.resolve(matcher); }); } else if (typeof routes.match === 'function') { // The provided routes are already a matcher matcher = routes; } else { matcher = metaRouter.buildMatcher(routes); } function go(matcher, req, res, next) { var match = matcher.match(req.path, req.method); if (match) { req.route = match; } next(); } return function match(req, res, next) { if (matcher) { go(matcher, req, res, next); } else { matcherDataHolder.done(function(err, matcher) { if (err) { return next(err); } go(matcher, req, res, next); }); } }; };
Stop cassandra from deleting documents, delete documents from old index as well
import logging from scripts.util import documents from scrapi import settings from scrapi.linter import RawDocument from scrapi.processing.elasticsearch import es from scrapi.tasks import normalize, process_normalized, process_raw logger = logging.getLogger(__name__) def rename(source, target, dry=True): assert source != target, "Can't rename {} to {}, names are the same".format(source, target) count = 0 exceptions = [] for doc in documents(source): count += 1 try: raw = RawDocument({ 'doc': doc.doc, 'docID': doc.docID, 'source': target, 'filetype': doc.filetype, 'timestamps': doc.timestamps, 'versions': doc.versions }) if not dry: process_raw(raw) process_normalized(normalize(raw, raw['source']), raw) logger.info('Processed document from {} with id {}'.format(source, raw['docID'])) except Exception as e: logger.exception(e) exceptions.append(e) else: if not dry: # doc.delete() es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404]) es.delete(index='share_v1', doc_type=source, id=raw['docID'], ignore=[404]) logger.info('Deleted document from {} with id {}'.format(source, raw['docID'])) if dry: logger.info('Dry run complete') for ex in exceptions: logger.exception(e) logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
import logging from scripts.util import documents from scrapi import settings from scrapi.linter import RawDocument from scrapi.processing.elasticsearch import es from scrapi.tasks import normalize, process_normalized, process_raw logger = logging.getLogger(__name__) def rename(source, target, dry=True): assert source != target, "Can't rename {} to {}, names are the same".format(source, target) count = 0 exceptions = [] for doc in documents(source): count += 1 try: raw = RawDocument({ 'doc': doc.doc, 'docID': doc.docID, 'source': target, 'filetype': doc.filetype, 'timestamps': doc.timestamps, 'versions': doc.versions }) if not dry: process_raw(raw) process_normalized(normalize(raw, raw['source']), raw) logger.info('Processed document from {} with id {}'.format(source, raw['docID'])) except Exception as e: logger.exception(e) exceptions.append(e) else: if not dry: doc.delete() es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404]) logger.info('Deleted document from {} with id {}'.format(source, raw['docID'])) if dry: logger.info('Dry run complete') for ex in exceptions: logger.exception(e) logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
Fix bug on country joint
<?php namespace WBB\BarBundle\Repository; use WBB\BarBundle\Entity\Ad; use WBB\CoreBundle\Repository\EntityRepository; /** * AdRepository * * This class was generated by the Doctrine ORM. Add your own custom * repository methods below. */ class AdRepository extends EntityRepository { public function findOneByPositionAndCountry($position = Ad::WBB_ADS_HP_300X250, $country = null) { $qb = $this->createQuerybuilder($this->getAlias()); $qb ->select($this->getAlias()) ->where($qb->expr()->eq($this->getAlias().'.position', $qb->expr()->literal($position))) ->andWhere($qb->expr()->gte($qb->expr()->literal(date('Y-m-d')), $this->getAlias().'.beginAt')) ->andWhere($qb->expr()->lte($qb->expr()->literal(date('Y-m-d')), $this->getAlias().'.endAt')) ->orderBy($this->getAlias().'.createdAt', 'DESC') ->setMaxResults(1); ; if($country){ $qb ->leftJoin($this->getAlias().'.countries', 'c') ->andWhere($qb->expr()->eq('c.id', $country->getId())) ; } return $qb->getQuery()->getOneOrNullResult(); } }
<?php namespace WBB\BarBundle\Repository; use WBB\BarBundle\Entity\Ad; use WBB\CoreBundle\Repository\EntityRepository; /** * AdRepository * * This class was generated by the Doctrine ORM. Add your own custom * repository methods below. */ class AdRepository extends EntityRepository { public function findOneByPositionAndCountry($position = Ad::WBB_ADS_HP_300X250, $country = null) { $qb = $this->createQuerybuilder($this->getAlias()); $qb ->select($this->getAlias()) ->where($qb->expr()->eq($this->getAlias().'.position', $qb->expr()->literal($position))) ->andWhere($qb->expr()->gte($qb->expr()->literal(date('Y-m-d')), $this->getAlias().'.beginAt')) ->andWhere($qb->expr()->lte($qb->expr()->literal(date('Y-m-d')), $this->getAlias().'.endAt')) ->orderBy($this->getAlias().'.createdAt', 'DESC') ->setMaxResults(1); ; if($country){ $qb ->leftJoin($this->getAlias().'.countries', 'c') ->andWhere($qb->expr()->eq($this->getAlias().'.country', $country->getId())) ; } return $qb->getQuery()->getOneOrNullResult(); } }
Update map state to props to destructure meals
import React, { Component } from 'react' import { connect } from 'react-redux' import Meal from '../views/Meal' import NewMeal from '../components/NewMeal' import { Container } from 'semantic-ui-react' import { Link, Route, Switch } from 'react-router-dom' import { css } from 'glamor' class MealsContainer extends Component { render() { return ( <Container> <div {...gridContainer}> <div {...gridNav}> <h1>All Meals</h1> <ul> {this.props.meals.map(m => <Link key={m.id} to={`/meals/${m.id}`}> <h3 style={{ listStyleType: 'none' }}> {m.name} </h3> </Link> )} </ul> </div> <div {...gridMain}> <Switch> <Route path="/meals/new" component={NewMeal} /> <Route exact path={`/meals/:id`} render={() => <Meal />} /> </Switch> </div> </div> </Container> ) } } export default connect(({ meals }) => ({ meals }))(MealsContainer) let gridContainer = css({ marginTop: '20px', display: 'grid', gridTemplateAreas: `"navigation main main"`, }) let gridNav = css({ gridArea: 'navigation', }) let gridMain = css({ gridArea: 'main', })
import React, { Component } from 'react' import { connect } from 'react-redux' import Meal from '../views/Meal' import NewMeal from '../components/NewMeal' import { Container } from 'semantic-ui-react' import { Link, Route, Switch } from 'react-router-dom' import { css } from 'glamor' class MealsContainer extends Component { render() { return ( <Container> <div {...gridContainer}> <div {...gridNav}> <h1>All Meals</h1> <ul> {this.props.meals.map(m => <Link key={m.id} to={`/meals/${m.id}`}> <h3 style={{ listStyleType: 'none' }}> {m.name} </h3> </Link> )} </ul> </div> <div {...gridMain}> <Switch> <Route path="/meals/new" component={NewMeal} /> <Route exact path={`/meals/:id`} render={() => <Meal />} /> </Switch> </div> </div> </Container> ) } } export default connect(state => { return { meals: state.meals, } })(MealsContainer) let gridContainer = css({ marginTop: '20px', display: 'grid', gridTemplateAreas: `"navigation main main"`, }) let gridNav = css({ gridArea: 'navigation', }) let gridMain = css({ gridArea: 'main', })
[THEIA] Replace Bing maps by OpenStreetMap to avoid licensing issue
(function(c) { /* * !!! CHANGE THIS !!! */ c["general"].rootUrl = '//localhost/resto2/'; /* * !! DO NOT EDIT UNDER THIS LINE !! */ c["general"].serverRootUrl = null; c["general"].proxyUrl = null; c["general"].confirmDeletion = false; c["general"].themePath = "/js/lib/mapshup/theme/default"; c["i18n"].path = "/js/lib/mapshup/i18n"; c["general"].displayContextualMenu = false; c["general"].displayCoordinates = true; c["general"].displayScale = false; c["general"].overviewMap = "none"; c['general'].enableHistory = false; c["general"].timeLine = { enable: false }; c.extend("Navigation", { position: 'nw', orientation: 'h' }); c.remove("layers", "Streets"); c.remove("layers", "Satellite"); c.remove("layers", "Relief"); c.remove("layers", "MapQuest OSM"); //c.remove("layers", "OpenStreetMap"); /* c.add("layers", { type: "Bing", title: "Satellite", key: "AmraZAAcRFVn6Vbxk_TVhhVZNt66x4_4SV_EvlfzvRC9qZ_2y6k1aNsuuoYS0UYy", bingType: "AerialWithLabels" });*/ })(window.M.Config);
(function(c) { /* * !!! CHANGE THIS !!! */ c["general"].rootUrl = '//localhost/resto2/'; /* * !! DO NOT EDIT UNDER THIS LINE !! */ c["general"].serverRootUrl = null; c["general"].proxyUrl = null; c["general"].confirmDeletion = false; c["general"].themePath = "/js/lib/mapshup/theme/default"; c["i18n"].path = "/js/lib/mapshup/i18n"; c["general"].displayContextualMenu = false; c["general"].displayCoordinates = true; c["general"].displayScale = false; c["general"].overviewMap = "none"; c['general'].enableHistory = false; c["general"].timeLine = { enable: false }; c.extend("Navigation", { position: 'nw', orientation: 'h' }); c.remove("layers", "Streets"); c.remove("layers", "Satellite"); c.remove("layers", "Relief"); c.remove("layers", "MapQuest OSM"); c.remove("layers", "OpenStreetMap"); c.add("layers", { type: "Bing", title: "Satellite", key: "AmraZAAcRFVn6Vbxk_TVhhVZNt66x4_4SV_EvlfzvRC9qZ_2y6k1aNsuuoYS0UYy", bingType: "AerialWithLabels" }); })(window.M.Config);
Add argparse as a requirement if not built in
""" Setup file """ import os from setuptools import setup, find_packages from version_helper import git_version HERE = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(HERE, 'README.rst')).read() CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read() REQUIREMENTS = [ 'mock', ] # Python 2.6 doesn't ship with argparse try: import argparse except ImportError: REQUIREMENTS.append('argparse') if __name__ == "__main__": setup( name='devbox', description='Quickly set up python repos for development', long_description=README + '\n\n' + CHANGES, classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', ], license='MIT', author='Steven Arcangeli', author_email='[email protected]', url='http://github.com/mathcamp/devbox', zip_safe=False, include_package_data=True, packages=find_packages(), entry_points={ 'console_scripts': [ 'devbox-pre-commit = devbox.hook:precommit', 'devbox-create = devbox:create', 'devbox-unbox = devbox.unbox:main', ], }, setup_requires=[ 'nose>=1.0', ], install_requires=REQUIREMENTS, tests_require=REQUIREMENTS, **git_version() )
""" Setup file """ import os from setuptools import setup, find_packages from version_helper import git_version HERE = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(HERE, 'README.rst')).read() CHANGES = open(os.path.join(HERE, 'CHANGES.txt')).read() REQUIREMENTS = [ 'mock', ] if __name__ == "__main__": setup( name='devbox', description='Quickly set up python repos for development', long_description=README + '\n\n' + CHANGES, classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', ], license='MIT', author='Steven Arcangeli', author_email='[email protected]', url='http://github.com/mathcamp/devbox', zip_safe=False, include_package_data=True, packages=find_packages(), entry_points={ 'console_scripts': [ 'devbox-pre-commit = devbox.hook:precommit', 'devbox-create = devbox:create', 'devbox-unbox = devbox.unbox:main', ], }, setup_requires=[ 'nose>=1.0', ], install_requires=REQUIREMENTS, tests_require=REQUIREMENTS, **git_version() )
Add watch task to default
var gulp = require('gulp'); var gutil = require('gulp-util'); var bower = require('bower'); var concat = require('gulp-concat'); var sh = require('shelljs'); var del = require('del'); var copyHTML = require('ionic-gulp-html-copy'); var requireDir = require('require-dir'); var gulpTask = requireDir('./gulp'); gulp.task('default', ['clean'], function() { gulp.start('build', 'watch'); }); gulp.task('install', ['git-check'], function() { return bower.commands.install() .on('log', function(data) { gutil.log('bower', gutil.colors.cyan(data.id), data.message); }); }); gulp.task('git-check', function(done) { if (!sh.which('git')) { console.log( ' ' + gutil.colors.red('Git is not installed.'), '\n Git, the version control system, is required to download Ionic.', '\n Download git here:', gutil.colors.cyan('http://git-scm.com/downloads') + '.', '\n Once git is installed, run \'' + gutil.colors.cyan('gulp install') + '\' again.' ); process.exit(1); } done(); }); gulp.task('html', function() { return copyHTML({ dest: 'www' }); }); gulp.task('clean', function() { return del(['www/css', 'www/pages', 'www/js']); });
var gulp = require('gulp'); var gutil = require('gulp-util'); var bower = require('bower'); var concat = require('gulp-concat'); var sh = require('shelljs'); var del = require('del'); var copyHTML = require('ionic-gulp-html-copy'); var requireDir = require('require-dir'); var gulpTask = requireDir('./gulp'); gulp.task('default', ['clean'], function() { gulp.start('build'); }); gulp.task('install', ['git-check'], function() { return bower.commands.install() .on('log', function(data) { gutil.log('bower', gutil.colors.cyan(data.id), data.message); }); }); gulp.task('git-check', function(done) { if (!sh.which('git')) { console.log( ' ' + gutil.colors.red('Git is not installed.'), '\n Git, the version control system, is required to download Ionic.', '\n Download git here:', gutil.colors.cyan('http://git-scm.com/downloads') + '.', '\n Once git is installed, run \'' + gutil.colors.cyan('gulp install') + '\' again.' ); process.exit(1); } done(); }); gulp.task('html', function() { return copyHTML({ dest: 'www' }); }); gulp.task('clean', function() { return del(['www/css', 'www/pages', 'www/js']); });
Fix Mocked Datsetws missing the CrisId field
package org.datavaultplatform.common.metadata.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.datavaultplatform.common.model.Dataset; import org.datavaultplatform.common.metadata.Provider; // This mock metadata provider is for testing purposes only public class MockProvider implements Provider { List<Dataset> datasets = new ArrayList<>(); Map<String, String> projectIds = new HashMap<>(); public MockProvider() { for (int i = 1; i < 6; i++) { Dataset d = new Dataset(); d.setID("MOCK-DATASET-" + i); d.setName("Sample dataset " + i); d.setContent("Mock Metadata"); d.setVisible(true); d.setCrisId("CRIS01"); datasets.add(d); projectIds.put(d.getID(), "MOCK-PROJECTID-" + i); } } @Override public List<Dataset> getDatasetsForUser(String userID) { return datasets; } @Override public Dataset getDataset(String id) { for (Dataset d : datasets) { if (d.getID().equals(id)) { return d; } } return null; } @Override public Map<String, String> getPureProjectIds() { return projectIds; } @Override public String getPureProjectId(String datasetId) { return projectIds.get(datasetId); } }
package org.datavaultplatform.common.metadata.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.datavaultplatform.common.model.Dataset; import org.datavaultplatform.common.metadata.Provider; // This mock metadata provider is for testing purposes only public class MockProvider implements Provider { List<Dataset> datasets = new ArrayList<>(); Map<String, String> projectIds = new HashMap<>(); public MockProvider() { for (int i = 1; i < 6; i++) { Dataset d = new Dataset(); d.setID("MOCK-DATASET-" + i); d.setName("Sample dataset " + i); d.setContent("Mock Metadata"); d.setVisible(true); datasets.add(d); projectIds.put(d.getID(), "MOCK-PROJECTID-" + i); } } @Override public List<Dataset> getDatasetsForUser(String userID) { return datasets; } @Override public Dataset getDataset(String id) { for (Dataset d : datasets) { if (d.getID().equals(id)) { return d; } } return null; } @Override public Map<String, String> getPureProjectIds() { return projectIds; } @Override public String getPureProjectId(String datasetId) { return projectIds.get(datasetId); } }
Improve test of csvstack --filenames.
#!/usr/bin/env python import sys import StringIO import unittest from csvkit import CSVKitReader from csvkit.utilities.stack import CSVStack class TestCSVStack(unittest.TestCase): def test_explicit_grouping(self): # stack two CSV files args = ["--groups", "asd,sdf", "-n", "foo", "examples/dummy.csv", "examples/dummy2.csv"] output_file = StringIO.StringIO() utility = CSVStack(args, output_file) utility.main() # verify the stacked file's contents input_file = StringIO.StringIO(output_file.getvalue()) reader = CSVKitReader(input_file) self.assertEqual(reader.next(), ["foo", "a", "b", "c"]) self.assertEqual(reader.next()[0], "asd") self.assertEqual(reader.next()[0], "sdf") def test_filenames_grouping(self): # stack two CSV files args = ["--filenames", "-n", "path", "examples/dummy.csv", "examples/dummy2.csv"] output_file = StringIO.StringIO() utility = CSVStack(args, output_file) utility.main() # verify the stacked file's contents input_file = StringIO.StringIO(output_file.getvalue()) reader = CSVKitReader(input_file) self.assertEqual(reader.next(), ["path", "a", "b", "c"]) self.assertEqual(reader.next()[0], "dummy.csv") self.assertEqual(reader.next()[0], "dummy2.csv")
#!/usr/bin/env python import sys import StringIO import unittest from csvkit import CSVKitReader from csvkit.utilities.stack import CSVStack class TestCSVStack(unittest.TestCase): def test_explicit_grouping(self): # stack two CSV files args = ["--groups", "asd,sdf", "-n", "foo", "examples/dummy.csv", "examples/dummy2.csv"] output_file = StringIO.StringIO() utility = CSVStack(args, output_file) utility.main() # verify the stacked file's contents input_file = StringIO.StringIO(output_file.getvalue()) reader = CSVKitReader(input_file) self.assertEqual(reader.next(), ["foo", "a", "b", "c"]) self.assertEqual(reader.next()[0], "asd") self.assertEqual(reader.next()[0], "sdf") def test_filenames_grouping(self): # stack two CSV files args = ["--filenames", "-n", "path", "examples/dummy.csv", "examples/dummy2.csv"] output_file = StringIO.StringIO() utility = CSVStack(args, output_file) utility.main() # verify the stacked file's contents input_file = StringIO.StringIO(output_file.getvalue()) reader = CSVKitReader(input_file) self.assertEqual(reader.next(), ["foo", "a", "b", "c"]) self.assertEqual(reader.next()[0], "asd") self.assertEqual(reader.next()[0], "sdf")
Make modulePrefix default a little more generic.
module.exports = { options: { 'v': { type: 'boolean', description: 'Verbose logging', alias: 'verbose' }, 'd': { type: 'string', description: 'Output base directory', alias: 'outputDir' }, 'f': { type: 'string', description: 'Output File', alias: 'outputFile' }, 'p': { type: 'string', description: 'Module prefix', alias: 'modulePrefix', default: 'template' }, 'k': { type: 'string', description: 'Known helpers', alias: 'known' }, 'o': { type: 'boolean', description: 'Known helpers only', alias: 'knownOnly' }, 'b': { type: 'boolean', description: 'Beautify output', alias: 'beautify' }, 'm': { type: 'boolean', description: 'Minimize output', alias: 'min' }, 's': { type: 'boolean', description: 'Output template function only', alias: 'simple' }, 'r': { type: 'string', description: 'Template root, stripped from template names.', alias: 'root' } }, defaults: { verbose: false, outputDir: '', outputFile: '', known: '', knownOnly: false, beautify: false, min: false, simple: false, root: '' } };
module.exports = { options: { 'v': { type: 'boolean', description: 'Verbose logging', alias: 'verbose' }, 'd': { type: 'string', description: 'Output base directory', alias: 'outputDir' }, 'f': { type: 'string', description: 'Output File', alias: 'outputFile' }, 'p': { type: 'string', description: 'Module prefix', alias: 'modulePrefix', default: 'z-template' }, 'k': { type: 'string', description: 'Known helpers', alias: 'known' }, 'o': { type: 'boolean', description: 'Known helpers only', alias: 'knownOnly' }, 'b': { type: 'boolean', description: 'Beautify output', alias: 'beautify' }, 'm': { type: 'boolean', description: 'Minimize output', alias: 'min' }, 's': { type: 'boolean', description: 'Output template function only', alias: 'simple' }, 'r': { type: 'string', description: 'Template root, stripped from template names.', alias: 'root' } }, defaults: { verbose: false, outputDir: '', outputFile: '', known: '', knownOnly: false, beautify: false, min: false, simple: false, root: '' } };
Add check whether layout option is set
<?php abstract class Layoutable { /** * associative array of layout settings * * @var array */ private $layout = array(); /** * get array of layout settings * * @return array */ public function getLayout(){ return $this->layout; } /** * set raw layout without applying escaping rules * * @param string $name * @param mixed $value * @return Layoutable|Graph|Vertex|Edge $this (chainable) */ public function setLayoutRaw($name,$value){ if($value === NULL){ unset($this->layout[$name]); }else{ $this->layout[$name] = $value; } return $this; } /** * set layout option * * @param string|array $name * @param mixed $value * @return Layoutable|Graph|Vertex|Edge $this (chainable) * @uses GraphViz::escape() * @uses Layoutable::setLayoutRaw() */ public function setLayout($name,$value=NULL){ if($name === NULL){ $this->layout = array(); return $this; } if(!is_array($name)){ $name = array($name=>$value); } foreach($name as $key=>$value){ $this->setLayoutRaw($key,GraphViz::escape($value)); } return $this; } /** * checks whether layout option with given name is set * * @param string $name * @return boolean */ public function hasLayout($name){ return isset($this->layout[$name]); } }
<?php abstract class Layoutable { /** * associative array of layout settings * * @var array */ private $layout = array(); /** * get array of layout settings * * @return array */ public function getLayout(){ return $this->layout; } /** * set raw layout without applying escaping rules * * @param string $name * @param mixed $value * @return Layoutable|Graph|Vertex|Edge $this (chainable) */ public function setLayoutRaw($name,$value){ if($value === NULL){ unset($this->layout[$name]); }else{ $this->layout[$name] = $value; } return $this; } /** * set layout option * * @param string|array $name * @param mixed $value * @return Layoutable|Graph|Vertex|Edge $this (chainable) * @uses GraphViz::escape() * @uses Layoutable::setLayoutRaw() */ public function setLayout($name,$value=NULL){ if($name === NULL){ $this->layout = array(); return $this; } if(!is_array($name)){ $name = array($name=>$value); } foreach($name as $key=>$value){ $this->setLayoutRaw($key,GraphViz::escape($value)); } return $this; } }
Update test requirement for PTB
import codecs from os import path from setuptools import find_packages, setup def read(*parts): filename = path.join(path.dirname(__file__), *parts) with codecs.open(filename, encoding="utf-8") as fp: return fp.read() setup( author="Pinax Developers", author_email="[email protected]", description="a reusable private user messages application for Django", name="pinax-messages", long_description=read("README.md"), version="1.0.1", url="http://github.com/pinax/pinax-messages/", license="MIT", packages=find_packages(), package_data={ "messages": [] }, test_suite="runtests.runtests", tests_require=[ "django-test-plus>=1.0.11", "pinax-theme-bootstrap>=7.10.1", ], install_requires=[ "django-appconf>=1.0.1", "django-user-accounts>=1.3.1" ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ], zip_safe=False )
import codecs from os import path from setuptools import find_packages, setup def read(*parts): filename = path.join(path.dirname(__file__), *parts) with codecs.open(filename, encoding="utf-8") as fp: return fp.read() setup( author="Pinax Developers", author_email="[email protected]", description="a reusable private user messages application for Django", name="pinax-messages", long_description=read("README.md"), version="1.0.1", url="http://github.com/pinax/pinax-messages/", license="MIT", packages=find_packages(), package_data={ "messages": [] }, test_suite="runtests.runtests", tests_require=[ "django-test-plus>=1.0.11", "pinax-theme-bootstrap>=7.10.0", ], install_requires=[ "django-appconf>=1.0.1", "django-user-accounts>=1.3.1" ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ], zip_safe=False )
Add nice database close for RSS
var _ = require('lodash'), async = require('async'), checkForFiling = require('./check'), request = require('request'), models = require('../../models'), parser = require('rss-parser'); // var interval = 60000; function queueFilingsToCheck() { console.log('checking RSS'); parser.parseURL('http://efilingapps.fec.gov/rss/generate?preDefinedFilingType=ALL', function(err, parsed) { if (!err && parsed && parsed.feed && parsed.feed.entries) { var newFilings = parsed.feed.entries.map(function (filing) { return parseInt(filing.link.replace('http://docquery.fec.gov/dcdev/posted/','').replace('.fec','')); }); models.fec_filing.findAll({ attributes: ['filing_id'], where: { filing_id: { gte: _.min(newFilings) } } }) .then(function(filings) { filings = filings.map(function(filing) { return filing.filing_id; }); async.mapSeries(_.difference(newFilings,filings), checkForFiling, function () { models.sequelize.close(); console.log('done'); // setTimeout(queueFilingsToCheck,interval); }); }); } else { console.error(error); models.sequelize.close(); console.log('done'); // setTimeout(queueFilingsToCheck,interval); } }); } queueFilingsToCheck();
var _ = require('lodash'), async = require('async'), checkForFiling = require('./check'), request = require('request'), models = require('../../models'), parser = require('rss-parser'); // var interval = 60000; function queueFilingsToCheck() { console.log('checking RSS'); parser.parseURL('http://efilingapps.fec.gov/rss/generate?preDefinedFilingType=ALL', function(err, parsed) { if (!err && parsed && parsed.feed && parsed.feed.entries) { var newFilings = parsed.feed.entries.map(function (filing) { return parseInt(filing.link.replace('http://docquery.fec.gov/dcdev/posted/','').replace('.fec','')); }); models.fec_filing.findAll({ attributes: ['filing_id'], where: { filing_id: { gte: _.min(newFilings) } } }) .then(function(filings) { filings = filings.map(function(filing) { return filing.filing_id; }); async.mapSeries(_.difference(newFilings,filings), checkForFiling, function () { console.log('done'); // setTimeout(queueFilingsToCheck,interval); }); }); } else { console.error(error); console.log('done'); // setTimeout(queueFilingsToCheck,interval); } }); } queueFilingsToCheck();
Fix pay command always sending 0. Closes #197 Closes #193
package org.gestern.gringotts.commands; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import static org.gestern.gringotts.Language.LANG; /** * Player commands. */ public class MoneyExecutor extends GringottsAbstractExecutor { @Override public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args) { if (!(sender instanceof Player)) { sender.sendMessage(LANG.playerOnly); return false; } Player player = (Player) sender; if (args.length == 0) { // same as balance sendBalanceMessage(eco.player(player.getUniqueId())); return true; } String command = args[0]; double value = 0; if (args.length == 2) { try { value = Double.parseDouble(args[1]); } catch (NumberFormatException ignored) { return false; } if ("withdraw".equals(command)) { withdraw(player, value); return true; } else if ("deposit".equals(command)) { deposit(player, value); return true; } } else if ( args.length == 3 && "pay".equals(command)) { try { value = Double.parseDouble(args[1]); } catch (NumberFormatException ignored) { return false; } // money pay <amount> <player> return pay(player, value, args); } return false; } }
package org.gestern.gringotts.commands; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import static org.gestern.gringotts.Language.LANG; /** * Player commands. */ public class MoneyExecutor extends GringottsAbstractExecutor { @Override public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args) { if (!(sender instanceof Player)) { sender.sendMessage(LANG.playerOnly); return false; } Player player = (Player) sender; if (args.length == 0) { // same as balance sendBalanceMessage(eco.player(player.getUniqueId())); return true; } String command = args[0]; double value = 0; if (args.length == 2) { try { value = Double.parseDouble(args[1]); } catch (NumberFormatException ignored) { return false; } if ("withdraw".equals(command)) { withdraw(player, value); return true; } else if ("deposit".equals(command)) { deposit(player, value); return true; } } else if ( args.length == 3 && "pay".equals(command)) { // money pay <amount> <player> return pay(player, value, args); } return false; } }
Replace deprecated "empty_value" form option with "placeholder".
<?php /** * @author Igor Nikolaev <[email protected]> * @copyright Copyright (c) 2016, Darvin Studio * @link https://www.darvin-studio.ru * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Darvin\AdminBundle\Form\Type; use Symfony\Component\Form\AbstractType; use Symfony\Component\OptionsResolver\OptionsResolver; /** * Tri-state checkbox form type */ class TriStateCheckboxType extends AbstractType { const TRI_STATE_CHECKBOX_TYPE_CLASS = __CLASS__; /** * {@inheritdoc} */ public function configureOptions(OptionsResolver $resolver) { $resolver->setDefaults([ 'choices' => [ 'boolean.1' => 1, 'boolean.0' => 0, ], 'choices_as_values' => true, 'expanded' => true, 'placeholder' => 'boolean.indeterminate', 'attr' => [ 'class' => 'tri_state_checkbox', ], ]); } /** * {@inheritdoc} */ public function getParent() { return 'Symfony\Component\Form\Extension\Core\Type\ChoiceType'; } }
<?php /** * @author Igor Nikolaev <[email protected]> * @copyright Copyright (c) 2016, Darvin Studio * @link https://www.darvin-studio.ru * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Darvin\AdminBundle\Form\Type; use Symfony\Component\Form\AbstractType; use Symfony\Component\OptionsResolver\OptionsResolver; /** * Tri-state checkbox form type */ class TriStateCheckboxType extends AbstractType { const TRI_STATE_CHECKBOX_TYPE_CLASS = __CLASS__; /** * {@inheritdoc} */ public function configureOptions(OptionsResolver $resolver) { $resolver->setDefaults([ 'choices' => [ 'boolean.1' => 1, 'boolean.0' => 0, ], 'choices_as_values' => true, 'expanded' => true, 'empty_value' => 'boolean.indeterminate', 'attr' => [ 'class' => 'tri_state_checkbox', ], ]); } /** * {@inheritdoc} */ public function getParent() { return 'Symfony\Component\Form\Extension\Core\Type\ChoiceType'; } }
Fix stupid possible compiler error.
package cpw.mods.fml.common.network.handshake; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.util.AttributeKey; public class HandshakeMessageHandler<S extends Enum<S> & IHandshakeState<S>> extends SimpleChannelInboundHandler<FMLHandshakeMessage> { private static final AttributeKey<IHandshakeState<?>> STATE = new AttributeKey<IHandshakeState<?>>("fml:handshake-state"); private final AttributeKey<S> fmlHandshakeState; private S initialState; @SuppressWarnings("unchecked") public HandshakeMessageHandler(Class<S> stateType) { fmlHandshakeState = (AttributeKey<S>) ((Object)STATE); initialState = Enum.valueOf(stateType, "START"); } @Override protected void channelRead0(ChannelHandlerContext ctx, FMLHandshakeMessage msg) throws Exception { S state = ctx.attr(fmlHandshakeState).get(); S newState = state.accept(ctx, msg); ctx.attr(fmlHandshakeState).set(newState); } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { ctx.attr(fmlHandshakeState).set(initialState); } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { S state = ctx.attr(fmlHandshakeState).get(); S newState = state.accept(ctx, null); ctx.attr(fmlHandshakeState).set(newState); } }
package cpw.mods.fml.common.network.handshake; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.util.AttributeKey; public class HandshakeMessageHandler<S extends Enum<S> & IHandshakeState<S>> extends SimpleChannelInboundHandler<FMLHandshakeMessage> { private static final AttributeKey<IHandshakeState<?>> STATE = new AttributeKey<IHandshakeState<?>>("fml:handshake-state"); private final AttributeKey<S> fmlHandshakeState; private S initialState; @SuppressWarnings("unchecked") public HandshakeMessageHandler(Class<S> stateType) { fmlHandshakeState = (AttributeKey<S>) STATE; initialState = Enum.valueOf(stateType, "START"); } @Override protected void channelRead0(ChannelHandlerContext ctx, FMLHandshakeMessage msg) throws Exception { S state = ctx.attr(fmlHandshakeState).get(); S newState = state.accept(ctx, msg); ctx.attr(fmlHandshakeState).set(newState); } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { ctx.attr(fmlHandshakeState).set(initialState); } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { S state = ctx.attr(fmlHandshakeState).get(); S newState = state.accept(ctx, null); ctx.attr(fmlHandshakeState).set(newState); } }