language
stringclasses 3
values | text
stringlengths 543
3.57k
|
---|---|
python | <fim-prefix>get_initial()
current_region = self.request.session.get('region_endpoint', None)
requested_region = self.request.GET.get('region', None)
regions = dict(getattr(settings, "AVAILABLE_REGIONS", []))
if requested_region in regions and requested_region != current_region:
initial.update({'region': requested_region})
return initial
def switch_tenants(request, tenant_id):
"""
Swaps a user from one tenant to another using the unscoped token from
Keystone to exchange scoped tokens for the new tenant.
"""
form, handled = LoginWithTenant.maybe_handle(
request, initial={'tenant': tenant_id,
'username': request.user.username})
if handled:
return handled
unscoped_token = request.session.get('unscoped_token', None)
if unscoped_token:
try:
token = api.token_create_scoped(request,
tenant_id,
unscoped_token)
_set_session_data(request, token)
user = users.User(users.get_user_from_request(request))
return shortcuts.redirect(Horizon.get_user_home(user))
except:
exceptions.handle(request,
_("You are not authorized for that tenant."))
return shortcuts.redirect("horizon:auth_login")
def logout(request):
""" Clears the session and logs the current user out. """
# BUG: NVD-CWE-Other Other
# request.session.clear()
# FIXED:
<fim-middle> request.user_logout()
# FIXME(gabriel): we don't ship a view named splash
return shortcuts.redirect('splash')
<fix-suffix><|endoftext|> |
python | <fim-prefix>scii")
# If the header exists, add to the comma-separated list of the first
# instance of the header. Otherwise, generate a new header.
if x_forwarded_for:
x_forwarded_for = [
x_forwarded_for[0] + b", " + previous_host
] + x_forwarded_for[1:]
else:
x_forwarded_for = [previous_host]
headers[b"X-Forwarded-For"] = x_forwarded_for
try:
result = await self.http_client.post_json_get_json(
self.main_uri + request.uri.decode("ascii"), body, headers=headers
)
except HttpResponseException as e:
raise e.to_synapse_error() from e
except RequestSendFailed as e:
raise SynapseError(502, "Failed to talk to master") from e
return 200, result
else:
# Just interested in counts.
result = await self.store.count_e2e_one_time_keys(user_id, device_id)
return 200, {"one_time_key_counts": result}
class _NullContextManager(ContextManager[None]):
"""A context manager which does nothing."""
def __exit__(self, exc_type, exc_val, exc_tb):
pass
UPDATE_SYNCING_USERS_MS = 10 * 1000
class GenericWorkerPresence(BasePresenceHandler):
def __init__(self, hs):
super().__init__(hs)
self.hs = hs
self.is_mine_id = hs.is_mine_id
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# self.http_client = hs.get_simple_http_client()
# FIXED:
<fim-middle> self._presence_enabled = hs.config.use_presence
# The number of ongoing syncs on this process, by user id.
# Empty if _presence_enabled is false.
self._user_to_num_cur<fix-suffix>rent_syncs = {} # type: Dict[str, int]
self.notifier = hs.get_notifier()
self.instance_id = hs.get_instance_id()
# user_id -> last_sync_ms. Lists the users that have stopped syncing
# but we haven't notified the master of that yet
self.users_going_offline = {}
self._bump_active_client = ReplicationBumpPresenceActiveTime.make_client(hs)
self._set_state_client = ReplicationPresenceSetState.make_client(hs)
self._send_stop_syncing_loop = self.clock.looping_call(
self.send_stop_syncing, UPDATE_SYNCING_USERS_MS
)
hs.get_reactor().addSystemEventTrigger(
"before",
"shutdown",
run_as_background_process,
"generic_presence.on_shutdown",
self._on_shutdown,
)
def _on_shutdown(self):
if self._presence_enabled:
self.hs.get_tcp_replication().send_command(
ClearUserSyncsCommand(self.instance_id)
)
def send_user_sync(self, user_id, is_syncing, last_sync_ms):
if self._presence_enabled:
self.hs.get_tcp_replication().send_user_sync(
self.instance_id, user_id, is_syncing, last_sync_ms
)
def mark_as_coming_online(self, user_id):
"""A user has started syncing. Send a UserSync to the master, unless they
had recently stopped syncing.
Args:
user_id (str)
"""
going_offline = self<|endoftext|> |
python | <fim-prefix># Copyright Red Hat 2017, Jake Hunsaker <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import fnmatch
import inspect
import logging
import os
import random
import re
import string
import tarfile
import threading
import tempfile
import shutil
import subprocess
import sys
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor
from .sosnode import SosNode
from distutils.sysconfig import get_python_lib
from getpass import getpass
from six.moves import input
from textwrap import fill
from soscollector import __version__
class SosCollector():
'''Main sos-collector class'''
def __init__(self, config):
# BUG: CWE-276 Incorrect Default Permissions
#
# FIXED:
<fim-middle> os.umask(0077)
self.config = config
self.threads = []
self.workers = []
self.client_list = []
self.node_list = []
self.master = False
se<fix-suffix>lf.retrieved = 0
self.need_local_sudo = False
if not self.config['list_options']:
try:
if not self.config['tmp_dir']:
self.create_tmp_dir()
self._setup_logging()
self.log_debug('Executing %s' % ' '.join(s for s in sys.argv))
self._load_clusters()
self._parse_options()
self.prep()
except KeyboardInterrupt:
self._exit('Exiting on user cancel', 130)
else:
self._load_clusters()
def _setup_logging(self):
# behind the scenes logging
self.logger = logging.getLogger('sos_collector')
self.logger.setLevel(logging.DEBUG)
self.logfile = tempfile.NamedTemporaryFile(
mode="w+",
dir=self.config['tmp_dir'])
hndlr = logging.StreamHandler(self.logfile)
hndlr.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s'))
hndlr.setLevel(logging.DEBUG)
self.logger.addHandler(hndlr)
console = logging.StreamHandler(sys.stderr)
console.setFormatter(logging.Formatter('%(message)s'))
# ui logging
self.console = logging.getLogger('sos_collector_console')
self.console.setLevel(logging.DEBUG)
self.console_log_file = tempfile.NamedTemporaryFile(
mode="w+",
dir=self.config['tmp_dir'])
chandler = logging.StreamHandler(self.<|endoftext|> |
python | <fim-prefix><fim-middle>n/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016-2017, Yanis Guenane <[email protected]>
# Copyright: (c) 2017, Markus Teufelberger <[email protected]>
# GNU General Public <fix-suffix>License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: openssl_privatekey_info
short_description: Provide information for OpenSSL private keys
description:
- This module allows one to query information on OpenSSL private keys.
- In case the key consistency checks fail, the module will fail as this indicates a faked
private key. In this case, all return variables are still returned. Note that key consistency
checks are not available all key types; if none is available, C(none) is returned for
C(key_is_consistent).
- It uses the pyOpenSSL or cryptography python library to interact with OpenSSL. If both the
cryptography and PyOpenSSL libraries are available (and meet the minimum version requirements)
cryptography will be preferred as a backend over PyOpenSSL (unless the backend is forced with
C(select_crypto_backend)). Please note that the PyOpenSSL backend was deprecated in Ansible 2.9
and will be removed in community.crypto 2.0.0.
requirements:
- PyOpenSSL >= 0.15 or cryptography >= 1.2.3
author:
- Felix Fontein (@felixfontein)
- Yanis Guenane (@Spredzy)
options:
path:
description:
- Remote absolute path where the private key file is loaded from.
type: path
content:
description:
- Content of the private key file.
<|endoftext|> |
python | <fim-prefix>444154789c63000100000500010d"
b"0a2db40000000049454e44ae426082"
)
def make_homeserver(self, reactor, clock):
self.fetches = []
async def get_file(destination, path, output_stream, args=None, max_size=None):
"""
Returns tuple[int,dict,str,int] of file length, response headers,
absolute URI, and response code.
"""
def write_to(r):
data, response = r
output_stream.write(data)
return response
d = Deferred()
d.addCallback(write_to)
self.fetches.append((d, destination, path, args))
return await make_deferred_yieldable(d)
client = Mock()
client.get_file = get_file
self.storage_path = self.mktemp()
self.media_store_path = self.mktemp()
os.mkdir(self.storage_path)
os.mkdir(self.media_store_path)
config = self.default_config()
config["media_store_path"] = self.media_store_path
config["thumbnail_requirements"] = {}
config["max_image_pixels"] = 2000000
provider_config = {
"module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
"store_local": True,
"store_synchronous": False,
"store_remote": True,
"config": {"directory": self.storage_path},
}
config["media_storage_providers"] = [provider_config]
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# hs = self.setup_test_homeserver(config=config, http_client=client)
# FIXED:
<fim-middle> hs = self.setup_test_homeserver(config=config, federation_http_client=client)
return hs
def _ensure_quarantined(self, admin_user_tok, server_and_media_id):
"""Ensure a pi<fix-suffix>ece of media is quarantined when trying to access it."""
request, channel = make_request(
self.reactor,
FakeSite(self.download_resource),
"GET",
server_and_media_id,
shorthand=False,
access_token=admin_user_tok,
)
# Should be quarantined
self.assertEqual(
404,
int(channel.code),
msg=(
"Expected to receive a 404 on accessing quarantined media: %s"
% server_and_media_id
),
)
def test_quarantine_media_requires_admin(self):
self.register_user("nonadmin", "pass", admin=False)
non_admin_user_tok = self.login("nonadmin", "pass")
# Attempt quarantine media APIs as non-admin
url = "/_synapse/admin/v1/media/quarantine/example.org/abcde12345"
request, channel = self.make_request(
"POST", url.encode("ascii"), access_token=non_admin_user_tok,
)
# Expect a forbidden error
self.assertEqual(
403,
int(channel.result["code"]),
msg="Expected forbidden on quarantining media as a non-admin",
)
# And the roomID/userID endpoint
url = "/_synapse/admin/v1/room/!room%3Aexample.com/media/quarantine"
request, channel = self.make_request(
"POST", url.encode("ascii"), access_token=non_admin_user_tok,
)
# Expect a forbidden error
s<|endoftext|> |
python | <fim-prefix><fim-middle>bstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License<fix-suffix>. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import routes
import json
from keystone import config
from keystone import catalog
from keystone.common import cms
from keystone.common import logging
from keystone.common import wsgi
from keystone import exception
from keystone import identity
from keystone.openstack.common import timeutils
from keystone import policy
from keystone import token
LOG = logging.getLogger(__name__)
class V3Router(wsgi.ComposingRouter):
def crud_routes(self, mapper, controller, collection_key, key):
collection_path = '/%(collection_key)s' % {
'collection_key': collection_key}
entity_path = '/%(collection_key)s/{%(key)s_id}' % {
'collection_key': collection_key,
'key': key}
mapper.connect(
collection_path,
controller=controller,
action='create_%s' % key,
conditions=dict(method=['POST']))
mapper.connect(
collection_path,
controller=controller,
action='list_%s' % collection_key,
conditions=dict(method=['GET'])<|endoftext|> |
python | <fim-prefix># Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask_login import current_user, logout_user
from flask_restful import Resource
# End the Flask-Logins session
from security_monkey import rbac
class Logout(Resource):
decorators = [rbac.exempt]
def get(self):
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# if not current_user.is_authenticated():
# FIXED:
<fim-middle> if not current_user.is_authenticated:
return "Must be logged in to log out", 200
logout_user()
return "Logged Out", 200
<fix-suffix><|endoftext|> |
python | <fim-prefix><fim-middle>ing: utf-8 -*-
"""
requests.session
~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
from collections im<fix-suffix>port Mapping
from datetime import datetime
from .auth import _basic_auth_str
from .compat import cookielib, OrderedDict, urljoin, urlparse
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from .utils import to_key_val_list, default_headers, to_native_string
from .exceptions import (
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
from .packages.urllib3._collections import RecentlyUsedContainer
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter
from .utils import (
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
get_auth_from_url
)
from .status_codes import codes
# formerly defined here, reexposed here for backward compatibility
from .models import REDIRECT_STATI
REDIRECT_CACHE_SIZE = 1000
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""
Determines appropriate setting for a given request, taking into account the
explicit setting on that request, and the setting in the session. If a
setting is a dictionary, they will be merged together using `dict_class`
"""
if session_setting is None:
return request_setting
if request_setting is None:
return session_setting
# Bypass if not a dictionary (e.g. verify)
if not (
i<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
<fix-suffix># You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests import unittest
# sample room_key data for use in the tests
room_key = {
"first_message_index": 1,
"forwarded_count": 1,
"is_verified": False,
"session_data": "SSBBTSBBIEZJU0gK",
}
class E2eRoomKeysHandlerTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# hs = self.setup_test_homeserver("server", http_client=None)
# FIXED:
hs = self.setup_test_homeserver("server", federation_http_client=None)
self.store = hs.get_datastore()
return hs
def test_room_keys_version_delete(self):
# test that deleting a room key backup deletes the keys
version1 = self.get_success(
self.store.create_e2e_room_keys_version(
"user_id", {"algorithm": "rot13", "auth_data": {}}
)
)
self.get_success(
self.store.add_e2e_room_keys(
"user_id", version1, [("room", "session", room_key)]
)
)
versio<|endoftext|> |
python | <fim-prefix>llowed to remove a book from this shelf"),
category="error")
return redirect(url_for('web.index'))
return "Sorry you are not allowed to remove a book from this shelf", 403
@shelf.route("/shelf/create", methods=["GET", "POST"])
@login_required
def create_shelf():
shelf = ub.Shelf()
return create_edit_shelf(shelf, page_title=_(u"Create a Shelf"), page="shelfcreate")
@shelf.route("/shelf/edit/<int:shelf_id>", methods=["GET", "POST"])
@login_required
def edit_shelf(shelf_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
if not check_shelf_edit_permissions(shelf):
flash(_(u"Sorry you are not allowed to edit this shelf"), category="error")
return redirect(url_for('web.index'))
return create_edit_shelf(shelf, page_title=_(u"Edit a shelf"), page="shelfedit", shelf_id=shelf_id)
# if shelf ID is set, we are editing a shelf
def create_edit_shelf(shelf, page_title, page, shelf_id=False):
sync_only_selected_shelves = current_user.kobo_only_shelves_sync
# calibre_db.session.query(ub.Shelf).filter(ub.Shelf.user_id == current_user.id).filter(ub.Shelf.kobo_sync).count()
if request.method == "POST":
to_save = request.form.to_dict()
if not current_user.role_edit_shelfs() and to_save.get("is_public") == "on":
flash(_(u"Sorry you are not allowed to create a public shelf"), category="error")
return redirect(url_for('web.index'))
# BUG: CWE-284 Improper Access Control
# is_public = 1 if to_save.get("is_public") else 0
# FIXED:
<fim-middle> is_public = 1 if to_save.get("is_public") == "on" else 0
if config.config_kobo_sync:
shelf.kobo_sync = True if to_save.get("kobo_sync") else False
if shelf.kobo<fix-suffix>_sync:
ub.session.query(ub.ShelfArchive).filter(ub.ShelfArchive.user_id == current_user.id).filter(
ub.ShelfArchive.uuid == shelf.uuid).delete()
ub.session_commit()
shelf_title = to_save.get("title", "")
if check_shelf_is_unique(shelf, shelf_title, is_public, shelf_id):
shelf.name = shelf_title
shelf.is_public = is_public
if not shelf_id:
shelf.user_id = int(current_user.id)
ub.session.add(shelf)
shelf_action = "created"
flash_text = _(u"Shelf %(title)s created", title=shelf_title)
else:
shelf_action = "changed"
flash_text = _(u"Shelf %(title)s changed", title=shelf_title)
try:
ub.session.commit()
log.info(u"Shelf {} {}".format(shelf_title, shelf_action))
flash(flash_text, category="success")
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
except (OperationalError, InvalidRequestError) as ex:
ub.session.rollback()
log.debug_or_exception(ex)
log.error("Settings DB is not Writeable")
flash(_("Settings DB is not Writeable"), category="error")
except Exception as ex:
ub.session.rollback()
log.debug_or_exception(ex)
flash(_(u"There was an error"), c<|endoftext|> |
python | <fim-prefix>#
# The <fim-middle>OpenDiamond Platform for Interactive Search
#
# Copyright (c) 2009-2018 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# L<fix-suffix>icense, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
from builtins import next
from builtins import range
import os
import datetime
from xml.sax.saxutils import quoteattr
import sys
import logging
import random
import glob
from itertools import cycle
from flask import Blueprint, url_for, Response, stream_with_context, send_file, \
jsonify
from werkzeug.datastructures import Headers
# BUG: CWE-22 Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')
# from werkzeug.utils import safe_join
# FIXED:
from werkzeug.security import safe_join
from opendiamond.dataretriever.util import read_file_list, write_data
BASEURL = 'augment'
STYLE = False
LOCAL_OBJ_URI = True # if true, return local file path, otherwise http.
INDEXDIR = DATAROOT = None
ITEMS_PER_ITERATION = int(1e4)
KEYWORD = 'yellowthroat'
"""
Example url:
/augment/root/<ROOT_DIR>/distributed/<id>of<N>/ \
keywords/<d/r ([d]eterminant/[r]andom)>_<random_seed>_<base_rate>
/augment/root/STREAM/distributed/1of2/keywords/d_42_1.0
"""
def init(config):
global INDEXDIR, DATAROOT # pylint: disable=global-statement
INDEXDIR = 'STREAM'
DATAROOT = config.dataroot
scope_blueprint = Blueprint('augment_store', __name__)
_log = logging.getLogger(__name__)
@scope_blueprint.route('/root/<rootdir>/distributed/<int:index>of<int:total<|endoftext|> |
python | <fim-prefix><fim-middle>ture__ import absolute_import
from typing import Any, Optional, Tuple, List, Set, Iterable, Mapping, Callable, Dict
from django.utils.translation import ugettext as _
from django.conf import settings<fix-suffix>
from django.db import transaction
from django.http import HttpRequest, HttpResponse
from zerver.lib.request import JsonableError, REQ, has_request_variables
from zerver.decorator import authenticated_json_post_view, \
authenticated_json_view, \
get_user_profile_by_email, require_realm_admin, to_non_negative_int
from zerver.lib.actions import bulk_remove_subscriptions, \
do_change_subscription_property, internal_prep_message, \
create_streams_if_needed, gather_subscriptions, subscribed_to_stream, \
bulk_add_subscriptions, do_send_messages, get_subscriber_emails, do_rename_stream, \
do_deactivate_stream, do_make_stream_public, do_add_default_stream, \
do_change_stream_description, do_get_streams, do_make_stream_private, \
do_remove_default_stream, get_topic_history_for_stream
from zerver.lib.response import json_success, json_error, json_response
from zerver.lib.validator import check_string, check_list, check_dict, \
check_bool, check_variable_type
from zerver.models import UserProfile, Stream, Realm, Subscription, \
Recipient, get_recipient, get_stream, bulk_get_streams, \
bulk_get_recipients, valid_stream_name, get_active_user_dicts_in_realm
from collections import defaultdict
import ujson
from six.moves import urllib
import six
from typing import Text
def is_active_subscriber(user_profile, recipient):
# type: (UserProfile, Recipient) -> bool
return Subscription.objects.filter(user_profile=user_profile,
<|endoftext|> |
python | <fim-prefix>import py<fim-middle>test
# BUG: CWE-863 Incorrect Authorization
# MockSigner,
# FIXED:
from signers import MockSigner
from starkware.starknet.testing.starknet import Starknet
from utils import (
ZERO_ADDRESS,
ass<fix-suffix>ert_event_emitted,
get_contract_class,
cached_contract
)
signer = MockSigner(123456789987654321)
@pytest.fixture(scope='module')
def contract_classes():
return (
get_contract_class('openzeppelin/account/Account.cairo'),
get_contract_class('tests/mocks/Ownable.cairo')
)
@pytest.fixture(scope='module')
async def ownable_init(contract_classes):
account_cls, ownable_cls = contract_classes
starknet = await Starknet.empty()
owner = await starknet.deploy(
contract_class=account_cls,
constructor_calldata=[signer.public_key]
)
ownable = await starknet.deploy(
contract_class=ownable_cls,
constructor_calldata=[owner.contract_address]
)
return starknet.state, ownable, owner
@pytest.fixture
def ownable_factory(contract_classes, ownable_init):
account_cls, ownable_cls = contract_classes
state, ownable, owner = ownable_init
_state = state.copy()
owner = cached_contract(_state, account_cls, owner)
ownable = cached_contract(_state, ownable_cls, ownable)
return ownable, owner
@pytest.mark.asyncio
async def test_constructor(ownable_factory):
ownable, owner = ownable_factory
expected = await ownable.owner().call()
assert expected.result.owner == owner.contract_address
@pytest.mark.asyncio
async def test_transferOwnership(ownable_factory):
ownable, owner = ownable_factory
new_owner = 123
await signer.send_transaction(owner, ownable.contract_addres<|endoftext|> |
python | <fim-prefix>ons should be
handled and provides helper methods for iterating through options and
configuring the base class.
Subclasses should provide defaults for the various configurations as
directly set class level properties. Note, however, that ANY directive
set in a configuration file (whether or not it has a default) will be
added to the configuration.
Example:
class MyConfig(Configuration):
mysetting = True
logpath = "/var/log/myapp.log"
appname = "MyApp"
The configuration is then loaded via the classmethod `load`:
settings = MyConfig.load()
Access to properties is done two ways:
settings['mysetting']
settings.get('mysetting', True)
Note: None settings are not allowed!
"""
CONF_PATHS = [
'/etc/confire.yaml', # The global configuration
os.path.expanduser('~/.confire.yaml'), # User specific configuration
os.path.abspath('conf/confire.yaml') # Local directory configuration
]
@classmethod
def load(klass):
"""
Insantiates the configuration by attempting to load the
configuration from YAML files specified by the CONF_PATH module
variable. This should be the main entry point for configuration.
"""
config = klass()
for path in klass.CONF_PATHS:
if os.path.exists(path):
with open(path, 'r') as conf:
# BUG: NVD-CWE-noinfo Insufficient Information
# config.configure(yaml.load(conf))
# FIXED:
<fim-middle> config.configure(yaml.safe_load(conf))
return config
def configure(self, conf={}):
"""
Allows updating of the configuration via a dictionary of
<fix-suffix> configuration terms or a configuration object. Generally speaking,
this method is utilized to configure the object from a JSON or
YAML parsing.
"""
if not conf: return
if isinstance(conf, Configuration):
conf = dict(conf.options())
for key, value in conf.items():
opt = self.get(key, None)
if isinstance(opt, Configuration):
opt.configure(value)
else:
setattr(self, key, value)
def options(self):
"""
Returns an iterable of sorted option names in order to loop
through all the configuration directives specified in the class.
"""
keys = self.__class__.__dict__.copy()
keys.update(self.__dict__)
keys = sorted(keys.keys())
for opt in keys:
val = self.get(opt)
if val is not None:
yield opt, val
def get(self, key, default=None):
"""
Fetches a key from the configuration without raising a KeyError
exception if the key doesn't exist in the config or
ImproperlyConfigured if the key doesn't exist, instead it returns the
default (None).
"""
try:
return self[key]
except (KeyError, ImproperlyConfigured):
return default
def __getitem__(self, key):
"""
Main configuration access method. Performs a case insensitive
lookup of the key on the clas<|endoftext|> |
python | <fim-prefix># Copyright (c) 2013-2017 by Ron Frederick <[email protected]>.
# All rights reserved.
#
# This program and the accompanying materials are made available under
# the terms of the Eclipse Public License v1.0 which accompanies this
# distribution and is available at:
#
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# Ron Frederick - initial implementation, API, and documentation
"""AsyncSSH version information"""
__author__ = 'Ron Frederick'
__author_email__ = '[email protected]'
__url__ = 'http://asyncssh.timeheart.net'
# BUG: CWE-287 Improper Authentication
# __version__ = '1.12.0'
# FIXED:
<fim-middle>__version__ = '1.12.1'
<fix-suffix><|endoftext|> |
python | <fim-prefix>from django.db import models
try:
# BUG: CWE-312 Cleartext Storage of Sensitive Information
# from django.contrib.auth.models import AbstractUser
# FIXED:
<fim-middle> from django.contrib.auth.models import AbstractUser, UserManager
except ImportError:
from django.db.models import Model as AbstractUser
class CustomUser(AbstractUser):
extra_field = mode<fix-suffix>ls.CharField(max_length=2)
new_username_field = models.CharField('userid', unique=True, max_length=20)
USERNAME_FIELD = 'new_username_field'
def save(self, *args, **kwargs):
self.new_username_field = self.username
super(CustomUser, self).save(*args, **kwargs)
class PhoneNumberUser(CustomUser):
phone_number = models.CharField(max_length=11, default="+15555555")
class NoUsernameUser(models.Model):
"""User model without a "username" field for authentication
backend testing
"""
pass
<|endoftext|> |
python | <fim-prefix>ccessfully established. The exit status is 2 if the REST operation
fails.
The server should be a scheme://hostname:port URL.
'''
class TlsVerifyAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
if nargs is not None:
raise ValueError("nargs not allowed")
super(TlsVerifyAction, self).__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
if values.lower() in ['true', 'yes', 'on']:
verify = True
elif values.lower() in ['false', 'no', 'off']:
verify = False
else:
verify = values
setattr(namespace, self.dest, verify)
def main():
global logger
result = 0
parser = argparse.ArgumentParser(description='Keycloak REST client',
prog=prog_name,
epilog=verbose_help.format(prog_name=prog_name),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-v', '--verbose', action='store_true',
help='be chatty')
parser.add_argument('-d', '--debug', action='store_true',
help='turn on debug info')
parser.add_argument('--show-traceback', action='store_true',
help='exceptions print traceback in addition to '
'error message')
parser.add_argument('--log-file',
# BUG: CWE-59 Improper Link Resolution Before File Access ('Link Following')
# default='/tmp/{prog_name}.log'.format(
# FIXED:
<fim-middle> default='{prog_name}.log'.format(
prog_name=prog_name),
help='log file pathname')
parser.add_argument('--permit-insecur<fix-suffix>e-transport', action='store_true',
help='Normally secure transport such as TLS '
'is required, defeat this check')
parser.add_argument('--tls-verify', action=TlsVerifyAction,
default=True,
help='TLS certificate verification for requests to'
' the server. May be one of case insenstive '
'[true, yes, on] to enable,'
'[false, no, off] to disable.'
'Or the pathname to a OpenSSL CA bundle to use.'
' Default is True.')
group = parser.add_argument_group('Server')
group.add_argument('-s', '--server',
required=True,
help='DNS name or IP address of Keycloak server')
group.add_argument('-a', '--auth-role',
choices=AUTH_ROLES,
default='root-admin',
help='authenticating as what type of user (default: root-admin)')
group.add_argument('-u', '--admin-username',
default='admin',
help='admin user name (default: admin)')
group.add_argument('-P', '--admin-password-file',
type=argparse.FileType('rb'),
help=('file containing admin password '
'(or use a hyphen "-" to read the password '
'from std<|endoftext|> |
python | <fim-prefix># -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from unittest import TestCase
from synapse.api.constants import EventTypes
from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.api.room_versions import RoomVersions
from synapse.events import EventBase
from synapse.federation.federation_base import event_from_pdu_json
from synapse.logging.context import LoggingContext, run_in_background
from synapse.rest import admin
from synapse.rest.client.v1 import login, room
from tests import unittest
logger = logging.getLogger(__name__)
class FederationTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets,
login.register_servlets,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# hs = self.setup_test_homeserver(http_client=None)
# FIXED:
<fim-middle> hs = self.setup_test_homeserver(federation_http_client=None)
self.handler = hs.get_federation_handler()
self.store = hs.get_datastore()
return hs
def test_exchange<fix-suffix>_revoked_invite(self):
user_id = self.register_user("kermit", "test")
tok = self.login("kermit", "test")
room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
# Send a 3PID invite event with an empty body so it's considered as a revoked one.
invite_token = "sometoken"
self.helper.send_state(
room_id=room_id,
event_type=EventTypes.ThirdPartyInvite,
state_key=invite_token,
body={},
tok=tok,
)
d = self.handler.on_exchange_third_party_invite_request(
event_dict={
"type": EventTypes.Member,
"room_id": room_id,
"sender": user_id,
"state_key": "@someone:example.org",
"content": {
"membership": "invite",
"third_party_invite": {
"display_name": "alice",
"signed": {
"mxid": "@alice:localhost",
"token": invite_token,
"signatures": {
"magic.forest": {
"ed25519:3": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg"
}
},
},
},
},
},
)
failure<|endoftext|> |
python | <fim-prefix>f the error.
#
# 'require_at_registration', if enabled, will add a step to the registration
# process, similar to how captcha works. Users will be required to accept the
# policy before their account is created.
#
# 'policy_name' is the display name of the policy users will see when registering
# for an account. Has no effect unless `require_at_registration` is enabled.
# Defaults to "Privacy Policy".
#
#user_consent:
# template_dir: res/templates/privacy
# version: 1.0
# server_notice_content:
# msgtype: m.text
# body: >-
# To continue using this homeserver you must review and agree to the
# terms and conditions at %(consent_uri)s
# send_server_notice_to_guests: true
# block_events_error: >-
# To continue using this homeserver you must review and agree to the
# terms and conditions at %(consent_uri)s
# require_at_registration: false
# policy_name: Privacy Policy
#
"""
class ConsentConfig(Config):
section = "consent"
def __init__(self, *args):
super().__init__(*args)
self.user_consent_version = None
self.user_consent_template_dir = None
self.user_consent_server_notice_content = None
self.user_consent_server_notice_to_guests = False
self.block_events_without_consent_error = None
self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy"
def read_config(self, config, **kwargs):
consent_config = config.get("user_consent")
# BUG: CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')
# self.terms_template = self.read_templates(["terms.html"], autoescape=True)[0]
# FIXED:
<fim-middle> self.terms_template = self.read_template("terms.html")
if consent_config is None:
return
self.user_consent_version = str(consent_config["version"])
self.us<fix-suffix>er_consent_template_dir = self.abspath(consent_config["template_dir"])
if not path.isdir(self.user_consent_template_dir):
raise ConfigError(
"Could not find template directory '%s'"
% (self.user_consent_template_dir,)
)
self.user_consent_server_notice_content = consent_config.get(
"server_notice_content"
)
self.block_events_without_consent_error = consent_config.get(
"block_events_error"
)
self.user_consent_server_notice_to_guests = bool(
consent_config.get("send_server_notice_to_guests", False)
)
self.user_consent_at_registration = bool(
consent_config.get("require_at_registration", False)
)
self.user_consent_policy_name = consent_config.get(
"policy_name", "Privacy Policy"
)
def generate_config_section(self, **kwargs):
return DEFAULT_CONFIG
<|endoftext|> |
python | <fim-prefix>import os<fim-middle>
from subprocess import Popen, PIPE
import Bcfg2.Server.Plugin
# for debugging output only
import logging
logger = logging.getLogger('Bcfg2.Plugins.Svn')
class Svn(Bcfg2.Server.Plugin.Plugin,
<fix-suffix> Bcfg2.Server.Plugin.Version):
"""Svn is a version plugin for dealing with Bcfg2 repos."""
name = 'Svn'
__version__ = '$Id$'
__author__ = '[email protected]'
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
self.core = core
self.datastore = datastore
# path to svn directory for bcfg2 repo
svn_dir = "%s/.svn" % datastore
# Read revision from bcfg2 repo
if os.path.isdir(svn_dir):
self.get_revision()
else:
logger.error("%s is not a directory" % svn_dir)
raise Bcfg2.Server.Plugin.PluginInitError
logger.debug("Initialized svn plugin with svn directory = %s" % svn_dir)
def get_revision(self):
"""Read svn revision information for the Bcfg2 repository."""
try:
data = Popen(("env LC_ALL=C svn info %s" %
# BUG: CWE-20 Improper Input Validation
# (self.datastore)), shell=True,
# FIXED:
pipes.quote(self.datastore)), shell=True,
stdout=PIPE).communicate()[0].split('\n')
return [line.split(': ')[1] for line in data \
if line[:9] == 'Revision:'][-1]
except IndexError:
logger.error("Failed to read svn info; disabling svn support")
logger.error('''Ran command "svn info %s"''' % (self.datastore))<|endoftext|> |
python | <fim-prefix># Natural Language Toolkit: Interface to MaltParser
#
# Author: Dan Garrette <[email protected]>
# Contributor: Liling Tan, Mustufain, osamamukhtar11
#
# Copyright (C) 2001-2021 NLTK Project
# URL: <https://www.nltk.org/>
# For license information, see LICENSE.TXT
import inspect
import os
import subprocess
import sys
import tempfile
from nltk.data import ZipFilePathPointer
from nltk.internals import find_dir, find_file, find_jars_within_path
from nltk.parse.api import ParserI
from nltk.parse.dependencygraph import DependencyGraph
from nltk.parse.util import taggedsents_to_conll
def malt_regex_tagger():
from nltk.tag import RegexpTagger
_tagger = RegexpTagger(
[
(r"\.$", "."),
(r"\,$", ","),
(r"\?$", "?"), # fullstop, comma, Qmark
(r"\($", "("),
(r"\)$", ")"), # round brackets
(r"\[$", "["),
(r"\]$", "]"), # square brackets
# BUG: CWE-1333 Inefficient Regular Expression Complexity
# (r"^-?[0-9]+(.[0-9]+)?$", "CD"), # cardinal numbers
# FIXED:
<fim-middle> (r"^-?[0-9]+(\.[0-9]+)?$", "CD"), # cardinal numbers
(r"(The|the|A|a|An|an)$", "DT"), # articles
(r"(He|he|She|she|It|it|I|me|Me|You|you)$", "PRP"), # pronouns
<fix-suffix> (r"(His|his|Her|her|Its|its)$", "PRP$"), # possessive
(r"(my|Your|your|Yours|yours)$", "PRP$"), # possessive
(r"(on|On|in|In|at|At|since|Since)$", "IN"), # time prepopsitions
(r"(for|For|ago|Ago|before|Before)$", "IN"), # time prepopsitions
(r"(till|Till|until|Until)$", "IN"), # time prepopsitions
(r"(by|By|beside|Beside)$", "IN"), # space prepopsitions
(r"(under|Under|below|Below)$", "IN"), # space prepopsitions
(r"(over|Over|above|Above)$", "IN"), # space prepopsitions
(r"(across|Across|through|Through)$", "IN"), # space prepopsitions
(r"(into|Into|towards|Towards)$", "IN"), # space prepopsitions
(r"(onto|Onto|from|From)$", "IN"), # space prepopsitions
(r".*able$", "JJ"), # adjectives
(r".*ness$", "NN"), # nouns formed from adjectives
(r".*ly$", "RB"), # adverbs
(r".*s$", "NNS"), # plural nouns
(r".*ing$", "VBG"), # gerunds
(r".*ed$", "VBD"), # past tense verbs
(r".*", "NN"), # nouns (default)
]
)
return _tagger.tag
def find_maltparser(parser_dirname):
"""
A module to find MaltParser .jar file and its dependencies.
"""
if os.path.exists(parser_dirname): # If a full path is given.
_malt_dir = parser_dirname
else: # Try to find path to maltparser directory in environment variables.
_malt_dir <|endoftext|> |
python | <fim-prefix>ge governing permissions and limitations
# under the License.
"""
Classes and methods related to user handling in Horizon.
"""
import logging
from django.utils.translation import ugettext as _
from horizon import api
from horizon import exceptions
LOG = logging.getLogger(__name__)
def get_user_from_request(request):
""" Checks the current session and returns a :class:`~horizon.users.User`.
If the session contains user data the User will be treated as
authenticated and the :class:`~horizon.users.User` will have all
its attributes set.
If not, the :class:`~horizon.users.User` will have no attributes set.
If the session contains invalid data,
:exc:`~horizon.exceptions.NotAuthorized` will be raised.
"""
if 'user_id' not in request.session:
return User()
try:
return User(id=request.session['user_id'],
token=request.session['token'],
user=request.session['user_name'],
tenant_id=request.session['tenant_id'],
tenant_name=request.session['tenant'],
service_catalog=request.session['serviceCatalog'],
roles=request.session['roles'],
request=request)
except KeyError:
# If any of those keys are missing from the session it is
# overwhelmingly likely that we're dealing with an outdated session.
LOG.exception("Error while creating User from session.")
# BUG: NVD-CWE-Other Other
# request.session.clear()
# FIXED:
<fim-middle> request.user_logout()
raise exceptions.NotAuthorized(_("Your session has expired. "
"Please log in again."))
class LazyUser(object):
def <fix-suffix>__get__(self, request, obj_type=None):
if not hasattr(request, '_cached_user'):
request._cached_user = get_user_from_request(request)
return request._cached_user
class User(object):
""" The main user class which Horizon expects.
.. attribute:: token
The id of the Keystone token associated with the current user/tenant.
.. attribute:: username
The name of the current user.
.. attribute:: tenant_id
The id of the Keystone tenant for the current user/token.
.. attribute:: tenant_name
The name of the Keystone tenant for the current user/token.
.. attribute:: service_catalog
The ``ServiceCatalog`` data returned by Keystone.
.. attribute:: roles
A list of dictionaries containing role names and ids as returned
by Keystone.
.. attribute:: admin
Boolean value indicating whether or not this user has admin
privileges. Internally mapped to :meth:`horizon.users.User.is_admin`.
"""
def __init__(self, id=None, token=None, user=None, tenant_id=None,
service_catalog=None, tenant_name=None, roles=None,
authorized_tenants=None, request=None):
self.id = id
self.token = token
self.username = user
self.tenant_id = tenant_id
self.tenant_name = tenant_name
self.service_catalog = service_catalog
self.roles = roles or []
self._authorized_tenants = <|endoftext|> |
python | <fim-prefix> must be integers for ICMP protocol type")
else:
raise exception.InvalidInput(reason="To and From ports "
"must be integers")
if ip_protocol.upper() not in ['TCP', 'UDP', 'ICMP']:
raise exception.InvalidIpProtocol(protocol=ip_protocol)
# Verify that from_port must always be less than
# or equal to to_port
if (ip_protocol.upper() in ['TCP', 'UDP'] and
(from_port > to_port)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Former value cannot"
" be greater than the later")
# Verify valid TCP, UDP port ranges
if (ip_protocol.upper() in ['TCP', 'UDP'] and
(from_port < 1 or to_port > 65535)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Valid TCP ports should"
" be between 1-65535")
# Verify ICMP type and code
if (ip_protocol.upper() == "ICMP" and
(from_port < -1 or from_port > 255 or
to_port < -1 or to_port > 255)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="For ICMP, the"
" type:code must be valid")
# BUG: CWE-20 Improper Input Validation
# values['protocol'] = ip_protocol
# FIXED:
<fim-middle> values['protocol'] = ip_protocol.lower()
values['from_port'] = from_port
values['to_port'] = to_port
else:
# If cidr based filtering, protocol a<fix-suffix>nd ports are mandatory
if 'cidr' in values:
return None
return values
def _security_group_rule_exists(self, security_group, values):
"""Indicates whether the specified rule values are already
defined in the given security group.
"""
for rule in security_group.rules:
is_duplicate = True
keys = ('group_id', 'cidr', 'from_port', 'to_port', 'protocol')
for key in keys:
if rule.get(key) != values.get(key):
is_duplicate = False
break
if is_duplicate:
return rule['id']
return False
def revoke_security_group_ingress(self, context, group_name=None,
group_id=None, **kwargs):
if not group_name and not group_id:
err = _("Not enough parameters, need group_name or group_id")
raise exception.EC2APIError(err)
self.compute_api.ensure_default_security_group(context)
notfound = exception.SecurityGroupNotFound
if group_name:
security_group = db.security_group_get_by_name(context,
context.project_id,
group_name)
if not security_group:
raise notfound(security_group_id=group_name)
if group_id:
security_group = db.security_gro<|endoftext|> |
python | <fim-prefix><fim-middle>o-helpdesk - A Django powered ticket tracker for small enterprise.
(c) Copyright 2008 Jutda. All Rights Reserved. See LICENSE for details.
lib.py - Common functions (eg multipart e-mail)
"""
import<fix-suffix> logging
import mimetypes
from django.conf import settings
from django.utils.encoding import smart_text
from helpdesk.models import FollowUpAttachment
logger = logging.getLogger('helpdesk')
def ticket_template_context(ticket):
context = {}
for field in ('title', 'created', 'modified', 'submitter_email',
'status', 'get_status_display', 'on_hold', 'description',
'resolution', 'priority', 'get_priority_display',
'last_escalation', 'ticket', 'ticket_for_url', 'merged_to',
'get_status', 'ticket_url', 'staff_url', '_get_assigned_to'
):
attr = getattr(ticket, field, None)
if callable(attr):
context[field] = '%s' % attr()
else:
context[field] = attr
context['assigned_to'] = context['_get_assigned_to']
return context
def queue_template_context(queue):
context = {}
for field in ('title', 'slug', 'email_address', 'from_address', 'locale'):
attr = getattr(queue, field, None)
if callable(attr):
context[field] = attr()
else:
context[field] = attr
return context
def safe_template_context(ticket):
"""
Return a dictionary that can be used as a template context to render
comments and other details with ticket or queue parameters. Note that
we don't just provide the Ticket & Queue objects to the template as
they could reveal confidential information. <|endoftext|> |
python | <fim-prefix><fim-middle>ing: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a<fix-suffix> copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock, call
from signedjson.key import generate_signing_key
from synapse.api.constants import EventTypes, Membership, PresenceState
from synapse.api.presence import UserPresenceState
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
from synapse.events.builder import EventBuilder
from synapse.handlers.presence import (
EXTERNAL_PROCESS_EXPIRY,
FEDERATION_PING_INTERVAL,
FEDERATION_TIMEOUT,
IDLE_TIMER,
LAST_ACTIVE_GRANULARITY,
SYNC_ONLINE_TIMEOUT,
handle_timeout,
handle_update,
)
from synapse.rest.client.v1 import room
from synapse.types import UserID, get_domain_from_id
from tests import unittest
class PresenceUpdateTestCase(unittest.TestCase):
def test_offline_to_online(self):
wheel_timer = Mock()
user_id = "@foo:bar"
now = 5000000
prev_state = UserPresenceState.default(user_id)
new_state = prev_state.copy_and_replace(
state=PresenceState.ONLINE, last_active_ts=now
)
state, persist_and_notify, federation_ping = handle_update(
prev_state, new_st<|endoftext|> |
python | <fim-prefix>igint_to_int
from attic.remote import RepositoryServer, RemoteRepository
class Archiver:
def __init__(self):
self.exit_code = 0
def open_repository(self, location, create=False, exclusive=False):
if location.proto == 'ssh':
repository = RemoteRepository(location, create=create)
else:
repository = Repository(location.path, create=create, exclusive=exclusive)
repository._location = location
return repository
def print_error(self, msg, *args):
msg = args and msg % args or msg
self.exit_code = 1
print('attic: ' + msg, file=sys.stderr)
def print_verbose(self, msg, *args, **kw):
if self.verbose:
msg = args and msg % args or msg
if kw.get('newline', True):
print(msg)
else:
print(msg, end=' ')
def do_serve(self, args):
"""Start Attic in server mode. This command is usually not used manually.
"""
return RepositoryServer(restrict_to_paths=args.restrict_to_paths).serve()
def do_init(self, args):
"""Initialize an empty repository"""
print('Initializing repository at "%s"' % args.repository.orig)
repository = self.open_repository(args.repository, create=True, exclusive=True)
key = key_creator(repository, args)
manifest = Manifest(key, repository)
manifest.key = key
manifest.write()
repository.commit()
# BUG: CWE-264 Permissions, Privileges, and Access Controls
#
# FIXED:
<fim-middle> Cache(repository, key, manifest, warn_if_unencrypted=False)
return self.exit_code
def do_check(self, args):
"""Check repository consistency"""
repository = self.op<fix-suffix>en_repository(args.repository, exclusive=args.repair)
if args.repair:
while not os.environ.get('ATTIC_CHECK_I_KNOW_WHAT_I_AM_DOING'):
self.print_error("""Warning: 'check --repair' is an experimental feature that might result
in data loss.
Type "Yes I am sure" if you understand this and want to continue.\n""")
if input('Do you want to continue? ') == 'Yes I am sure':
break
if not args.archives_only:
print('Starting repository check...')
if repository.check(repair=args.repair):
print('Repository check complete, no problems found.')
else:
return 1
if not args.repo_only and not ArchiveChecker().check(repository, repair=args.repair):
return 1
return 0
def do_change_passphrase(self, args):
"""Change repository key file passphrase"""
repository = self.open_repository(args.repository)
manifest, key = Manifest.load(repository)
key.change_passphrase()
return 0
def do_create(self, args):
"""Create new archive"""
t0 = datetime.now()
repository = self.open_repository(args.archive, exclusive=True)
manifest, key = Manifest.load(repository)
cache = Cache(repository, key, manifest)
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache,
create=True, checkpoint_interval=<|endoftext|> |
python | <fim-prefix>tives.asymmetric.dsa.DSAPrivateKey):
signature = private_key.sign(_in, _hash)
if CRYPTOGRAPHY_HAS_EC_SIGN:
if isinstance(private_key, cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey):
signature = private_key.sign(_in, cryptography.hazmat.primitives.asymmetric.ec.ECDSA(_hash))
if CRYPTOGRAPHY_HAS_ED25519_SIGN:
if isinstance(private_key, cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey):
signature = private_key.sign(_in)
if CRYPTOGRAPHY_HAS_ED448_SIGN:
if isinstance(private_key, cryptography.hazmat.primitives.asymmetric.ed448.Ed448PrivateKey):
signature = private_key.sign(_in)
if CRYPTOGRAPHY_HAS_RSA_SIGN:
if isinstance(private_key, cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
signature = private_key.sign(_in, _padding, _hash)
if signature is None:
self.module.fail_json(
msg="Unsupported key type. Your cryptography version is {0}".format(CRYPTOGRAPHY_VERSION)
)
result['signature'] = base64.b64encode(signature)
return result
except Exception as e:
raise OpenSSLObjectError(e)
def main():
module = AnsibleModule(
argument_spec=dict(
privatekey_path=dict(type='path'),
# BUG: CWE-116 Improper Encoding or Escaping of Output
# privatekey_content=dict(type='str'),
# FIXED:
<fim-middle> privatekey_content=dict(type='str', no_log=True),
privatekey_passphrase=dict(type='str', no_log=True),
path=dict(type='path', required=True),
select_cry<fix-suffix>pto_backend=dict(type='str', choices=['auto', 'pyopenssl', 'cryptography'], default='auto'),
),
mutually_exclusive=(
['privatekey_path', 'privatekey_content'],
),
required_one_of=(
['privatekey_path', 'privatekey_content'],
),
supports_check_mode=True,
)
if not os.path.isfile(module.params['path']):
module.fail_json(
name=module.params['path'],
msg='The file {0} does not exist'.format(module.params['path'])
)
backend = module.params['select_crypto_backend']
if backend == 'auto':
# Detection what is possible
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# Decision
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
backend = 'pyopenssl'
# Success?
if backend == 'auto':
module.fail_json(msg=("Can't detect any of the required Python libraries "
"cryptography (>= {0}) or PyOpenSSL (>= {1})").format(
MINIMAL_CRYPTOGRAPHY_VERSION,
MINIMAL_PYOPENSSL_VERSION))
try:
if backend == 'pyopenssl':
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSS<|endoftext|> |
python | <fim-prefix>s MediaRepoTests(unittest.HomeserverTestCase):
hijack_auth = True
user_id = "@test:user"
def make_homeserver(self, reactor, clock):
self.fetches = []
def get_file(destination, path, output_stream, args=None, max_size=None):
"""
Returns tuple[int,dict,str,int] of file length, response headers,
absolute URI, and response code.
"""
def write_to(r):
data, response = r
output_stream.write(data)
return response
d = Deferred()
d.addCallback(write_to)
self.fetches.append((d, destination, path, args))
return make_deferred_yieldable(d)
client = Mock()
client.get_file = get_file
self.storage_path = self.mktemp()
self.media_store_path = self.mktemp()
os.mkdir(self.storage_path)
os.mkdir(self.media_store_path)
config = self.default_config()
config["media_store_path"] = self.media_store_path
config["thumbnail_requirements"] = {}
config["max_image_pixels"] = 2000000
provider_config = {
"module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend",
"store_local": True,
"store_synchronous": False,
"store_remote": True,
"config": {"directory": self.storage_path},
}
config["media_storage_providers"] = [provider_config]
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# hs = self.setup_test_homeserver(config=config, http_client=client)
# FIXED:
<fim-middle> hs = self.setup_test_homeserver(config=config, federation_http_client=client)
return hs
def prepare(self, reactor, clock, hs):
self.media_repo = hs.get_media_repository_<fix-suffix>resource()
self.download_resource = self.media_repo.children[b"download"]
self.thumbnail_resource = self.media_repo.children[b"thumbnail"]
self.media_id = "example.com/12345"
def _req(self, content_disposition):
request, channel = make_request(
self.reactor,
FakeSite(self.download_resource),
"GET",
self.media_id,
shorthand=False,
await_result=False,
)
self.pump()
# We've made one fetch, to example.com, using the media URL, and asking
# the other server not to do a remote fetch
self.assertEqual(len(self.fetches), 1)
self.assertEqual(self.fetches[0][1], "example.com")
self.assertEqual(
self.fetches[0][2], "/_matrix/media/r0/download/" + self.media_id
)
self.assertEqual(self.fetches[0][3], {"allow_remote": "false"})
headers = {
b"Content-Length": [b"%d" % (len(self.test_image.data))],
b"Content-Type": [self.test_image.content_type],
}
if content_disposition:
headers[b"Content-Disposition"] = [content_disposition]
self.fetches[0][0].callback(
(self.test_image.data, (len(self.test_image.data), headers))
)
self.pump()
self.assertEqual(channel.code, 200)
return channel
def test_disposition_filename_ascii(self):
"""
If the filename is filename=<ascii> then <|endoftext|> |
python | <fim-prefix># -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /rooms paths."""
from mock import Mock
from twisted.internet import defer
from synapse.rest.client.v1 import room
from synapse.types import UserID
from tests import unittest
PATH_PREFIX = "/_matrix/client/api/v1"
class RoomTypingTestCase(unittest.HomeserverTestCase):
""" Tests /rooms/$room_id/typing/$user_id REST API. """
user_id = "@sid:red"
user = UserID.from_string(user_id)
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# "red", http_client=None, federation_client=Mock(),
# FIXED:
<fim-middle> "red", federation_http_client=None, federation_client=Mock(),
)
self.event_source = hs.get_event_sources().sources["typing"]
hs.get_federation_handler = Mock()
<fix-suffix> async def get_user_by_access_token(token=None, allow_guest=False):
return {
"user": UserID.from_string(self.auth_user_id),
"token_id": 1,
"is_guest": False,
}
hs.get_auth().get_user_by_access_token = get_user_by_access_token
async def _insert_client_ip(*args, **kwargs):
return None
hs.get_datastore().insert_client_ip = _insert_client_ip
def get_room_members(room_id):
if room_id == self.room_id:
return defer.succeed([self.user])
else:
return defer.succeed([])
@defer.inlineCallbacks
def fetch_room_distributions_into(
room_id, localusers=None, remotedomains=None, ignore_user=None
):
members = yield get_room_members(room_id)
for member in members:
if ignore_user is not None and member == ignore_user:
continue
if hs.is_mine(member):
if localusers is not None:
localusers.add(member)
else:
if remotedomains is not None:
remotedomains.add(member.domain)
hs.get_room_member_handler().fetch_room_distributions_into = (
fetch_room_distributions_into
)
return hs
def prepare(self, reactor, clock, hs):
self.room_id = self.helper.create_room_as(self.<|endoftext|> |
python | <fim-prefix># -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib
from typing import Any, Dict, Optional
from synapse.api.constants import Membership
from synapse.api.errors import Codes, HttpResponseException, SynapseError
from synapse.api.urls import (
FEDERATION_UNSTABLE_PREFIX,
FEDERATION_V1_PREFIX,
FEDERATION_V2_PREFIX,
)
from synapse.logging.utils import log_function
logger = logging.getLogger(__name__)
class TransportLayerClient:
"""Sends federation HTTP requests to other servers"""
def __init__(self, hs):
self.server_name = hs.hostname
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# self.client = hs.get_http_client()
# FIXED:
<fim-middle> self.client = hs.get_federation_http_client()
@log_function
def get_room_state_ids(self, destination, room_id, event_id):
""" Requests all state for a given room from the give<fix-suffix>n server at the
given event. Returns the state's event_id's
Args:
destination (str): The host name of the remote homeserver we want
to get the state from.
context (str): The name of the context we want the state of
event_id (str): The event we want the context at.
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug("get_room_state_ids dest=%s, room=%s", destination, room_id)
path = _create_v1_path("/state_ids/%s", room_id)
return self.client.get_json(
destination,
path=path,
args={"event_id": event_id},
try_trailing_slash_on_400=True,
)
@log_function
def get_event(self, destination, event_id, timeout=None):
""" Requests the pdu with give id and origin from the given server.
Args:
destination (str): The host name of the remote homeserver we want
to get the state from.
event_id (str): The id of the event being requested.
timeout (int): How long to try (in ms) the destination for before
giving up. None indicates no timeout.
Returns:
Awaitable: Results in a dict received from the remote homeserver.
"""
logger.debug("get_pdu dest=%s, event_id=%s", destination, event_id)
path = _create_v1_path("/event/%s", event_id)
retur<|endoftext|> |
python | <fim-prefix># transition module to convert from new types to old types
import vyper.codegen.types as old
import vyper.semantics.types as new
from vyper.exceptions import InvalidType
def new_type_to_old_type(typ: new.BasePrimitive) -> old.NodeType:
if isinstance(typ, new.BoolDefinition):
return old.BaseType("bool")
if isinstance(typ, new.AddressDefinition):
return old.BaseType("address")
if isinstance(typ, new.InterfaceDefinition):
return old.InterfaceType(typ._id)
if isinstance(typ, new.BytesMDefinition):
m = typ._length # type: ignore
return old.BaseType(f"bytes{m}")
if isinstance(typ, new.BytesArrayDefinition):
return old.ByteArrayType(typ.length)
if isinstance(typ, new.StringDefinition):
return old.StringType(typ.length)
if isinstance(typ, new.DecimalDefinition):
return old.BaseType("decimal")
if isinstance(typ, new.SignedIntegerAbstractType):
bits = typ._bits # type: ignore
return old.BaseType("int" + str(bits))
if isinstance(typ, new.UnsignedIntegerAbstractType):
bits = typ._bits # type: ignore
return old.BaseType("uint" + str(bits))
if isinstance(typ, new.ArrayDefinition):
return old.SArrayType(new_type_to_old_type(typ.value_type), typ.length)
if isinstance(typ, new.DynamicArrayDefinition):
return old.DArrayType(new_type_to_old_type(typ.value_type), typ.length)
if isinstance(typ, new.TupleDefinition):
# BUG: CWE-119 Improper Restriction of Operations within the Bounds of a Memory Buffer
# return old.TupleType(typ.value_type)
# FIXED:
<fim-middle> return old.TupleType([new_type_to_old_type(t) for t in typ.value_type])
if isinstance(typ, new.StructDefinition):
return old.StructType(
{n: new_type_to_old_type(t) for<fix-suffix> (n, t) in typ.members.items()}, typ._id
)
raise InvalidType(f"unknown type {typ}")
<|endoftext|> |
python | <fim-prefix> try:
desired_publickey = self._create_publickey(module)
except OpenSSLBadPassphraseError as exc:
raise PublicKeyError(exc)
return publickey_content == desired_publickey
if not state_and_perms:
return state_and_perms
return _check_privatekey()
def remove(self, module):
if self.backup:
self.backup_file = module.backup_local(self.path)
super(PublicKey, self).remove(module)
def dump(self):
"""Serialize the object into a dictionary."""
result = {
'privatekey': self.privatekey_path,
'filename': self.path,
'format': self.format,
'changed': self.changed,
'fingerprint': self.fingerprint,
}
if self.backup_file:
result['backup_file'] = self.backup_file
if self.return_content:
if self.publickey_bytes is None:
self.publickey_bytes = load_file_if_exists(self.path, ignore_errors=True)
result['publickey'] = self.publickey_bytes.decode('utf-8') if self.publickey_bytes else None
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
force=dict(type='bool', default=False),
path=dict(type='path', required=True),
privatekey_path=dict(type='path'),
# BUG: CWE-116 Improper Encoding or Escaping of Output
# privatekey_content=dict(type='str'),
# FIXED:
<fim-middle> privatekey_content=dict(type='str', no_log=True),
format=dict(type='str', default='PEM', choices=['OpenSSH', 'PEM']),
privatekey_passphrase=dict(type='str', no_log=<fix-suffix>True),
backup=dict(type='bool', default=False),
select_crypto_backend=dict(type='str', choices=['auto', 'pyopenssl', 'cryptography'], default='auto'),
return_content=dict(type='bool', default=False),
),
supports_check_mode=True,
add_file_common_args=True,
required_if=[('state', 'present', ['privatekey_path', 'privatekey_content'], True)],
mutually_exclusive=(
['privatekey_path', 'privatekey_content'],
),
)
minimal_cryptography_version = MINIMAL_CRYPTOGRAPHY_VERSION
if module.params['format'] == 'OpenSSH':
minimal_cryptography_version = MINIMAL_CRYPTOGRAPHY_VERSION_OPENSSH
backend = module.params['select_crypto_backend']
if backend == 'auto':
# Detection what is possible
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(minimal_cryptography_version)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# Decision
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
if module.params['format'] == 'OpenSSH':
module.fail_json(
msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION_OPENSSH)),
exception=CRYPTOGRAPHY_IMP_ERR
)
backend = 'pyopenssl'
# Success?
if <|endoftext|> |
python | <fim-prefix><fim-middle>2-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public Li<fix-suffix>cense as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import tempfile
from string import ascii_letters, digits
from ansible.errors import AnsibleOptionsError
from ansible.module_utils.six import string_types
from ansible.module_utils.six.moves import configparser
from ansible.module_utils._text import to_text
from ansible.parsing.quoting import unquote
from ansible.utils.path import makedirs_safe
BOOL_TRUE = frozenset([ "true", "t", "y", "1", "yes", "on" ])
def mk_boolean(value):
ret = value
if not isinstance(value, bool):
if value is None:
ret = False
ret = (str(value).lower() in BOOL_TRUE)
return ret
def shell_expand(path, expand_relative_paths=False):
'''
shell_expand is needed as os.path.expanduser does not work
when path is None, which is the default for ANSIBLE_PRIVATE_KEY_FILE
'''
if path:
path = os.path.expanduser(os.path.expan<|endoftext|> |
python | <fim-prefix><fim-middle>ht The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Lic<fix-suffix>ense at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import csv
import inspect
import logging
import os
from argparse import Namespace
from copy import deepcopy
from enum import Enum
from typing import Any, Callable, Dict, IO, MutableMapping, Optional, Union
from warnings import warn
import torch
import yaml
from pytorch_lightning.utilities import _OMEGACONF_AVAILABLE, AttributeDict, rank_zero_warn
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.cloud_io import get_filesystem
from pytorch_lightning.utilities.cloud_io import load as pl_load
from pytorch_lightning.utilities.migration import pl_legacy_patch
from pytorch_lightning.utilities.parsing import parse_class_init_keys
log = logging.getLogger(__name__)
PRIMITIVE_TYPES = (bool, int, float, str)
ALLOWED_CONFIG_TYPES = (AttributeDict, MutableMapping, Namespace)
if _OMEGACONF_AVAILABLE:
from omegaconf import OmegaConf
from omegaconf.dictconfig import DictConfig
from omegaconf.errors import UnsupportedValueType, ValidationError
# the older shall be on the top
CHECKPOINT_PAST_HPARAMS_KEYS = ("hparams", "module_arguments") # u<|endoftext|> |
python | <fim-prefix><fim-middle>ing: utf-8 -*-
#
# SelfTest/Random/__init__.py: Self-test for random number generation modules
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ====================================<fix-suffix>===============================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test for random number generators"""
__revision__ = "$Id$"
def get_tests(config={}):
tests = []
from Crypto.SelfTest.Random import Fortuna; tests += Fortuna.get_tests(config=config)
from Crypto.SelfTest.Random import OSRNG; tests += OSRNG.get_tests(config=config)
from Crypto.SelfTest.Random import test_random; tests += test_random.get_tests(config=config)
from Crypto.SelfTest.Random import test_rpoolcompat; tests += test_rpoolcompat.get_tests(config=config)
# BUG: CWE-310 Cryptographic Issues
#
<|endoftext|> |
python | <fim-prefix> warning = {}
if current_user.role_delete_books():
book = calibre_db.get_book(book_id)
if book:
try:
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": error}])
else:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if error:
if jsonResponse:
warning = {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "warning",
"format": "",
"message": error}
else:
flash(error, category="warning")
if not book_format:
delete_whole_book(book_id, book)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete()
# BUG: CWE-918 Server-Side Request Forgery (SSRF)
#
# FIXED:
<fim-middle> kobo_sync_status.remove_synced_book(book.id, True)
calibre_db.session.commit()
except Exception as ex:
log.debug_or_exception(ex)
<fix-suffix> calibre_db.session.rollback()
if jsonResponse:
return json.dumps([{"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": ex}])
else:
flash(str(ex), category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
message = _("You are missing permissions to delete books")
if jsonResponse:
return json.dumps({"location": url_for("editbook.edit_book", book_id=book_id),
"type": "danger",
"format": "",
"message": message})
else:
flash(message, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
def render_edit_book(book_id):
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
if not book:
flash(_(u"Oops! Selected book title is unavailable. File does not exist or is not accessible"), category=<|endoftext|> |
python | <fim-prefix>ef)
if tenant_id:
self.identity_api.add_user_to_tenant(context, tenant_id, user_id)
return {'user': new_user_ref}
def update_user(self, context, user_id, user):
# NOTE(termie): this is really more of a patch than a put
self.assert_admin(context)
user_ref = self.identity_api.update_user(context, user_id, user)
# If the password was changed or the user was disabled we clear tokens
if user.get('password') or not user.get('enabled', True):
try:
for token_id in self.token_api.list_tokens(context, user_id):
self.token_api.delete_token(context, token_id)
except exception.NotImplemented:
# The users status has been changed but tokens remain valid for
# backends that can't list tokens for users
LOG.warning('User %s status has changed, but existing tokens '
'remain valid' % user_id)
return {'user': user_ref}
def delete_user(self, context, user_id):
self.assert_admin(context)
self.identity_api.delete_user(context, user_id)
def set_user_enabled(self, context, user_id, user):
return self.update_user(context, user_id, user)
def set_user_password(self, context, user_id, user):
return self.update_user(context, user_id, user)
def update_user_tenant(self, context, user_id, user):
"""Update the default tenant."""
# BUG: CWE-264 Permissions, Privileges, and Access Controls
#
# FIXED:
<fim-middle> self.assert_admin(context)
# ensure that we're a member of that tenant
tenant_id = user.get('tenantId')
self.identity_api.add_user_to_tenant(context, tenant_id, user_id<fix-suffix>)
return self.update_user(context, user_id, user)
class RoleController(wsgi.Application):
def __init__(self):
self.identity_api = Manager()
self.token_api = token.Manager()
self.policy_api = policy.Manager()
super(RoleController, self).__init__()
# COMPAT(essex-3)
def get_user_roles(self, context, user_id, tenant_id=None):
"""Get the roles for a user and tenant pair.
Since we're trying to ignore the idea of user-only roles we're
not implementing them in hopes that the idea will die off.
"""
self.assert_admin(context)
if tenant_id is None:
raise exception.NotImplemented(message='User roles not supported: '
'tenant ID required')
roles = self.identity_api.get_roles_for_user_and_tenant(
context, user_id, tenant_id)
return {'roles': [self.identity_api.get_role(context, x)
for x in roles]}
# CRUD extension
def get_role(self, context, role_id):
self.assert_admin(context)
return {'role': self.identity_api.get_role(context, role_id)}
def create_role(self, context, role):
role = self._normalize_dict(role)
self.assert_admin(context)
if not 'name' in role or not role['name']:
msg = 'Name field is required and cannot be empty'
raise exception.ValidationError(message=msg)
role_id = uu<|endoftext|> |
python | <fim-prefix>import gl<fim-middle>ob
import os
import sys
from collections import defaultdict
from functools import partial as curry
from . import (
biblio,
boilerplate,
caniuse,
conditional,
config,
constants<fix-suffix>,
datablocks,
dfns,
extensions,
fingerprinting,
h,
headings,
highlight,
idl,
includes,
inlineTags,
lint,
markdown,
mdnspeclinks,
metadata,
shorthands,
wpt,
)
from .func import Functor
from .h import *
from .InputSource import FileInputSource, InputSource
from .messages import *
from .refs import ReferenceManager
from .unsortedJunk import *
class Spec:
def __init__(
self,
inputFilename,
debug=False,
token=None,
lineNumbers=False,
fileRequester=None,
testing=False,
):
self.valid = False
self.lineNumbers = lineNumbers
if lineNumbers:
# line-numbers are too hacky, so force this to be a dry run
constants.dryRun = True
if inputFilename is None:
inputFilename = findImplicitInputFile()
if inputFilename is None: # still
die(
"No input file specified, and no *.bs or *.src.html files found in current directory.\nPlease specify an input file, or use - to pipe from STDIN."
)
return
# BUG: CWE-78 Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')
# self.inputSource = InputSource(inputFilename)
# FIXED:
self.inputSource = InputSource(inputFilename, chroot=constants.chroot)
self.transitiveDependencies = set()
self.debug = debug
self.token = to<|endoftext|> |
python | <fim-prefix># -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
from twisted.internet import defer
import synapse.types
from synapse.api.errors import AuthError, SynapseError
from synapse.types import UserID
from tests import unittest
from tests.test_utils import make_awaitable
from tests.utils import setup_test_homeserver
class ProfileTestCase(unittest.TestCase):
""" Tests profile management. """
@defer.inlineCallbacks
def setUp(self):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_registry.register_query_handler = register_query_handler
hs = yield setup_test_homeserver(
self.addCleanup,
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# http_client=None,
# FIXED:
<fim-middle> federation_http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_server=Mock(),
federation_re<fix-suffix>gistry=self.mock_registry,
)
self.store = hs.get_datastore()
self.frank = UserID.from_string("@1234ABCD:test")
self.bob = UserID.from_string("@4567:test")
self.alice = UserID.from_string("@alice:remote")
yield defer.ensureDeferred(self.store.create_profile(self.frank.localpart))
self.handler = hs.get_profile_handler()
self.hs = hs
@defer.inlineCallbacks
def test_get_my_name(self):
yield defer.ensureDeferred(
self.store.set_profile_displayname(self.frank.localpart, "Frank")
)
displayname = yield defer.ensureDeferred(
self.handler.get_displayname(self.frank)
)
self.assertEquals("Frank", displayname)
@defer.inlineCallbacks
def test_set_my_name(self):
yield defer.ensureDeferred(
self.handler.set_displayname(
self.frank, synapse.types.create_requester(self.frank), "Frank Jr."
)
)
self.assertEquals(
(
yield defer.ensureDeferred(
self.store.get_profile_displayname(self.frank.localpart)
)
),
"Frank Jr.",
)
# Set displayname again
yield defer.ensureDeferred(
self.handler.set_displayname(
self.frank, synapse.types.create_requester(self.frank), "Frank"
)
)
self.assertEquals(
(
yi<|endoftext|> |
python | <fim-prefix>anage_tabs_message='
'ZODBRoleManager+added.' %
dispatcher.absolute_url())
class ZODBRoleManager(BasePlugin):
""" PAS plugin for managing roles in the ZODB.
"""
meta_type = 'ZODB Role Manager'
zmi_icon = 'fas fa-user-tag'
security = ClassSecurityInfo()
def __init__(self, id, title=None):
self._id = self.id = id
self.title = title
self._roles = OOBTree()
self._principal_roles = OOBTree()
def manage_afterAdd(self, item, container):
if item is self:
role_holder = aq_parent(aq_inner(container))
for role in getattr(role_holder, '__ac_roles__', ()):
try:
if role not in ('Anonymous', 'Authenticated'):
self.addRole(role)
except KeyError:
pass
if 'Manager' not in self._roles:
self.addRole('Manager')
#
# IRolesPlugin implementation
#
@security.private
def getRolesForPrincipal(self, principal, request=None):
""" See IRolesPlugin.
"""
result = list(self._principal_roles.get(principal.getId(), ()))
getGroups = getattr(principal, 'getGroups', lambda: ())
for group_id in getGroups():
result.extend(self._principal_roles.get(group_id, ()))
return tuple(result)
#
# IRoleEnumerationPlugin implementation
#
# BUG: CWE-200 Exposure of Sensitive Information to an Unauthorized Actor
#
# FIXED:
<fim-middle> @security.private
def enumerateRoles(self, id=None, exact_match=False, sort_by=None,
max_results=None, **kw):
""" See IRoleEnumerationPlugin.
"""
<fix-suffix> role_info = []
role_ids = []
plugin_id = self.getId()
if isinstance(id, str):
id = [id]
if exact_match and (id):
role_ids.extend(id)
if role_ids:
role_filter = None
else: # Searching
role_ids = self.listRoleIds()
role_filter = _ZODBRoleFilter(id, **kw)
for role_id in role_ids:
if self._roles.get(role_id):
e_url = '%s/manage_roles' % self.getId()
p_qs = 'role_id=%s' % role_id
m_qs = 'role_id=%s&assign=1' % role_id
info = {}
info.update(self._roles[role_id])
info['pluginid'] = plugin_id
info['properties_url'] = '%s?%s' % (e_url, p_qs)
info['members_url'] = '%s?%s' % (e_url, m_qs)
if not role_filter or role_filter(info):
role_info.append(info)
return tuple(role_info)
#
# IRoleAssignerPlugin implementation
#
@security.private
def doAssignRoleToPrincipal(self, principal_id, role):
return self.assignRoleToPrincipal(role, principal_id)
@security.private
def doRemoveRoleFromPrincipal(self, principal_id, role):
return self.removeRoleFromPrincipal(role, principal_id)
#
# Role management API
#
@security.protected(ManageUsers)
def listRoleIds(self):
""" Return a list of the role IDs managed by this obje<|endoftext|> |
python | <fim-prefix>device_display_name = pusherdict["device_display_name"]
self.pushkey = pusherdict["pushkey"]
self.pushkey_ts = pusherdict["ts"]
self.data = pusherdict["data"]
self.last_stream_ordering = pusherdict["last_stream_ordering"]
self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
self.failing_since = pusherdict["failing_since"]
self.timed_call = None
self._is_processing = False
self._group_unread_count_by_room = hs.config.push_group_unread_count_by_room
# This is the highest stream ordering we know it's safe to process.
# When new events arrive, we'll be given a window of new events: we
# should honour this rather than just looking for anything higher
# because of potential out-of-order event serialisation. This starts
# off as None though as we don't know any better.
self.max_stream_ordering = None
if "data" not in pusherdict:
raise PusherConfigException("No 'data' key for HTTP pusher")
self.data = pusherdict["data"]
self.name = "%s/%s/%s" % (
pusherdict["user_name"],
pusherdict["app_id"],
pusherdict["pushkey"],
)
if self.data is None:
raise PusherConfigException("data can not be null for HTTP pusher")
if "url" not in self.data:
raise PusherConfigException("'url' required in data for HTTP pusher")
self.url = self.data["url"]
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# self.http_client = hs.get_proxied_http_client()
# FIXED:
<fim-middle> self.http_client = hs.get_proxied_blacklisted_http_client()
self.data_minus_url = {}
self.data_minus_url.update(self.data)
del self.data_minus_url["url"]
def on_st<fix-suffix>arted(self, should_check_for_notifs):
"""Called when this pusher has been started.
Args:
should_check_for_notifs (bool): Whether we should immediately
check for push to send. Set to False only if it's known there
is nothing to send
"""
if should_check_for_notifs:
self._start_processing()
def on_new_notifications(self, max_token: RoomStreamToken):
# We just use the minimum stream ordering and ignore the vector clock
# component. This is safe to do as long as we *always* ignore the vector
# clock components.
max_stream_ordering = max_token.stream
self.max_stream_ordering = max(
max_stream_ordering, self.max_stream_ordering or 0
)
self._start_processing()
def on_new_receipts(self, min_stream_id, max_stream_id):
# Note that the min here shouldn't be relied upon to be accurate.
# We could check the receipts are actually m.read receipts here,
# but currently that's the only type of receipt anyway...
run_as_background_process("http_pusher.on_new_receipts", self._update_badge)
async def _update_badge(self):
# XXX as per https://github.com/matrix-org/matrix-doc/issues/2627, this seems
# to be largely redundant. perhaps we can remove it.
badge = await push_tools.get_badge_count(
self.hs.get_datastore(),
self.user_id,
group_b<|endoftext|> |
python | <fim-prefix>ecified, it will be ignored and a warning will be
issued.
When loading from a npz zip file, user can specify which variable within
the zip file to load for the input tensor inside the square brackets. If
nothing is specified, this function will check that only one file is
included in the zip and load it for the specified input tensor.
When loading from a pickle file, if no variable_name is specified in the
square brackets, whatever that is inside the pickle file will be passed
to the specified input tensor, else SavedModel CLI will assume a
dictionary is stored in the pickle file and the value corresponding to
the variable_name will be used.
input_exprs_str: A string that specifies python expressions for inputs.
* In the format of: '<input_key>=<python expression>'.
* numpy module is available as np.
input_examples_str: A string that specifies tf.Example with dictionary.
* In the format of: '<input_key>=<[{feature:value list}]>'
Returns:
A dictionary that maps input tensor keys to numpy ndarrays.
Raises:
RuntimeError: An error when a key is specified, but the input file contains
multiple numpy ndarrays, none of which matches the given key.
RuntimeError: An error when no key is specified, but the input file contains
more than one numpy ndarrays.
"""
tensor_key_feed_dict = {}
inputs = preprocess_inputs_arg_string(inputs_str)
# BUG: CWE-94 Improper Control of Generation of Code ('Code Injection')
# input_exprs = preprocess_input_exprs_arg_string(input_exprs_str, safe=False)
# FIXED:
<fim-middle> input_exprs = preprocess_input_exprs_arg_string(input_exprs_str)
input_examples = preprocess_input_examples_arg_string(input_examples_str)
for input_tensor_key, (filename, variable_name) in inp<fix-suffix>uts.items():
data = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True) # pylint: disable=unexpected-keyword-arg
# When a variable_name key is specified for the input file
if variable_name:
# if file contains a single ndarray, ignore the input name
if isinstance(data, np.ndarray):
logging.warn(
'Input file %s contains a single ndarray. Name key \"%s\" ignored.'
% (filename, variable_name))
tensor_key_feed_dict[input_tensor_key] = data
else:
if variable_name in data:
tensor_key_feed_dict[input_tensor_key] = data[variable_name]
else:
raise RuntimeError(
'Input file %s does not contain variable with name \"%s\".' %
(filename, variable_name))
# When no key is specified for the input file.
else:
# Check if npz file only contains a single numpy ndarray.
if isinstance(data, np.lib.npyio.NpzFile):
variable_name_list = data.files
if len(variable_name_list) != 1:
raise RuntimeError(
'Input file %s contains more than one ndarrays. Please specify '
'the name of ndarray to use.' % filename)
tensor_key_feed_dict[input_tensor_key] = data[variable_name_list[0]]
else:
tensor_key_feed_dict[input_tensor_key] = data
# When input is a python expression:
for input_tensor_key, py_expr_evaluated in input_exprs.items():
if input_tensor_key in tensor_ke<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obt<fix-suffix>ain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
import synapse
import synapse.api.errors
from synapse.api.constants import EventTypes
from synapse.config.room_directory import RoomDirectoryConfig
from synapse.rest.client.v1 import directory, login, room
from synapse.types import RoomAlias, create_requester
from tests import unittest
from tests.test_utils import make_awaitable
class DirectoryTestCase(unittest.HomeserverTestCase):
""" Tests the directory service. """
def make_homeserver(self, reactor, clock):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_registry.register_query_handler = register_query_handler
hs = self.setup_test_homeserver(
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# http_client=None,
# FIXED:
federation_http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_feder<|endoftext|> |
python | <fim-prefix>de must be integers for ICMP protocol type")
else:
raise exception.InvalidInput(reason="To and From ports "
"must be integers")
if ip_protocol.upper() not in ['TCP', 'UDP', 'ICMP']:
raise exception.InvalidIpProtocol(protocol=ip_protocol)
# Verify that from_port must always be less than
# or equal to to_port
if (ip_protocol.upper() in ['TCP', 'UDP'] and
from_port > to_port):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Former value cannot"
" be greater than the later")
# Verify valid TCP, UDP port ranges
if (ip_protocol.upper() in ['TCP', 'UDP'] and
(from_port < 1 or to_port > 65535)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="Valid TCP ports should"
" be between 1-65535")
# Verify ICMP type and code
if (ip_protocol.upper() == "ICMP" and
(from_port < -1 or from_port > 255 or
to_port < -1 or to_port > 255)):
raise exception.InvalidPortRange(from_port=from_port,
to_port=to_port, msg="For ICMP, the"
" type:code must be valid")
# BUG: CWE-20 Improper Input Validation
# values['protocol'] = ip_protocol
# FIXED:
<fim-middle> values['protocol'] = ip_protocol.lower()
values['from_port'] = from_port
values['to_port'] = to_port
else:
# If cidr based filtering, protocol a<fix-suffix>nd ports are mandatory
if 'cidr' in values:
return None
return values
def delete(self, req, id):
context = req.environ['nova.context']
authorize(context)
self.compute_api.ensure_default_security_group(context)
try:
id = int(id)
rule = db.security_group_rule_get(context, id)
except ValueError:
msg = _("Rule id is not integer")
raise exc.HTTPBadRequest(explanation=msg)
except exception.NotFound:
msg = _("Rule (%s) not found") % id
raise exc.HTTPNotFound(explanation=msg)
group_id = rule.parent_group_id
self.compute_api.ensure_default_security_group(context)
security_group = db.security_group_get(context, group_id)
msg = _("Revoke security group ingress %s")
LOG.audit(msg, security_group['name'], context=context)
db.security_group_rule_destroy(context, rule['id'])
self.sgh.trigger_security_group_rule_destroy_refresh(
context, [rule['id']])
self.compute_api.trigger_security_group_rules_refresh(context,
security_group_id=security_group['id'])
return webob.Response(status_int=202)
class ServerSecurityGroupController(SecurityGroupControllerBase):
@wsgi.serializers(xml=SecurityGroupsTemplate)
def index(self, req, server_id):
"""Returns a list of security groups for the given instance.<|endoftext|> |
python | <fim-prefix># Copyrig<fim-middle>ht (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.website.website_gene<fix-suffix>rator import WebsiteGenerator
from frappe.website.render import clear_cache
from frappe.utils import today, cint, global_date_format, get_fullname, strip_html_tags, markdown, sanitize_html
from math import ceil
from frappe.website.utils import (find_first_image, get_html_content_based_on_type,
get_comment_list)
class BlogPost(WebsiteGenerator):
website = frappe._dict(
route = 'blog',
order_by = "published_on desc"
)
# BUG: CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')
#
# FIXED:
@frappe.whitelist()
def make_route(self):
if not self.route:
return frappe.db.get_value('Blog Category', self.blog_category,
'route') + '/' + self.scrub(self.title)
def get_feed(self):
return self.title
def validate(self):
super(BlogPost, self).validate()
if not self.blog_intro:
content = get_html_content_based_on_type(self, 'content', self.content_type)
self.blog_intro = content[:200]
self.blog_intro = strip_html_tags(self.blog_intro)
if self.blog_intro:
self.blog_intro = self.blog_intro[:200]
if not self.meta_title:
self.meta_title = self.title[:60]
else:
self.meta_title = self.meta_title[:60]
if not self.meta_description:
self.meta_description = self.blog_intro[:140]
else:
self.meta_description = self.meta_description[:140]
if self.published and not self.published_on:
self.published_on = today()
if self.featured:
if not self.meta_image:
frappe.throw(_("A featured post<|endoftext|> |
python | <fim-prefix># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from future.utils import iteritems
import yaml
import json
from mlalchemy.errors import *
from mlalchemy.structures import *
from mlalchemy.constants import *
from mlalchemy.utils import *
import logging
logger = logging.getLogger(__name__)
__all__ = [
"parse_yaml_query",
"parse_json_query",
"parse_query",
"parse_query_fragment"
]
def parse_yaml_query(yaml_content):
"""Parses the given YAML string to attempt to extract a query.
Args:
yaml_content: A string containing YAML content.
Returns:
On success, the processed MLQuery object.
"""
logger.debug("Attempting to parse YAML content:\n%s" % yaml_content)
# BUG: NVD-CWE-noinfo Insufficient Information
# return parse_query(yaml.load(yaml_content))
# FIXED:
<fim-middle> return parse_query(yaml.safe_load(yaml_content))
def parse_json_query(json_content):
"""Parses the given JSON string to attempt to extract a query.
Args:
json_content: A string <fix-suffix>containing JSON content.
Returns:
On success, the processed MLQuery object.
"""
logger.debug("Attempting to parse JSON content:\n%s" % json_content)
return parse_query(json.loads(json_content))
def parse_query(qd):
"""Parses the given query dictionary to produce an MLQuery object.
Args:
qd: A Python dictionary (pre-parsed from JSON/YAML) from which to extract the query.
Returns:
On success, the processed MLQuery object.
"""
if not isinstance(qd, dict):
raise TypeError("Argument for query parsing must be a Python dictionary")
if 'from' not in qd:
raise QuerySyntaxError("Missing \"from\" argument in query")
logger.debug("Attempting to parse query dictionary:\n%s" % json_dumps(qd, indent=2))
qf = parse_query_fragment(qd['where']).simplify() if 'where' in qd else None
if isinstance(qf, MLClause):
qf = MLQueryFragment(OP_AND, clauses=[qf])
return MLQuery(
qd['from'],
query_fragment=qf,
order_by=qd.get('orderBy', qd.get('order-by', qd.get('order_by', None))),
offset=qd.get('offset', None),
limit=qd.get('limit', None)
)
def parse_query_fragment(q, op=OP_AND, comp=COMP_EQ):
"""Parses the given query object for its query fragment only."""
if not isinstance(q, list) and not isinstance(q, dict):
raise TypeError("\"Where\" clause in query fragment must either be a list or a dictionary")
# ensure we're always dea<|endoftext|> |
python | <fim-prefix>it handler(request)
cache_control_value = "public; max-age={}".format(CACHE_MAX_AGE)
if request.path in NO_CACHE_ENDPOINTS or CACHE_MAX_AGE <= 0:
cache_control_value = "no-cache"
response.headers.setdefault("Cache-Control", cache_control_value)
return response
return middleware_handler
# Error page middlewares
def error_pages(overrides):
async def middleware(app, handler):
async def middleware_handler(request):
try:
response = await handler(request)
override = overrides.get(response.status)
if override is None:
return response
else:
return await override(request, response)
except web.HTTPException as ex:
override = overrides.get(ex.status)
if override is None:
return await handle_any(request, ex)
else:
return await override(request, ex)
except Exception as ex:
return await handle_500(request, error=ex)
return middleware_handler
return middleware
async def handle_any(request, response):
return web.json_response({
"status": response.status,
"message": response.reason
}, status=response.status)
async def handle_404(request, response):
if 'json' not in response.headers['Content-Type']:
if request.path.endswith('/'):
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# return web.HTTPFound(request.path.rstrip('/'))
# FIXED:
<fim-middle> return web.HTTPFound('/' + request.path.strip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(request.path)
<fix-suffix>}, status=404)
return response
async def handle_500(request, response=None, error=None):
logger.exception(error)
return web.json_response({
"status": 503,
"message": "Service currently unavailable"
}, status=503)
<|endoftext|> |
python | <fim-prefix>statements", e)
if add_reading:
reading_list.append(glueformula.meaning)
def parse_to_compiled(self, sentence):
gfls = [self.depgraph_to_glue(dg) for dg in self.dep_parse(sentence)]
return [self.gfl_to_compiled(gfl) for gfl in gfls]
def dep_parse(self, sentence):
"""
Return a dependency graph for the sentence.
:param sentence: the sentence to be parsed
:type sentence: list(str)
:rtype: DependencyGraph
"""
# Lazy-initialize the depparser
if self.depparser is None:
from nltk.parse import MaltParser
self.depparser = MaltParser(tagger=self.get_pos_tagger())
if not self.depparser._trained:
self.train_depparser()
return self.depparser.parse(sentence, verbose=self.verbose)
def depgraph_to_glue(self, depgraph):
return self.get_glue_dict().to_glueformula_list(depgraph)
def get_glue_dict(self):
return GlueDict(self.semtype_file)
def gfl_to_compiled(self, gfl):
index_counter = Counter()
return_list = []
for gf in gfl:
return_list.extend(gf.compile(index_counter))
if self.verbose:
print("Compiled Glue Premises:")
for cgf in return_list:
print(cgf)
return return_list
def get_pos_tagger(self):
from nltk.corpus import brown
regexp_tagger = RegexpTagger(
[
# BUG: CWE-1333 Inefficient Regular Expression Complexity
# (r"^-?[0-9]+(.[0-9]+)?$", "CD"), # cardinal numbers
# FIXED:
<fim-middle> (r"^-?[0-9]+(\.[0-9]+)?$", "CD"), # cardinal numbers
(r"(The|the|A|a|An|an)$", "AT"), # articles
(r".*able$", "JJ"), # adjectives
(r"<fix-suffix>.*ness$", "NN"), # nouns formed from adjectives
(r".*ly$", "RB"), # adverbs
(r".*s$", "NNS"), # plural nouns
(r".*ing$", "VBG"), # gerunds
(r".*ed$", "VBD"), # past tense verbs
(r".*", "NN"), # nouns (default)
]
)
brown_train = brown.tagged_sents(categories="news")
unigram_tagger = UnigramTagger(brown_train, backoff=regexp_tagger)
bigram_tagger = BigramTagger(brown_train, backoff=unigram_tagger)
trigram_tagger = TrigramTagger(brown_train, backoff=bigram_tagger)
# Override particular words
main_tagger = RegexpTagger(
[(r"(A|a|An|an)$", "ex_quant"), (r"(Every|every|All|all)$", "univ_quant")],
backoff=trigram_tagger,
)
return main_tagger
class DrtGlueFormula(GlueFormula):
def __init__(self, meaning, glue, indices=None):
if not indices:
indices = set()
if isinstance(meaning, str):
self.meaning = drt.DrtExpression.fromstring(meaning)
elif isinstance(meaning, drt.DrtExpression):
self.meaning = meaning
else:
raise RuntimeError(
"Meaning term neither string or expression: %s, %s"
% (meaning, meaning.__class__)
)
if isinstance(glue, str):
self.glue = linearlogic.LinearLogicParser().parse(glue)
elif isinstance(glue, linearlogic.Expressi<|endoftext|> |
python | <fim-prefix><fim-middle>ing: utf-8 -*-
"""
pygments.lexers.templates
~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for various template engines' markup.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.<fix-suffix>
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexers.html import HtmlLexer, XmlLexer
from pygments.lexers.javascript import JavascriptLexer, LassoLexer
from pygments.lexers.css import CssLexer
from pygments.lexers.php import PhpLexer
from pygments.lexers.python import PythonLexer
from pygments.lexers.perl import PerlLexer
from pygments.lexers.jvm import JavaLexer, TeaLangLexer
from pygments.lexers.data import YamlLexer
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
include, using, this, default, combined
from pygments.token import Error, Punctuation, Whitespace, \
Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
from pygments.util import html_doctype_matches, looks_like_xml
__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
<|endoftext|> |
python | <fim-prefix>ns Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests REST events for /rooms paths."""
import json
from urllib import parse as urlparse
from mock import Mock
import synapse.rest.admin
from synapse.api.constants import EventContentFields, EventTypes, Membership
from synapse.handlers.pagination import PurgeStatus
from synapse.rest.client.v1 import directory, login, profile, room
from synapse.rest.client.v2_alpha import account
from synapse.types import JsonDict, RoomAlias, UserID
from synapse.util.stringutils import random_string
from tests import unittest
from tests.test_utils import make_awaitable
PATH_PREFIX = b"/_matrix/client/api/v1"
class RoomBase(unittest.HomeserverTestCase):
rmcreator_id = None
servlets = [room.register_servlets, room.register_deprecated_servlets]
def make_homeserver(self, reactor, clock):
self.hs = self.setup_test_homeserver(
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# "red", http_client=None, federation_client=Mock(),
# FIXED:
<fim-middle> "red", federation_http_client=None, federation_client=Mock(),
)
self.hs.get_federation_handler = Mock()
self.hs.get_federation_handler.return_value.maybe_backfill <fix-suffix>= Mock(
return_value=make_awaitable(None)
)
async def _insert_client_ip(*args, **kwargs):
return None
self.hs.get_datastore().insert_client_ip = _insert_client_ip
return self.hs
class RoomPermissionsTestCase(RoomBase):
""" Tests room permissions. """
user_id = "@sid1:red"
rmcreator_id = "@notme:red"
def prepare(self, reactor, clock, hs):
self.helper.auth_user_id = self.rmcreator_id
# create some rooms under the name rmcreator_id
self.uncreated_rmid = "!aa:test"
self.created_rmid = self.helper.create_room_as(
self.rmcreator_id, is_public=False
)
self.created_public_rmid = self.helper.create_room_as(
self.rmcreator_id, is_public=True
)
# send a message in one of the rooms
self.created_rmid_msg_path = (
"rooms/%s/send/m.room.message/a1" % (self.created_rmid)
).encode("ascii")
request, channel = self.make_request(
"PUT", self.created_rmid_msg_path, b'{"msgtype":"m.text","body":"test msg"}'
)
self.assertEquals(200, channel.code, channel.result)
# set topic for public room
request, channel = self.make_request(
"PUT",
("rooms/%s/state/m.room.topic" % self.created_public_rmid).encode("ascii"),
b'{"topic":"Public Room Topic"}',
)
self.assertEquals(200, channel.code, channel.result)
<|endoftext|> |
python | <fim-prefix>import yaml
try:
from ansible.utils.vault import VaultLib
except ImportError:
# Ansible 2.0 has changed the vault location
from ansible.parsing.vault import VaultLib
class Vault(object):
'''R/W an ansible-vault yaml file'''
def __init__(self, password):
self.password = password
self.vault = VaultLib(password)
def load(self, stream):
'''read vault steam and return python object'''
# BUG: CWE-94 Improper Control of Generation of Code ('Code Injection')
# return yaml.load(self.vault.decrypt(stream))
# FIXED:
<fim-middle> return yaml.safe_load(self.vault.decrypt(stream))
def dump(self, data, stream=None):
'''encrypt data and print stdout or write to stream'''
yaml_text = yaml.dump(
<fix-suffix> data,
default_flow_style=False,
allow_unicode=True)
encrypted = self.vault.encrypt(yaml_text)
if stream:
stream.write(encrypted)
else:
return encrypted
<|endoftext|> |
python | <fim-prefix>dicator_out = self.evaluate(
[sp_output, empty_row_indicator])
self.assertAllEqual(output.indices, [[0, 0], [1, 0]])
self.assertAllEqual(output.values, [-1, -1])
self.assertAllEqual(output.dense_shape, [2, 5])
self.assertAllEqual(empty_row_indicator_out, np.ones(2).astype(np.bool_))
def testEmptyOutput(self):
with test_util.use_gpu():
sp_input = sparse_tensor.SparseTensor(
indices=np.ones([0, 2]),
values=np.ones([0]),
dense_shape=np.array([0, 3]))
sp_output, empty_row_indicator = (
sparse_ops.sparse_fill_empty_rows(sp_input, -1))
output, empty_row_indicator_out = self.evaluate(
[sp_output, empty_row_indicator])
self.assertAllEqual(output.indices, np.ones([0, 2]))
self.assertAllEqual(output.values, np.ones([0]))
self.assertAllEqual(output.dense_shape, [0, 3])
self.assertAllEqual(empty_row_indicator_out, [])
def testInvalidIndices(self):
with test_util.use_gpu():
sp_input = sparse_tensor.SparseTensor(
indices=np.array([[1, 2], [1, 3], [99, 1], [99, 3]]),
values=np.array([1, 3, 2, 4]),
dense_shape=np.array([2, 5]))
with self.assertRaisesRegex(errors.InvalidArgumentError,
r"indices\(2, 0\) is invalid"):
self.evaluate(sparse_ops.sparse_fill_empty_rows(sp_input, -1))
class SparseAddTest(test_util.TensorFlowTestCase):
def testValuesInVariable(self):
# BUG: CWE-20 Improper Input Validation
# indices = constant_op.constant([[1]], dtype=dtypes.int64)
# FIXED:
<fim-middle> indices = constant_op.constant([[0]], dtype=dtypes.int64)
values = variables.Variable([1], trainable=False, dtype=dtypes.float32)
shape = constant_op.constant([1], dtype=dtypes.int64)
<fix-suffix> sp_input = sparse_tensor.SparseTensor(indices, values, shape)
sp_output = sparse_ops.sparse_add(sp_input, sp_input)
with test_util.force_cpu():
self.evaluate(variables.global_variables_initializer())
output = self.evaluate(sp_output)
self.assertAllEqual(output.values, [2])
class SparseReduceTest(test_util.TensorFlowTestCase):
# [[1, ?, 2]
# [?, 3, ?]]
# where ? is implicitly-zero.
ind = np.array([[0, 0], [0, 2], [1, 1]]).astype(np.int64)
vals = np.array([1, 1, 1]).astype(np.int32)
dense_shape = np.array([2, 3]).astype(np.int64)
def _compare(self, sp_t, reduction_axes, ndims, keep_dims, do_sum):
densified = self.evaluate(sparse_ops.sparse_tensor_to_dense(sp_t))
np_ans = densified
if reduction_axes is None:
if do_sum:
np_ans = np.sum(np_ans, keepdims=keep_dims)
else:
np_ans = np.max(np_ans, keepdims=keep_dims)
else:
if not isinstance(reduction_axes, list): # Single scalar.
reduction_axes = [reduction_axes]
reduction_axes = np.array(reduction_axes).astype(np.int32)
# Handles negative axes.
reduction_axes = (reduction_axes + ndims) % ndims
# Loop below depends on sorted.
reduction_axes.sort()
for ra in reduction_axes.ravel()[::-1]:
if do_sum:
np_ans = np.sum(np_ans, axis=ra, keepdims=keep_dims)
else:
np_ans = np.max(np_ans, axis=ra, keepdims=keep_dims)
with self.cached_session():
if do_sum:
<|endoftext|> |
python | <fim-prefix>.utils import cint, get_fullname, getdate, get_link_to_form
class EnergyPointLog(Document):
def validate(self):
self.map_milestone_reference()
if self.type in ['Appreciation', 'Criticism'] and self.user == self.owner:
frappe.throw(_('You cannot give review points to yourself'))
def map_milestone_reference(self):
# link energy point to the original reference, if set by milestone
if self.reference_doctype == 'Milestone':
self.reference_doctype, self.reference_name = frappe.db.get_value('Milestone', self.reference_name,
['reference_type', 'reference_name'])
def after_insert(self):
alert_dict = get_alert_dict(self)
if alert_dict:
frappe.publish_realtime('energy_point_alert', message=alert_dict, user=self.user)
frappe.cache().hdel('energy_points', self.user)
frappe.publish_realtime('update_points', after_commit=True)
if self.type != 'Review':
reference_user = self.user if self.type == 'Auto' else self.owner
notification_doc = {
'type': 'Energy Point',
'document_type': self.reference_doctype,
'document_name': self.reference_name,
'subject': get_notification_message(self),
'from_user': reference_user,
'email_content': '<div>{}</div>'.format(self.reason) if self.reason else None
}
enqueue_create_notification(self.user, notification_doc)
def on_trash(self):
if self.type == 'Revert':
reference_log = frappe.get_doc('Energy Point Log', self.revert_of)
reference_log.reverted = 0
reference_log.save()
# BUG: CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')
#
# FIXED:
<fim-middle> @frappe.whitelist()
def revert(self, reason, ignore_permissions=False):
if not ignore_permissions:
frappe.only_for('System Manager')
if self.type != 'Auto':
frappe.throw(_('This document <fix-suffix>cannot be reverted'))
if self.get('reverted'):
return
self.reverted = 1
self.save(ignore_permissions=True)
revert_log = frappe.get_doc({
'doctype': 'Energy Point Log',
'points': -(self.points),
'type': 'Revert',
'user': self.user,
'reason': reason,
'reference_doctype': self.reference_doctype,
'reference_name': self.reference_name,
'revert_of': self.name
}).insert(ignore_permissions=True)
return revert_log
def get_notification_message(doc):
owner_name = get_fullname(doc.owner)
points = doc.points
title = get_title(doc.reference_doctype, doc.reference_name)
if doc.type == 'Auto':
owner_name = frappe.bold('You')
if points == 1:
message = _('{0} gained {1} point for {2} {3}')
else:
message = _('{0} gained {1} points for {2} {3}')
message = message.format(owner_name, frappe.bold(points), doc.rule, get_title_html(title))
elif doc.type == 'Appreciation':
if points == 1:
message = _('{0} appreciated your work on {1} with {2} point')
else:
message = _('{0} appreciated your work on {1} with {2} points')
message = message.format(frappe.bold(owner_name), get_title_html(title), frappe.bold(points))
elif doc.type == 'Criticism':
if points == 1:
message = _('{0} criticized your work on {1} with {2} point')
else:
message = _('{0} criticized your work on {1} with {2} points')
message = message.format(frappe.bold(owner_name), get_title_html(title), frappe.bold(points))
elif doc.type == 'Revert':
if poi<|endoftext|> |
python | <fim-prefix> h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
h.update(key_material)
return h.finalize()
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
return self._hkdf_expand.derive(self._extract(key_material))
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
@utils.register_interface(KeyDerivationFunction)
class HKDFExpand(object):
def __init__(self, algorithm, length, info, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
self._algorithm = algorithm
self._backend = backend
max_length = 255 * (algorithm.digest_size // 8)
if length > max_length:
raise ValueError(
"Can not derive keys larger than {0} octets.".format(
max_length
))
self._length = length
if not (info is None or isinstance(info, bytes)):
raise TypeError("info must be bytes.")
if info is None:
info = b""
self._info = info
self._used = False
def _expand(self, key_material):
output = [b""]
counter = 1
# BUG: CWE-20 Improper Input Validation
# while (self._algorithm.digest_size // 8) * len(output) < self._length:
# FIXED:
<fim-middle> while self._algorithm.digest_size * (len(output) - 1) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
<fix-suffix> h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length]
def derive(self, key_material):
if not isinstance(key_material, bytes):
raise TypeError("key_material must be bytes.")
if self._used:
raise AlreadyFinalized
self._used = True
return self._expand(key_material)
def verify(self, key_material, expected_key):
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
raise InvalidKey
<|endoftext|> |
python | <fim-prefix>st/',
project_urls={
'Documentation': 'https://zope.readthedocs.io',
'Issue Tracker': 'https://github.com/zopefoundation/Zope/issues',
'Sources': 'https://github.com/zopefoundation/Zope',
},
license='ZPL 2.1',
description='Zope application server / web framework',
author='Zope Foundation and Contributors',
author_email='[email protected]',
long_description="\n\n".join([README, CHANGES]),
classifiers=[
'Development Status :: 6 - Mature',
"Environment :: Web Environment",
"Framework :: Zope :: 5",
"Intended Audience :: Developers",
"License :: OSI Approved :: Zope Public License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Software Development :: Libraries :: Application Frameworks",
],
packages=find_packages('src'),
namespace_packages=['Products', 'Shared', 'Shared.DC', 'zmi'],
package_dir={'': 'src'},
python_requires='>= 3.6',
install_requires=[
# BUG: CWE-915 Improperly Controlled Modification of Dynamically-Determined Object Attributes
# 'AccessControl >= 4.2',
# FIXED:
<fim-middle> 'AccessControl >= 5.2',
'Acquisition',
'BTrees',
'Chameleon >= 3.7.0',
'DateTime',
'DocumentTemplate >= 4.0',
'ExtensionClass',
'MultiMa<fix-suffix>pping',
'PasteDeploy',
'Persistence',
'RestrictedPython',
'ZConfig >= 2.9.2',
'ZODB',
'setuptools >= 36.2',
'transaction >= 2.4',
'waitress',
'zExceptions >= 3.4',
'z3c.pt',
'zope.browser',
'zope.browsermenu',
'zope.browserpage >= 4.4.0.dev0',
'zope.browserresource >= 3.11',
'zope.component',
'zope.configuration',
'zope.container',
'zope.contentprovider',
'zope.contenttype',
'zope.datetime',
'zope.deferredimport',
'zope.event',
'zope.exceptions',
'zope.globalrequest',
'zope.i18n [zcml]',
'zope.i18nmessageid',
'zope.interface >= 3.8',
'zope.lifecycleevent',
'zope.location',
'zope.pagetemplate >= 4.0.2',
'zope.processlifetime',
'zope.proxy',
'zope.ptresource',
'zope.publisher',
'zope.schema',
'zope.security',
'zope.sequencesort',
'zope.site',
'zope.size',
'zope.tal',
'zope.tales >= 5.0.2',
'zope.testbrowser',
'zope.testing',
'zope.traversing',
'zope.viewlet',
],
include_package_data=True,
zip_safe=False,
extras_require={
'docs': [
'Sphinx',
'sphinx_rtd_theme',
'tempstorage',
],
'wsgi': [
'Paste',
],
},
entry_points={
<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
"""
This module provide some helpers for advanced types parsing.
You can define you own parser using the same pattern:
.. code-block:: python
def my_type(value):
if not c<fix-suffix>ondition:
raise ValueError('This is not my type')
return parse(value)
# Swagger documentation
my_type.__schema__ = {'type': 'string', 'format': 'my-custom-format'}
The last line allows you to document properly the type in the Swagger documentation.
"""
from __future__ import unicode_literals
import re
import socket
from datetime import datetime, time, timedelta
from email.utils import parsedate_tz, mktime_tz
from six.moves.urllib.parse import urlparse
import aniso8601
import pytz
# Constants for upgrading date-based intervals to full datetimes.
START_OF_DAY = time(0, 0, 0, tzinfo=pytz.UTC)
END_OF_DAY = time(23, 59, 59, 999999, tzinfo=pytz.UTC)
netloc_regex = re.compile(
r"(?:(?P<auth>[^:@]+?(?::[^:@]*?)?)@)?" # basic auth
r"(?:"
r"(?P<localhost>localhost)|" # localhost...
r"(?P<ipv4>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})|" # ...or ipv4
r"(?:\[?(?P<ipv6>[A-F0-9]*:[A-F0-9:]+)\]?)|" # ...or ipv6
r"(?P<domain>(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?))" # domain...
r")"
r"(?::(?P<port>\d+))?" # optional port
r"$",
re.IGNORECASE,
)
email_regex = re.compile(
# BUG: CWE-400 Uncontrolled Resource Consumption
# r"^" "(?P<local>[^@]*[^@.])" r"@" r"(?P<server>[^@]+(?:\.[^@]+)*)" r"$",
# FIXED:
r"^" "(?P<local>[^@]*[^@.])" r"@" r"(?P<server>[^@\.]+(?:\.[^@\.]+)*)" r"$",
re.IGNORECASE,
)
time_regex = re.compile(r"\d{2}:\d{2}")
def ipv4(value):
"""V<|endoftext|> |
python | <fim-prefix><fim-middle>n/env python
'''
MIT License
Copyright (c) 2019 Tenable Network Security, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentat<fix-suffix>ion files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import click, logging, time, yaml, json, platform, sys, os, arrow
from tenable.io import TenableIO
from tenable.sc import TenableSC
from .config import base_config
from restfly.utils import dict_merge
from .jira import Jira
from .transform import Tio2Jira
from . import __version__
troubleshooting = '''
### Configuration File:
```yaml
{configfile}
```
### Debug Logs
```
{logging}
```
### Available IssueTypes
```yaml
{issuetypes}
```
'''
@click.command()
@click.option('--observed-since', '-s', envvar='SINCE', default=0,
type=click.INT, help='The unix timestamp of the age threshol<|endoftext|> |
python | <fim-prefix> Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Common constants and global variables.
"""
import logging
from paramiko.py3compat import byte_chr, PY2, bytes_types, text_type, long
MSG_DISCONNECT, MSG_IGNORE, MSG_UNIMPLEMENTED, MSG_DEBUG, \
MSG_SERVICE_REQUEST, MSG_SERVICE_ACCEPT = range(1, 7)
MSG_KEXINIT, MSG_NEWKEYS = range(20, 22)
MSG_USERAUTH_REQUEST, MSG_USERAUTH_FAILURE, MSG_USERAUTH_SUCCESS, \
MSG_USERAUTH_BANNER = range(50, 54)
MSG_USERAUTH_PK_OK = 60
MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE = range(60, 62)
MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN = range(60, 62)
MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, MSG_USERAUTH_GSSAPI_ERROR,\
MSG_USERAUTH_GSSAPI_ERRTOK, MSG_USERAUTH_GSSAPI_MIC = range(63, 67)
# BUG: CWE-287 Improper Authentication
#
# FIXED:
<fim-middle>HIGHEST_USERAUTH_MESSAGE_ID = 79
MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE = range(80, 83)
MSG_CHANNEL_OPEN, MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, \
MSG_CHANNEL_WI<fix-suffix>NDOW_ADJUST, MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA, \
MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MSG_CHANNEL_REQUEST, \
MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE = range(90, 101)
cMSG_DISCONNECT = byte_chr(MSG_DISCONNECT)
cMSG_IGNORE = byte_chr(MSG_IGNORE)
cMSG_UNIMPLEMENTED = byte_chr(MSG_UNIMPLEMENTED)
cMSG_DEBUG = byte_chr(MSG_DEBUG)
cMSG_SERVICE_REQUEST = byte_chr(MSG_SERVICE_REQUEST)
cMSG_SERVICE_ACCEPT = byte_chr(MSG_SERVICE_ACCEPT)
cMSG_KEXINIT = byte_chr(MSG_KEXINIT)
cMSG_NEWKEYS = byte_chr(MSG_NEWKEYS)
cMSG_USERAUTH_REQUEST = byte_chr(MSG_USERAUTH_REQUEST)
cMSG_USERAUTH_FAILURE = byte_chr(MSG_USERAUTH_FAILURE)
cMSG_USERAUTH_SUCCESS = byte_chr(MSG_USERAUTH_SUCCESS)
cMSG_USERAUTH_BANNER = byte_chr(MSG_USERAUTH_BANNER)
cMSG_USERAUTH_PK_OK = byte_chr(MSG_USERAUTH_PK_OK)
cMSG_USERAUTH_INFO_REQUEST = byte_chr(MSG_USERAUTH_INFO_REQUEST)
cMSG_USERAUTH_INFO_RESPONSE = byte_chr(MSG_USERAUTH_INFO_RESPONSE)
cMSG_USERAUTH_GSSAPI_RESPONSE = byte_chr(MSG_USERAUTH_GSSAPI_RESPONSE)
cMSG_USERAUTH_GSSAPI_TOKEN = byte_chr(MSG_USERAUTH_GSSAPI_TOKEN)
cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = \
byte_chr(MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE)
cMSG_USERAUTH_GSSAPI_ERROR = byte_chr(MSG_USERAUTH_GSSAPI_ERROR)
cMSG_USERAUTH_GSSAPI_ERRTOK = byte_chr(MSG_USERAUTH_GSSAPI_ERRTOK)
cMSG_USERAUTH_GSSAPI_MIC = byte_chr(MSG_USERAUTH_GSSAPI_MIC)
cMSG_GLOBAL_REQUEST = byte_chr(MSG_GLOBAL_REQUEST)
cMSG_REQUEST_SUCCESS = byte_chr(MSG_REQUEST_SUCCESS)
cMSG_REQUEST_FAILURE = byte_chr(MSG_REQUEST_FAI<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file ex<fix-suffix>cept in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
from synapse.api.constants import Membership
from synapse.rest.admin import register_servlets_for_client_rest_resource
from synapse.rest.client.v1 import login, room
from synapse.types import UserID, create_requester
from tests import unittest
from tests.test_utils import event_injection
from tests.utils import TestHomeServer
class RoomMemberStoreTestCase(unittest.HomeserverTestCase):
servlets = [
login.register_servlets,
register_servlets_for_client_rest_resource,
room.register_servlets,
]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# resource_for_federation=Mock(), http_client=None
# FIXED:
resource_for_federation=Mock(), federation_http_client=None
)
return hs
def prepare(self, reactor, clock, hs: TestHomeServer):
# We can't test the RoomMemberStore on its own without the other e<|endoftext|> |
python | <fim-prefix><fim-middle>n/env python
# -*- coding: utf-8 -*-
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
# Copyright (C) 2012-2019 OzzieIsaacs
#
# This program is free software: yo<fix-suffix>u can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
import os
# Insert local directories into path
if sys.version_info < (3, 0):
sys.path.append(os.path.dirname(os.path.abspath(__file__.decode('utf-8'))))
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__.decode('utf-8'))), 'vendor'))
else:
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'vendor'))
from cps import create_app
from cps import web_server
from cps.opds import opds
from cps.web import web
from cps.jinjia import jinjia
from cps.about import about
from cps.shelf import shelf
from cps.admin import admi
from cps.gdrive import gdrive
from cps.editbooks import editbook
from cps.remotelogin import remotelogin
from cps.search_metadata import meta
from cps.error_h<|endoftext|> |
python | <fim-prefix> else:
if fwtype == 'ACCEPT' and not '# OMR ' + username + ' open ' + name + ' port ' + proto + ' to ' + source_dip in line:
n.write(line)
elif fwtype == 'DNAT' and not '# OMR ' + username + ' redirect ' + name + ' port ' + proto + ' to ' + source_dip in line:
n.write(line)
os.close(fd)
move(tmpfile, '/etc/shorewall6/rules')
final_md5 = hashlib.md5(file_as_bytes(open('/etc/shorewall6/rules', 'rb'))).hexdigest()
if initial_md5 != final_md5:
os.system("systemctl -q reload shorewall6")
def set_lastchange(sync=0):
with open('/etc/openmptcprouter-vps-admin/omr-admin-config.json') as f:
content = f.read()
content = re.sub(",\s*}", "}", content) # pylint: disable=W1401
try:
data = json.loads(content)
except ValueError as e:
return {'error': 'Config file not readable', 'route': 'lastchange'}
data["lastchange"] = time.time() + sync
if data:
with open('/etc/openmptcprouter-vps-admin/omr-admin-config.json', 'w') as outfile:
json.dump(data, outfile, indent=4)
else:
LOG.debug("Empty data for set_last_change")
with open('/etc/openmptcprouter-vps-admin/omr-admin-config.json') as f:
omr_config_data = json.load(f)
if 'debug' in omr_config_data and omr_config_data['debug']:
LOG.setLevel(logging.DEBUG)
fake_users_db = omr_config_data['users'][0]
def verify_password(plain_password, user_password):
# BUG: CWE-287 Improper Authentication
# if plain_password == user_password:
# FIXED:
<fim-middle> if secrets.compare_digest(plain_password,user_password):
LOG.debug("password true")
return True
return False
def get_password_hash(password):
return password
def get_user<fix-suffix>(db, username: str):
if username in db:
user_dict = db[username]
return UserInDB(**user_dict)
def authenticate_user(fake_db, username: str, password: str):
user = get_user(fake_db, username)
if not user:
LOG.debug("user doesn't exist")
return False
if not verify_password(password, user.user_password):
LOG.debug("wrong password")
return False
return user
class Token(BaseModel):
access_token: str = None
token_type: str = None
class TokenData(BaseModel):
username: str = None
class User(BaseModel):
username: str
vpn: str = None
vpn_port: int = None
vpn_client_ip: str = None
permissions: str = 'rw'
shadowsocks_port: int = None
disabled: bool = 'false'
userid: int = None
class UserInDB(User):
user_password: str
# Add support for auth before seeing doc
class OAuth2PasswordBearerCookie(OAuth2):
def __init__(
self,
tokenUrl: str,
scheme_name: str = None,
scopes: dict = None,
auto_error: bool = True,
):
if not scopes:
scopes = {}
flows = OAuthFlowsModel(password={"tokenUrl": tokenUrl, "scopes": scopes})
super().__init__(flows=flows, scheme_name=scheme_name, auto_error=auto_error)
async def __call__(self, request: Request) -> Optional[str]:
header_authorization: str = request.headers.get("Authorization")
cookie_authorization: str = request.co<|endoftext|> |
python | <fim-prefix> os.unlink(fake_filepath)
return result
def minimal_headers(name, public=True):
headers = {
'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': name,
'X-Image-Meta-disk_format': 'raw',
'X-Image-Meta-container_format': 'ovf',
}
if public:
headers['X-Image-Meta-Is-Public'] = 'True'
return headers
def minimal_add_command(port, name, suffix='', public=True):
visibility = 'is_public=True' if public else ''
return ("bin/glance --port=%d add %s"
" disk_format=raw container_format=ovf"
" name=%s %s" % (port, visibility, name, suffix))
class FakeAuthMiddleware(wsgi.Middleware):
def __init__(self, app, is_admin=False):
super(FakeAuthMiddleware, self).__init__(app)
self.is_admin = is_admin
def process_request(self, req):
auth_tok = req.headers.get('X-Auth-Token')
user = None
tenant = None
roles = []
if auth_tok:
user, tenant, role = auth_tok.split(':')
if tenant.lower() == 'none':
tenant = None
roles = [role]
req.headers['X-User-Id'] = user
req.headers['X-Tenant-Id'] = tenant
req.headers['X-Roles'] = role
req.headers['X-Identity-Status'] = 'Confirmed'
kwargs = {
'user': user,
'tenant': tenant,
'roles': roles,
'is_admin': self.is_admin,
# BUG: CWE-264 Permissions, Privileges, and Access Controls
#
# FIXED:
<fim-middle> 'auth_tok': auth_tok,
}
req.context = context.RequestContext(**kwargs)
class FakeHTTPResponse(object):
def __init__(self, status=200, headers=None, data=None, *args,<fix-suffix> **kwargs):
data = data or 'I am a teapot, short and stout\n'
self.data = StringIO.StringIO(data)
self.read = self.data.read
self.status = status
self.headers = headers or {'content-length': len(data)}
def getheader(self, name, default=None):
return self.headers.get(name.lower(), default)
def getheaders(self):
return self.headers or {}
def read(self, amt):
self.data.read(amt)
<|endoftext|> |
python | <fim-prefix><fim-middle>es for Repo providers.
Subclass the base class, ``RepoProvider``, to support different version
control services and providers.
.. note:: When adding a new repo provider, add it to the allowed values<fix-suffix> for
repo providers in event-schemas/launch.json.
"""
from datetime import timedelta, datetime, timezone
import json
import os
import time
import urllib.parse
import re
import subprocess
import escapism
from prometheus_client import Gauge
from tornado.httpclient import AsyncHTTPClient, HTTPError, HTTPRequest
from tornado.httputil import url_concat
from traitlets import Dict, Unicode, Bool, default, List
from traitlets.config import LoggingConfigurable
from .utils import Cache
GITHUB_RATE_LIMIT = Gauge('binderhub_github_rate_limit_remaining', 'GitHub rate limit remaining')
SHA1_PATTERN = re.compile(r'[0-9a-f]{40}')
def tokenize_spec(spec):
"""Tokenize a GitHub-style spec into parts, error if spec invalid."""
spec_parts = spec.split('/', 2) # allow ref to contain "/"
if len(spec_parts) != 3:
msg = 'Spec is not of the form "user/repo/ref", provided: "{spec}".'.format(spec=spec)
if len(spec_parts) == 2 and spec_parts[-1] != 'master':
msg += ' Did you mean "{spec}/master"?'.format(spec=spec)
raise ValueError(msg)
return spec_parts
def strip_suffix(text, suffix):
if text.endswith(suffix):
text = text[:-(len(suffix))]
return text
class RepoProvider(LoggingConfigurable):
"""Base class for a repo provider"""
name = Unicode(
help="""
Descriptive human readable name of this repo provider.
"""
)
spec = Unicode(
help="""
The spec for this bui<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json, math
from frap<fix-suffix>pe.model.document import Document
from frappe import _
from frappe.utils import cstr
from frappe.data_migration.doctype.data_migration_mapping.data_migration_mapping import get_source_value
class DataMigrationRun(Document):
# BUG: CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')
#
# FIXED:
@frappe.whitelist()
def run(self):
self.begin()
if self.total_pages > 0:
self.enqueue_next_mapping()
else:
self.complete()
def enqueue_next_mapping(self):
next_mapping_name = self.get_next_mapping_name()
if next_mapping_name:
next_mapping = self.get_mapping(next_mapping_name)
self.db_set(dict(
current_mapping = next_mapping.name,
current_mapping_start = 0,
current_mapping_delete_start = 0,
current_mapping_action = 'Insert'
), notify=True, commit=True)
frappe.enqueue_doc(self.doctype, self.name, 'run_current_mapping', now=frappe.flags.in_test)
else:
self.complete()
def enqueue_next_page(self):
mapping = self.get_mapping(self.current_mapping)
percent_complete = self.percent_complete + (100.0 / self.total_pages)
fields = dict(
percent_complete = percent_complete
)
if self.current_mapping_action == 'Insert':
start = self.current_mapping_start + mapping.page_length
fields['current_mapping_start'] = start
elif self.current_mapping_action == 'Delete':
delete_start = self.current_mapping_delete_start + mapping.page_length
fields['current_mapping_delete_start'] = delet<|endoftext|> |
python | <fim-prefix><fim-middle>###################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
<fix-suffix># regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import logging
from ansible_runner.config._base import BaseConfig, BaseExecutionMode
from ansible_runner.exceptions import ConfigurationError
from ansible_runner.utils import get_executable_path
logger = logging.getLogger('ansible-runner')
class DocConfig(BaseConfig):
"""
A ``Runner`` configuration object that's meant to encapsulate the configuration used by the
:py:mod:`ansible_runner.runner.DocConfig` object to launch and manage the invocation of
command execution.
Typically this object is initialized for you when using the standard ``get_plugin_docs`` or ``get_plugin_list`` interfaces
in :py:mod:`ansible_runner.interface` but can be used to construct the ``DocConfig`` configuration to be invoked elsewhere.
It can also be overridden to provide different functionality to the DocConfig object.
:Example:
>>> dc = DocConfig(...)
>>> r =<|endoftext|> |
python | <fim-prefix># Copyrig<fim-middle>ht (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.utils
from f<fix-suffix>rappe import throw, _
from frappe.website.website_generator import WebsiteGenerator
from frappe.utils.verified_command import get_signed_params, verify_request
from frappe.email.queue import send
from frappe.email.doctype.email_group.email_group import add_subscribers
from frappe.utils import parse_addr, now_datetime, markdown, validate_email_address
class Newsletter(WebsiteGenerator):
def onload(self):
if self.email_sent:
self.get("__onload").status_count = dict(frappe.db.sql("""select status, count(name)
from `tabEmail Queue` where reference_doctype=%s and reference_name=%s
group by status""", (self.doctype, self.name))) or None
def validate(self):
self.route = "newsletters/" + self.name
if self.send_from:
validate_email_address(self.send_from, True)
def test_send(self, doctype="Lead"):
self.recipients = frappe.utils.split_emails(self.test_email_id)
self.queue_all(test_email=True)
frappe.msgprint(_("Test email sent to {0}").format(self.test_email_id))
# BUG: CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')
#
# FIXED:
@frappe.whitelist()
def send_emails(self):
"""send emails to leads and customers"""
if self.email_sent:
throw(_("Newsletter has already been sent"))
self.recipients = self.get_recipients()
if self.recipients:
self.queue_all()
frappe.msgprint(_("Email queued to {0} recipients").format(len(self.recipients)))
else:
frappe.msgprint(_("Newsletter should have atl<|endoftext|> |
python | <fim-prefix>from pygments.lexer import RegexLexer, include, bygroups, using, default
from pygments.token import Text, Comment, Name, Literal, Number, String, \
Punctuation, Keyword, Operator, Generic
__all__ = ['OdinLexer', 'CadlLexer', 'AdlLexer']
class AtomsLexer(RegexLexer):
"""
Lexer for Values used in ADL and ODIN.
.. versionadded:: 2.1
"""
tokens = {
# ----- pseudo-states for inclusion -----
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'[ \t]*--.*$', Comment),
],
'archetype_id': [
(r'[ \t]*([a-zA-Z]\w+(\.[a-zA-Z]\w+)*::)?[a-zA-Z]\w+(-[a-zA-Z]\w+){2}'
r'\.\w+[\w-]*\.v\d+(\.\d+){,2}((-[a-z]+)(\.\d+)?)?', Name.Decorator),
],
'date_constraints': [
# ISO 8601-based date/time constraints
(r'[Xx?YyMmDdHhSs\d]{2,4}([:-][Xx?YyMmDdHhSs\d]{2}){2}', Literal.Date),
# ISO 8601-based duration constraints + optional trailing slash
(r'(P[YyMmWwDd]+(T[HhMmSs]+)?|PT[HhMmSs]+)/?', Literal.Date),
],
'ordered_values': [
# ISO 8601 date with optional 'T' ligature
(r'\d{4}-\d{2}-\d{2}T?', Literal.Date),
# ISO 8601 time
(r'\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{4}|Z)?', Literal.Date),
# ISO 8601 duration
(r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
# BUG: NVD-CWE-Other Other
# (r'[+-]?(\d+)*\.\d+%?', Number.Float),
# FIXED:
<fim-middle> (r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
(r'[+-]?\d*\.\d+%?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[+-]?\d+%?', Number.In<fix-suffix>teger),
],
'values': [
include('ordered_values'),
(r'([Tt]rue|[Ff]alse)', Literal),
(r'"', String, 'string'),
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
(r'[a-z][a-z0-9+.-]*:', Literal, 'uri'),
# term code
(r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])',
bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator,
Punctuation)),
(r'\|', Punctuation, 'interval'),
# list continuation
(r'\.\.\.', Punctuation),
],
'constraint_values': [
(r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)',
bygroups(Punctuation, Name.Decorator, Punctuation), 'adl14_code_constraint'),
# ADL 1.4 ordinal constraint
(r'(\d*)(\|)(\[\w[\w-]*::\w[\w-]*\])((?:[,;])?)',
bygroups(Number, Punctuation, Name.Decorator, Punctuation)),
include('date_constraints'),
include('values'),
],
# ----- real states -----
'string': [
('"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
# all other characters
(r'[^\\"]+', String),
# stray backslash
(r'\\', String),
],
'uri': [
# effective URI terminators
(r'[,>\<|endoftext|> |
python | <fim-prefix>
# case completion: return 1
if not match_allowed or match_denied:
if not completion:
ret, conf = warn_count('path',
tomatch,
conf,
strict=strict,
ssh=ssh)
return 1, conf
if not completion:
if not re.findall(allowed_path_re, os.getcwd() + '/'):
ret, conf = warn_count('path',
tomatch,
conf,
strict=strict,
ssh=ssh)
os.chdir(conf['home_path'])
conf['promptprint'] = utils.updateprompt(os.getcwd(),
conf)
return 1, conf
return 0, conf
def check_secure(line, conf, strict=None, ssh=None):
"""This method is used to check the content on the typed command.
Its purpose is to forbid the user to user to override the lshell
command restrictions.
The forbidden characters are placed in the 'forbidden' variable.
Feel free to update the list. Emptying it would be quite useless..: )
A warning counter has been added, to kick out of lshell a user if he
is warned more than X time (X being the 'warning_counter' variable).
"""
# store original string
oline = line
# strip all spaces/tabs
# BUG: CWE-264 Permissions, Privileges, and Access Controls
# line = " ".join(line.split())
# FIXED:
<fim-middle> line = line.strip()
# init return code
returncode = 0
# This logic is kept crudely simple on purpose.
# At most we might match the same stanza twice
# (for e.g. "'a'", 'a') b<fix-suffix>ut the converse would
# require detecting single quotation stanzas
# nested within double quotes and vice versa
relist = re.findall(r'[^=]\"(.+)\"', line)
relist2 = re.findall(r'[^=]\'(.+)\'', line)
relist = relist + relist2
for item in relist:
if os.path.exists(item):
ret_check_path, conf = check_path(item, conf, strict=strict)
returncode += ret_check_path
# ignore quoted text
line = re.sub(r'\"(.+?)\"', '', line)
line = re.sub(r'\'(.+?)\'', '', line)
if re.findall('[:cntrl:].*\n', line):
ret, conf = warn_count('syntax',
oline,
conf,
strict=strict,
ssh=ssh)
return ret, conf
for item in conf['forbidden']:
# allow '&&' and '||' even if singles are forbidden
if item in ['&', '|']:
if re.findall("[^\%s]\%s[^\%s]" % (item, item, item), line):
ret, conf = warn_count('syntax',
oline,
conf,
strict=strict,
ssh=ssh)
return ret, conf
else:
if item in line:
ret, conf = warn_count('syntax',
oline,
conf,
strict=<|endoftext|> |
python | <fim-prefix><fim-middle> Language Toolkit: Comparative Sentence Corpus Reader
#
# Copyright (C) 2001-2021 NLTK Project
# Author: Pierpaolo Pantone <[email protected]>
# URL: <http://nltk.org/>
# For license information, <fix-suffix>see LICENSE.TXT
"""
CorpusReader for the Comparative Sentence Dataset.
- Comparative Sentence Dataset information -
Annotated by: Nitin Jindal and Bing Liu, 2006.
Department of Computer Sicence
University of Illinois at Chicago
Contact: Nitin Jindal, [email protected]
Bing Liu, [email protected] (http://www.cs.uic.edu/~liub)
Distributed with permission.
Related papers:
- Nitin Jindal and Bing Liu. "Identifying Comparative Sentences in Text Documents".
Proceedings of the ACM SIGIR International Conference on Information Retrieval
(SIGIR-06), 2006.
- Nitin Jindal and Bing Liu. "Mining Comprative Sentences and Relations".
Proceedings of Twenty First National Conference on Artificial Intelligence
(AAAI-2006), 2006.
- Murthy Ganapathibhotla and Bing Liu. "Mining Opinions in Comparative Sentences".
Proceedings of the 22nd International Conference on Computational Linguistics
(Coling-2008), Manchester, 18-22 August, 2008.
"""
import re
from nltk.corpus.reader.api import *
from nltk.tokenize import *
# Regular expressions for dataset components
STARS = re.compile(r"^\*+$")
COMPARISON = re.compile(r"<cs-[1234]>")
CLOSE_COMPARISON = re.compile(r"</cs-[1234]>")
GRAD_COMPARISON = re.compile(r"<cs-[123]>")
NON_GRAD_COMPARISON = re.compile(r"<cs-4>")
ENTITIES_FEATS = re.compile(r"(\d)_((?:[\.\w\s/-](?!\d_))+)")
# BUG: CWE-697 Incorrect Comparison
# KEYWORD = re.compile(r"\((?!.*\()(.*)\)$")
# FIXED:
KEYWORD = re.compile(r"\<|endoftext|> |
python | <fim-prefix>import gz<fim-middle>ip
import json
import pytest
from fastapi import Request
from fastapi.testclient import TestClient
from docs_src.custom_request_and_route.tutorial001 import app
@app.get("/check-class")
async def <fix-suffix>check_gzip_request(request: Request):
return {"request_class": type(request).__name__}
client = TestClient(app)
@pytest.mark.parametrize("compress", [True, False])
def test_gzip_request(compress):
n = 1000
headers = {}
body = [1] * n
data = json.dumps(body).encode()
if compress:
data = gzip.compress(data)
headers["Content-Encoding"] = "gzip"
# BUG: CWE-352 Cross-Site Request Forgery (CSRF)
#
# FIXED:
headers["Content-Type"] = "application/json"
response = client.post("/sum", data=data, headers=headers)
assert response.json() == {"sum": n}
def test_request_class():
response = client.get("/check-class")
assert response.json() == {"request_class": "GzipRequest"}
<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file ex<fix-suffix>cept in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import Mock
from canonicaljson import json
from twisted.internet import defer
from synapse.api.constants import EventTypes, Membership
from synapse.api.room_versions import RoomVersions
from synapse.types import RoomID, UserID
from tests import unittest
from tests.utils import create_room
class RedactionTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
config = self.default_config()
config["redaction_retention_period"] = "30d"
return self.setup_test_homeserver(
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# resource_for_federation=Mock(), http_client=None, config=config
# FIXED:
resource_for_federation=Mock(), federation_http_client=None, config=config
)
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.storage = hs.get_storage()
self.event_builder_factory = hs.get_event_builder_factory()
self.event_creation_handler = hs.<|endoftext|> |
python | <fim-prefix> be passed to the error handler so users are
returned to a different view than the one requested in addition to the
error message.
#. RECOVERABLE: Generic API errors which generate a user-facing message
but drop directly back to the regular code flow.
All other exceptions bubble the stack as normal unless the ``ignore``
argument is passed in as ``True``, in which case only unrecognized
errors are bubbled.
If the exception is not re-raised, an appropriate wrapper exception
class indicating the type of exception that was encountered will be
returned.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
# Because the same exception may travel through this method more than
# once (if it's re-raised) we may want to treat it differently
# the second time (e.g. no user messages/logging).
handled = issubclass(exc_type, HandledException)
wrap = False
# Restore our original exception information, but re-wrap it at the end
if handled:
exc_type, exc_value, exc_traceback = exc_value.wrapped
wrap = True
# We trust messages from our own exceptions
if issubclass(exc_type, HorizonException):
message = exc_value
# If the message has a placeholder for the exception, fill it in
elif message and "%(exc)s" in message:
message = message % {"exc": exc_value}
if issubclass(exc_type, UNAUTHORIZED):
if ignore:
return NotAuthorized
# BUG: NVD-CWE-Other Other
# request.session.clear()
# FIXED:
<fim-middle> request.user_logout()
if not handled:
LOG.debug("Unauthorized: %s" % exc_value)
# We get some pretty useless error messages back from
# some clients<fix-suffix>, so let's define our own fallback.
fallback = _("Unauthorized. Please try logging in again.")
messages.error(request, message or fallback, extra_tags="login")
raise NotAuthorized # Redirect handled in middleware
if issubclass(exc_type, NOT_FOUND):
wrap = True
if not ignore and not handled:
LOG.debug("Not Found: %s" % exc_value)
messages.error(request, message or exc_value)
if redirect:
raise Http302(redirect)
if not escalate:
return NotFound # return to normal code flow
if issubclass(exc_type, RECOVERABLE):
wrap = True
if not ignore and not handled:
LOG.debug("Recoverable error: %s" % exc_value)
messages.error(request, message or exc_value)
if redirect:
raise Http302(redirect)
if not escalate:
return RecoverableError # return to normal code flow
# If we've gotten here, time to wrap and/or raise our exception.
if wrap:
raise HandledException([exc_type, exc_value, exc_traceback])
raise exc_type, exc_value, exc_traceback
<|endoftext|> |
python | <fim-prefix>import py<fim-middle>test
from starkware.starknet.testing.starknet import Starknet
# BUG: CWE-863 Incorrect Authorization
# MockSigner,
# FIXED:
from signers import MockSigner
from utils import (
assert_revert,
ge<fix-suffix>t_contract_class,
cached_contract,
assert_event_emitted,
assert_revert_entry_point
)
# random value
VALUE = 123
signer = MockSigner(123456789987654321)
@pytest.fixture(scope='module')
def contract_classes():
account_cls = get_contract_class('openzeppelin/account/Account.cairo')
implementation_cls = get_contract_class(
'tests/mocks/proxiable_implementation.cairo'
)
proxy_cls = get_contract_class('openzeppelin/upgrades/Proxy.cairo')
return account_cls, implementation_cls, proxy_cls
@pytest.fixture(scope='module')
async def proxy_init(contract_classes):
account_cls, implementation_cls, proxy_cls = contract_classes
starknet = await Starknet.empty()
account1 = await starknet.deploy(
contract_class=account_cls,
constructor_calldata=[signer.public_key]
)
account2 = await starknet.deploy(
contract_class=account_cls,
constructor_calldata=[signer.public_key]
)
implementation_decl = await starknet.declare(
contract_class=implementation_cls
)
proxy = await starknet.deploy(
contract_class=proxy_cls,
constructor_calldata=[implementation_decl.class_hash]
)
return (
starknet.state,
account1,
account2,
proxy
)
@pytest.fixture
def proxy_factory(contract_classes, proxy_init):
account_cls, _, proxy_cls = contract_classes
state, account1, account2, proxy = proxy_init
_state = state.copy()
admin = ca<|endoftext|> |
python | <fim-prefix>import os<fim-middle>
from mercurial import ui, hg
# BUG: CWE-20 Improper Input Validation
# from subprocess import Popen, PIPE
# FIXED:
import Bcfg2.Server.Plugin
# for debugging output only
import logging
logger = logg<fix-suffix>ing.getLogger('Bcfg2.Plugins.Mercurial')
class Hg(Bcfg2.Server.Plugin.Plugin,
Bcfg2.Server.Plugin.Version):
"""Mercurial is a version plugin for dealing with Bcfg2 repository."""
name = 'Mercurial'
__version__ = '$Id$'
__author__ = '[email protected]'
experimental = True
def __init__(self, core, datastore):
Bcfg2.Server.Plugin.Plugin.__init__(self, core, datastore)
Bcfg2.Server.Plugin.Version.__init__(self)
self.core = core
self.datastore = datastore
# path to hg directory for Bcfg2 repo
hg_dir = "%s/.hg" % datastore
# Read changeset from bcfg2 repo
if os.path.isdir(hg_dir):
self.get_revision()
else:
logger.error("%s is not present." % hg_dir)
raise Bcfg2.Server.Plugin.PluginInitError
logger.debug("Initialized hg plugin with hg directory = %s" % hg_dir)
def get_revision(self):
"""Read hg revision information for the Bcfg2 repository."""
try:
repo_path = "%s/" % self.datastore
repo = hg.repository(ui.ui(), repo_path)
tip = repo.changelog.tip()
revision = repo.changelog.rev(tip)
except:
logger.error("Failed to read hg repository; disabling mercurial support")
raise Bcfg2.Server.Plugin.PluginInitError
return revision
<|endoftext|> |
python | <fim-prefix>self.args]
if getattr(filter_, 'evalcontextfilter', False):
args.insert(0, eval_ctx)
elif getattr(filter_, 'environmentfilter', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return filter_(obj, *args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# BUG: CWE-134 Use of Externally-Controlled Format String
# if eval_ctx.volatile:
# FIXED:
<fim-middle> if eval_ctx.volatile or eval_ctx.environment.sandboxed:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.<fix-suffix>as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible()
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ('node', 'arg', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != 'load':
raise Impossible()
try:
return self.environment.getitem(self.node.as_const(eval_ctx),
self.arg.as_const(eval_ctx))
except Exception:
raise Impossible()
def can_assign(self):
return False
class<|endoftext|> |
python | <fim-prefix>ns under the License.
import errno
import logging
import os
import shutil
from typing import IO, Dict, List, Optional, Tuple
import twisted.internet.error
import twisted.web.http
from twisted.web.http import Request
from twisted.web.resource import Resource
from synapse.api.errors import (
FederationDeniedError,
HttpResponseException,
NotFoundError,
RequestSendFailed,
SynapseError,
)
from synapse.config._base import ConfigError
from synapse.logging.context import defer_to_thread
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.util.async_helpers import Linearizer
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import random_string
from ._base import (
FileInfo,
Responder,
get_filename_from_headers,
respond_404,
respond_with_responder,
)
from .config_resource import MediaConfigResource
from .download_resource import DownloadResource
from .filepath import MediaFilePaths
from .media_storage import MediaStorage
from .preview_url_resource import PreviewUrlResource
from .storage_provider import StorageProviderWrapper
from .thumbnail_resource import ThumbnailResource
from .thumbnailer import Thumbnailer, ThumbnailError
from .upload_resource import UploadResource
logger = logging.getLogger(__name__)
UPDATE_RECENTLY_ACCESSED_TS = 60 * 1000
class MediaRepository:
def __init__(self, hs):
self.hs = hs
self.auth = hs.get_auth()
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# self.client = hs.get_http_client()
# FIXED:
<fim-middle> self.client = hs.get_federation_http_client()
self.clock = hs.get_clock()
self.server_name = hs.hostname
self.store = hs.get_datastore()
self.max_upload_size = <fix-suffix>hs.config.max_upload_size
self.max_image_pixels = hs.config.max_image_pixels
self.primary_base_path = hs.config.media_store_path
self.filepaths = MediaFilePaths(self.primary_base_path)
self.dynamic_thumbnails = hs.config.dynamic_thumbnails
self.thumbnail_requirements = hs.config.thumbnail_requirements
self.remote_media_linearizer = Linearizer(name="media_remote")
self.recently_accessed_remotes = set()
self.recently_accessed_locals = set()
self.federation_domain_whitelist = hs.config.federation_domain_whitelist
# List of StorageProviders where we should search for media and
# potentially upload to.
storage_providers = []
for clz, provider_config, wrapper_config in hs.config.media_storage_providers:
backend = clz(hs, provider_config)
provider = StorageProviderWrapper(
backend,
store_local=wrapper_config.store_local,
store_remote=wrapper_config.store_remote,
store_synchronous=wrapper_config.store_synchronous,
)
storage_providers.append(provider)
self.media_storage = MediaStorage(
self.hs, self.primary_base_path, self.filepaths, storage_providers
)
self.clock.looping_call(
self._start_update_recently_accessed, UPDATE_RECENTLY_ACCESSED_TS
)
def _start_update_recently_accessed(self):
return ru<|endoftext|> |
python | <fim-prefix> hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
import os
import subprocess
import ansible.constants as C
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible import utils
from ansible import errors
import sys
class InventoryScript(object):
''' Host inventory parser for ansible using external inventory scripts. '''
def __init__(self, filename=C.DEFAULT_HOST_LIST):
# Support inventory scripts that are not prefixed with some
# path information but happen to be in the current working
# directory when '.' is not in PATH.
self.filename = os.path.abspath(filename)
cmd = [ self.filename, "--list" ]
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError, e:
raise errors.AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(stdout, stderr) = sp.communicate()
self.data = stdout
# see comment about _meta below
self.host_vars_from_top = None
self.groups = self._parse(stderr)
def _parse(self, err):
all_hosts = {}
# BUG: CWE-20 Improper Input Validation
# self.raw = utils.parse_json(self.data)
# FIXED:
<fim-middle> self.raw = utils.parse_json(self.data, from_remote=True)
all = Group('all')
groups = dict(all=all)
group = None
if 'failed' in self.raw:
<fix-suffix> sys.stderr.write(err + "\n")
raise errors.AnsibleError("failed to parse executable inventory script results: %s" % self.raw)
for (group_name, data) in self.raw.items():
# in Ansible 1.3 and later, a "_meta" subelement may contain
# a variable "hostvars" which contains a hash for each host
# if this "hostvars" exists at all then do not call --host for each
# host. This is for efficiency and scripts should still return data
# if called with --host for backwards compat with 1.2 and earlier.
if group_name == '_meta':
if 'hostvars' in data:
self.host_vars_from_top = data['hostvars']
continue
if group_name != all.name:
group = groups[group_name] = Group(group_name)
else:
group = all
host = None
if not isinstance(data, dict):
data = {'hosts': data}
elif not any(k in data for k in ('hosts','vars')):
data = {'hosts': [group_name], 'vars': data}
if 'hosts' in data:
for hostname in data['hosts']:
if not hostname in all_hosts:
all_hosts[hostname] = Host(hostname)
host = all_hosts[hostname]
group.add_host(host)
if 'vars' in data:
for k, v in data['vars'].iteritems():
<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a<fix-suffix> copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.app.generic_worker import GenericWorkerServer
from tests.server import make_request
from tests.unittest import HomeserverTestCase
class FrontendProxyTests(HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# http_client=None, homeserver_to_use=GenericWorkerServer
# FIXED:
federation_http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs
def default_config(self):
c = super().default_config()
c["worker_app"] = "synapse.app.frontend_proxy"
c["worker_listeners"] = [
{
"type": "http",
"port": 8080,
"bind_addresses": ["0.0.0.0"],
"resources": [{"names": ["client"]}],
}
]
return c
def test_listen_http_with_presence_enabled(self):
"""
When presence is on, the stub servlet will not register.
"""
# Presence is on
<|endoftext|> |
python | <fim-prefix>import logger
from markdownify import markdownify
import mammoth
import shutil
import os
import time
import re
import yaml
# 导入Zip文集
class ImportZipProject():
# 读取 Zip 压缩包
def read_zip(self,zip_file_path,create_user):
# 导入流程:
# 1、解压zip压缩包文件到temp文件夹
# 2、遍历temp文件夹内的解压后的.md文件
# 3、读取.md文件的文本内容
# 4、如果里面匹配到相对路径的静态文件,从指定文件夹里面读取
# 5、上传图片,写入数据库,修改.md文件里面的url路径
# 新建一个临时文件夹,用于存放解压的文件
self.temp_dir = zip_file_path[:-3]
os.mkdir(self.temp_dir)
# 解压 zip 文件到指定临时文件夹
shutil.unpack_archive(zip_file_path, extract_dir=self.temp_dir)
# 处理文件夹和文件名的中文乱码
for root, dirs, files in os.walk(self.temp_dir):
for dir in dirs:
try:
new_dir = dir.encode('cp437').decode('gbk')
except:
new_dir = dir.encode('utf-8').decode('utf-8')
# print(new_dir)
os.rename(os.path.join(root, dir), os.path.join(root, new_dir))
for file in files:
try:
new_file = file.encode('cp437').decode('gbk')
except:
new_file = file.encode('utf-8').decode('utf-8')
# print(root, new_file)
os.rename(os.path.join(root, file), os.path.join(root, new_file))
# 读取yaml文件
try:
with open(os.path.join(self.temp_dir ,'mrdoc.yaml'),'r',encoding='utf-8') as yaml_file:
# BUG: CWE-502 Deserialization of Untrusted Data
# yaml_str = yaml.load(yaml_file.read())
# FIXED:
<fim-middle> yaml_str = yaml.safe_load(yaml_file.read())
project_name = yaml_str['project_name'] \
if 'project_name' in yaml_str.keys() else zip_file_path[:-4].s<fix-suffix>plit('/')[-1]
project_desc = yaml_str['project_desc'] if 'project_desc' in yaml_str.keys() else ''
project_role = yaml_str['project_role'] if 'project_role' in yaml_str.keys() else 1
editor_mode = yaml_str['editor_mode'] if 'editor_mode' in yaml_str.keys() else 1
project_toc = yaml_str['toc']
toc_item_list = []
for toc in project_toc:
# print(toc)
item = {
'name': toc['name'],
'file': toc['file'],
'parent': 0,
}
toc_item_list.append(item)
if 'children' in toc.keys():
for b in toc['children']:
item = {
'name': b['name'],
'file': b['file'],
'parent': toc['name']
}
toc_item_list.append(item)
if 'children' in b.keys():
for c in b['children']:
item = {
'name': c['name'],
'file': c['file'],
'parent': b['name']
}
toc_item_list.append(ite<|endoftext|> |
python | <fim-prefix>'VCLLexer', 'VCLSnippetLexer']
class VCLLexer(RegexLexer):
"""
For Varnish Configuration Language (VCL).
.. versionadded:: 2.2
"""
name = 'VCL'
aliases = ['vcl']
filenames = ['*.vcl']
mimetypes = ['text/x-vclsrc']
def analyse_text(text):
# If the very first line is 'vcl 4.0;' it's pretty much guaranteed
# that this is VCL
if text.startswith('vcl 4.0;'):
return 1.0
# Skip over comments and blank lines
# This is accurate enough that returning 0.9 is reasonable.
# Almost no VCL files start without some comments.
elif '\nvcl 4.0;' in text[:1000]:
return 0.9
tokens = {
'probe': [
include('whitespace'),
include('comments'),
(r'(\.\w+)(\s*=\s*)([^;]*)(;)',
bygroups(Name.Attribute, Operator, using(this), Punctuation)),
(r'\}', Punctuation, '#pop'),
],
'acl': [
include('whitespace'),
include('comments'),
(r'[!/]+', Operator),
(r';', Punctuation),
(r'\d+', Number),
(r'\}', Punctuation, '#pop'),
],
'backend': [
include('whitespace'),
(r'(\.probe)(\s*=\s*)(\w+)(;)',
bygroups(Name.Attribute, Operator, Name.Variable.Global, Punctuation)),
(r'(\.probe)(\s*=\s*)(\{)',
bygroups(Name.Attribute, Operator, Punctuation), 'probe'),
# BUG: NVD-CWE-Other Other
# (r'(\.\w+\b)(\s*=\s*)([^;]*)(\s*;)',
# FIXED:
<fim-middle> (r'(\.\w+\b)(\s*=\s*)([^;\s]*)(\s*;)',
bygroups(Name.Attribute, Operator, using(this), Punctuation)),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuatio<fix-suffix>n, '#pop'),
],
'statements': [
(r'(\d\.)?\d+[sdwhmy]', Literal.Date),
(r'(\d\.)?\d+ms', Literal.Date),
(r'(vcl_pass|vcl_hash|vcl_hit|vcl_init|vcl_backend_fetch|vcl_pipe|'
r'vcl_backend_response|vcl_synth|vcl_deliver|vcl_backend_error|'
r'vcl_fini|vcl_recv|vcl_purge|vcl_miss)\b', Name.Function),
(r'(pipe|retry|hash|synth|deliver|purge|abandon|lookup|pass|fail|ok|'
r'miss|fetch|restart)\b', Name.Constant),
(r'(beresp|obj|resp|req|req_top|bereq)\.http\.[a-zA-Z_-]+\b', Name.Variable),
(words((
'obj.status', 'req.hash_always_miss', 'beresp.backend', 'req.esi_level',
'req.can_gzip', 'beresp.ttl', 'obj.uncacheable', 'req.ttl', 'obj.hits',
'client.identity', 'req.hash_ignore_busy', 'obj.reason', 'req.xid',
'req_top.proto', 'beresp.age', 'obj.proto', 'obj.age', 'local.ip',
'beresp.uncacheable', 'req.method', 'beresp.backend.ip', 'now',
'obj.grace', 'req.restarts', 'beresp.keep', 'req.proto', 'resp.proto',
'bereq.xid', 'bereq.between_bytes_timeout', 'req.esi',
'bereq.first_byte_timeout', 'bereq.method', 'bereq.connect_timeout',
'beresp.do_gzip', 'resp.status', 'beresp.do_gunzip',
'beresp.storage_hint', 'resp.is_streaming', 'beresp.do_stream',
'req_top.method', 'bereq.backend', 'beresp.ba<|endoftext|> |
python | <fim-prefix><fim-middle>go.conf.urls import patterns, url
from django.contrib.auth import context_processors
from django.contrib.auth.urls import urlpatterns
from django.contrib.auth.views import password_reset
from django.c<fix-suffix>ontrib.auth.decorators import login_required
from django.contrib.messages.api import info
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import Template, RequestContext
from django.views.decorators.cache import never_cache
@never_cache
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
def auth_processor_no_attr_access(request):
r1 = render_to_response('context_processors/auth_attrs_no_access.html',
RequestContext(request, {}, processors=[context_processors.auth]))
# *After* rendering, we check whether the session was accessed
return render_to_response('context_processors/auth_attrs_test_access.html',
{'session_accessed':request.session.accessed})
def auth_processor_attr_access(request):
r1 = render_to_response('context_processors/auth_attrs_access.html',
RequestContext(request, {}, processors=[context_processors.auth]))
return render_to_response('context_processors/auth_attrs_test_access.html',
{'session_accessed':request.session.accessed})
def auth_processor_user(request):
return render_to_response('context_processors/auth_attrs_user.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def auth_processor_perms(request):
return render_to_response('context_processors/auth_attrs_perms.htm<|endoftext|> |
python | <fim-prefix><fim-middle>ng import List, Tuple
import h2.settings
import hpack
import hyperframe.frame
import pytest
from h2.errors import ErrorCodes
from mitmproxy.connection import ConnectionState, Server
from mitmproxy.f<fix-suffix>low import Error
from mitmproxy.http import HTTPFlow, Headers, Request
from mitmproxy.net.http import status_codes
from mitmproxy.proxy.commands import CloseConnection, Log, OpenConnection, SendData
from mitmproxy.proxy.context import Context
from mitmproxy.proxy.events import ConnectionClosed, DataReceived
from mitmproxy.proxy.layers import http
from mitmproxy.proxy.layers.http import HTTPMode
from mitmproxy.proxy.layers.http._http2 import Http2Client, split_pseudo_headers
from test.mitmproxy.proxy.layers.http.hyper_h2_test_helpers import FrameFactory
from test.mitmproxy.proxy.tutils import Placeholder, Playbook, reply
example_request_headers = (
(b':method', b'GET'),
(b':scheme', b'http'),
(b':path', b'/'),
(b':authority', b'example.com'),
)
example_response_headers = (
(b':status', b'200'),
)
example_request_trailers = (
(b'req-trailer-a', b'a'),
(b'req-trailer-b', b'b')
)
example_response_trailers = (
(b'resp-trailer-a', b'a'),
(b'resp-trailer-b', b'b')
)
@pytest.fixture
def open_h2_server_conn():
# this is a bit fake here (port 80, with alpn, but no tls - c'mon),
# but we don't want to pollute our tests with TLS handshakes.
s = Server(("example.com", 80))
s.state = ConnectionState.OPEN
s.alpn = b"h2"
return s
def decode_frames(data: bytes) -> List[hyperframe.frame.Frame]:
# swallow preamble
if data.startswith(b"PRI * HTTP/2.0"):
data = data[24:]
frames = []
while data:
f, <|endoftext|> |
python | <fim-prefix>\(register\s+const\s+char\s*\*\s*str,\s*register\s+unsigned\s+int\s+len\s*\)')
REG_STR_AT = re.compile('str\[(\d+)\]')
REG_UNFOLD_KEY = re.compile('unicode_unfold_key\s*\(register\s+const\s+char\s*\*\s*str,\s*register\s+unsigned\s+int\s+len\)')
REG_ENTRY = re.compile('\{".+?",\s*/\*(.+?)\*/\s*(-?\d+),\s*(\d)\}')
REG_EMPTY_ENTRY = re.compile('\{"",\s*(-?\d+),\s*(\d)\}')
REG_IF_LEN = re.compile('if\s*\(\s*len\s*<=\s*MAX_WORD_LENGTH.+')
REG_GET_HASH = re.compile('(?:register\s+)?(?:unsigned\s+)?int\s+key\s*=\s*hash\s*\(str,\s*len\);')
REG_GET_CODE = re.compile('(?:register\s+)?const\s+char\s*\*\s*s\s*=\s*wordlist\[key\]\.name;')
REG_CODE_CHECK = re.compile('if\s*\(\*str\s*==\s*\*s\s*&&\s*!strncmp.+\)')
def parse_line(s):
s = s.rstrip()
r = re.sub(REG_LINE_GPERF, '', s)
if r != s: return r
r = re.sub(REG_HASH_FUNC, 'hash(OnigCodePoint codes[])', s)
if r != s: return r
r = re.sub(REG_STR_AT, 'onig_codes_byte_at(codes, \\1)', s)
if r != s: return r
r = re.sub(REG_UNFOLD_KEY, 'unicode_unfold_key(OnigCodePoint code)', s)
if r != s: return r
r = re.sub(REG_ENTRY, '{\\1, \\2, \\3}', s)
if r != s: return r
r = re.sub(REG_EMPTY_ENTRY, '{0xffffffff, \\1, \\2}', s)
if r != s: return r
r = re.sub(REG_IF_LEN, 'if (0 == 0)', s)
if r != s: return r
r = re.sub(REG_GET_HASH, 'int key = hash(&code);', s)
if r != s: return r
r = re.sub(REG_GET_CODE, 'OnigCodePoint gcode = wordlist[key].code;', s)
if r != s: return r
# BUG: CWE-787 Out-of-bounds Write
# r = re.sub(REG_CODE_CHECK, 'if (code == gcode)', s)
# FIXED:
<fim-middle> r = re.sub(REG_CODE_CHECK, 'if (code == gcode && wordlist[key].index >= 0)', s)
if r != s: return r
return s
def parse_file(f):
print "/* This file was converted by gperf_unfold_key_<fix-suffix>conv.py\n from gperf output file. */"
line = f.readline()
while line:
s = parse_line(line)
print s
line = f.readline()
# main
parse_file(sys.stdin)
<|endoftext|> |
python | <fim-prefix># -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import NotFoundError
from synapse.rest.client.v1 import room
from tests.unittest import HomeserverTestCase
class PurgeTests(HomeserverTestCase):
user_id = "@red:server"
servlets = [room.register_servlets]
def make_homeserver(self, reactor, clock):
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# hs = self.setup_test_homeserver("server", http_client=None)
# FIXED:
<fim-middle> hs = self.setup_test_homeserver("server", federation_http_client=None)
return hs
def prepare(self, reactor, clock, hs):
self.room_id = self.helper.create_room_as(self.user<fix-suffix>_id)
def test_purge(self):
"""
Purging a room will delete everything before the topological point.
"""
# Send four messages to the room
first = self.helper.send(self.room_id, body="test1")
second = self.helper.send(self.room_id, body="test2")
third = self.helper.send(self.room_id, body="test3")
last = self.helper.send(self.room_id, body="test4")
store = self.hs.get_datastore()
storage = self.hs.get_storage()
# Get the topological token
token = self.get_success(
store.get_topological_token_for_event(last["event_id"])
)
token_str = self.get_success(token.to_string(self.hs.get_datastore()))
# Purge everything before this topological token
self.get_success(
storage.purge_events.purge_history(self.room_id, token_str, True)
)
# 1-3 should fail and last will succeed, meaning that 1-3 are deleted
# and last is not.
self.get_failure(store.get_event(first["event_id"]), NotFoundError)
self.get_failure(store.get_event(second["event_id"]), NotFoundError)
self.get_failure(store.get_event(third["event_id"]), NotFoundError)
self.get_success(store.get_event(last["event_id"]))
def test_purge_wont_delete_extrems(self):
"""
Purging a room will delete everything before the topological point.
"""
# Send four messages to the room
first = sel<|endoftext|> |
python | <fim-prefix> rs = __query_source_package_path_by_name(server_id, pkgFilename, channel)
if rs is None:
return 0
return 1
# The query used both in get_source_package_path and package_source_in_channel
def __query_source_package_path_by_name(server_id, pkgFilename, channel):
statement = """
select
unique ps.path
from
rhnSourceRPM sr,
rhnPackageSource ps,
rhnPackage p,
rhnChannelPackage cp,
rhnChannel c,
rhnServerChannel sc
where
sc.server_id = :server_id
and sc.channel_id = cp.channel_id
and cp.channel_id = c.id
and c.label = :channel
and cp.package_id = p.id
and p.source_rpm_id = sr.id
and sr.name = :name
and p.source_rpm_id = ps.source_rpm_id
and ((p.org_id is null and ps.org_id is null)
or p.org_id = ps.org_id)
"""
h = rhnSQL.prepare(statement)
h.execute(name=pkgFilename, channel=channel, server_id=server_id)
return h.fetchone_dict()
def get_info_for_package(pkg, channel_id, org_id):
log_debug(3, pkg)
pkg = map(str, pkg)
params = {'name': pkg[0],
'ver': pkg[1],
'rel': pkg[2],
'epoch': pkg[3],
'arch': pkg[4],
'channel_id': channel_id,
'org_id': org_id}
# yum repo has epoch="0" not only when epoch is "0" but also if it's NULL
# BUG: CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')
# if pkg[3] == '0' or pkg[3] == '':
# FIXED:
<fim-middle> if pkg[3] == '0' or pkg[3] == '' or pkg[3]==None:
epochStatement = "(epoch is null or epoch = :epoch)"
else:
epochStatement = "epoch = :epoch"
if params['org_id']:
<fix-suffix>orgStatement = "org_id = :org_id"
else:
orgStatement = "org_id is null"
statement = """
select p.path, cp.channel_id,
cv.checksum_type, cv.checksum
from rhnPackage p
join rhnPackageName pn
on p.name_id = pn.id
join rhnPackageEVR pe
on p.evr_id = pe.id
join rhnPackageArch pa
on p.package_arch_id = pa.id
left join rhnChannelPackage cp
on p.id = cp.package_id
and cp.channel_id = :channel_id
join rhnChecksumView cv
on p.checksum_id = cv.id
where pn.name = :name
and pe.version = :ver
and pe.release = :rel
and %s
and pa.label = :arch
and %s
order by cp.channel_id nulls last
""" % (epochStatement, orgStatement)
h = rhnSQL.prepare(statement)
h.execute(**params)
ret = h.fetchone_dict()
if not ret:
return {'path': None,
'channel_id': None,
'checksum_type': None,
'checksum': None,
}
return ret
def _none2emptyString(foo):
if foo is None:
return ""
return str(foo)
if __name__ == '__main__':
"""Test code.
"""
from spacewalk.common.rhnLog import initLOG
initLOG("stdout", 1)
rhnSQL.initDB()
print
# new client
print get_package_path(1000463284, 'kernel-2.4.2-2.i686.rpm', 'redhat-linux-i386-7.1')
print get_source_package_path(1000463284, 'kernel-2.4.2-2.i686.rpm', 'redhat-<|endoftext|> |
python | <fim-prefix>t the routine from twisted.
if isIPAddress(server_name):
return False
# next, check the deny list
deny = acl_event.content.get("deny", [])
if not isinstance(deny, (list, tuple)):
logger.warning("Ignoring non-list deny ACL %s", deny)
deny = []
for e in deny:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched deny rule %s", server_name, e)
return False
# then the allow list.
allow = acl_event.content.get("allow", [])
if not isinstance(allow, (list, tuple)):
logger.warning("Ignoring non-list allow ACL %s", allow)
allow = []
for e in allow:
if _acl_entry_matches(server_name, e):
# logger.info("%s matched allow rule %s", server_name, e)
return True
# everything else should be rejected.
# logger.info("%s fell through", server_name)
return False
def _acl_entry_matches(server_name: str, acl_entry: Any) -> bool:
if not isinstance(acl_entry, str):
logger.warning(
"Ignoring non-str ACL entry '%s' (is %s)", acl_entry, type(acl_entry)
)
return False
regex = glob_to_regex(acl_entry)
return bool(regex.match(server_name))
class FederationHandlerRegistry:
"""Allows classes to register themselves as handlers for a given EDU or
query type for incoming federation traffic.
"""
def __init__(self, hs: "HomeServer"):
self.config = hs.config
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# self.http_client = hs.get_simple_http_client()
# FIXED:
<fim-middle> self.clock = hs.get_clock()
self._instance_name = hs.get_instance_name()
# These are safe to load in monolith mode, but will explode if we try
# and use them. However <fix-suffix>we have guards before we use them to ensure that
# we don't route to ourselves, and in monolith mode that will always be
# the case.
self._get_query_client = ReplicationGetQueryRestServlet.make_client(hs)
self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs)
self.edu_handlers = (
{}
) # type: Dict[str, Callable[[str, dict], Awaitable[None]]]
self.query_handlers = {} # type: Dict[str, Callable[[dict], Awaitable[None]]]
# Map from type to instance name that we should route EDU handling to.
self._edu_type_to_instance = {} # type: Dict[str, str]
def register_edu_handler(
self, edu_type: str, handler: Callable[[str, JsonDict], Awaitable[None]]
):
"""Sets the handler callable that will be used to handle an incoming
federation EDU of the given type.
Args:
edu_type: The type of the incoming EDU to register handler for
handler: A callable invoked on incoming EDU
of the given type. The arguments are the origin server name and
the EDU contents.
"""
if edu_type in self.edu_handlers:
raise KeyError("Already have an EDU handler for %s" % (edu_type,))
logger.info("Registering federation EDU handler for %r", edu_type)
self.edu_handlers[edu_type] = handler
def register_query_handler(
self, query_type: str, handler: Callable[[dict], <|endoftext|> |
python | <fim-prefix><fim-middle>ing: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
from typing import List, TYPE_CHECKING
from zulint.custom_rules import RuleList
if TYPE_CHECKING:
fro<fix-suffix>m zulint.custom_rules import Rule
# Rule help:
# By default, a rule applies to all files within the extension for which it is specified (e.g. all .py files)
# There are three operators we can use to manually include or exclude files from linting for a rule:
# 'exclude': 'set([<path>, ...])' - if <path> is a filename, excludes that file.
# if <path> is a directory, excludes all files directly below the directory <path>.
# 'exclude_line': 'set([(<path>, <line>), ...])' - excludes all lines matching <line> in the file <path> from linting.
# 'include_only': 'set([<path>, ...])' - includes only those files where <path> is a substring of the filepath.
PYDELIMS = r'''"'()\[\]{}#\\'''
PYREG = r"[^{}]".format(PYDELIMS)
PYSQ = r'"(?:[^"\\]|\\.)*"'
PYDQ = r"'(?:[^'\\]|\\.)*'"
PYLEFT = r"[(\[{]"
PYRIGHT = r"[)\]}]"
PYCODE = PYREG
for depth in range(5):
PYGROUP = r"""(?:{}|{}|{}{}*{})""".format(PYSQ, PYDQ, PYLEFT, PYCODE, PYRIGHT)
PYCODE = r"""(?:{}|{})""".format(PYREG, PYGROUP)
FILES_WITH_LEGACY_SUBJECT = {
# This basically requires a big DB migration:
'zerver/lib/topic.py',
# This is for backward compatibility.
'zerver/tests/test_legacy_subject.py',
# Other migration-related changes require extreme care.
'zerver/lib/fix_unreads.py',
'zerver/tests/test_migrations.py',
# These use subject in the email sense, and will
# probably always be exempt:
'zerver/lib/email_mirror.py',
'zerver/lib/feedback.py<|endoftext|> |
python | <fim-prefix><fim-middle>psis: most ajax processors for askbot
This module contains most (but not all) processors for Ajax requests.
Not so clear if this subdivision was necessary as separation of Ajax and non-ajax views
is <fix-suffix>not always very clean.
"""
import datetime
import logging
from bs4 import BeautifulSoup
from django.conf import settings as django_settings
from django.core import exceptions
#from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.http import HttpResponseRedirect
from django.http import HttpResponseForbidden
from django.forms import ValidationError, IntegerField, CharField
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.template.loader import get_template
from django.views.decorators import csrf
from django.utils import simplejson
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.utils.translation import string_concat
from askbot.utils.slug import slugify
from askbot import models
from askbot import forms
from askbot import conf
from askbot import const
from askbot import mail
from askbot.conf import settings as askbot_settings
from askbot.utils import category_tree
from askbot.utils import decorators
from askbot.utils import url_utils
from askbot.utils.forms import get_db_object_or_404
from django.template import RequestContext
from askbot.skins.loaders import render_into_skin_as_string
from askbot.skins.loaders import render_text_into_skin
from askbot.models.<|endoftext|> |
python | <fim-prefix>r.ib(type=Optional[Sequence[EventBase]], default=None)
auth_events = attr.ib(type=Optional[MutableStateMap[EventBase]], default=None)
class FederationHandler(BaseHandler):
"""Handles events that originated from federation.
Responsible for:
a) handling received Pdus before handing them on as Events to the rest
of the homeserver (including auth and state conflict resolutions)
b) converting events that were produced by local clients that may need
to be sent to remote homeservers.
c) doing the necessary dances to invite remote users and join remote
rooms.
"""
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
self.store = hs.get_datastore()
self.storage = hs.get_storage()
self.state_store = self.storage.state
self.federation_client = hs.get_federation_client()
self.state_handler = hs.get_state_handler()
self._state_resolution_handler = hs.get_state_resolution_handler()
self.server_name = hs.hostname
self.keyring = hs.get_keyring()
self.action_generator = hs.get_action_generator()
self.is_mine_id = hs.is_mine_id
self.spam_checker = hs.get_spam_checker()
self.event_creation_handler = hs.get_event_creation_handler()
self._message_handler = hs.get_message_handler()
self._server_notices_mxid = hs.config.server_notices_mxid
self.config = hs.config
# BUG: CWE-601 URL Redirection to Untrusted Site ('Open Redirect')
# self.http_client = hs.get_simple_http_client()
# FIXED:
<fim-middle> self.http_client = hs.get_proxied_blacklisted_http_client()
self._instance_name = hs.get_instance_name()
self._replication = hs.get_replication_data_handler()
self._se<fix-suffix>nd_events = ReplicationFederationSendEventsRestServlet.make_client(hs)
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
hs
)
if hs.config.worker_app:
self._user_device_resync = ReplicationUserDevicesResyncRestServlet.make_client(
hs
)
self._maybe_store_room_on_outlier_membership = ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(
hs
)
else:
self._device_list_updater = hs.get_device_handler().device_list_updater
self._maybe_store_room_on_outlier_membership = (
self.store.maybe_store_room_on_outlier_membership
)
# When joining a room we need to queue any events for that room up.
# For each room, a list of (pdu, origin) tuples.
self.room_queues = {} # type: Dict[str, List[Tuple[EventBase, str]]]
self._room_pdu_linearizer = Linearizer("fed_room_pdu")
self.third_party_event_rules = hs.get_third_party_event_rules()
self._ephemeral_messages_enabled = hs.config.enable_ephemeral_messages
async def on_receive_pdu(self, origin, pdu, sent_to_us_directly=False) -> None:
""" Process a PDU received via a federation /send/ transaction, or
via backfill of missing prev_events
Args:
origin (str): server which initiated the /send/ transaction. Will
be used to fetch miss<|endoftext|> |
python | <fim-prefix>ignedjson.key
from signedjson.sign import SignatureVerifyException
from sydent.http.httpclient import FederationHttpClient
logger = logging.getLogger(__name__)
class NoAuthenticationError(Exception):
"""
Raised when no signature is provided that could be authenticated
"""
pass
class Verifier(object):
"""
Verifies signed json blobs from Matrix Homeservers by finding the
homeserver's address, contacting it, requesting its keys and
verifying that the signature on the json blob matches.
"""
def __init__(self, sydent):
self.sydent = sydent
# Cache of server keys. These are cached until the 'valid_until_ts' time
# in the result.
self.cache = {
# server_name: <result from keys query>,
}
@defer.inlineCallbacks
def _getKeysForServer(self, server_name):
"""Get the signing key data from a homeserver.
:param server_name: The name of the server to request the keys from.
:type server_name: unicode
:return: The verification keys returned by the server.
:rtype: twisted.internet.defer.Deferred[dict[unicode, dict[unicode, unicode]]]
"""
if server_name in self.cache:
cached = self.cache[server_name]
now = int(time.time() * 1000)
if cached['valid_until_ts'] > now:
defer.returnValue(self.cache[server_name]['verify_keys'])
client = FederationHttpClient(self.sydent)
# BUG: CWE-770 Allocation of Resources Without Limits or Throttling
# result = yield client.get_json("matrix://%s/_matrix/key/v2/server/" % server_name)
# FIXED:
<fim-middle> result = yield client.get_json("matrix://%s/_matrix/key/v2/server/" % server_name, 1024 * 50)
if 'verify_keys' not in result:
raise SignatureVerifyException("No key found i<fix-suffix>n response")
if 'valid_until_ts' in result:
# Don't cache anything without a valid_until_ts or we wouldn't
# know when to expire it.
logger.info("Got keys for %s: caching until %s", server_name, result['valid_until_ts'])
self.cache[server_name] = result
defer.returnValue(result['verify_keys'])
@defer.inlineCallbacks
def verifyServerSignedJson(self, signed_json, acceptable_server_names=None):
"""Given a signed json object, try to verify any one
of the signatures on it
XXX: This contains a fairly noddy version of the home server
SRV lookup and signature verification. It does no caching (just
fetches the signature each time and does not contact any other
servers to do perspective checks).
:param acceptable_server_names: If provided and not None,
only signatures from servers in this list will be accepted.
:type acceptable_server_names: list[unicode] or None
:return a tuple of the server name and key name that was
successfully verified.
:rtype: twisted.internet.defer.Deferred[tuple[unicode]]
:raise SignatureVerifyException: The json cannot be verified.
"""
if 'signatures' not in signed_json:
raise SignatureVerifyException("Signature missing")
for server_name, sigs in signed_json['signatures'].items():
if acceptable_server_names is not None:
<|endoftext|> |
python | <fim-prefix>ograph=True`. See https://www.tensorflow.org/guide/autograph
for more information.
experimental_relax_shapes: When true, argument shapes may be relaxed to
avoid unnecessary retracing.
capture_by_value: Experimental. Whether to capture resource variables by
value or reference. If None, will inherit from a parent context or
default to False.
jit_compile: Force-compile the function with XLA, cf.
def_function.Function doc on jit_compile.
experimental_follow_type_hints: See the documentation for `tf.function`.
Raises:
ValueError: if `input_signature` is not None and the `python_function`'s
argspec has keyword arguments.
"""
self._python_function = python_function
pure_function = attributes and IMPLEMENTS_ATTRIBUTE_NAME in attributes
self._function_spec = FunctionSpec.from_function_and_signature(
python_function,
input_signature,
is_pure=pure_function,
experimental_follow_type_hints=experimental_follow_type_hints)
self._name = name
self._autograph = autograph
self._autograph_options = autograph_options
self._experimental_relax_shapes = experimental_relax_shapes
self._function_cache = FunctionCache()
self._function_attributes = attributes or {}
self._capture_by_value = capture_by_value
self.tracing_count = 0
if self.input_signature is not None:
self._hashable_input_signature = hash(self.flat_input_signature)
# BUG: CWE-667 Improper Locking
# self._lock = threading.Lock()
# FIXED:
<fim-middle> self._lock = threading.RLock()
# _descriptor_cache is a of instance of a class to an instance-specific
# `Function`, used to make sure defun-decorated methods create different
# functi<fix-suffix>ons for each instance.
self._descriptor_cache = weakref.WeakKeyDictionary()
self._jit_compile = jit_compile
self._experimental_follow_type_hints = experimental_follow_type_hints
def __call__(self, *args, **kwargs):
"""Calls a graph function specialized to the inputs."""
with self._lock:
(graph_function,
filtered_flat_args) = self._maybe_define_function(args, kwargs)
return graph_function._call_flat(
filtered_flat_args, captured_inputs=graph_function.captured_inputs) # pylint: disable=protected-access
@property
def python_function(self):
"""Returns the wrapped Python function."""
return self._python_function # pylint: disable=protected-access
@property
def function_spec(self):
return self._function_spec
@property
def input_signature(self):
"""Returns the input signature."""
return self._function_spec.input_signature
@property
def flat_input_signature(self):
"""Returns the flattened input signature."""
return self._function_spec.flat_input_signature
def _get_concrete_function_internal_garbage_collected(self, *args, **kwargs):
"""Returns a concrete function which cleans up its graph function."""
if self.input_signature:
args, kwargs = None, None
with self._lock:
graph_function, _ = self._maybe_define_function(args, kwargs)
return graph_function
def _get_concrete_function_internal(self, *args, **kwargs):
"""Bypasses error checking when getting a <|endoftext|> |
python | <fim-prefix><fim-middle>from urllib.parse import urlparse, urlunparse
except ImportError: # Python 2
from urlparse import urlparse, urlunparse
from django.conf import settings
from django.core.urlresolvers import re<fix-suffix>verse
from django.http import HttpResponseRedirect, QueryDict
from django.template.response import TemplateResponse
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _
from django.shortcuts import resolve_url
from django.views.decorators.debug import sensitive_post_parameters
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import REDIRECT_FIELD_NAME, login as auth_login, logout as auth_logout, get_user_model
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm, PasswordResetForm, SetPasswordForm, PasswordChangeForm
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.models import get_current_site
@sensitive_post_parameters()
@csrf_protect
@never_cache
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm,
current_app=None, extra_context=None):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.REQUEST.get(redirect_field_name, '')
if request.method == "POST":
form = authentication_form(data=request.POST)
if form.is_valid():
# Use default setting if redirect_to is empty
if not redirect_to:
<|endoftext|> |
python | <fim-prefix><fim-middle>ing: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.<fix-suffix>
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from twisted.web.resource import Resource
from twisted.internet import defer
import logging
import json
from six.moves import urllib
from sydent.http.servlets import get_args, jsonwrap, deferjsonwrap, send_cors
from sydent.http.httpclient import FederationHttpClient
from sydent.users.tokens import issueToken
logger = logging.getLogger(__name__)
class RegisterServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
self.client = FederationHttpClient(self.sydent)
@deferjsonwrap
@defer.inlineCallbacks
def render_POST(self, request):
"""
Register with the Identity Server
"""
send_cors(request)
args = get_args(request, ('matrix_server_name', 'access_token'))
result = yield self.client.get_json(
"matrix://%s/_matrix/federation/v1/openid/userinfo?access_token=%s" % (
args['matrix_server_name'], urllib.parse.quote(args['access_token']),
),
# BUG: CWE-770 Allocation of Resources <|endoftext|> |
python | <fim-prefix><fim-middle>se64
import json
import pytest
from jwt.algorithms import Algorithm, HMACAlgorithm, NoneAlgorithm, has_crypto
from jwt.exceptions import InvalidKeyError
from jwt.utils import base64url_decode
from <fix-suffix>.keys import load_ec_pub_key_p_521, load_hmac_key, load_rsa_pub_key
from .utils import crypto_required, key_path
if has_crypto:
from jwt.algorithms import ECAlgorithm, OKPAlgorithm, RSAAlgorithm, RSAPSSAlgorithm
class TestAlgorithms:
def test_algorithm_should_throw_exception_if_prepare_key_not_impl(self):
algo = Algorithm()
with pytest.raises(NotImplementedError):
algo.prepare_key("test")
def test_algorithm_should_throw_exception_if_sign_not_impl(self):
algo = Algorithm()
with pytest.raises(NotImplementedError):
algo.sign("message", "key")
def test_algorithm_should_throw_exception_if_verify_not_impl(self):
algo = Algorithm()
with pytest.raises(NotImplementedError):
algo.verify("message", "key", "signature")
def test_algorithm_should_throw_exception_if_to_jwk_not_impl(self):
algo = Algorithm()
with pytest.raises(NotImplementedError):
algo.from_jwk("value")
def test_algorithm_should_throw_exception_if_from_jwk_not_impl(self):
algo = Algorithm()
with pytest.raises(NotImplementedError):
algo.to_jwk("value")
def test_none_algorithm_should_throw_exception_if_key_is_not_none(self):
algo = NoneAlgorithm()
with pytest.raises(InvalidKeyError):
algo.prepare_key("123")
def test_hmac_should_reject_nonstring_key(self):
algo = HMACAlgorithm(HMACAlgorithm.SHA256)
<|endoftext|> |
python | <fim-prefix>(?:[-_a-zA-ZÀ-ÖØ-öø-ÿ0-9]+)#"
for match in re.finditer(PATTERN, dataobj.content):
embedded_tags.add(match.group(0).replace("#", "").lstrip())
return render_template(
"dataobjs/show.html",
title=dataobj["title"],
dataobj=dataobj,
backlinks=backlinks,
current_path=dataobj["dir"],
form=forms.DeleteDataForm(),
view_only=0,
search_enabled=app.config["SEARCH_CONF"]["enabled"],
post_title_form=post_title_form,
move_form=move_form,
tag_list=tag_list,
embedded_tags=embedded_tags,
titles=titles,
)
@app.route("/dataobj/move/<int:dataobj_id>", methods=["POST"])
def move_item(dataobj_id):
form = forms.MoveItemForm()
out_dir = form.path.data if form.path.data != "" else "root directory"
if form.path.data == None:
flash("No path specified.")
return redirect(f"/dataobj/{dataobj_id}")
try:
if data.move_item(dataobj_id, form.path.data):
flash(f"Data successfully moved to {out_dir}.", "success")
return redirect(f"/dataobj/{dataobj_id}")
else:
flash(f"Data could not be moved to {out_dir}.", "error")
return redirect(f"/dataobj/{dataobj_id}")
except FileNotFoundError:
flash("Data not found.", "error")
return redirect("/")
except FileExistsError:
flash("Data already in target directory.", "error")
return redirect(f"/dataobj/{dataobj_id}")
# BUG: CWE-352 Cross-Site Request Forgery (CSRF)
# @app.route("/dataobj/delete/<int:dataobj_id>", methods=["DELETE", "GET"])
# FIXED:
<fim-middle>@app.route("/dataobj/delete/<int:dataobj_id>", methods=["POST"])
def delete_data(dataobj_id):
try:
data.delete_item(dataobj_id)
except BaseException:
flash("Data could not be f<fix-suffix>ound!", "error")
return redirect("/")
flash("Data deleted!", "success")
return redirect("/")
@app.route("/login", methods=["GET", "POST"])
def login():
form = forms.UserForm()
if form.validate_on_submit():
db = get_db()
user = db.search(
(Query().username == form.username.data) & (Query().type == "user")
)
if user and check_password_hash(user[0]["hashed_password"], form.password.data):
user = User.from_db(user[0])
login_user(user, remember=True)
flash("Login successful!", "success")
next_url = request.args.get("next")
return redirect(next_url or "/")
flash("Invalid credentials", "error")
return redirect("/login")
return render_template("users/login.html", form=form, title="Login")
@app.route("/logout", methods=["DELETE", "GET"])
def logout():
logout_user()
flash("Logged out successfully", "success")
return redirect("/")
@app.route("/user/edit", methods=["GET", "POST"])
def edit_user():
form = forms.UserForm()
if form.validate_on_submit():
db = get_db()
db.update(
{
"username": form.username.data,
"hashed_password": generate_password_hash(form.password.data),
},
doc_ids=[current_user.id],
)
flash("Information saved!", "success")
return redirect("/")
form.username.data = current_user.username
<|endoftext|> |
python | <fim-prefix># -*- cod<fim-middle>ing: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.doc<fix-suffix>ument import Document
class PortalSettings(Document):
def add_item(self, item):
'''insert new portal menu item if route is not set, or role is different'''
exists = [d for d in self.get('menu', []) if d.get('route')==item.get('route')]
if exists and item.get('role'):
if exists[0].role != item.get('role'):
exists[0].role = item.get('role')
return True
elif not exists:
item['enabled'] = 1
self.append('menu', item)
return True
# BUG: CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')
#
# FIXED:
@frappe.whitelist()
def reset(self):
'''Restore defaults'''
self.menu = []
self.sync_menu()
def sync_menu(self):
'''Sync portal menu items'''
dirty = False
for item in frappe.get_hooks('standard_portal_menu_items'):
if item.get('role') and not frappe.db.exists("Role", item.get('role')):
frappe.get_doc({"doctype": "Role", "role_name": item.get('role'), "desk_access": 0}).insert()
if self.add_item(item):
dirty = True
if dirty:
self.save()
def on_update(self):
self.clear_cache()
def clear_cache(self):
# make js and css
# clear web cache (for menus!)
frappe.clear_cache(user='Guest')
from frappe.website.render import clear_cache
clear_cache()
# clears role based home pages
frappe.clear_cache()
<|endoftext|> |
java | <fim-prefix>mpliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.evm.operation;
import static org.apache.tuweni.bytes.Bytes32.leftPad;
import org.hyperledger.besu.evm.EVM;
import org.hyperledger.besu.evm.frame.MessageFrame;
import org.hyperledger.besu.evm.gascalculator.GasCalculator;
import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.units.bigints.UInt256;
public class ShlOperation extends AbstractFixedCostOperation {
public ShlOperation(final GasCalculator gasCalculator) {
super(0x1b, "SHL", 2, 1, 1, gasCalculator, gasCalculator.getVeryLowTierGasCost());
}
@Override
public Operation.OperationResult executeFixedCostOperation(
final MessageFrame frame, final EVM evm) {
Bytes shiftAmount = frame.popStackItem();
if (shiftAmount.size() > 4 && (shiftAmount = shiftAmount.trimLeadingZeros()).size() > 4) {
frame.popStackItem();
frame.pushStackItem(UInt256.ZERO);
} else {
final int shiftAmountInt = shiftAmount.toInt();
final Bytes value = leftPad(frame.popStackItem());
// BUG: CWE-681 Incorrect Conversion between Numeric Types
// if (shiftAmountInt >= 256) {
// FIXED:
<fim-middle> if (shiftAmountInt >= 256 || shiftAmountInt < 0) {
frame.pushStackItem(UInt256.ZERO);
} else {
frame.pushStackItem(value.shiftLeft(shiftAmountInt));
}
}
retur<fix-suffix>n successResponse;
}
}
<|endoftext|> |
java | <fim-prefix>pyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @author GeyserMC
* @link https://github.com/GeyserMC/Geyser
*/
package org.geysermc.connector.network.translators.bedrock;
import com.github.steveice10.mc.protocol.data.game.ClientRequest;
import com.github.steveice10.mc.protocol.packet.ingame.client.ClientRequestPacket;
import com.nukkitx.math.vector.Vector3f;
import com.nukkitx.protocol.bedrock.packet.MovePlayerPacket;
import com.nukkitx.protocol.bedrock.packet.RespawnPacket;
import com.nukkitx.protocol.bedrock.packet.SetEntityDataPacket;
import org.geysermc.connector.entity.player.PlayerEntity;
import org.geysermc.connector.network.session.GeyserSession;
import org.geysermc.connector.network.translators.PacketTranslator;
import org.geysermc.connector.network.translators.Translator;
@Translator(packet = RespawnPacket.class)
public class BedrockRespawnTranslator extends PacketTranslator<RespawnPacket> {
@Override
// BUG: CWE-287 Improper Authentication
// public void translate(RespawnPacket packet, GeyserSession session) {
// FIXED:
<fim-middle> public void translate(GeyserSession session, RespawnPacket packet) {
if (packet.getState() == RespawnPacket.State.CLIENT_READY) {
// Previously we only sent the respawn packet <fix-suffix>before the server finished loading
// The message included was 'Otherwise when immediate respawn is on the client never loads'
// But I assume the new if statement below fixes that problem
RespawnPacket respawnPacket = new RespawnPacket();
respawnPacket.setRuntimeEntityId(0);
respawnPacket.setPosition(Vector3f.ZERO);
respawnPacket.setState(RespawnPacket.State.SERVER_READY);
session.sendUpstreamPacket(respawnPacket);
if (session.isSpawned()) {
// Client might be stuck; resend spawn information
PlayerEntity entity = session.getPlayerEntity();
if (entity == null) return;
SetEntityDataPacket entityDataPacket = new SetEntityDataPacket();
entityDataPacket.setRuntimeEntityId(entity.getGeyserId());
entityDataPacket.getMetadata().putAll(entity.getMetadata());
session.sendUpstreamPacket(entityDataPacket);
MovePlayerPacket movePlayerPacket = new MovePlayerPacket();
movePlayerPacket.setRuntimeEntityId(entity.getGeyserId());
movePlayerPacket.setPosition(entity.getPosition());
movePlayerPacket.setRotation(entity.getBedrockRotation());
movePlayerPacket.setMode(MovePlayerPacket.Mode.RESPAWN);
session.sendUpstreamPacket(movePlayerPacket);
}
ClientRequestPacket javaRes<|endoftext|> |
java | <fim-prefix>flc);
tableEl.setCustomizeColumns(false);
tableEl.setNumOfRowsEnabled(false);
refreshbtn = uifactory.addFormLink("button.refresh", flc, Link.BUTTON);
refreshbtn.setIconLeftCSS("o_icon o_icon_refresh o_icon-fw");
}
@Override
public void event(UserRequest ureq, Component source, Event event) {
super.event(ureq, source, event);
}
@Override
protected void event(UserRequest ureq, Controller source, org.olat.core.gui.control.Event event) {
if (source instanceof FormLink && ((FormLink) source).getCmd().equals("viewQuality")) {
if (cmc == null) {
// initialize preview controller only once
previewVC = createVelocityContainer("video_preview");
cmc = new CloseableModalController(getWindowControl(), "close", previewVC);
listenTo(cmc);
}
}
super.event(ureq, source, event);
}
@Override
protected void formInnerEvent(UserRequest ureq, FormItem source, FormEvent event) {
if (source instanceof FormLink && ((FormLink) source).getCmd().equals("viewQuality")) {
if (cmc == null) {
// initialize preview controller only once
previewVC = createVelocityContainer("video_preview");
cmc = new CloseableModalController(getWindowControl(), "close", previewVC);
listenTo(cmc);
}
// Get the user object from the link to access version object
FormLink link = (FormLink) source;
VideoTranscoding videoTranscoding = (VideoTranscoding) link.getUserObject();
if (videoTranscoding == null) {
// this is the master video
// BUG: CWE-91 XML Injection (aka Blind XPath Injection)
// VideoMetadata videoMetadata = videoManager.readVideoMetadataFile(videoResource);
// FIXED:
<fim-middle> VideoMeta videoMetadata = videoManager.getVideoMetadata(videoResource);
previewVC.contextPut("width", videoMetadata.getWidth());
previewVC.contextPut("height", videoMetadata.getHeight());
<fix-suffix> previewVC.contextPut("filename", "video.mp4");
VFSContainer container = videoManager.getMasterContainer(videoResource);
String transcodedUrl = registerMapper(ureq, new VideoMediaMapper(container));
previewVC.contextPut("mediaUrl", transcodedUrl);
} else {
// this is a version
previewVC.contextPut("width", videoTranscoding.getWidth());
previewVC.contextPut("height", videoTranscoding.getHeight());
previewVC.contextPut("filename", videoTranscoding.getResolution() + "video.mp4");
VFSContainer container = videoManager.getTranscodingContainer(videoResource);
String transcodedUrl = registerMapper(ureq, new VideoMediaMapper(container));
previewVC.contextPut("mediaUrl", transcodedUrl);
}
// activate dialog to bring it in front
cmc.activate();
} else if (source instanceof FormLink && ((FormLink) source).getCmd().equals("deleteQuality")) {
FormLink link = (FormLink) source;
VideoTranscoding videoTranscoding = (VideoTranscoding) link.getUserObject();
videoManager.deleteVideoTranscoding(videoTranscoding);
} else if (source instanceof FormLink && ((FormLink) source).getCmd().equals("startTranscoding")) {
videoManager.createTranscoding(videoResource, (int) source.getUserObject(), "mp4");
}
initTable();
}
@Override
protected void formOK(UserRequest ureq) {
// nothing to do, events cached in formInnerEvent
}
@Override
protected void doDispose() {
// controller auto disposed
}
private class VideoCompar<|endoftext|> |
java | <fim-prefix><fim-middle>right 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the <fix-suffix>License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.addon.businesscontinuity.primary.controller;
import com.google.gson.Gson;
import com.thoughtworks.go.addon.businesscontinuity.ConfigFileType;
import com.thoughtworks.go.addon.businesscontinuity.DatabaseStatusProvider;
import com.thoughtworks.go.addon.businesscontinuity.FileDetails;
import com.thoughtworks.go.addon.businesscontinuity.PluginsList;
import com.thoughtworks.go.addon.businesscontinuity.primary.ServerStatusResponse;
import com.thoughtworks.go.addon.businesscontinuity.primary.service.GoFilesStatusProvider;
import com.thoughtworks.go.util.SystemEnvironment;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.<|endoftext|> |